linux/arch/blackfin/include/asm/cmpxchg.h
<<
>>
Prefs
   1/*
   2 * Copyright 2004-2011 Analog Devices Inc.
   3 *
   4 * Licensed under the GPL-2 or later.
   5 */
   6
   7#ifndef __ARCH_BLACKFIN_CMPXCHG__
   8#define __ARCH_BLACKFIN_CMPXCHG__
   9
  10#ifdef CONFIG_SMP
  11
  12#include <linux/linkage.h>
  13
  14asmlinkage unsigned long __raw_xchg_1_asm(volatile void *ptr, unsigned long value);
  15asmlinkage unsigned long __raw_xchg_2_asm(volatile void *ptr, unsigned long value);
  16asmlinkage unsigned long __raw_xchg_4_asm(volatile void *ptr, unsigned long value);
  17asmlinkage unsigned long __raw_cmpxchg_1_asm(volatile void *ptr,
  18                                        unsigned long new, unsigned long old);
  19asmlinkage unsigned long __raw_cmpxchg_2_asm(volatile void *ptr,
  20                                        unsigned long new, unsigned long old);
  21asmlinkage unsigned long __raw_cmpxchg_4_asm(volatile void *ptr,
  22                                        unsigned long new, unsigned long old);
  23
  24static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
  25                                   int size)
  26{
  27        unsigned long tmp;
  28
  29        switch (size) {
  30        case 1:
  31                tmp = __raw_xchg_1_asm(ptr, x);
  32                break;
  33        case 2:
  34                tmp = __raw_xchg_2_asm(ptr, x);
  35                break;
  36        case 4:
  37                tmp = __raw_xchg_4_asm(ptr, x);
  38                break;
  39        }
  40
  41        return tmp;
  42}
  43
  44/*
  45 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  46 * store NEW in MEM.  Return the initial value in MEM.  Success is
  47 * indicated by comparing RETURN with OLD.
  48 */
  49static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  50                                      unsigned long new, int size)
  51{
  52        unsigned long tmp;
  53
  54        switch (size) {
  55        case 1:
  56                tmp = __raw_cmpxchg_1_asm(ptr, new, old);
  57                break;
  58        case 2:
  59                tmp = __raw_cmpxchg_2_asm(ptr, new, old);
  60                break;
  61        case 4:
  62                tmp = __raw_cmpxchg_4_asm(ptr, new, old);
  63                break;
  64        }
  65
  66        return tmp;
  67}
  68#define cmpxchg(ptr, o, n) \
  69        ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
  70                (unsigned long)(n), sizeof(*(ptr))))
  71
  72#else /* !CONFIG_SMP */
  73
  74#include <mach/blackfin.h>
  75#include <asm/irqflags.h>
  76
  77struct __xchg_dummy {
  78        unsigned long a[100];
  79};
  80#define __xg(x) ((volatile struct __xchg_dummy *)(x))
  81
  82static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
  83                                   int size)
  84{
  85        unsigned long tmp = 0;
  86        unsigned long flags;
  87
  88        flags = hard_local_irq_save();
  89
  90        switch (size) {
  91        case 1:
  92                __asm__ __volatile__
  93                        ("%0 = b%2 (z);\n\t"
  94                         "b%2 = %1;\n\t"
  95                         : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
  96                break;
  97        case 2:
  98                __asm__ __volatile__
  99                        ("%0 = w%2 (z);\n\t"
 100                         "w%2 = %1;\n\t"
 101                         : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
 102                break;
 103        case 4:
 104                __asm__ __volatile__
 105                        ("%0 = %2;\n\t"
 106                         "%2 = %1;\n\t"
 107                         : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
 108                break;
 109        }
 110        hard_local_irq_restore(flags);
 111        return tmp;
 112}
 113
 114#include <asm-generic/cmpxchg-local.h>
 115
 116/*
 117 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 118 * them available.
 119 */
 120#define cmpxchg_local(ptr, o, n)                                               \
 121        ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
 122                        (unsigned long)(n), sizeof(*(ptr))))
 123#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 124
 125#define cmpxchg(ptr, o, n)      cmpxchg_local((ptr), (o), (n))
 126#define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 127
 128#endif /* !CONFIG_SMP */
 129
 130#define xchg(ptr, x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
 131#define tas(ptr) ((void)xchg((ptr), 1))
 132
 133#endif /* __ARCH_BLACKFIN_CMPXCHG__ */
 134