linux/arch/m32r/include/asm/cmpxchg.h
<<
>>
Prefs
   1#ifndef _ASM_M32R_CMPXCHG_H
   2#define _ASM_M32R_CMPXCHG_H
   3
   4/*
   5 *  M32R version:
   6 *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
   7 *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
   8 */
   9
  10#include <linux/irqflags.h>
  11#include <asm/assembler.h>
  12#include <asm/dcache_clear.h>
  13
  14extern void  __xchg_called_with_bad_pointer(void);
  15
  16static __always_inline unsigned long
  17__xchg(unsigned long x, volatile void *ptr, int size)
  18{
  19        unsigned long flags;
  20        unsigned long tmp = 0;
  21
  22        local_irq_save(flags);
  23
  24        switch (size) {
  25#ifndef CONFIG_SMP
  26        case 1:
  27                __asm__ __volatile__ (
  28                        "ldb    %0, @%2 \n\t"
  29                        "stb    %1, @%2 \n\t"
  30                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  31                break;
  32        case 2:
  33                __asm__ __volatile__ (
  34                        "ldh    %0, @%2 \n\t"
  35                        "sth    %1, @%2 \n\t"
  36                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  37                break;
  38        case 4:
  39                __asm__ __volatile__ (
  40                        "ld     %0, @%2 \n\t"
  41                        "st     %1, @%2 \n\t"
  42                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  43                break;
  44#else  /* CONFIG_SMP */
  45        case 4:
  46                __asm__ __volatile__ (
  47                        DCACHE_CLEAR("%0", "r4", "%2")
  48                        "lock   %0, @%2;        \n\t"
  49                        "unlock %1, @%2;        \n\t"
  50                        : "=&r" (tmp) : "r" (x), "r" (ptr)
  51                        : "memory"
  52#ifdef CONFIG_CHIP_M32700_TS1
  53                        , "r4"
  54#endif  /* CONFIG_CHIP_M32700_TS1 */
  55                );
  56                break;
  57#endif  /* CONFIG_SMP */
  58        default:
  59                __xchg_called_with_bad_pointer();
  60        }
  61
  62        local_irq_restore(flags);
  63
  64        return (tmp);
  65}
  66
  67#define xchg(ptr, x)                                                    \
  68        ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  69
  70static __always_inline unsigned long
  71__xchg_local(unsigned long x, volatile void *ptr, int size)
  72{
  73        unsigned long flags;
  74        unsigned long tmp = 0;
  75
  76        local_irq_save(flags);
  77
  78        switch (size) {
  79        case 1:
  80                __asm__ __volatile__ (
  81                        "ldb    %0, @%2 \n\t"
  82                        "stb    %1, @%2 \n\t"
  83                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  84                break;
  85        case 2:
  86                __asm__ __volatile__ (
  87                        "ldh    %0, @%2 \n\t"
  88                        "sth    %1, @%2 \n\t"
  89                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  90                break;
  91        case 4:
  92                __asm__ __volatile__ (
  93                        "ld     %0, @%2 \n\t"
  94                        "st     %1, @%2 \n\t"
  95                        : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  96                break;
  97        default:
  98                __xchg_called_with_bad_pointer();
  99        }
 100
 101        local_irq_restore(flags);
 102
 103        return (tmp);
 104}
 105
 106#define xchg_local(ptr, x)                                              \
 107        ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr),    \
 108                        sizeof(*(ptr))))
 109
 110static inline unsigned long
 111__cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
 112{
 113        unsigned long flags;
 114        unsigned int retval;
 115
 116        local_irq_save(flags);
 117        __asm__ __volatile__ (
 118                        DCACHE_CLEAR("%0", "r4", "%1")
 119                        M32R_LOCK" %0, @%1;     \n"
 120                "       bne     %0, %2, 1f;     \n"
 121                        M32R_UNLOCK" %3, @%1;   \n"
 122                "       bra     2f;             \n"
 123                "       .fillinsn               \n"
 124                "1:"
 125                        M32R_UNLOCK" %0, @%1;   \n"
 126                "       .fillinsn               \n"
 127                "2:"
 128                        : "=&r" (retval)
 129                        : "r" (p), "r" (old), "r" (new)
 130                        : "cbit", "memory"
 131#ifdef CONFIG_CHIP_M32700_TS1
 132                        , "r4"
 133#endif  /* CONFIG_CHIP_M32700_TS1 */
 134                );
 135        local_irq_restore(flags);
 136
 137        return retval;
 138}
 139
 140static inline unsigned long
 141__cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
 142                        unsigned int new)
 143{
 144        unsigned long flags;
 145        unsigned int retval;
 146
 147        local_irq_save(flags);
 148        __asm__ __volatile__ (
 149                        DCACHE_CLEAR("%0", "r4", "%1")
 150                        "ld %0, @%1;            \n"
 151                "       bne     %0, %2, 1f;     \n"
 152                        "st %3, @%1;            \n"
 153                "       bra     2f;             \n"
 154                "       .fillinsn               \n"
 155                "1:"
 156                        "st %0, @%1;            \n"
 157                "       .fillinsn               \n"
 158                "2:"
 159                        : "=&r" (retval)
 160                        : "r" (p), "r" (old), "r" (new)
 161                        : "cbit", "memory"
 162#ifdef CONFIG_CHIP_M32700_TS1
 163                        , "r4"
 164#endif  /* CONFIG_CHIP_M32700_TS1 */
 165                );
 166        local_irq_restore(flags);
 167
 168        return retval;
 169}
 170
 171/* This function doesn't exist, so you'll get a linker error
 172   if something tries to do an invalid cmpxchg().  */
 173extern void __cmpxchg_called_with_bad_pointer(void);
 174
 175static inline unsigned long
 176__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
 177{
 178        switch (size) {
 179        case 4:
 180                return __cmpxchg_u32(ptr, old, new);
 181#if 0   /* we don't have __cmpxchg_u64 */
 182        case 8:
 183                return __cmpxchg_u64(ptr, old, new);
 184#endif /* 0 */
 185        }
 186        __cmpxchg_called_with_bad_pointer();
 187        return old;
 188}
 189
 190#define cmpxchg(ptr, o, n)                                               \
 191        ((__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)(o),       \
 192                        (unsigned long)(n), sizeof(*(ptr))))
 193
 194#include <asm-generic/cmpxchg-local.h>
 195
 196static inline unsigned long __cmpxchg_local(volatile void *ptr,
 197                                      unsigned long old,
 198                                      unsigned long new, int size)
 199{
 200        switch (size) {
 201        case 4:
 202                return __cmpxchg_local_u32(ptr, old, new);
 203        default:
 204                return __cmpxchg_local_generic(ptr, old, new, size);
 205        }
 206
 207        return old;
 208}
 209
 210/*
 211 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 212 * them available.
 213 */
 214#define cmpxchg_local(ptr, o, n)                                            \
 215        ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),     \
 216                        (unsigned long)(n), sizeof(*(ptr))))
 217#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 218
 219#endif /* _ASM_M32R_CMPXCHG_H */
 220