linux/arch/arc/include/asm/cmpxchg.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
   4 */
   5
   6#ifndef __ASM_ARC_CMPXCHG_H
   7#define __ASM_ARC_CMPXCHG_H
   8
   9#include <linux/build_bug.h>
  10#include <linux/types.h>
  11
  12#include <asm/barrier.h>
  13#include <asm/smp.h>
  14
  15#ifdef CONFIG_ARC_HAS_LLSC
  16
  17/*
  18 * if (*ptr == @old)
  19 *      *ptr = @new
  20 */
  21#define __cmpxchg(ptr, old, new)                                        \
  22({                                                                      \
  23        __typeof__(*(ptr)) _prev;                                       \
  24                                                                        \
  25        __asm__ __volatile__(                                           \
  26        "1:     llock  %0, [%1] \n"                                     \
  27        "       brne   %0, %2, 2f       \n"                             \
  28        "       scond  %3, [%1] \n"                                     \
  29        "       bnz     1b              \n"                             \
  30        "2:                             \n"                             \
  31        : "=&r"(_prev)  /* Early clobber prevent reg reuse */           \
  32        : "r"(ptr),     /* Not "m": llock only supports reg */          \
  33          "ir"(old),                                                    \
  34          "r"(new)      /* Not "ir": scond can't take LIMM */           \
  35        : "cc",                                                         \
  36          "memory");    /* gcc knows memory is clobbered */             \
  37                                                                        \
  38        _prev;                                                          \
  39})
  40
  41#define arch_cmpxchg_relaxed(ptr, old, new)                             \
  42({                                                                      \
  43        __typeof__(ptr) _p_ = (ptr);                                    \
  44        __typeof__(*(ptr)) _o_ = (old);                                 \
  45        __typeof__(*(ptr)) _n_ = (new);                                 \
  46        __typeof__(*(ptr)) _prev_;                                      \
  47                                                                        \
  48        switch(sizeof((_p_))) {                                         \
  49        case 4:                                                         \
  50                _prev_ = __cmpxchg(_p_, _o_, _n_);                      \
  51                break;                                                  \
  52        default:                                                        \
  53                BUILD_BUG();                                            \
  54        }                                                               \
  55        _prev_;                                                         \
  56})
  57
  58#else
  59
  60#define arch_cmpxchg(ptr, old, new)                                     \
  61({                                                                      \
  62        volatile __typeof__(ptr) _p_ = (ptr);                           \
  63        __typeof__(*(ptr)) _o_ = (old);                                 \
  64        __typeof__(*(ptr)) _n_ = (new);                                 \
  65        __typeof__(*(ptr)) _prev_;                                      \
  66        unsigned long __flags;                                          \
  67                                                                        \
  68        BUILD_BUG_ON(sizeof(_p_) != 4);                                 \
  69                                                                        \
  70        /*                                                              \
  71         * spin lock/unlock provide the needed smp_mb() before/after    \
  72         */                                                             \
  73        atomic_ops_lock(__flags);                                       \
  74        _prev_ = *_p_;                                                  \
  75        if (_prev_ == _o_)                                              \
  76                *_p_ = _n_;                                             \
  77        atomic_ops_unlock(__flags);                                     \
  78        _prev_;                                                         \
  79})
  80
  81#endif
  82
  83/*
  84 * xchg
  85 */
  86#ifdef CONFIG_ARC_HAS_LLSC
  87
  88#define __xchg(ptr, val)                                                \
  89({                                                                      \
  90        __asm__ __volatile__(                                           \
  91        "       ex  %0, [%1]    \n"     /* set new value */             \
  92        : "+r"(val)                                                     \
  93        : "r"(ptr)                                                      \
  94        : "memory");                                                    \
  95        _val_;          /* get old value */                             \
  96})
  97
  98#define arch_xchg_relaxed(ptr, val)                                     \
  99({                                                                      \
 100        __typeof__(ptr) _p_ = (ptr);                                    \
 101        __typeof__(*(ptr)) _val_ = (val);                               \
 102                                                                        \
 103        switch(sizeof(*(_p_))) {                                        \
 104        case 4:                                                         \
 105                _val_ = __xchg(_p_, _val_);                             \
 106                break;                                                  \
 107        default:                                                        \
 108                BUILD_BUG();                                            \
 109        }                                                               \
 110        _val_;                                                          \
 111})
 112
 113#else  /* !CONFIG_ARC_HAS_LLSC */
 114
 115/*
 116 * EX instructions is baseline and present in !LLSC too. But in this
 117 * regime it still needs use @atomic_ops_lock spinlock to allow interop
 118 * with cmpxchg() which uses spinlock in !LLSC
 119 * (llist.h use xchg and cmpxchg on sama data)
 120 */
 121
 122#define arch_xchg(ptr, val)                                             \
 123({                                                                      \
 124        __typeof__(ptr) _p_ = (ptr);                                    \
 125        __typeof__(*(ptr)) _val_ = (val);                               \
 126                                                                        \
 127        unsigned long __flags;                                          \
 128                                                                        \
 129        atomic_ops_lock(__flags);                                       \
 130                                                                        \
 131        __asm__ __volatile__(                                           \
 132        "       ex  %0, [%1]    \n"                                     \
 133        : "+r"(_val_)                                                   \
 134        : "r"(_p_)                                                      \
 135        : "memory");                                                    \
 136                                                                        \
 137        atomic_ops_unlock(__flags);                                     \
 138        _val_;                                                          \
 139})
 140
 141#endif
 142
 143#endif
 144