linux/arch/s390/include/asm/percpu.h
<<
>>
Prefs
   1#ifndef __ARCH_S390_PERCPU__
   2#define __ARCH_S390_PERCPU__
   3
   4#include <linux/preempt.h>
   5#include <asm/cmpxchg.h>
   6
   7/*
   8 * s390 uses its own implementation for per cpu data, the offset of
   9 * the cpu local data area is cached in the cpu's lowcore memory.
  10 */
  11#define __my_cpu_offset S390_lowcore.percpu_offset
  12
  13/*
  14 * For 64 bit module code, the module may be more than 4G above the
  15 * per cpu area, use weak definitions to force the compiler to
  16 * generate external references.
  17 */
  18#if defined(CONFIG_SMP) && defined(MODULE)
  19#define ARCH_NEEDS_WEAK_PER_CPU
  20#endif
  21
  22/*
  23 * We use a compare-and-swap loop since that uses less cpu cycles than
  24 * disabling and enabling interrupts like the generic variant would do.
  25 */
  26#define arch_this_cpu_to_op_simple(pcp, val, op)                        \
  27({                                                                      \
  28        typedef typeof(pcp) pcp_op_T__;                                 \
  29        pcp_op_T__ old__, new__, prev__;                                \
  30        pcp_op_T__ *ptr__;                                              \
  31        preempt_disable();                                              \
  32        ptr__ = raw_cpu_ptr(&(pcp));                                    \
  33        prev__ = *ptr__;                                                \
  34        do {                                                            \
  35                old__ = prev__;                                         \
  36                new__ = old__ op (val);                                 \
  37                prev__ = cmpxchg(ptr__, old__, new__);                  \
  38        } while (prev__ != old__);                                      \
  39        preempt_enable();                                               \
  40        new__;                                                          \
  41})
  42
  43#define this_cpu_add_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
  44#define this_cpu_add_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
  45#define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
  46#define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
  47#define this_cpu_and_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
  48#define this_cpu_and_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
  49#define this_cpu_or_1(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
  50#define this_cpu_or_2(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
  51
  52#ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
  53
  54#define this_cpu_add_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
  55#define this_cpu_add_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
  56#define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
  57#define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
  58#define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
  59#define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
  60#define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
  61#define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
  62
  63#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  64
  65#define arch_this_cpu_add(pcp, val, op1, op2, szcast)                   \
  66{                                                                       \
  67        typedef typeof(pcp) pcp_op_T__;                                 \
  68        pcp_op_T__ val__ = (val);                                       \
  69        pcp_op_T__ old__, *ptr__;                                       \
  70        preempt_disable();                                              \
  71        ptr__ = raw_cpu_ptr(&(pcp));                            \
  72        if (__builtin_constant_p(val__) &&                              \
  73            ((szcast)val__ > -129) && ((szcast)val__ < 128)) {          \
  74                asm volatile(                                           \
  75                        op2 "   %[ptr__],%[val__]\n"                    \
  76                        : [ptr__] "+Q" (*ptr__)                         \
  77                        : [val__] "i" ((szcast)val__)                   \
  78                        : "cc");                                        \
  79        } else {                                                        \
  80                asm volatile(                                           \
  81                        op1 "   %[old__],%[val__],%[ptr__]\n"           \
  82                        : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)   \
  83                        : [val__] "d" (val__)                           \
  84                        : "cc");                                        \
  85        }                                                               \
  86        preempt_enable();                                               \
  87}
  88
  89#define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
  90#define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
  91
  92#define arch_this_cpu_add_return(pcp, val, op)                          \
  93({                                                                      \
  94        typedef typeof(pcp) pcp_op_T__;                                 \
  95        pcp_op_T__ val__ = (val);                                       \
  96        pcp_op_T__ old__, *ptr__;                                       \
  97        preempt_disable();                                              \
  98        ptr__ = raw_cpu_ptr(&(pcp));                                    \
  99        asm volatile(                                                   \
 100                op "    %[old__],%[val__],%[ptr__]\n"                   \
 101                : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
 102                : [val__] "d" (val__)                                   \
 103                : "cc");                                                \
 104        preempt_enable();                                               \
 105        old__ + val__;                                                  \
 106})
 107
 108#define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
 109#define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
 110
 111#define arch_this_cpu_to_op(pcp, val, op)                               \
 112{                                                                       \
 113        typedef typeof(pcp) pcp_op_T__;                                 \
 114        pcp_op_T__ val__ = (val);                                       \
 115        pcp_op_T__ old__, *ptr__;                                       \
 116        preempt_disable();                                              \
 117        ptr__ = raw_cpu_ptr(&(pcp));                                    \
 118        asm volatile(                                                   \
 119                op "    %[old__],%[val__],%[ptr__]\n"                   \
 120                : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
 121                : [val__] "d" (val__)                                   \
 122                : "cc");                                                \
 123        preempt_enable();                                               \
 124}
 125
 126#define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op(pcp, val, "lan")
 127#define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op(pcp, val, "lang")
 128#define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op(pcp, val, "lao")
 129#define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op(pcp, val, "laog")
 130
 131#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
 132
 133#define arch_this_cpu_cmpxchg(pcp, oval, nval)                          \
 134({                                                                      \
 135        typedef typeof(pcp) pcp_op_T__;                                 \
 136        pcp_op_T__ ret__;                                               \
 137        pcp_op_T__ *ptr__;                                              \
 138        preempt_disable();                                              \
 139        ptr__ = raw_cpu_ptr(&(pcp));                                    \
 140        ret__ = cmpxchg(ptr__, oval, nval);                             \
 141        preempt_enable();                                               \
 142        ret__;                                                          \
 143})
 144
 145#define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
 146#define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
 147#define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
 148#define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
 149
 150#define arch_this_cpu_xchg(pcp, nval)                                   \
 151({                                                                      \
 152        typeof(pcp) *ptr__;                                             \
 153        typeof(pcp) ret__;                                              \
 154        preempt_disable();                                              \
 155        ptr__ = raw_cpu_ptr(&(pcp));                                    \
 156        ret__ = xchg(ptr__, nval);                                      \
 157        preempt_enable();                                               \
 158        ret__;                                                          \
 159})
 160
 161#define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
 162#define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
 163#define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
 164#define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
 165
 166#define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2)        \
 167({                                                                      \
 168        typeof(pcp1) o1__ = (o1), n1__ = (n1);                          \
 169        typeof(pcp2) o2__ = (o2), n2__ = (n2);                          \
 170        typeof(pcp1) *p1__;                                             \
 171        typeof(pcp2) *p2__;                                             \
 172        int ret__;                                                      \
 173        preempt_disable();                                              \
 174        p1__ = raw_cpu_ptr(&(pcp1));                                    \
 175        p2__ = raw_cpu_ptr(&(pcp2));                                    \
 176        ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__);   \
 177        preempt_enable();                                               \
 178        ret__;                                                          \
 179})
 180
 181#define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
 182
 183#include <asm-generic/percpu.h>
 184
 185#endif /* __ARCH_S390_PERCPU__ */
 186