linux/arch/m32r/include/asm/local.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef __M32R_LOCAL_H
   3#define __M32R_LOCAL_H
   4
   5/*
   6 *  linux/include/asm-m32r/local.h
   7 *
   8 *  M32R version:
   9 *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
  10 *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
  11 *    Copyright (C) 2007  Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
  12 */
  13
  14#include <linux/percpu.h>
  15#include <asm/assembler.h>
  16#include <asm/local.h>
  17
  18/*
  19 * Atomic operations that C can't guarantee us.  Useful for
  20 * resource counting etc..
  21 */
  22
  23/*
  24 * Make sure gcc doesn't try to be clever and move things around
  25 * on us. We need to use _exactly_ the address the user gave us,
  26 * not some alias that contains the same information.
  27 */
  28typedef struct { volatile int counter; } local_t;
  29
  30#define LOCAL_INIT(i)   { (i) }
  31
  32/**
  33 * local_read - read local variable
  34 * @l: pointer of type local_t
  35 *
  36 * Atomically reads the value of @l.
  37 */
  38#define local_read(l)   ((l)->counter)
  39
  40/**
  41 * local_set - set local variable
  42 * @l: pointer of type local_t
  43 * @i: required value
  44 *
  45 * Atomically sets the value of @l to @i.
  46 */
  47#define local_set(l, i) (((l)->counter) = (i))
  48
  49/**
  50 * local_add_return - add long to local variable and return it
  51 * @i: long value to add
  52 * @l: pointer of type local_t
  53 *
  54 * Atomically adds @i to @l and return (@i + @l).
  55 */
  56static inline long local_add_return(long i, local_t *l)
  57{
  58        unsigned long flags;
  59        long result;
  60
  61        local_irq_save(flags);
  62        __asm__ __volatile__ (
  63                "# local_add_return             \n\t"
  64                DCACHE_CLEAR("%0", "r4", "%1")
  65                "ld %0, @%1;                    \n\t"
  66                "add    %0, %2;                 \n\t"
  67                "st %0, @%1;                    \n\t"
  68                : "=&r" (result)
  69                : "r" (&l->counter), "r" (i)
  70                : "memory"
  71#ifdef CONFIG_CHIP_M32700_TS1
  72                , "r4"
  73#endif  /* CONFIG_CHIP_M32700_TS1 */
  74        );
  75        local_irq_restore(flags);
  76
  77        return result;
  78}
  79
  80/**
  81 * local_sub_return - subtract long from local variable and return it
  82 * @i: long value to subtract
  83 * @l: pointer of type local_t
  84 *
  85 * Atomically subtracts @i from @l and return (@l - @i).
  86 */
  87static inline long local_sub_return(long i, local_t *l)
  88{
  89        unsigned long flags;
  90        long result;
  91
  92        local_irq_save(flags);
  93        __asm__ __volatile__ (
  94                "# local_sub_return             \n\t"
  95                DCACHE_CLEAR("%0", "r4", "%1")
  96                "ld %0, @%1;                    \n\t"
  97                "sub    %0, %2;                 \n\t"
  98                "st %0, @%1;                    \n\t"
  99                : "=&r" (result)
 100                : "r" (&l->counter), "r" (i)
 101                : "memory"
 102#ifdef CONFIG_CHIP_M32700_TS1
 103                , "r4"
 104#endif  /* CONFIG_CHIP_M32700_TS1 */
 105        );
 106        local_irq_restore(flags);
 107
 108        return result;
 109}
 110
 111/**
 112 * local_add - add long to local variable
 113 * @i: long value to add
 114 * @l: pointer of type local_t
 115 *
 116 * Atomically adds @i to @l.
 117 */
 118#define local_add(i, l) ((void) local_add_return((i), (l)))
 119
 120/**
 121 * local_sub - subtract the local variable
 122 * @i: long value to subtract
 123 * @l: pointer of type local_t
 124 *
 125 * Atomically subtracts @i from @l.
 126 */
 127#define local_sub(i, l) ((void) local_sub_return((i), (l)))
 128
 129/**
 130 * local_sub_and_test - subtract value from variable and test result
 131 * @i: integer value to subtract
 132 * @l: pointer of type local_t
 133 *
 134 * Atomically subtracts @i from @l and returns
 135 * true if the result is zero, or false for all
 136 * other cases.
 137 */
 138#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
 139
 140/**
 141 * local_inc_return - increment local variable and return it
 142 * @l: pointer of type local_t
 143 *
 144 * Atomically increments @l by 1 and returns the result.
 145 */
 146static inline long local_inc_return(local_t *l)
 147{
 148        unsigned long flags;
 149        long result;
 150
 151        local_irq_save(flags);
 152        __asm__ __volatile__ (
 153                "# local_inc_return             \n\t"
 154                DCACHE_CLEAR("%0", "r4", "%1")
 155                "ld %0, @%1;                    \n\t"
 156                "addi   %0, #1;                 \n\t"
 157                "st %0, @%1;                    \n\t"
 158                : "=&r" (result)
 159                : "r" (&l->counter)
 160                : "memory"
 161#ifdef CONFIG_CHIP_M32700_TS1
 162                , "r4"
 163#endif  /* CONFIG_CHIP_M32700_TS1 */
 164        );
 165        local_irq_restore(flags);
 166
 167        return result;
 168}
 169
 170/**
 171 * local_dec_return - decrement local variable and return it
 172 * @l: pointer of type local_t
 173 *
 174 * Atomically decrements @l by 1 and returns the result.
 175 */
 176static inline long local_dec_return(local_t *l)
 177{
 178        unsigned long flags;
 179        long result;
 180
 181        local_irq_save(flags);
 182        __asm__ __volatile__ (
 183                "# local_dec_return             \n\t"
 184                DCACHE_CLEAR("%0", "r4", "%1")
 185                "ld %0, @%1;                    \n\t"
 186                "addi   %0, #-1;                \n\t"
 187                "st %0, @%1;                    \n\t"
 188                : "=&r" (result)
 189                : "r" (&l->counter)
 190                : "memory"
 191#ifdef CONFIG_CHIP_M32700_TS1
 192                , "r4"
 193#endif  /* CONFIG_CHIP_M32700_TS1 */
 194        );
 195        local_irq_restore(flags);
 196
 197        return result;
 198}
 199
 200/**
 201 * local_inc - increment local variable
 202 * @l: pointer of type local_t
 203 *
 204 * Atomically increments @l by 1.
 205 */
 206#define local_inc(l) ((void)local_inc_return(l))
 207
 208/**
 209 * local_dec - decrement local variable
 210 * @l: pointer of type local_t
 211 *
 212 * Atomically decrements @l by 1.
 213 */
 214#define local_dec(l) ((void)local_dec_return(l))
 215
 216/**
 217 * local_inc_and_test - increment and test
 218 * @l: pointer of type local_t
 219 *
 220 * Atomically increments @l by 1
 221 * and returns true if the result is zero, or false for all
 222 * other cases.
 223 */
 224#define local_inc_and_test(l) (local_inc_return(l) == 0)
 225
 226/**
 227 * local_dec_and_test - decrement and test
 228 * @l: pointer of type local_t
 229 *
 230 * Atomically decrements @l by 1 and
 231 * returns true if the result is 0, or false for all
 232 * other cases.
 233 */
 234#define local_dec_and_test(l) (local_dec_return(l) == 0)
 235
 236/**
 237 * local_add_negative - add and test if negative
 238 * @l: pointer of type local_t
 239 * @i: integer value to add
 240 *
 241 * Atomically adds @i to @l and returns true
 242 * if the result is negative, or false when
 243 * result is greater than or equal to zero.
 244 */
 245#define local_add_negative(i, l) (local_add_return((i), (l)) < 0)
 246
 247#define local_cmpxchg(l, o, n) (cmpxchg_local(&((l)->counter), (o), (n)))
 248#define local_xchg(v, new) (xchg_local(&((l)->counter), new))
 249
 250/**
 251 * local_add_unless - add unless the number is a given value
 252 * @l: pointer of type local_t
 253 * @a: the amount to add to l...
 254 * @u: ...unless l is equal to u.
 255 *
 256 * Atomically adds @a to @l, so long as it was not @u.
 257 * Returns non-zero if @l was not @u, and zero otherwise.
 258 */
 259static inline int local_add_unless(local_t *l, long a, long u)
 260{
 261        long c, old;
 262        c = local_read(l);
 263        for (;;) {
 264                if (unlikely(c == (u)))
 265                        break;
 266                old = local_cmpxchg((l), c, c + (a));
 267                if (likely(old == c))
 268                        break;
 269                c = old;
 270        }
 271        return c != (u);
 272}
 273
 274#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 275
 276static inline void local_clear_mask(unsigned long  mask, local_t *addr)
 277{
 278        unsigned long flags;
 279        unsigned long tmp;
 280
 281        local_irq_save(flags);
 282        __asm__ __volatile__ (
 283                "# local_clear_mask             \n\t"
 284                DCACHE_CLEAR("%0", "r5", "%1")
 285                "ld %0, @%1;                    \n\t"
 286                "and    %0, %2;                 \n\t"
 287                "st %0, @%1;                    \n\t"
 288                : "=&r" (tmp)
 289                : "r" (addr), "r" (~mask)
 290                : "memory"
 291#ifdef CONFIG_CHIP_M32700_TS1
 292                , "r5"
 293#endif  /* CONFIG_CHIP_M32700_TS1 */
 294        );
 295        local_irq_restore(flags);
 296}
 297
 298static inline void local_set_mask(unsigned long  mask, local_t *addr)
 299{
 300        unsigned long flags;
 301        unsigned long tmp;
 302
 303        local_irq_save(flags);
 304        __asm__ __volatile__ (
 305                "# local_set_mask               \n\t"
 306                DCACHE_CLEAR("%0", "r5", "%1")
 307                "ld %0, @%1;                    \n\t"
 308                "or     %0, %2;                 \n\t"
 309                "st %0, @%1;                    \n\t"
 310                : "=&r" (tmp)
 311                : "r" (addr), "r" (mask)
 312                : "memory"
 313#ifdef CONFIG_CHIP_M32700_TS1
 314                , "r5"
 315#endif  /* CONFIG_CHIP_M32700_TS1 */
 316        );
 317        local_irq_restore(flags);
 318}
 319
 320/* Atomic operations are already serializing on m32r */
 321#define smp_mb__before_local_dec()      barrier()
 322#define smp_mb__after_local_dec()       barrier()
 323#define smp_mb__before_local_inc()      barrier()
 324#define smp_mb__after_local_inc()       barrier()
 325
 326/* Use these for per-cpu local_t variables: on some archs they are
 327 * much more efficient than these naive implementations.  Note they take
 328 * a variable, not an address.
 329 */
 330
 331#define __local_inc(l)          ((l)->a.counter++)
 332#define __local_dec(l)          ((l)->a.counter++)
 333#define __local_add(i, l)       ((l)->a.counter += (i))
 334#define __local_sub(i, l)       ((l)->a.counter -= (i))
 335
 336/* Use these for per-cpu local_t variables: on some archs they are
 337 * much more efficient than these naive implementations.  Note they take
 338 * a variable, not an address.
 339 */
 340
 341#endif /* __M32R_LOCAL_H */
 342