linux/arch/mips/include/asm/atomic.h
<<
>>
Prefs
   1/*
   2 * Atomic operations that C can't guarantee us.  Useful for
   3 * resource counting etc..
   4 *
   5 * But use these as seldom as possible since they are much more slower
   6 * than regular operations.
   7 *
   8 * This file is subject to the terms and conditions of the GNU General Public
   9 * License.  See the file "COPYING" in the main directory of this archive
  10 * for more details.
  11 *
  12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13 */
  14#ifndef _ASM_ATOMIC_H
  15#define _ASM_ATOMIC_H
  16
  17#include <linux/irqflags.h>
  18#include <linux/types.h>
  19#include <asm/barrier.h>
  20#include <asm/compiler.h>
  21#include <asm/cpu-features.h>
  22#include <asm/cmpxchg.h>
  23#include <asm/war.h>
  24
  25#define ATOMIC_INIT(i)    { (i) }
  26
  27/*
  28 * atomic_read - read atomic variable
  29 * @v: pointer of type atomic_t
  30 *
  31 * Atomically reads the value of @v.
  32 */
  33#define atomic_read(v)          ACCESS_ONCE((v)->counter)
  34
  35/*
  36 * atomic_set - set atomic variable
  37 * @v: pointer of type atomic_t
  38 * @i: required value
  39 *
  40 * Atomically sets the value of @v to @i.
  41 */
  42#define atomic_set(v, i)                ((v)->counter = (i))
  43
  44#define ATOMIC_OP(op, c_op, asm_op)                                           \
  45static __inline__ void atomic_##op(int i, atomic_t * v)                       \
  46{                                                                             \
  47        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
  48                int temp;                                                     \
  49                                                                              \
  50                __asm__ __volatile__(                                         \
  51                "       .set    arch=r4000                              \n"   \
  52                "1:     ll      %0, %1          # atomic_" #op "        \n"   \
  53                "       " #asm_op " %0, %2                              \n"   \
  54                "       sc      %0, %1                                  \n"   \
  55                "       beqzl   %0, 1b                                  \n"   \
  56                "       .set    mips0                                   \n"   \
  57                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
  58                : "Ir" (i));                                                  \
  59        } else if (kernel_uses_llsc) {                                        \
  60                int temp;                                                     \
  61                                                                              \
  62                do {                                                          \
  63                        __asm__ __volatile__(                                 \
  64                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
  65                        "       ll      %0, %1          # atomic_" #op "\n"   \
  66                        "       " #asm_op " %0, %2                      \n"   \
  67                        "       sc      %0, %1                          \n"   \
  68                        "       .set    mips0                           \n"   \
  69                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
  70                        : "Ir" (i));                                          \
  71                } while (unlikely(!temp));                                    \
  72        } else {                                                              \
  73                unsigned long flags;                                          \
  74                                                                              \
  75                raw_local_irq_save(flags);                                    \
  76                v->counter c_op i;                                            \
  77                raw_local_irq_restore(flags);                                 \
  78        }                                                                     \
  79}
  80
  81#define ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
  82static __inline__ int atomic_##op##_return(int i, atomic_t * v)               \
  83{                                                                             \
  84        int result;                                                           \
  85                                                                              \
  86        smp_mb__before_llsc();                                                \
  87                                                                              \
  88        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
  89                int temp;                                                     \
  90                                                                              \
  91                __asm__ __volatile__(                                         \
  92                "       .set    arch=r4000                              \n"   \
  93                "1:     ll      %1, %2          # atomic_" #op "_return \n"   \
  94                "       " #asm_op " %0, %1, %3                          \n"   \
  95                "       sc      %0, %2                                  \n"   \
  96                "       beqzl   %0, 1b                                  \n"   \
  97                "       " #asm_op " %0, %1, %3                          \n"   \
  98                "       .set    mips0                                   \n"   \
  99                : "=&r" (result), "=&r" (temp),                               \
 100                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
 101                : "Ir" (i));                                                  \
 102        } else if (kernel_uses_llsc) {                                        \
 103                int temp;                                                     \
 104                                                                              \
 105                do {                                                          \
 106                        __asm__ __volatile__(                                 \
 107                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 108                        "       ll      %1, %2  # atomic_" #op "_return \n"   \
 109                        "       " #asm_op " %0, %1, %3                  \n"   \
 110                        "       sc      %0, %2                          \n"   \
 111                        "       .set    mips0                           \n"   \
 112                        : "=&r" (result), "=&r" (temp),                       \
 113                          "+" GCC_OFF_SMALL_ASM() (v->counter)                \
 114                        : "Ir" (i));                                          \
 115                } while (unlikely(!result));                                  \
 116                                                                              \
 117                result = temp; result c_op i;                                 \
 118        } else {                                                              \
 119                unsigned long flags;                                          \
 120                                                                              \
 121                raw_local_irq_save(flags);                                    \
 122                result = v->counter;                                          \
 123                result c_op i;                                                \
 124                v->counter = result;                                          \
 125                raw_local_irq_restore(flags);                                 \
 126        }                                                                     \
 127                                                                              \
 128        smp_llsc_mb();                                                        \
 129                                                                              \
 130        return result;                                                        \
 131}
 132
 133#define ATOMIC_OPS(op, c_op, asm_op)                                          \
 134        ATOMIC_OP(op, c_op, asm_op)                                           \
 135        ATOMIC_OP_RETURN(op, c_op, asm_op)
 136
 137ATOMIC_OPS(add, +=, addu)
 138ATOMIC_OPS(sub, -=, subu)
 139
 140#undef ATOMIC_OPS
 141#undef ATOMIC_OP_RETURN
 142#undef ATOMIC_OP
 143
 144/*
 145 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 146 * @i: integer value to subtract
 147 * @v: pointer of type atomic_t
 148 *
 149 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 150 * The function returns the old value of @v minus @i.
 151 */
 152static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 153{
 154        int result;
 155
 156        smp_mb__before_llsc();
 157
 158        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 159                int temp;
 160
 161                __asm__ __volatile__(
 162                "       .set    arch=r4000                              \n"
 163                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 164                "       subu    %0, %1, %3                              \n"
 165                "       bltz    %0, 1f                                  \n"
 166                "       sc      %0, %2                                  \n"
 167                "       .set    noreorder                               \n"
 168                "       beqzl   %0, 1b                                  \n"
 169                "        subu   %0, %1, %3                              \n"
 170                "       .set    reorder                                 \n"
 171                "1:                                                     \n"
 172                "       .set    mips0                                   \n"
 173                : "=&r" (result), "=&r" (temp),
 174                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 175                : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 176                : "memory");
 177        } else if (kernel_uses_llsc) {
 178                int temp;
 179
 180                __asm__ __volatile__(
 181                "       .set    "MIPS_ISA_LEVEL"                        \n"
 182                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 183                "       subu    %0, %1, %3                              \n"
 184                "       bltz    %0, 1f                                  \n"
 185                "       sc      %0, %2                                  \n"
 186                "       .set    noreorder                               \n"
 187                "       beqz    %0, 1b                                  \n"
 188                "        subu   %0, %1, %3                              \n"
 189                "       .set    reorder                                 \n"
 190                "1:                                                     \n"
 191                "       .set    mips0                                   \n"
 192                : "=&r" (result), "=&r" (temp),
 193                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 194                : "Ir" (i));
 195        } else {
 196                unsigned long flags;
 197
 198                raw_local_irq_save(flags);
 199                result = v->counter;
 200                result -= i;
 201                if (result >= 0)
 202                        v->counter = result;
 203                raw_local_irq_restore(flags);
 204        }
 205
 206        smp_llsc_mb();
 207
 208        return result;
 209}
 210
 211#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 212#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
 213
 214/**
 215 * __atomic_add_unless - add unless the number is a given value
 216 * @v: pointer of type atomic_t
 217 * @a: the amount to add to v...
 218 * @u: ...unless v is equal to u.
 219 *
 220 * Atomically adds @a to @v, so long as it was not @u.
 221 * Returns the old value of @v.
 222 */
 223static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 224{
 225        int c, old;
 226        c = atomic_read(v);
 227        for (;;) {
 228                if (unlikely(c == (u)))
 229                        break;
 230                old = atomic_cmpxchg((v), c, c + (a));
 231                if (likely(old == c))
 232                        break;
 233                c = old;
 234        }
 235        return c;
 236}
 237
 238#define atomic_dec_return(v) atomic_sub_return(1, (v))
 239#define atomic_inc_return(v) atomic_add_return(1, (v))
 240
 241/*
 242 * atomic_sub_and_test - subtract value from variable and test result
 243 * @i: integer value to subtract
 244 * @v: pointer of type atomic_t
 245 *
 246 * Atomically subtracts @i from @v and returns
 247 * true if the result is zero, or false for all
 248 * other cases.
 249 */
 250#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
 251
 252/*
 253 * atomic_inc_and_test - increment and test
 254 * @v: pointer of type atomic_t
 255 *
 256 * Atomically increments @v by 1
 257 * and returns true if the result is zero, or false for all
 258 * other cases.
 259 */
 260#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
 261
 262/*
 263 * atomic_dec_and_test - decrement by 1 and test
 264 * @v: pointer of type atomic_t
 265 *
 266 * Atomically decrements @v by 1 and
 267 * returns true if the result is 0, or false for all other
 268 * cases.
 269 */
 270#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
 271
 272/*
 273 * atomic_dec_if_positive - decrement by 1 if old value positive
 274 * @v: pointer of type atomic_t
 275 */
 276#define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
 277
 278/*
 279 * atomic_inc - increment atomic variable
 280 * @v: pointer of type atomic_t
 281 *
 282 * Atomically increments @v by 1.
 283 */
 284#define atomic_inc(v) atomic_add(1, (v))
 285
 286/*
 287 * atomic_dec - decrement and test
 288 * @v: pointer of type atomic_t
 289 *
 290 * Atomically decrements @v by 1.
 291 */
 292#define atomic_dec(v) atomic_sub(1, (v))
 293
 294/*
 295 * atomic_add_negative - add and test if negative
 296 * @v: pointer of type atomic_t
 297 * @i: integer value to add
 298 *
 299 * Atomically adds @i to @v and returns true
 300 * if the result is negative, or false when
 301 * result is greater than or equal to zero.
 302 */
 303#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
 304
 305#ifdef CONFIG_64BIT
 306
 307#define ATOMIC64_INIT(i)    { (i) }
 308
 309/*
 310 * atomic64_read - read atomic variable
 311 * @v: pointer of type atomic64_t
 312 *
 313 */
 314#define atomic64_read(v)        ACCESS_ONCE((v)->counter)
 315
 316/*
 317 * atomic64_set - set atomic variable
 318 * @v: pointer of type atomic64_t
 319 * @i: required value
 320 */
 321#define atomic64_set(v, i)      ((v)->counter = (i))
 322
 323#define ATOMIC64_OP(op, c_op, asm_op)                                         \
 324static __inline__ void atomic64_##op(long i, atomic64_t * v)                  \
 325{                                                                             \
 326        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 327                long temp;                                                    \
 328                                                                              \
 329                __asm__ __volatile__(                                         \
 330                "       .set    arch=r4000                              \n"   \
 331                "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
 332                "       " #asm_op " %0, %2                              \n"   \
 333                "       scd     %0, %1                                  \n"   \
 334                "       beqzl   %0, 1b                                  \n"   \
 335                "       .set    mips0                                   \n"   \
 336                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
 337                : "Ir" (i));                                                  \
 338        } else if (kernel_uses_llsc) {                                        \
 339                long temp;                                                    \
 340                                                                              \
 341                do {                                                          \
 342                        __asm__ __volatile__(                                 \
 343                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 344                        "       lld     %0, %1          # atomic64_" #op "\n" \
 345                        "       " #asm_op " %0, %2                      \n"   \
 346                        "       scd     %0, %1                          \n"   \
 347                        "       .set    mips0                           \n"   \
 348                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
 349                        : "Ir" (i));                                          \
 350                } while (unlikely(!temp));                                    \
 351        } else {                                                              \
 352                unsigned long flags;                                          \
 353                                                                              \
 354                raw_local_irq_save(flags);                                    \
 355                v->counter c_op i;                                            \
 356                raw_local_irq_restore(flags);                                 \
 357        }                                                                     \
 358}
 359
 360#define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
 361static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)         \
 362{                                                                             \
 363        long result;                                                          \
 364                                                                              \
 365        smp_mb__before_llsc();                                                \
 366                                                                              \
 367        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 368                long temp;                                                    \
 369                                                                              \
 370                __asm__ __volatile__(                                         \
 371                "       .set    arch=r4000                              \n"   \
 372                "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
 373                "       " #asm_op " %0, %1, %3                          \n"   \
 374                "       scd     %0, %2                                  \n"   \
 375                "       beqzl   %0, 1b                                  \n"   \
 376                "       " #asm_op " %0, %1, %3                          \n"   \
 377                "       .set    mips0                                   \n"   \
 378                : "=&r" (result), "=&r" (temp),                               \
 379                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
 380                : "Ir" (i));                                                  \
 381        } else if (kernel_uses_llsc) {                                        \
 382                long temp;                                                    \
 383                                                                              \
 384                do {                                                          \
 385                        __asm__ __volatile__(                                 \
 386                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 387                        "       lld     %1, %2  # atomic64_" #op "_return\n"  \
 388                        "       " #asm_op " %0, %1, %3                  \n"   \
 389                        "       scd     %0, %2                          \n"   \
 390                        "       .set    mips0                           \n"   \
 391                        : "=&r" (result), "=&r" (temp),                       \
 392                          "=" GCC_OFF_SMALL_ASM() (v->counter)                \
 393                        : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
 394                        : "memory");                                          \
 395                } while (unlikely(!result));                                  \
 396                                                                              \
 397                result = temp; result c_op i;                                 \
 398        } else {                                                              \
 399                unsigned long flags;                                          \
 400                                                                              \
 401                raw_local_irq_save(flags);                                    \
 402                result = v->counter;                                          \
 403                result c_op i;                                                \
 404                v->counter = result;                                          \
 405                raw_local_irq_restore(flags);                                 \
 406        }                                                                     \
 407                                                                              \
 408        smp_llsc_mb();                                                        \
 409                                                                              \
 410        return result;                                                        \
 411}
 412
 413#define ATOMIC64_OPS(op, c_op, asm_op)                                        \
 414        ATOMIC64_OP(op, c_op, asm_op)                                         \
 415        ATOMIC64_OP_RETURN(op, c_op, asm_op)
 416
 417ATOMIC64_OPS(add, +=, daddu)
 418ATOMIC64_OPS(sub, -=, dsubu)
 419
 420#undef ATOMIC64_OPS
 421#undef ATOMIC64_OP_RETURN
 422#undef ATOMIC64_OP
 423
 424/*
 425 * atomic64_sub_if_positive - conditionally subtract integer from atomic
 426 *                            variable
 427 * @i: integer value to subtract
 428 * @v: pointer of type atomic64_t
 429 *
 430 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 431 * The function returns the old value of @v minus @i.
 432 */
 433static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 434{
 435        long result;
 436
 437        smp_mb__before_llsc();
 438
 439        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 440                long temp;
 441
 442                __asm__ __volatile__(
 443                "       .set    arch=r4000                              \n"
 444                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 445                "       dsubu   %0, %1, %3                              \n"
 446                "       bltz    %0, 1f                                  \n"
 447                "       scd     %0, %2                                  \n"
 448                "       .set    noreorder                               \n"
 449                "       beqzl   %0, 1b                                  \n"
 450                "        dsubu  %0, %1, %3                              \n"
 451                "       .set    reorder                                 \n"
 452                "1:                                                     \n"
 453                "       .set    mips0                                   \n"
 454                : "=&r" (result), "=&r" (temp),
 455                  "=" GCC_OFF_SMALL_ASM() (v->counter)
 456                : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 457                : "memory");
 458        } else if (kernel_uses_llsc) {
 459                long temp;
 460
 461                __asm__ __volatile__(
 462                "       .set    "MIPS_ISA_LEVEL"                        \n"
 463                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 464                "       dsubu   %0, %1, %3                              \n"
 465                "       bltz    %0, 1f                                  \n"
 466                "       scd     %0, %2                                  \n"
 467                "       .set    noreorder                               \n"
 468                "       beqz    %0, 1b                                  \n"
 469                "        dsubu  %0, %1, %3                              \n"
 470                "       .set    reorder                                 \n"
 471                "1:                                                     \n"
 472                "       .set    mips0                                   \n"
 473                : "=&r" (result), "=&r" (temp),
 474                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 475                : "Ir" (i));
 476        } else {
 477                unsigned long flags;
 478
 479                raw_local_irq_save(flags);
 480                result = v->counter;
 481                result -= i;
 482                if (result >= 0)
 483                        v->counter = result;
 484                raw_local_irq_restore(flags);
 485        }
 486
 487        smp_llsc_mb();
 488
 489        return result;
 490}
 491
 492#define atomic64_cmpxchg(v, o, n) \
 493        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 494#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
 495
 496/**
 497 * atomic64_add_unless - add unless the number is a given value
 498 * @v: pointer of type atomic64_t
 499 * @a: the amount to add to v...
 500 * @u: ...unless v is equal to u.
 501 *
 502 * Atomically adds @a to @v, so long as it was not @u.
 503 * Returns the old value of @v.
 504 */
 505static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 506{
 507        long c, old;
 508        c = atomic64_read(v);
 509        for (;;) {
 510                if (unlikely(c == (u)))
 511                        break;
 512                old = atomic64_cmpxchg((v), c, c + (a));
 513                if (likely(old == c))
 514                        break;
 515                c = old;
 516        }
 517        return c != (u);
 518}
 519
 520#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 521
 522#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 523#define atomic64_inc_return(v) atomic64_add_return(1, (v))
 524
 525/*
 526 * atomic64_sub_and_test - subtract value from variable and test result
 527 * @i: integer value to subtract
 528 * @v: pointer of type atomic64_t
 529 *
 530 * Atomically subtracts @i from @v and returns
 531 * true if the result is zero, or false for all
 532 * other cases.
 533 */
 534#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
 535
 536/*
 537 * atomic64_inc_and_test - increment and test
 538 * @v: pointer of type atomic64_t
 539 *
 540 * Atomically increments @v by 1
 541 * and returns true if the result is zero, or false for all
 542 * other cases.
 543 */
 544#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
 545
 546/*
 547 * atomic64_dec_and_test - decrement by 1 and test
 548 * @v: pointer of type atomic64_t
 549 *
 550 * Atomically decrements @v by 1 and
 551 * returns true if the result is 0, or false for all other
 552 * cases.
 553 */
 554#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
 555
 556/*
 557 * atomic64_dec_if_positive - decrement by 1 if old value positive
 558 * @v: pointer of type atomic64_t
 559 */
 560#define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
 561
 562/*
 563 * atomic64_inc - increment atomic variable
 564 * @v: pointer of type atomic64_t
 565 *
 566 * Atomically increments @v by 1.
 567 */
 568#define atomic64_inc(v) atomic64_add(1, (v))
 569
 570/*
 571 * atomic64_dec - decrement and test
 572 * @v: pointer of type atomic64_t
 573 *
 574 * Atomically decrements @v by 1.
 575 */
 576#define atomic64_dec(v) atomic64_sub(1, (v))
 577
 578/*
 579 * atomic64_add_negative - add and test if negative
 580 * @v: pointer of type atomic64_t
 581 * @i: integer value to add
 582 *
 583 * Atomically adds @i to @v and returns true
 584 * if the result is negative, or false when
 585 * result is greater than or equal to zero.
 586 */
 587#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
 588
 589#endif /* CONFIG_64BIT */
 590
 591#endif /* _ASM_ATOMIC_H */
 592