linux/arch/mips/include/asm/atomic.h
<<
>>
Prefs
   1/*
   2 * Atomic operations that C can't guarantee us.  Useful for
   3 * resource counting etc..
   4 *
   5 * But use these as seldom as possible since they are much more slower
   6 * than regular operations.
   7 *
   8 * This file is subject to the terms and conditions of the GNU General Public
   9 * License.  See the file "COPYING" in the main directory of this archive
  10 * for more details.
  11 *
  12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13 */
  14#ifndef _ASM_ATOMIC_H
  15#define _ASM_ATOMIC_H
  16
  17#include <linux/irqflags.h>
  18#include <linux/types.h>
  19#include <asm/barrier.h>
  20#include <asm/compiler.h>
  21#include <asm/cpu-features.h>
  22#include <asm/cmpxchg.h>
  23#include <asm/war.h>
  24
  25#define ATOMIC_INIT(i)    { (i) }
  26
  27/*
  28 * atomic_read - read atomic variable
  29 * @v: pointer of type atomic_t
  30 *
  31 * Atomically reads the value of @v.
  32 */
  33#define atomic_read(v)          READ_ONCE((v)->counter)
  34
  35/*
  36 * atomic_set - set atomic variable
  37 * @v: pointer of type atomic_t
  38 * @i: required value
  39 *
  40 * Atomically sets the value of @v to @i.
  41 */
  42#define atomic_set(v, i)        WRITE_ONCE((v)->counter, (i))
  43
  44#define ATOMIC_OP(op, c_op, asm_op)                                           \
  45static __inline__ void atomic_##op(int i, atomic_t * v)                       \
  46{                                                                             \
  47        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
  48                int temp;                                                     \
  49                                                                              \
  50                __asm__ __volatile__(                                         \
  51                "       .set    arch=r4000                              \n"   \
  52                "1:     ll      %0, %1          # atomic_" #op "        \n"   \
  53                "       " #asm_op " %0, %2                              \n"   \
  54                "       sc      %0, %1                                  \n"   \
  55                "       beqzl   %0, 1b                                  \n"   \
  56                "       .set    mips0                                   \n"   \
  57                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
  58                : "Ir" (i));                                                  \
  59        } else if (kernel_uses_llsc) {                                        \
  60                int temp;                                                     \
  61                                                                              \
  62                do {                                                          \
  63                        __asm__ __volatile__(                                 \
  64                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
  65                        "       ll      %0, %1          # atomic_" #op "\n"   \
  66                        "       " #asm_op " %0, %2                      \n"   \
  67                        "       sc      %0, %1                          \n"   \
  68                        "       .set    mips0                           \n"   \
  69                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)  \
  70                        : "Ir" (i));                                          \
  71                } while (unlikely(!temp));                                    \
  72        } else {                                                              \
  73                unsigned long flags;                                          \
  74                                                                              \
  75                raw_local_irq_save(flags);                                    \
  76                v->counter c_op i;                                            \
  77                raw_local_irq_restore(flags);                                 \
  78        }                                                                     \
  79}
  80
  81#define ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
  82static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)       \
  83{                                                                             \
  84        int result;                                                           \
  85                                                                              \
  86        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
  87                int temp;                                                     \
  88                                                                              \
  89                __asm__ __volatile__(                                         \
  90                "       .set    arch=r4000                              \n"   \
  91                "1:     ll      %1, %2          # atomic_" #op "_return \n"   \
  92                "       " #asm_op " %0, %1, %3                          \n"   \
  93                "       sc      %0, %2                                  \n"   \
  94                "       beqzl   %0, 1b                                  \n"   \
  95                "       " #asm_op " %0, %1, %3                          \n"   \
  96                "       .set    mips0                                   \n"   \
  97                : "=&r" (result), "=&r" (temp),                               \
  98                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
  99                : "Ir" (i));                                                  \
 100        } else if (kernel_uses_llsc) {                                        \
 101                int temp;                                                     \
 102                                                                              \
 103                do {                                                          \
 104                        __asm__ __volatile__(                                 \
 105                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 106                        "       ll      %1, %2  # atomic_" #op "_return \n"   \
 107                        "       " #asm_op " %0, %1, %3                  \n"   \
 108                        "       sc      %0, %2                          \n"   \
 109                        "       .set    mips0                           \n"   \
 110                        : "=&r" (result), "=&r" (temp),                       \
 111                          "+" GCC_OFF_SMALL_ASM() (v->counter)                \
 112                        : "Ir" (i));                                          \
 113                } while (unlikely(!result));                                  \
 114                                                                              \
 115                result = temp; result c_op i;                                 \
 116        } else {                                                              \
 117                unsigned long flags;                                          \
 118                                                                              \
 119                raw_local_irq_save(flags);                                    \
 120                result = v->counter;                                          \
 121                result c_op i;                                                \
 122                v->counter = result;                                          \
 123                raw_local_irq_restore(flags);                                 \
 124        }                                                                     \
 125                                                                              \
 126        return result;                                                        \
 127}
 128
 129#define ATOMIC_FETCH_OP(op, c_op, asm_op)                                     \
 130static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)        \
 131{                                                                             \
 132        int result;                                                           \
 133                                                                              \
 134        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 135                int temp;                                                     \
 136                                                                              \
 137                __asm__ __volatile__(                                         \
 138                "       .set    arch=r4000                              \n"   \
 139                "1:     ll      %1, %2          # atomic_fetch_" #op "  \n"   \
 140                "       " #asm_op " %0, %1, %3                          \n"   \
 141                "       sc      %0, %2                                  \n"   \
 142                "       beqzl   %0, 1b                                  \n"   \
 143                "       move    %0, %1                                  \n"   \
 144                "       .set    mips0                                   \n"   \
 145                : "=&r" (result), "=&r" (temp),                               \
 146                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
 147                : "Ir" (i));                                                  \
 148        } else if (kernel_uses_llsc) {                                        \
 149                int temp;                                                     \
 150                                                                              \
 151                do {                                                          \
 152                        __asm__ __volatile__(                                 \
 153                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 154                        "       ll      %1, %2  # atomic_fetch_" #op "  \n"   \
 155                        "       " #asm_op " %0, %1, %3                  \n"   \
 156                        "       sc      %0, %2                          \n"   \
 157                        "       .set    mips0                           \n"   \
 158                        : "=&r" (result), "=&r" (temp),                       \
 159                          "+" GCC_OFF_SMALL_ASM() (v->counter)                \
 160                        : "Ir" (i));                                          \
 161                } while (unlikely(!result));                                  \
 162                                                                              \
 163                result = temp;                                                \
 164        } else {                                                              \
 165                unsigned long flags;                                          \
 166                                                                              \
 167                raw_local_irq_save(flags);                                    \
 168                result = v->counter;                                          \
 169                v->counter c_op i;                                            \
 170                raw_local_irq_restore(flags);                                 \
 171        }                                                                     \
 172                                                                              \
 173        return result;                                                        \
 174}
 175
 176#define ATOMIC_OPS(op, c_op, asm_op)                                          \
 177        ATOMIC_OP(op, c_op, asm_op)                                           \
 178        ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
 179        ATOMIC_FETCH_OP(op, c_op, asm_op)
 180
 181ATOMIC_OPS(add, +=, addu)
 182ATOMIC_OPS(sub, -=, subu)
 183
 184#define atomic_add_return_relaxed       atomic_add_return_relaxed
 185#define atomic_sub_return_relaxed       atomic_sub_return_relaxed
 186#define atomic_fetch_add_relaxed        atomic_fetch_add_relaxed
 187#define atomic_fetch_sub_relaxed        atomic_fetch_sub_relaxed
 188
 189#undef ATOMIC_OPS
 190#define ATOMIC_OPS(op, c_op, asm_op)                                          \
 191        ATOMIC_OP(op, c_op, asm_op)                                           \
 192        ATOMIC_FETCH_OP(op, c_op, asm_op)
 193
 194ATOMIC_OPS(and, &=, and)
 195ATOMIC_OPS(or, |=, or)
 196ATOMIC_OPS(xor, ^=, xor)
 197
 198#define atomic_fetch_and_relaxed        atomic_fetch_and_relaxed
 199#define atomic_fetch_or_relaxed         atomic_fetch_or_relaxed
 200#define atomic_fetch_xor_relaxed        atomic_fetch_xor_relaxed
 201
 202#undef ATOMIC_OPS
 203#undef ATOMIC_FETCH_OP
 204#undef ATOMIC_OP_RETURN
 205#undef ATOMIC_OP
 206
 207/*
 208 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 209 * @i: integer value to subtract
 210 * @v: pointer of type atomic_t
 211 *
 212 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 213 * The function returns the old value of @v minus @i.
 214 */
 215static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 216{
 217        int result;
 218
 219        smp_mb__before_llsc();
 220
 221        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 222                int temp;
 223
 224                __asm__ __volatile__(
 225                "       .set    arch=r4000                              \n"
 226                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 227                "       subu    %0, %1, %3                              \n"
 228                "       bltz    %0, 1f                                  \n"
 229                "       sc      %0, %2                                  \n"
 230                "       .set    noreorder                               \n"
 231                "       beqzl   %0, 1b                                  \n"
 232                "        subu   %0, %1, %3                              \n"
 233                "       .set    reorder                                 \n"
 234                "1:                                                     \n"
 235                "       .set    mips0                                   \n"
 236                : "=&r" (result), "=&r" (temp),
 237                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 238                : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 239                : "memory");
 240        } else if (kernel_uses_llsc) {
 241                int temp;
 242
 243                __asm__ __volatile__(
 244                "       .set    "MIPS_ISA_LEVEL"                        \n"
 245                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 246                "       subu    %0, %1, %3                              \n"
 247                "       bltz    %0, 1f                                  \n"
 248                "       sc      %0, %2                                  \n"
 249                "       .set    noreorder                               \n"
 250                "       beqz    %0, 1b                                  \n"
 251                "        subu   %0, %1, %3                              \n"
 252                "       .set    reorder                                 \n"
 253                "1:                                                     \n"
 254                "       .set    mips0                                   \n"
 255                : "=&r" (result), "=&r" (temp),
 256                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 257                : "Ir" (i));
 258        } else {
 259                unsigned long flags;
 260
 261                raw_local_irq_save(flags);
 262                result = v->counter;
 263                result -= i;
 264                if (result >= 0)
 265                        v->counter = result;
 266                raw_local_irq_restore(flags);
 267        }
 268
 269        smp_llsc_mb();
 270
 271        return result;
 272}
 273
 274#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 275#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
 276
 277/**
 278 * __atomic_add_unless - add unless the number is a given value
 279 * @v: pointer of type atomic_t
 280 * @a: the amount to add to v...
 281 * @u: ...unless v is equal to u.
 282 *
 283 * Atomically adds @a to @v, so long as it was not @u.
 284 * Returns the old value of @v.
 285 */
 286static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 287{
 288        int c, old;
 289        c = atomic_read(v);
 290        for (;;) {
 291                if (unlikely(c == (u)))
 292                        break;
 293                old = atomic_cmpxchg((v), c, c + (a));
 294                if (likely(old == c))
 295                        break;
 296                c = old;
 297        }
 298        return c;
 299}
 300
 301#define atomic_dec_return(v) atomic_sub_return(1, (v))
 302#define atomic_inc_return(v) atomic_add_return(1, (v))
 303
 304/*
 305 * atomic_sub_and_test - subtract value from variable and test result
 306 * @i: integer value to subtract
 307 * @v: pointer of type atomic_t
 308 *
 309 * Atomically subtracts @i from @v and returns
 310 * true if the result is zero, or false for all
 311 * other cases.
 312 */
 313#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
 314
 315/*
 316 * atomic_inc_and_test - increment and test
 317 * @v: pointer of type atomic_t
 318 *
 319 * Atomically increments @v by 1
 320 * and returns true if the result is zero, or false for all
 321 * other cases.
 322 */
 323#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
 324
 325/*
 326 * atomic_dec_and_test - decrement by 1 and test
 327 * @v: pointer of type atomic_t
 328 *
 329 * Atomically decrements @v by 1 and
 330 * returns true if the result is 0, or false for all other
 331 * cases.
 332 */
 333#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
 334
 335/*
 336 * atomic_dec_if_positive - decrement by 1 if old value positive
 337 * @v: pointer of type atomic_t
 338 */
 339#define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
 340
 341/*
 342 * atomic_inc - increment atomic variable
 343 * @v: pointer of type atomic_t
 344 *
 345 * Atomically increments @v by 1.
 346 */
 347#define atomic_inc(v) atomic_add(1, (v))
 348
 349/*
 350 * atomic_dec - decrement and test
 351 * @v: pointer of type atomic_t
 352 *
 353 * Atomically decrements @v by 1.
 354 */
 355#define atomic_dec(v) atomic_sub(1, (v))
 356
 357/*
 358 * atomic_add_negative - add and test if negative
 359 * @v: pointer of type atomic_t
 360 * @i: integer value to add
 361 *
 362 * Atomically adds @i to @v and returns true
 363 * if the result is negative, or false when
 364 * result is greater than or equal to zero.
 365 */
 366#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
 367
 368#ifdef CONFIG_64BIT
 369
 370#define ATOMIC64_INIT(i)    { (i) }
 371
 372/*
 373 * atomic64_read - read atomic variable
 374 * @v: pointer of type atomic64_t
 375 *
 376 */
 377#define atomic64_read(v)        READ_ONCE((v)->counter)
 378
 379/*
 380 * atomic64_set - set atomic variable
 381 * @v: pointer of type atomic64_t
 382 * @i: required value
 383 */
 384#define atomic64_set(v, i)      WRITE_ONCE((v)->counter, (i))
 385
 386#define ATOMIC64_OP(op, c_op, asm_op)                                         \
 387static __inline__ void atomic64_##op(long i, atomic64_t * v)                  \
 388{                                                                             \
 389        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 390                long temp;                                                    \
 391                                                                              \
 392                __asm__ __volatile__(                                         \
 393                "       .set    arch=r4000                              \n"   \
 394                "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
 395                "       " #asm_op " %0, %2                              \n"   \
 396                "       scd     %0, %1                                  \n"   \
 397                "       beqzl   %0, 1b                                  \n"   \
 398                "       .set    mips0                                   \n"   \
 399                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
 400                : "Ir" (i));                                                  \
 401        } else if (kernel_uses_llsc) {                                        \
 402                long temp;                                                    \
 403                                                                              \
 404                do {                                                          \
 405                        __asm__ __volatile__(                                 \
 406                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 407                        "       lld     %0, %1          # atomic64_" #op "\n" \
 408                        "       " #asm_op " %0, %2                      \n"   \
 409                        "       scd     %0, %1                          \n"   \
 410                        "       .set    mips0                           \n"   \
 411                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
 412                        : "Ir" (i));                                          \
 413                } while (unlikely(!temp));                                    \
 414        } else {                                                              \
 415                unsigned long flags;                                          \
 416                                                                              \
 417                raw_local_irq_save(flags);                                    \
 418                v->counter c_op i;                                            \
 419                raw_local_irq_restore(flags);                                 \
 420        }                                                                     \
 421}
 422
 423#define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
 424static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
 425{                                                                             \
 426        long result;                                                          \
 427                                                                              \
 428        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 429                long temp;                                                    \
 430                                                                              \
 431                __asm__ __volatile__(                                         \
 432                "       .set    arch=r4000                              \n"   \
 433                "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
 434                "       " #asm_op " %0, %1, %3                          \n"   \
 435                "       scd     %0, %2                                  \n"   \
 436                "       beqzl   %0, 1b                                  \n"   \
 437                "       " #asm_op " %0, %1, %3                          \n"   \
 438                "       .set    mips0                                   \n"   \
 439                : "=&r" (result), "=&r" (temp),                               \
 440                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
 441                : "Ir" (i));                                                  \
 442        } else if (kernel_uses_llsc) {                                        \
 443                long temp;                                                    \
 444                                                                              \
 445                do {                                                          \
 446                        __asm__ __volatile__(                                 \
 447                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 448                        "       lld     %1, %2  # atomic64_" #op "_return\n"  \
 449                        "       " #asm_op " %0, %1, %3                  \n"   \
 450                        "       scd     %0, %2                          \n"   \
 451                        "       .set    mips0                           \n"   \
 452                        : "=&r" (result), "=&r" (temp),                       \
 453                          "=" GCC_OFF_SMALL_ASM() (v->counter)                \
 454                        : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
 455                        : "memory");                                          \
 456                } while (unlikely(!result));                                  \
 457                                                                              \
 458                result = temp; result c_op i;                                 \
 459        } else {                                                              \
 460                unsigned long flags;                                          \
 461                                                                              \
 462                raw_local_irq_save(flags);                                    \
 463                result = v->counter;                                          \
 464                result c_op i;                                                \
 465                v->counter = result;                                          \
 466                raw_local_irq_restore(flags);                                 \
 467        }                                                                     \
 468                                                                              \
 469        return result;                                                        \
 470}
 471
 472#define ATOMIC64_FETCH_OP(op, c_op, asm_op)                                   \
 473static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v)  \
 474{                                                                             \
 475        long result;                                                          \
 476                                                                              \
 477        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
 478                long temp;                                                    \
 479                                                                              \
 480                __asm__ __volatile__(                                         \
 481                "       .set    arch=r4000                              \n"   \
 482                "1:     lld     %1, %2          # atomic64_fetch_" #op "\n"   \
 483                "       " #asm_op " %0, %1, %3                          \n"   \
 484                "       scd     %0, %2                                  \n"   \
 485                "       beqzl   %0, 1b                                  \n"   \
 486                "       move    %0, %1                                  \n"   \
 487                "       .set    mips0                                   \n"   \
 488                : "=&r" (result), "=&r" (temp),                               \
 489                  "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
 490                : "Ir" (i));                                                  \
 491        } else if (kernel_uses_llsc) {                                        \
 492                long temp;                                                    \
 493                                                                              \
 494                do {                                                          \
 495                        __asm__ __volatile__(                                 \
 496                        "       .set    "MIPS_ISA_LEVEL"                \n"   \
 497                        "       lld     %1, %2  # atomic64_fetch_" #op "\n"   \
 498                        "       " #asm_op " %0, %1, %3                  \n"   \
 499                        "       scd     %0, %2                          \n"   \
 500                        "       .set    mips0                           \n"   \
 501                        : "=&r" (result), "=&r" (temp),                       \
 502                          "=" GCC_OFF_SMALL_ASM() (v->counter)                \
 503                        : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
 504                        : "memory");                                          \
 505                } while (unlikely(!result));                                  \
 506                                                                              \
 507                result = temp;                                                \
 508        } else {                                                              \
 509                unsigned long flags;                                          \
 510                                                                              \
 511                raw_local_irq_save(flags);                                    \
 512                result = v->counter;                                          \
 513                v->counter c_op i;                                            \
 514                raw_local_irq_restore(flags);                                 \
 515        }                                                                     \
 516                                                                              \
 517        return result;                                                        \
 518}
 519
 520#define ATOMIC64_OPS(op, c_op, asm_op)                                        \
 521        ATOMIC64_OP(op, c_op, asm_op)                                         \
 522        ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
 523        ATOMIC64_FETCH_OP(op, c_op, asm_op)
 524
 525ATOMIC64_OPS(add, +=, daddu)
 526ATOMIC64_OPS(sub, -=, dsubu)
 527
 528#define atomic64_add_return_relaxed     atomic64_add_return_relaxed
 529#define atomic64_sub_return_relaxed     atomic64_sub_return_relaxed
 530#define atomic64_fetch_add_relaxed      atomic64_fetch_add_relaxed
 531#define atomic64_fetch_sub_relaxed      atomic64_fetch_sub_relaxed
 532
 533#undef ATOMIC64_OPS
 534#define ATOMIC64_OPS(op, c_op, asm_op)                                        \
 535        ATOMIC64_OP(op, c_op, asm_op)                                         \
 536        ATOMIC64_FETCH_OP(op, c_op, asm_op)
 537
 538ATOMIC64_OPS(and, &=, and)
 539ATOMIC64_OPS(or, |=, or)
 540ATOMIC64_OPS(xor, ^=, xor)
 541
 542#define atomic64_fetch_and_relaxed      atomic64_fetch_and_relaxed
 543#define atomic64_fetch_or_relaxed       atomic64_fetch_or_relaxed
 544#define atomic64_fetch_xor_relaxed      atomic64_fetch_xor_relaxed
 545
 546#undef ATOMIC64_OPS
 547#undef ATOMIC64_FETCH_OP
 548#undef ATOMIC64_OP_RETURN
 549#undef ATOMIC64_OP
 550
 551/*
 552 * atomic64_sub_if_positive - conditionally subtract integer from atomic
 553 *                            variable
 554 * @i: integer value to subtract
 555 * @v: pointer of type atomic64_t
 556 *
 557 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 558 * The function returns the old value of @v minus @i.
 559 */
 560static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 561{
 562        long result;
 563
 564        smp_mb__before_llsc();
 565
 566        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 567                long temp;
 568
 569                __asm__ __volatile__(
 570                "       .set    arch=r4000                              \n"
 571                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 572                "       dsubu   %0, %1, %3                              \n"
 573                "       bltz    %0, 1f                                  \n"
 574                "       scd     %0, %2                                  \n"
 575                "       .set    noreorder                               \n"
 576                "       beqzl   %0, 1b                                  \n"
 577                "        dsubu  %0, %1, %3                              \n"
 578                "       .set    reorder                                 \n"
 579                "1:                                                     \n"
 580                "       .set    mips0                                   \n"
 581                : "=&r" (result), "=&r" (temp),
 582                  "=" GCC_OFF_SMALL_ASM() (v->counter)
 583                : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
 584                : "memory");
 585        } else if (kernel_uses_llsc) {
 586                long temp;
 587
 588                __asm__ __volatile__(
 589                "       .set    "MIPS_ISA_LEVEL"                        \n"
 590                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 591                "       dsubu   %0, %1, %3                              \n"
 592                "       bltz    %0, 1f                                  \n"
 593                "       scd     %0, %2                                  \n"
 594                "       .set    noreorder                               \n"
 595                "       beqz    %0, 1b                                  \n"
 596                "        dsubu  %0, %1, %3                              \n"
 597                "       .set    reorder                                 \n"
 598                "1:                                                     \n"
 599                "       .set    mips0                                   \n"
 600                : "=&r" (result), "=&r" (temp),
 601                  "+" GCC_OFF_SMALL_ASM() (v->counter)
 602                : "Ir" (i));
 603        } else {
 604                unsigned long flags;
 605
 606                raw_local_irq_save(flags);
 607                result = v->counter;
 608                result -= i;
 609                if (result >= 0)
 610                        v->counter = result;
 611                raw_local_irq_restore(flags);
 612        }
 613
 614        smp_llsc_mb();
 615
 616        return result;
 617}
 618
 619#define atomic64_cmpxchg(v, o, n) \
 620        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 621#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
 622
 623/**
 624 * atomic64_add_unless - add unless the number is a given value
 625 * @v: pointer of type atomic64_t
 626 * @a: the amount to add to v...
 627 * @u: ...unless v is equal to u.
 628 *
 629 * Atomically adds @a to @v, so long as it was not @u.
 630 * Returns true iff @v was not @u.
 631 */
 632static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 633{
 634        long c, old;
 635        c = atomic64_read(v);
 636        for (;;) {
 637                if (unlikely(c == (u)))
 638                        break;
 639                old = atomic64_cmpxchg((v), c, c + (a));
 640                if (likely(old == c))
 641                        break;
 642                c = old;
 643        }
 644        return c != (u);
 645}
 646
 647#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 648
 649#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 650#define atomic64_inc_return(v) atomic64_add_return(1, (v))
 651
 652/*
 653 * atomic64_sub_and_test - subtract value from variable and test result
 654 * @i: integer value to subtract
 655 * @v: pointer of type atomic64_t
 656 *
 657 * Atomically subtracts @i from @v and returns
 658 * true if the result is zero, or false for all
 659 * other cases.
 660 */
 661#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
 662
 663/*
 664 * atomic64_inc_and_test - increment and test
 665 * @v: pointer of type atomic64_t
 666 *
 667 * Atomically increments @v by 1
 668 * and returns true if the result is zero, or false for all
 669 * other cases.
 670 */
 671#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
 672
 673/*
 674 * atomic64_dec_and_test - decrement by 1 and test
 675 * @v: pointer of type atomic64_t
 676 *
 677 * Atomically decrements @v by 1 and
 678 * returns true if the result is 0, or false for all other
 679 * cases.
 680 */
 681#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
 682
 683/*
 684 * atomic64_dec_if_positive - decrement by 1 if old value positive
 685 * @v: pointer of type atomic64_t
 686 */
 687#define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
 688
 689/*
 690 * atomic64_inc - increment atomic variable
 691 * @v: pointer of type atomic64_t
 692 *
 693 * Atomically increments @v by 1.
 694 */
 695#define atomic64_inc(v) atomic64_add(1, (v))
 696
 697/*
 698 * atomic64_dec - decrement and test
 699 * @v: pointer of type atomic64_t
 700 *
 701 * Atomically decrements @v by 1.
 702 */
 703#define atomic64_dec(v) atomic64_sub(1, (v))
 704
 705/*
 706 * atomic64_add_negative - add and test if negative
 707 * @v: pointer of type atomic64_t
 708 * @i: integer value to add
 709 *
 710 * Atomically adds @i to @v and returns true
 711 * if the result is negative, or false when
 712 * result is greater than or equal to zero.
 713 */
 714#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
 715
 716#endif /* CONFIG_64BIT */
 717
 718#endif /* _ASM_ATOMIC_H */
 719