linux/arch/mips/include/asm/atomic.h
<<
>>
Prefs
   1/*
   2 * Atomic operations that C can't guarantee us.  Useful for
   3 * resource counting etc..
   4 *
   5 * But use these as seldom as possible since they are much more slower
   6 * than regular operations.
   7 *
   8 * This file is subject to the terms and conditions of the GNU General Public
   9 * License.  See the file "COPYING" in the main directory of this archive
  10 * for more details.
  11 *
  12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13 */
  14#ifndef _ASM_ATOMIC_H
  15#define _ASM_ATOMIC_H
  16
  17#include <linux/irqflags.h>
  18#include <linux/types.h>
  19#include <asm/barrier.h>
  20#include <asm/cpu-features.h>
  21#include <asm/cmpxchg.h>
  22#include <asm/war.h>
  23
  24#define ATOMIC_INIT(i)    { (i) }
  25
  26/*
  27 * atomic_read - read atomic variable
  28 * @v: pointer of type atomic_t
  29 *
  30 * Atomically reads the value of @v.
  31 */
  32#define atomic_read(v)          (*(volatile int *)&(v)->counter)
  33
  34/*
  35 * atomic_set - set atomic variable
  36 * @v: pointer of type atomic_t
  37 * @i: required value
  38 *
  39 * Atomically sets the value of @v to @i.
  40 */
  41#define atomic_set(v, i)                ((v)->counter = (i))
  42
  43/*
  44 * atomic_add - add integer to atomic variable
  45 * @i: integer value to add
  46 * @v: pointer of type atomic_t
  47 *
  48 * Atomically adds @i to @v.
  49 */
  50static __inline__ void atomic_add(int i, atomic_t * v)
  51{
  52        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  53                int temp;
  54
  55                __asm__ __volatile__(
  56                "       .set    mips3                                   \n"
  57                "1:     ll      %0, %1          # atomic_add            \n"
  58                "       addu    %0, %2                                  \n"
  59                "       sc      %0, %1                                  \n"
  60                "       beqzl   %0, 1b                                  \n"
  61                "       .set    mips0                                   \n"
  62                : "=&r" (temp), "+m" (v->counter)
  63                : "Ir" (i));
  64        } else if (kernel_uses_llsc) {
  65                int temp;
  66
  67                do {
  68                        __asm__ __volatile__(
  69                        "       .set    mips3                           \n"
  70                        "       ll      %0, %1          # atomic_add    \n"
  71                        "       addu    %0, %2                          \n"
  72                        "       sc      %0, %1                          \n"
  73                        "       .set    mips0                           \n"
  74                        : "=&r" (temp), "+m" (v->counter)
  75                        : "Ir" (i));
  76                } while (unlikely(!temp));
  77        } else {
  78                unsigned long flags;
  79
  80                raw_local_irq_save(flags);
  81                v->counter += i;
  82                raw_local_irq_restore(flags);
  83        }
  84}
  85
  86/*
  87 * atomic_sub - subtract the atomic variable
  88 * @i: integer value to subtract
  89 * @v: pointer of type atomic_t
  90 *
  91 * Atomically subtracts @i from @v.
  92 */
  93static __inline__ void atomic_sub(int i, atomic_t * v)
  94{
  95        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  96                int temp;
  97
  98                __asm__ __volatile__(
  99                "       .set    mips3                                   \n"
 100                "1:     ll      %0, %1          # atomic_sub            \n"
 101                "       subu    %0, %2                                  \n"
 102                "       sc      %0, %1                                  \n"
 103                "       beqzl   %0, 1b                                  \n"
 104                "       .set    mips0                                   \n"
 105                : "=&r" (temp), "+m" (v->counter)
 106                : "Ir" (i));
 107        } else if (kernel_uses_llsc) {
 108                int temp;
 109
 110                do {
 111                        __asm__ __volatile__(
 112                        "       .set    mips3                           \n"
 113                        "       ll      %0, %1          # atomic_sub    \n"
 114                        "       subu    %0, %2                          \n"
 115                        "       sc      %0, %1                          \n"
 116                        "       .set    mips0                           \n"
 117                        : "=&r" (temp), "+m" (v->counter)
 118                        : "Ir" (i));
 119                } while (unlikely(!temp));
 120        } else {
 121                unsigned long flags;
 122
 123                raw_local_irq_save(flags);
 124                v->counter -= i;
 125                raw_local_irq_restore(flags);
 126        }
 127}
 128
 129/*
 130 * Same as above, but return the result value
 131 */
 132static __inline__ int atomic_add_return(int i, atomic_t * v)
 133{
 134        int result;
 135
 136        smp_mb__before_llsc();
 137
 138        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 139                int temp;
 140
 141                __asm__ __volatile__(
 142                "       .set    mips3                                   \n"
 143                "1:     ll      %1, %2          # atomic_add_return     \n"
 144                "       addu    %0, %1, %3                              \n"
 145                "       sc      %0, %2                                  \n"
 146                "       beqzl   %0, 1b                                  \n"
 147                "       addu    %0, %1, %3                              \n"
 148                "       .set    mips0                                   \n"
 149                : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 150                : "Ir" (i));
 151        } else if (kernel_uses_llsc) {
 152                int temp;
 153
 154                do {
 155                        __asm__ __volatile__(
 156                        "       .set    mips3                           \n"
 157                        "       ll      %1, %2  # atomic_add_return     \n"
 158                        "       addu    %0, %1, %3                      \n"
 159                        "       sc      %0, %2                          \n"
 160                        "       .set    mips0                           \n"
 161                        : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 162                        : "Ir" (i));
 163                } while (unlikely(!result));
 164
 165                result = temp + i;
 166        } else {
 167                unsigned long flags;
 168
 169                raw_local_irq_save(flags);
 170                result = v->counter;
 171                result += i;
 172                v->counter = result;
 173                raw_local_irq_restore(flags);
 174        }
 175
 176        smp_llsc_mb();
 177
 178        return result;
 179}
 180
 181static __inline__ int atomic_sub_return(int i, atomic_t * v)
 182{
 183        int result;
 184
 185        smp_mb__before_llsc();
 186
 187        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 188                int temp;
 189
 190                __asm__ __volatile__(
 191                "       .set    mips3                                   \n"
 192                "1:     ll      %1, %2          # atomic_sub_return     \n"
 193                "       subu    %0, %1, %3                              \n"
 194                "       sc      %0, %2                                  \n"
 195                "       beqzl   %0, 1b                                  \n"
 196                "       subu    %0, %1, %3                              \n"
 197                "       .set    mips0                                   \n"
 198                : "=&r" (result), "=&r" (temp), "=m" (v->counter)
 199                : "Ir" (i), "m" (v->counter)
 200                : "memory");
 201
 202                result = temp - i;
 203        } else if (kernel_uses_llsc) {
 204                int temp;
 205
 206                do {
 207                        __asm__ __volatile__(
 208                        "       .set    mips3                           \n"
 209                        "       ll      %1, %2  # atomic_sub_return     \n"
 210                        "       subu    %0, %1, %3                      \n"
 211                        "       sc      %0, %2                          \n"
 212                        "       .set    mips0                           \n"
 213                        : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 214                        : "Ir" (i));
 215                } while (unlikely(!result));
 216
 217                result = temp - i;
 218        } else {
 219                unsigned long flags;
 220
 221                raw_local_irq_save(flags);
 222                result = v->counter;
 223                result -= i;
 224                v->counter = result;
 225                raw_local_irq_restore(flags);
 226        }
 227
 228        smp_llsc_mb();
 229
 230        return result;
 231}
 232
 233/*
 234 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 235 * @i: integer value to subtract
 236 * @v: pointer of type atomic_t
 237 *
 238 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 239 * The function returns the old value of @v minus @i.
 240 */
 241static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 242{
 243        int result;
 244
 245        smp_mb__before_llsc();
 246
 247        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 248                int temp;
 249
 250                __asm__ __volatile__(
 251                "       .set    mips3                                   \n"
 252                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 253                "       subu    %0, %1, %3                              \n"
 254                "       bltz    %0, 1f                                  \n"
 255                "       sc      %0, %2                                  \n"
 256                "       .set    noreorder                               \n"
 257                "       beqzl   %0, 1b                                  \n"
 258                "        subu   %0, %1, %3                              \n"
 259                "       .set    reorder                                 \n"
 260                "1:                                                     \n"
 261                "       .set    mips0                                   \n"
 262                : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 263                : "Ir" (i), "m" (v->counter)
 264                : "memory");
 265        } else if (kernel_uses_llsc) {
 266                int temp;
 267
 268                __asm__ __volatile__(
 269                "       .set    mips3                                   \n"
 270                "1:     ll      %1, %2          # atomic_sub_if_positive\n"
 271                "       subu    %0, %1, %3                              \n"
 272                "       bltz    %0, 1f                                  \n"
 273                "       sc      %0, %2                                  \n"
 274                "       .set    noreorder                               \n"
 275                "       beqz    %0, 1b                                  \n"
 276                "        subu   %0, %1, %3                              \n"
 277                "       .set    reorder                                 \n"
 278                "1:                                                     \n"
 279                "       .set    mips0                                   \n"
 280                : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 281                : "Ir" (i));
 282        } else {
 283                unsigned long flags;
 284
 285                raw_local_irq_save(flags);
 286                result = v->counter;
 287                result -= i;
 288                if (result >= 0)
 289                        v->counter = result;
 290                raw_local_irq_restore(flags);
 291        }
 292
 293        smp_llsc_mb();
 294
 295        return result;
 296}
 297
 298#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 299#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
 300
 301/**
 302 * __atomic_add_unless - add unless the number is a given value
 303 * @v: pointer of type atomic_t
 304 * @a: the amount to add to v...
 305 * @u: ...unless v is equal to u.
 306 *
 307 * Atomically adds @a to @v, so long as it was not @u.
 308 * Returns the old value of @v.
 309 */
 310static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 311{
 312        int c, old;
 313        c = atomic_read(v);
 314        for (;;) {
 315                if (unlikely(c == (u)))
 316                        break;
 317                old = atomic_cmpxchg((v), c, c + (a));
 318                if (likely(old == c))
 319                        break;
 320                c = old;
 321        }
 322        return c;
 323}
 324
 325#define atomic_dec_return(v) atomic_sub_return(1, (v))
 326#define atomic_inc_return(v) atomic_add_return(1, (v))
 327
 328/*
 329 * atomic_sub_and_test - subtract value from variable and test result
 330 * @i: integer value to subtract
 331 * @v: pointer of type atomic_t
 332 *
 333 * Atomically subtracts @i from @v and returns
 334 * true if the result is zero, or false for all
 335 * other cases.
 336 */
 337#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
 338
 339/*
 340 * atomic_inc_and_test - increment and test
 341 * @v: pointer of type atomic_t
 342 *
 343 * Atomically increments @v by 1
 344 * and returns true if the result is zero, or false for all
 345 * other cases.
 346 */
 347#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
 348
 349/*
 350 * atomic_dec_and_test - decrement by 1 and test
 351 * @v: pointer of type atomic_t
 352 *
 353 * Atomically decrements @v by 1 and
 354 * returns true if the result is 0, or false for all other
 355 * cases.
 356 */
 357#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
 358
 359/*
 360 * atomic_dec_if_positive - decrement by 1 if old value positive
 361 * @v: pointer of type atomic_t
 362 */
 363#define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
 364
 365/*
 366 * atomic_inc - increment atomic variable
 367 * @v: pointer of type atomic_t
 368 *
 369 * Atomically increments @v by 1.
 370 */
 371#define atomic_inc(v) atomic_add(1, (v))
 372
 373/*
 374 * atomic_dec - decrement and test
 375 * @v: pointer of type atomic_t
 376 *
 377 * Atomically decrements @v by 1.
 378 */
 379#define atomic_dec(v) atomic_sub(1, (v))
 380
 381/*
 382 * atomic_add_negative - add and test if negative
 383 * @v: pointer of type atomic_t
 384 * @i: integer value to add
 385 *
 386 * Atomically adds @i to @v and returns true
 387 * if the result is negative, or false when
 388 * result is greater than or equal to zero.
 389 */
 390#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
 391
 392#ifdef CONFIG_64BIT
 393
 394#define ATOMIC64_INIT(i)    { (i) }
 395
 396/*
 397 * atomic64_read - read atomic variable
 398 * @v: pointer of type atomic64_t
 399 *
 400 */
 401#define atomic64_read(v)        (*(volatile long *)&(v)->counter)
 402
 403/*
 404 * atomic64_set - set atomic variable
 405 * @v: pointer of type atomic64_t
 406 * @i: required value
 407 */
 408#define atomic64_set(v, i)      ((v)->counter = (i))
 409
 410/*
 411 * atomic64_add - add integer to atomic variable
 412 * @i: integer value to add
 413 * @v: pointer of type atomic64_t
 414 *
 415 * Atomically adds @i to @v.
 416 */
 417static __inline__ void atomic64_add(long i, atomic64_t * v)
 418{
 419        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 420                long temp;
 421
 422                __asm__ __volatile__(
 423                "       .set    mips3                                   \n"
 424                "1:     lld     %0, %1          # atomic64_add          \n"
 425                "       daddu   %0, %2                                  \n"
 426                "       scd     %0, %1                                  \n"
 427                "       beqzl   %0, 1b                                  \n"
 428                "       .set    mips0                                   \n"
 429                : "=&r" (temp), "+m" (v->counter)
 430                : "Ir" (i));
 431        } else if (kernel_uses_llsc) {
 432                long temp;
 433
 434                do {
 435                        __asm__ __volatile__(
 436                        "       .set    mips3                           \n"
 437                        "       lld     %0, %1          # atomic64_add  \n"
 438                        "       daddu   %0, %2                          \n"
 439                        "       scd     %0, %1                          \n"
 440                        "       .set    mips0                           \n"
 441                        : "=&r" (temp), "+m" (v->counter)
 442                        : "Ir" (i));
 443                } while (unlikely(!temp));
 444        } else {
 445                unsigned long flags;
 446
 447                raw_local_irq_save(flags);
 448                v->counter += i;
 449                raw_local_irq_restore(flags);
 450        }
 451}
 452
 453/*
 454 * atomic64_sub - subtract the atomic variable
 455 * @i: integer value to subtract
 456 * @v: pointer of type atomic64_t
 457 *
 458 * Atomically subtracts @i from @v.
 459 */
 460static __inline__ void atomic64_sub(long i, atomic64_t * v)
 461{
 462        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 463                long temp;
 464
 465                __asm__ __volatile__(
 466                "       .set    mips3                                   \n"
 467                "1:     lld     %0, %1          # atomic64_sub          \n"
 468                "       dsubu   %0, %2                                  \n"
 469                "       scd     %0, %1                                  \n"
 470                "       beqzl   %0, 1b                                  \n"
 471                "       .set    mips0                                   \n"
 472                : "=&r" (temp), "+m" (v->counter)
 473                : "Ir" (i));
 474        } else if (kernel_uses_llsc) {
 475                long temp;
 476
 477                do {
 478                        __asm__ __volatile__(
 479                        "       .set    mips3                           \n"
 480                        "       lld     %0, %1          # atomic64_sub  \n"
 481                        "       dsubu   %0, %2                          \n"
 482                        "       scd     %0, %1                          \n"
 483                        "       .set    mips0                           \n"
 484                        : "=&r" (temp), "+m" (v->counter)
 485                        : "Ir" (i));
 486                } while (unlikely(!temp));
 487        } else {
 488                unsigned long flags;
 489
 490                raw_local_irq_save(flags);
 491                v->counter -= i;
 492                raw_local_irq_restore(flags);
 493        }
 494}
 495
 496/*
 497 * Same as above, but return the result value
 498 */
 499static __inline__ long atomic64_add_return(long i, atomic64_t * v)
 500{
 501        long result;
 502
 503        smp_mb__before_llsc();
 504
 505        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 506                long temp;
 507
 508                __asm__ __volatile__(
 509                "       .set    mips3                                   \n"
 510                "1:     lld     %1, %2          # atomic64_add_return   \n"
 511                "       daddu   %0, %1, %3                              \n"
 512                "       scd     %0, %2                                  \n"
 513                "       beqzl   %0, 1b                                  \n"
 514                "       daddu   %0, %1, %3                              \n"
 515                "       .set    mips0                                   \n"
 516                : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 517                : "Ir" (i));
 518        } else if (kernel_uses_llsc) {
 519                long temp;
 520
 521                do {
 522                        __asm__ __volatile__(
 523                        "       .set    mips3                           \n"
 524                        "       lld     %1, %2  # atomic64_add_return   \n"
 525                        "       daddu   %0, %1, %3                      \n"
 526                        "       scd     %0, %2                          \n"
 527                        "       .set    mips0                           \n"
 528                        : "=&r" (result), "=&r" (temp), "=m" (v->counter)
 529                        : "Ir" (i), "m" (v->counter)
 530                        : "memory");
 531                } while (unlikely(!result));
 532
 533                result = temp + i;
 534        } else {
 535                unsigned long flags;
 536
 537                raw_local_irq_save(flags);
 538                result = v->counter;
 539                result += i;
 540                v->counter = result;
 541                raw_local_irq_restore(flags);
 542        }
 543
 544        smp_llsc_mb();
 545
 546        return result;
 547}
 548
 549static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
 550{
 551        long result;
 552
 553        smp_mb__before_llsc();
 554
 555        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 556                long temp;
 557
 558                __asm__ __volatile__(
 559                "       .set    mips3                                   \n"
 560                "1:     lld     %1, %2          # atomic64_sub_return   \n"
 561                "       dsubu   %0, %1, %3                              \n"
 562                "       scd     %0, %2                                  \n"
 563                "       beqzl   %0, 1b                                  \n"
 564                "       dsubu   %0, %1, %3                              \n"
 565                "       .set    mips0                                   \n"
 566                : "=&r" (result), "=&r" (temp), "=m" (v->counter)
 567                : "Ir" (i), "m" (v->counter)
 568                : "memory");
 569        } else if (kernel_uses_llsc) {
 570                long temp;
 571
 572                do {
 573                        __asm__ __volatile__(
 574                        "       .set    mips3                           \n"
 575                        "       lld     %1, %2  # atomic64_sub_return   \n"
 576                        "       dsubu   %0, %1, %3                      \n"
 577                        "       scd     %0, %2                          \n"
 578                        "       .set    mips0                           \n"
 579                        : "=&r" (result), "=&r" (temp), "=m" (v->counter)
 580                        : "Ir" (i), "m" (v->counter)
 581                        : "memory");
 582                } while (unlikely(!result));
 583
 584                result = temp - i;
 585        } else {
 586                unsigned long flags;
 587
 588                raw_local_irq_save(flags);
 589                result = v->counter;
 590                result -= i;
 591                v->counter = result;
 592                raw_local_irq_restore(flags);
 593        }
 594
 595        smp_llsc_mb();
 596
 597        return result;
 598}
 599
 600/*
 601 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
 602 * @i: integer value to subtract
 603 * @v: pointer of type atomic64_t
 604 *
 605 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 606 * The function returns the old value of @v minus @i.
 607 */
 608static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 609{
 610        long result;
 611
 612        smp_mb__before_llsc();
 613
 614        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 615                long temp;
 616
 617                __asm__ __volatile__(
 618                "       .set    mips3                                   \n"
 619                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 620                "       dsubu   %0, %1, %3                              \n"
 621                "       bltz    %0, 1f                                  \n"
 622                "       scd     %0, %2                                  \n"
 623                "       .set    noreorder                               \n"
 624                "       beqzl   %0, 1b                                  \n"
 625                "        dsubu  %0, %1, %3                              \n"
 626                "       .set    reorder                                 \n"
 627                "1:                                                     \n"
 628                "       .set    mips0                                   \n"
 629                : "=&r" (result), "=&r" (temp), "=m" (v->counter)
 630                : "Ir" (i), "m" (v->counter)
 631                : "memory");
 632        } else if (kernel_uses_llsc) {
 633                long temp;
 634
 635                __asm__ __volatile__(
 636                "       .set    mips3                                   \n"
 637                "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
 638                "       dsubu   %0, %1, %3                              \n"
 639                "       bltz    %0, 1f                                  \n"
 640                "       scd     %0, %2                                  \n"
 641                "       .set    noreorder                               \n"
 642                "       beqz    %0, 1b                                  \n"
 643                "        dsubu  %0, %1, %3                              \n"
 644                "       .set    reorder                                 \n"
 645                "1:                                                     \n"
 646                "       .set    mips0                                   \n"
 647                : "=&r" (result), "=&r" (temp), "+m" (v->counter)
 648                : "Ir" (i));
 649        } else {
 650                unsigned long flags;
 651
 652                raw_local_irq_save(flags);
 653                result = v->counter;
 654                result -= i;
 655                if (result >= 0)
 656                        v->counter = result;
 657                raw_local_irq_restore(flags);
 658        }
 659
 660        smp_llsc_mb();
 661
 662        return result;
 663}
 664
 665#define atomic64_cmpxchg(v, o, n) \
 666        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 667#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
 668
 669/**
 670 * atomic64_add_unless - add unless the number is a given value
 671 * @v: pointer of type atomic64_t
 672 * @a: the amount to add to v...
 673 * @u: ...unless v is equal to u.
 674 *
 675 * Atomically adds @a to @v, so long as it was not @u.
 676 * Returns the old value of @v.
 677 */
 678static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 679{
 680        long c, old;
 681        c = atomic64_read(v);
 682        for (;;) {
 683                if (unlikely(c == (u)))
 684                        break;
 685                old = atomic64_cmpxchg((v), c, c + (a));
 686                if (likely(old == c))
 687                        break;
 688                c = old;
 689        }
 690        return c != (u);
 691}
 692
 693#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 694
 695#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 696#define atomic64_inc_return(v) atomic64_add_return(1, (v))
 697
 698/*
 699 * atomic64_sub_and_test - subtract value from variable and test result
 700 * @i: integer value to subtract
 701 * @v: pointer of type atomic64_t
 702 *
 703 * Atomically subtracts @i from @v and returns
 704 * true if the result is zero, or false for all
 705 * other cases.
 706 */
 707#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
 708
 709/*
 710 * atomic64_inc_and_test - increment and test
 711 * @v: pointer of type atomic64_t
 712 *
 713 * Atomically increments @v by 1
 714 * and returns true if the result is zero, or false for all
 715 * other cases.
 716 */
 717#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
 718
 719/*
 720 * atomic64_dec_and_test - decrement by 1 and test
 721 * @v: pointer of type atomic64_t
 722 *
 723 * Atomically decrements @v by 1 and
 724 * returns true if the result is 0, or false for all other
 725 * cases.
 726 */
 727#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
 728
 729/*
 730 * atomic64_dec_if_positive - decrement by 1 if old value positive
 731 * @v: pointer of type atomic64_t
 732 */
 733#define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
 734
 735/*
 736 * atomic64_inc - increment atomic variable
 737 * @v: pointer of type atomic64_t
 738 *
 739 * Atomically increments @v by 1.
 740 */
 741#define atomic64_inc(v) atomic64_add(1, (v))
 742
 743/*
 744 * atomic64_dec - decrement and test
 745 * @v: pointer of type atomic64_t
 746 *
 747 * Atomically decrements @v by 1.
 748 */
 749#define atomic64_dec(v) atomic64_sub(1, (v))
 750
 751/*
 752 * atomic64_add_negative - add and test if negative
 753 * @v: pointer of type atomic64_t
 754 * @i: integer value to add
 755 *
 756 * Atomically adds @i to @v and returns true
 757 * if the result is negative, or false when
 758 * result is greater than or equal to zero.
 759 */
 760#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
 761
 762#endif /* CONFIG_64BIT */
 763
 764/*
 765 * atomic*_return operations are serializing but not the non-*_return
 766 * versions.
 767 */
 768#define smp_mb__before_atomic_dec()     smp_mb__before_llsc()
 769#define smp_mb__after_atomic_dec()      smp_llsc_mb()
 770#define smp_mb__before_atomic_inc()     smp_mb__before_llsc()
 771#define smp_mb__after_atomic_inc()      smp_llsc_mb()
 772
 773#endif /* _ASM_ATOMIC_H */
 774