linux/arch/x86/include/asm/refcount.h
<<
>>
Prefs
   1#ifndef __ASM_X86_REFCOUNT_H
   2#define __ASM_X86_REFCOUNT_H
   3/*
   4 * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from
   5 * PaX/grsecurity.
   6 */
   7#include <linux/refcount.h>
   8#include <asm/bug.h>
   9
  10/*
  11 * This is the first portion of the refcount error handling, which lives in
  12 * .text.unlikely, and is jumped to from the CPU flag check (in the
  13 * following macros). This saves the refcount value location into CX for
  14 * the exception handler to use (in mm/extable.c), and then triggers the
  15 * central refcount exception. The fixup address for the exception points
  16 * back to the regular execution flow in .text.
  17 */
  18#define _REFCOUNT_EXCEPTION                             \
  19        ".pushsection .text..refcount\n"                \
  20        "111:\tlea %[var], %%" _ASM_CX "\n"             \
  21        "112:\t" ASM_UD2 "\n"                           \
  22        ASM_UNREACHABLE                                 \
  23        ".popsection\n"                                 \
  24        "113:\n"                                        \
  25        _ASM_EXTABLE_REFCOUNT(112b, 113b)
  26
  27/* Trigger refcount exception if refcount result is negative. */
  28#define REFCOUNT_CHECK_LT_ZERO                          \
  29        "js 111f\n\t"                                   \
  30        _REFCOUNT_EXCEPTION
  31
  32/* Trigger refcount exception if refcount result is zero or negative. */
  33#define REFCOUNT_CHECK_LE_ZERO                          \
  34        "jz 111f\n\t"                                   \
  35        REFCOUNT_CHECK_LT_ZERO
  36
  37/* Trigger refcount exception unconditionally. */
  38#define REFCOUNT_ERROR                                  \
  39        "jmp 111f\n\t"                                  \
  40        _REFCOUNT_EXCEPTION
  41
  42static __always_inline void refcount_add(unsigned int i, refcount_t *r)
  43{
  44        asm volatile(LOCK_PREFIX "addl %1,%0\n\t"
  45                REFCOUNT_CHECK_LT_ZERO
  46                : [var] "+m" (r->refs.counter)
  47                : "ir" (i)
  48                : "cc", "cx");
  49}
  50
  51static __always_inline void refcount_inc(refcount_t *r)
  52{
  53        asm volatile(LOCK_PREFIX "incl %0\n\t"
  54                REFCOUNT_CHECK_LT_ZERO
  55                : [var] "+m" (r->refs.counter)
  56                : : "cc", "cx");
  57}
  58
  59static __always_inline void refcount_dec(refcount_t *r)
  60{
  61        asm volatile(LOCK_PREFIX "decl %0\n\t"
  62                REFCOUNT_CHECK_LE_ZERO
  63                : [var] "+m" (r->refs.counter)
  64                : : "cc", "cx");
  65}
  66
  67static __always_inline __must_check
  68bool refcount_sub_and_test(unsigned int i, refcount_t *r)
  69{
  70        return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
  71                                         REFCOUNT_CHECK_LT_ZERO,
  72                                         r->refs.counter, e, "er", i, "cx");
  73}
  74
  75static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
  76{
  77        return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
  78                                        REFCOUNT_CHECK_LT_ZERO,
  79                                        r->refs.counter, e, "cx");
  80}
  81
  82static __always_inline __must_check
  83bool refcount_add_not_zero(unsigned int i, refcount_t *r)
  84{
  85        int c, result;
  86
  87        c = atomic_read(&(r->refs));
  88        do {
  89                if (unlikely(c == 0))
  90                        return false;
  91
  92                result = c + i;
  93
  94                /* Did we try to increment from/to an undesirable state? */
  95                if (unlikely(c < 0 || c == INT_MAX || result < c)) {
  96                        asm volatile(REFCOUNT_ERROR
  97                                     : : [var] "m" (r->refs.counter)
  98                                     : "cc", "cx");
  99                        break;
 100                }
 101
 102        } while (!atomic_try_cmpxchg(&(r->refs), &c, result));
 103
 104        return c != 0;
 105}
 106
 107static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
 108{
 109        return refcount_add_not_zero(1, r);
 110}
 111
 112#endif
 113