1#ifndef __ASM_X86_REFCOUNT_H
2#define __ASM_X86_REFCOUNT_H
3
4
5
6
7#include <linux/refcount.h>
8#include <asm/bug.h>
9
10
11
12
13
14
15
16
17
18#define _REFCOUNT_EXCEPTION \
19 ".pushsection .text..refcount\n" \
20 "111:\tlea %[counter], %%" _ASM_CX "\n" \
21 "112:\t" ASM_UD2 "\n" \
22 ASM_UNREACHABLE \
23 ".popsection\n" \
24 "113:\n" \
25 _ASM_EXTABLE_REFCOUNT(112b, 113b)
26
27
28#define REFCOUNT_CHECK_LT_ZERO \
29 "js 111f\n\t" \
30 _REFCOUNT_EXCEPTION
31
32
33#define REFCOUNT_CHECK_LE_ZERO \
34 "jz 111f\n\t" \
35 REFCOUNT_CHECK_LT_ZERO
36
37
38#define REFCOUNT_ERROR \
39 "jmp 111f\n\t" \
40 _REFCOUNT_EXCEPTION
41
42static __always_inline void refcount_add(unsigned int i, refcount_t *r)
43{
44 asm volatile(LOCK_PREFIX "addl %1,%0\n\t"
45 REFCOUNT_CHECK_LT_ZERO
46 : [counter] "+m" (r->refs.counter)
47 : "ir" (i)
48 : "cc", "cx");
49}
50
51static __always_inline void refcount_inc(refcount_t *r)
52{
53 asm volatile(LOCK_PREFIX "incl %0\n\t"
54 REFCOUNT_CHECK_LT_ZERO
55 : [counter] "+m" (r->refs.counter)
56 : : "cc", "cx");
57}
58
59static __always_inline void refcount_dec(refcount_t *r)
60{
61 asm volatile(LOCK_PREFIX "decl %0\n\t"
62 REFCOUNT_CHECK_LE_ZERO
63 : [counter] "+m" (r->refs.counter)
64 : : "cc", "cx");
65}
66
67static __always_inline __must_check
68bool refcount_sub_and_test(unsigned int i, refcount_t *r)
69{
70 GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", REFCOUNT_CHECK_LT_ZERO,
71 r->refs.counter, "er", i, "%0", e, "cx");
72}
73
74static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
75{
76 GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", REFCOUNT_CHECK_LT_ZERO,
77 r->refs.counter, "%0", e, "cx");
78}
79
80static __always_inline __must_check
81bool refcount_add_not_zero(unsigned int i, refcount_t *r)
82{
83 int c, result;
84
85 c = atomic_read(&(r->refs));
86 do {
87 if (unlikely(c == 0))
88 return false;
89
90 result = c + i;
91
92
93 if (unlikely(c < 0 || c == INT_MAX || result < c)) {
94 asm volatile(REFCOUNT_ERROR
95 : : [counter] "m" (r->refs.counter)
96 : "cc", "cx");
97 break;
98 }
99
100 } while (!atomic_try_cmpxchg(&(r->refs), &c, result));
101
102 return c != 0;
103}
104
105static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
106{
107 return refcount_add_not_zero(1, r);
108}
109
110#endif
111