1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#ifndef _CHECKSUM_H
16#define _CHECKSUM_H
17
18#include <linux/errno.h>
19#include <asm/types.h>
20#include <asm/byteorder.h>
21#include <linux/uaccess.h>
22#include <asm/checksum.h>
23
24#ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
25static inline
26__wsum csum_and_copy_from_user (const void __user *src, void *dst,
27 int len)
28{
29 if (copy_from_user(dst, src, len))
30 return 0;
31 return csum_partial(dst, len, ~0U);
32}
33#endif
34
35#ifndef HAVE_CSUM_COPY_USER
36static __inline__ __wsum csum_and_copy_to_user
37(const void *src, void __user *dst, int len)
38{
39 __wsum sum = csum_partial(src, len, ~0U);
40
41 if (copy_to_user(dst, src, len) == 0)
42 return sum;
43 return 0;
44}
45#endif
46
47#ifndef _HAVE_ARCH_CSUM_AND_COPY
48static inline __wsum
49csum_partial_copy_nocheck(const void *src, void *dst, int len)
50{
51 memcpy(dst, src, len);
52 return csum_partial(dst, len, 0);
53}
54#endif
55
56#ifndef HAVE_ARCH_CSUM_ADD
57static inline __wsum csum_add(__wsum csum, __wsum addend)
58{
59 u32 res = (__force u32)csum;
60 res += (__force u32)addend;
61 return (__force __wsum)(res + (res < (__force u32)addend));
62}
63#endif
64
65static inline __wsum csum_sub(__wsum csum, __wsum addend)
66{
67 return csum_add(csum, ~addend);
68}
69
70static inline __sum16 csum16_add(__sum16 csum, __be16 addend)
71{
72 u16 res = (__force u16)csum;
73
74 res += (__force u16)addend;
75 return (__force __sum16)(res + (res < (__force u16)addend));
76}
77
78static inline __sum16 csum16_sub(__sum16 csum, __be16 addend)
79{
80 return csum16_add(csum, ~addend);
81}
82
83static inline __wsum
84csum_block_add(__wsum csum, __wsum csum2, int offset)
85{
86 u32 sum = (__force u32)csum2;
87
88
89 if (offset & 1)
90 sum = ror32(sum, 8);
91
92 return csum_add(csum, (__force __wsum)sum);
93}
94
95static inline __wsum
96csum_block_add_ext(__wsum csum, __wsum csum2, int offset, int len)
97{
98 return csum_block_add(csum, csum2, offset);
99}
100
101static inline __wsum
102csum_block_sub(__wsum csum, __wsum csum2, int offset)
103{
104 return csum_block_add(csum, ~csum2, offset);
105}
106
107static inline __wsum csum_unfold(__sum16 n)
108{
109 return (__force __wsum)n;
110}
111
112static inline __wsum csum_partial_ext(const void *buff, int len, __wsum sum)
113{
114 return csum_partial(buff, len, sum);
115}
116
117#define CSUM_MANGLED_0 ((__force __sum16)0xffff)
118
119static inline void csum_replace_by_diff(__sum16 *sum, __wsum diff)
120{
121 *sum = csum_fold(csum_add(diff, ~csum_unfold(*sum)));
122}
123
124static inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to)
125{
126 __wsum tmp = csum_sub(~csum_unfold(*sum), (__force __wsum)from);
127
128 *sum = csum_fold(csum_add(tmp, (__force __wsum)to));
129}
130
131
132
133
134
135
136
137static inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new)
138{
139 *sum = ~csum16_add(csum16_sub(~(*sum), old), new);
140}
141
142struct sk_buff;
143void inet_proto_csum_replace4(__sum16 *sum, struct sk_buff *skb,
144 __be32 from, __be32 to, bool pseudohdr);
145void inet_proto_csum_replace16(__sum16 *sum, struct sk_buff *skb,
146 const __be32 *from, const __be32 *to,
147 bool pseudohdr);
148void inet_proto_csum_replace_by_diff(__sum16 *sum, struct sk_buff *skb,
149 __wsum diff, bool pseudohdr);
150
151static inline void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb,
152 __be16 from, __be16 to,
153 bool pseudohdr)
154{
155 inet_proto_csum_replace4(sum, skb, (__force __be32)from,
156 (__force __be32)to, pseudohdr);
157}
158
159static inline __wsum remcsum_adjust(void *ptr, __wsum csum,
160 int start, int offset)
161{
162 __sum16 *psum = (__sum16 *)(ptr + offset);
163 __wsum delta;
164
165
166 csum = csum_sub(csum, csum_partial(ptr, start, 0));
167
168
169 delta = csum_sub((__force __wsum)csum_fold(csum),
170 (__force __wsum)*psum);
171 *psum = csum_fold(csum);
172
173 return delta;
174}
175
176static inline void remcsum_unadjust(__sum16 *psum, __wsum delta)
177{
178 *psum = csum_fold(csum_sub(delta, (__force __wsum)*psum));
179}
180
181#endif
182