1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifndef _CHECKSUM_H
20#define _CHECKSUM_H
21
22#include <linux/errno.h>
23#include <asm/types.h>
24#include <asm/byteorder.h>
25#include <asm/uaccess.h>
26#include <asm/checksum.h>
27
28#ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
29static inline
30__wsum csum_and_copy_from_user (const void __user *src, void *dst,
31 int len, __wsum sum, int *err_ptr)
32{
33 if (access_ok(VERIFY_READ, src, len))
34 return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
35
36 if (len)
37 *err_ptr = -EFAULT;
38
39 return sum;
40}
41#endif
42
43#ifndef HAVE_CSUM_COPY_USER
44static __inline__ __wsum csum_and_copy_to_user
45(const void *src, void __user *dst, int len, __wsum sum, int *err_ptr)
46{
47 sum = csum_partial(src, len, sum);
48
49 if (access_ok(VERIFY_WRITE, dst, len)) {
50 if (copy_to_user(dst, src, len) == 0)
51 return sum;
52 }
53 if (len)
54 *err_ptr = -EFAULT;
55
56 return (__force __wsum)-1;
57}
58#endif
59
60#ifndef HAVE_ARCH_CSUM_ADD
61static inline __wsum csum_add(__wsum csum, __wsum addend)
62{
63 u32 res = (__force u32)csum;
64 res += (__force u32)addend;
65 return (__force __wsum)(res + (res < (__force u32)addend));
66}
67#endif
68
69static inline __wsum csum_sub(__wsum csum, __wsum addend)
70{
71 return csum_add(csum, ~addend);
72}
73
74static inline __sum16 csum16_add(__sum16 csum, __be16 addend)
75{
76 u16 res = (__force u16)csum;
77
78 res += (__force u16)addend;
79 return (__force __sum16)(res + (res < (__force u16)addend));
80}
81
82static inline __sum16 csum16_sub(__sum16 csum, __be16 addend)
83{
84 return csum16_add(csum, ~addend);
85}
86
87static inline __wsum
88csum_block_add(__wsum csum, __wsum csum2, int offset)
89{
90 u32 sum = (__force u32)csum2;
91 if (offset&1)
92 sum = ((sum&0xFF00FF)<<8)+((sum>>8)&0xFF00FF);
93 return csum_add(csum, (__force __wsum)sum);
94}
95
96static inline __wsum
97csum_block_add_ext(__wsum csum, __wsum csum2, int offset, int len)
98{
99 return csum_block_add(csum, csum2, offset);
100}
101
102static inline __wsum
103csum_block_sub(__wsum csum, __wsum csum2, int offset)
104{
105 u32 sum = (__force u32)csum2;
106 if (offset&1)
107 sum = ((sum&0xFF00FF)<<8)+((sum>>8)&0xFF00FF);
108 return csum_sub(csum, (__force __wsum)sum);
109}
110
111static inline __wsum csum_unfold(__sum16 n)
112{
113 return (__force __wsum)n;
114}
115
116static inline __wsum csum_partial_ext(const void *buff, int len, __wsum sum)
117{
118 return csum_partial(buff, len, sum);
119}
120
121#define CSUM_MANGLED_0 ((__force __sum16)0xffff)
122
123static inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to)
124{
125 *sum = csum_fold(csum_add(csum_sub(~csum_unfold(*sum), from), to));
126}
127
128
129
130
131
132
133
134static inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new)
135{
136 *sum = ~csum16_add(csum16_sub(~(*sum), old), new);
137}
138
139struct sk_buff;
140void inet_proto_csum_replace4(__sum16 *sum, struct sk_buff *skb,
141 __be32 from, __be32 to, int pseudohdr);
142void inet_proto_csum_replace16(__sum16 *sum, struct sk_buff *skb,
143 const __be32 *from, const __be32 *to,
144 int pseudohdr);
145
146static inline void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb,
147 __be16 from, __be16 to,
148 int pseudohdr)
149{
150 inet_proto_csum_replace4(sum, skb, (__force __be32)from,
151 (__force __be32)to, pseudohdr);
152}
153
154static inline __wsum remcsum_adjust(void *ptr, __wsum csum,
155 int start, int offset)
156{
157 __sum16 *psum = (__sum16 *)(ptr + offset);
158 __wsum delta;
159
160
161 csum = csum_sub(csum, csum_partial(ptr, start, 0));
162
163
164 delta = csum_sub(csum_fold(csum), *psum);
165 *psum = csum_fold(csum);
166
167 return delta;
168}
169
170static inline void remcsum_unadjust(__sum16 *psum, __wsum delta)
171{
172 *psum = csum_fold(csum_sub(delta, *psum));
173}
174
175#endif
176