1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifndef _CHECKSUM_H
20#define _CHECKSUM_H
21
22#include <linux/errno.h>
23#include <asm/types.h>
24#include <asm/byteorder.h>
25#include <asm/uaccess.h>
26#include <asm/checksum.h>
27
28#ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
29static inline
30__wsum csum_and_copy_from_user (const void __user *src, void *dst,
31 int len, __wsum sum, int *err_ptr)
32{
33 if (access_ok(VERIFY_READ, src, len))
34 return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
35
36 if (len)
37 *err_ptr = -EFAULT;
38
39 return sum;
40}
41#endif
42
43#ifndef HAVE_CSUM_COPY_USER
44static __inline__ __wsum csum_and_copy_to_user
45(const void *src, void __user *dst, int len, __wsum sum, int *err_ptr)
46{
47 sum = csum_partial(src, len, sum);
48
49 if (access_ok(VERIFY_WRITE, dst, len)) {
50 if (copy_to_user(dst, src, len) == 0)
51 return sum;
52 }
53 if (len)
54 *err_ptr = -EFAULT;
55
56 return (__force __wsum)-1;
57}
58#endif
59
60static inline __wsum csum_add(__wsum csum, __wsum addend)
61{
62 u32 res = (__force u32)csum;
63 res += (__force u32)addend;
64 return (__force __wsum)(res + (res < (__force u32)addend));
65}
66
67static inline __wsum csum_sub(__wsum csum, __wsum addend)
68{
69 return csum_add(csum, ~addend);
70}
71
72static inline __wsum
73csum_block_add(__wsum csum, __wsum csum2, int offset)
74{
75 u32 sum = (__force u32)csum2;
76 if (offset&1)
77 sum = ((sum&0xFF00FF)<<8)+((sum>>8)&0xFF00FF);
78 return csum_add(csum, (__force __wsum)sum);
79}
80
81static inline __wsum
82csum_block_sub(__wsum csum, __wsum csum2, int offset)
83{
84 u32 sum = (__force u32)csum2;
85 if (offset&1)
86 sum = ((sum&0xFF00FF)<<8)+((sum>>8)&0xFF00FF);
87 return csum_sub(csum, (__force __wsum)sum);
88}
89
90static inline __wsum csum_unfold(__sum16 n)
91{
92 return (__force __wsum)n;
93}
94
95#define CSUM_MANGLED_0 ((__force __sum16)0xffff)
96#endif
97