1
2
3
4
5
6
7
8
9
10
11
12
13#include <linux/module.h>
14#include <linux/types.h>
15
16#include <net/checksum.h>
17#include <asm/byteorder.h>
18#include <asm/string.h>
19#include <linux/uaccess.h>
20
21#define addc(_t,_r) \
22 __asm__ __volatile__ ( \
23" add %0, %1, %0\n" \
24" addc %0, %%r0, %0\n" \
25 : "=r"(_t) \
26 : "r"(_r), "0"(_t));
27
28static inline unsigned short from32to16(unsigned int x)
29{
30
31 x = (x & 0xffff) + (x >> 16);
32
33 x = (x & 0xffff) + (x >> 16);
34 return (unsigned short)x;
35}
36
37static inline unsigned int do_csum(const unsigned char * buff, int len)
38{
39 int odd, count;
40 unsigned int result = 0;
41
42 if (len <= 0)
43 goto out;
44 odd = 1 & (unsigned long) buff;
45 if (odd) {
46 result = be16_to_cpu(*buff);
47 len--;
48 buff++;
49 }
50 count = len >> 1;
51 if (count) {
52 if (2 & (unsigned long) buff) {
53 result += *(unsigned short *) buff;
54 count--;
55 len -= 2;
56 buff += 2;
57 }
58 count >>= 1;
59 if (count) {
60 while (count >= 4) {
61 unsigned int r1, r2, r3, r4;
62 r1 = *(unsigned int *)(buff + 0);
63 r2 = *(unsigned int *)(buff + 4);
64 r3 = *(unsigned int *)(buff + 8);
65 r4 = *(unsigned int *)(buff + 12);
66 addc(result, r1);
67 addc(result, r2);
68 addc(result, r3);
69 addc(result, r4);
70 count -= 4;
71 buff += 16;
72 }
73 while (count) {
74 unsigned int w = *(unsigned int *) buff;
75 count--;
76 buff += 4;
77 addc(result, w);
78 }
79 result = (result & 0xffff) + (result >> 16);
80 }
81 if (len & 2) {
82 result += *(unsigned short *) buff;
83 buff += 2;
84 }
85 }
86 if (len & 1)
87 result += le16_to_cpu(*buff);
88 result = from32to16(result);
89 if (odd)
90 result = swab16(result);
91out:
92 return result;
93}
94
95
96
97
98
99
100
101__wsum csum_partial(const void *buff, int len, __wsum sum)
102{
103 unsigned int result = do_csum(buff, len);
104 addc(result, sum);
105 return (__force __wsum)from32to16(result);
106}
107
108EXPORT_SYMBOL(csum_partial);
109