1
2
3
4
5
6
7
8
9
10
11#ifndef _S390_CHECKSUM_H
12#define _S390_CHECKSUM_H
13
14#include <asm/uaccess.h>
15
16
17
18
19
20
21
22
23
24
25
26
27
28static inline __wsum
29csum_partial(const void *buff, int len, __wsum sum)
30{
31 register unsigned long reg2 asm("2") = (unsigned long) buff;
32 register unsigned long reg3 asm("3") = (unsigned long) len;
33
34 asm volatile(
35 "0: cksm %0,%1\n"
36 " jo 0b\n"
37 : "+d" (sum), "+d" (reg2), "+d" (reg3) : : "cc", "memory");
38 return sum;
39}
40
41
42
43
44
45
46
47
48
49
50static inline __wsum
51csum_partial_copy_from_user(const void __user *src, void *dst,
52 int len, __wsum sum,
53 int *err_ptr)
54{
55 int missing;
56
57 missing = copy_from_user(dst, src, len);
58 if (missing) {
59 memset(dst + len - missing, 0, missing);
60 *err_ptr = -EFAULT;
61 }
62
63 return csum_partial(dst, len, sum);
64}
65
66
67static inline __wsum
68csum_partial_copy_nocheck (const void *src, void *dst, int len, __wsum sum)
69{
70 memcpy(dst,src,len);
71 return csum_partial(dst, len, sum);
72}
73
74
75
76
77static inline __sum16 csum_fold(__wsum sum)
78{
79 u32 csum = (__force u32) sum;
80
81 csum += (csum >> 16) + (csum << 16);
82 csum >>= 16;
83 return (__force __sum16) ~csum;
84}
85
86
87
88
89
90
91static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl)
92{
93 return csum_fold(csum_partial(iph, ihl*4, 0));
94}
95
96
97
98
99
100static inline __wsum
101csum_tcpudp_nofold(__be32 saddr, __be32 daddr, __u32 len, __u8 proto,
102 __wsum sum)
103{
104 __u32 csum = (__force __u32)sum;
105
106 csum += (__force __u32)saddr;
107 if (csum < (__force __u32)saddr)
108 csum++;
109
110 csum += (__force __u32)daddr;
111 if (csum < (__force __u32)daddr)
112 csum++;
113
114 csum += len + proto;
115 if (csum < len + proto)
116 csum++;
117
118 return (__force __wsum)csum;
119}
120
121
122
123
124
125
126static inline __sum16
127csum_tcpudp_magic(__be32 saddr, __be32 daddr, __u32 len, __u8 proto,
128 __wsum sum)
129{
130 return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum));
131}
132
133
134
135
136
137
138static inline __sum16 ip_compute_csum(const void *buff, int len)
139{
140 return csum_fold(csum_partial(buff, len, 0));
141}
142
143#endif
144
145
146