#ifndef _CHECKSUM_H
#define _CHECKSUM_H
#include <linux/errno.h>
#include <asm/types.h>
#include <asm/byteorder.h>
#include <asm/checksum.h>
#if !defined(_HAVE_ARCH_COPY_AND_CSUM_FROM_USER) || !defined(HAVE_CSUM_COPY_USER)
#include <linux/uaccess.h>
#endif
#ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
static __always_inline
__wsum csum_and_copy_from_user (const void __user *src, void *dst,
int len)
{
if (copy_from_user(dst, src, len))
return 0;
return csum_partial(dst, len, ~0U);
}
#endif
#ifndef HAVE_CSUM_COPY_USER
static __always_inline __wsum csum_and_copy_to_user
(const void *src, void __user *dst, int len)
{
__wsum sum = csum_partial(src, len, ~0U);
if (copy_to_user(dst, src, len) == 0)
return sum;
return 0;
}
#endif
#ifndef _HAVE_ARCH_CSUM_AND_COPY
static __always_inline __wsum
csum_partial_copy_nocheck(const void *src, void *dst, int len)
{
memcpy(dst, src, len);
return csum_partial(dst, len, 0);
}
#endif
#ifndef HAVE_ARCH_CSUM_ADD
static __always_inline __wsum csum_add(__wsum csum, __wsum addend)
{
u32 res = (__force u32)csum;
res += (__force u32)addend;
return (__force __wsum)(res + (res < (__force u32)addend));
}
#endif
static __always_inline __wsum csum_sub(__wsum csum, __wsum addend)
{ … }
static __always_inline __sum16 csum16_add(__sum16 csum, __be16 addend)
{ … }
static __always_inline __sum16 csum16_sub(__sum16 csum, __be16 addend)
{ … }
#ifndef HAVE_ARCH_CSUM_SHIFT
static __always_inline __wsum csum_shift(__wsum sum, int offset)
{ … }
#endif
static __always_inline __wsum
csum_block_add(__wsum csum, __wsum csum2, int offset)
{ … }
static __always_inline __wsum
csum_block_add_ext(__wsum csum, __wsum csum2, int offset, int len)
{ … }
static __always_inline __wsum
csum_block_sub(__wsum csum, __wsum csum2, int offset)
{ … }
static __always_inline __wsum csum_unfold(__sum16 n)
{ … }
static __always_inline
__wsum csum_partial_ext(const void *buff, int len, __wsum sum)
{ … }
#define CSUM_MANGLED_0 …
static __always_inline void csum_replace_by_diff(__sum16 *sum, __wsum diff)
{ … }
static __always_inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to)
{ … }
static __always_inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new)
{ … }
static inline void csum_replace(__wsum *csum, __wsum old, __wsum new)
{ … }
struct sk_buff;
void inet_proto_csum_replace4(__sum16 *sum, struct sk_buff *skb,
__be32 from, __be32 to, bool pseudohdr);
void inet_proto_csum_replace16(__sum16 *sum, struct sk_buff *skb,
const __be32 *from, const __be32 *to,
bool pseudohdr);
void inet_proto_csum_replace_by_diff(__sum16 *sum, struct sk_buff *skb,
__wsum diff, bool pseudohdr);
static __always_inline
void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb,
__be16 from, __be16 to, bool pseudohdr)
{ … }
static __always_inline __wsum remcsum_adjust(void *ptr, __wsum csum,
int start, int offset)
{ … }
static __always_inline void remcsum_unadjust(__sum16 *psum, __wsum delta)
{ … }
static __always_inline __wsum wsum_negate(__wsum val)
{ … }
#endif