Commit 5904122c authored by Al Viro's avatar Al Viro

take the dummy csum_and_copy_from_user() into net/checksum.h

now that can be done conveniently - all non-trivial cases have
_HAVE_ARCH_COPY_AND_CSUM_FROM_USER defined, so the fallback in
net/checksum.h is used only for dummy (copy_from_user, then
csum_partial) implementation.  Allowing us to get rid of all
dummy instances, both of csum_and_copy_from_user() and
csum_partial_copy_from_user().
Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent 24f9aa92
...@@ -4,28 +4,6 @@ ...@@ -4,28 +4,6 @@
#include <linux/module.h> #include <linux/module.h>
#include <net/checksum.h> #include <net/checksum.h>
#include <asm/byteorder.h>
/*
* copy from fs while checksumming, otherwise like csum_partial
*/
__wsum
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
__wsum sum, int *csum_err)
{
int missing;
missing = __copy_from_user(dst, src, len);
if (missing) {
memset(dst + len - missing, 0, missing);
*csum_err = -EFAULT;
} else
*csum_err = 0;
return csum_partial(dst, len, sum);
}
EXPORT_SYMBOL(csum_partial_copy_from_user);
/* These are from csum_64plus.S */ /* These are from csum_64plus.S */
EXPORT_SYMBOL(csum_partial); EXPORT_SYMBOL(csum_partial);
EXPORT_SYMBOL(csum_partial_copy); EXPORT_SYMBOL(csum_partial_copy);
......
...@@ -37,17 +37,6 @@ extern __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr, ...@@ -37,17 +37,6 @@ extern __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr,
*/ */
extern __wsum csum_partial(const void *buff, int len, __wsum sum); extern __wsum csum_partial(const void *buff, int len, __wsum sum);
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
/*
* Same as csum_partial, but copies from src while it checksums.
*
* Here it is even more important to align src and dst on a 32-bit (or
* even better 64-bit) boundary.
*/
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst,
int len, __wsum sum,
int *errp);
extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, extern __wsum csum_partial_copy_nocheck(const void *src, void *dst,
int len, __wsum sum); int len, __wsum sum);
......
...@@ -103,24 +103,6 @@ unsigned long do_csum_c(const unsigned char * buff, int len, unsigned int psum) ...@@ -103,24 +103,6 @@ unsigned long do_csum_c(const unsigned char * buff, int len, unsigned int psum)
* This is very ugly but temporary. THIS NEEDS SERIOUS ENHANCEMENTS. * This is very ugly but temporary. THIS NEEDS SERIOUS ENHANCEMENTS.
* But it's very tricky to get right even in C. * But it's very tricky to get right even in C.
*/ */
__wsum
csum_and_copy_from_user(const void __user *src, void *dst,
int len, __wsum psum, int *errp)
{
/* XXX Fixme
* for now we separate the copy from checksum for obvious
* alignment difficulties. Look at the Alpha code and you'll be
* scared.
*/
if (copy_from_user(dst, src, len))
*errp = -EFAULT;
return csum_partial(dst, len, psum);
}
EXPORT_SYMBOL(csum_and_copy_from_user);
__wsum __wsum
csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
{ {
......
...@@ -14,8 +14,6 @@ ...@@ -14,8 +14,6 @@
extern __wsum csum_partial(const void *buff, int len, __wsum sum); extern __wsum csum_partial(const void *buff, int len, __wsum sum);
extern __wsum csum_partial_copy(const void *src, void *dst, int len, extern __wsum csum_partial_copy(const void *src, void *dst, int len,
__wsum sum); __wsum sum);
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
int len, __wsum sum, int *csum_err);
#define csum_partial_copy_nocheck(src, dst, len, sum) \ #define csum_partial_copy_nocheck(src, dst, len, sum) \
csum_partial_copy((src), (dst), (len), (sum)) csum_partial_copy((src), (dst), (len), (sum))
......
...@@ -26,14 +26,6 @@ extern __wsum csum_partial(const void *, int, __wsum); ...@@ -26,14 +26,6 @@ extern __wsum csum_partial(const void *, int, __wsum);
*/ */
extern __wsum csum_partial_copy_nocheck(const void *, void *, int, __wsum); extern __wsum csum_partial_copy_nocheck(const void *, void *, int, __wsum);
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
/*
* this is a new version of the above that records errors it finds in *errp,
* but continues and zeros the rest of the buffer.
*/
extern __wsum csum_and_copy_from_user(const void __user *src,
void *dst, int len, __wsum sum, int *errp);
/* /*
* Optimized for IP headers, which always checksum on 4 octet boundaries. * Optimized for IP headers, which always checksum on 4 octet boundaries.
* *
......
...@@ -123,17 +123,3 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst, ...@@ -123,17 +123,3 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
return sum; return sum;
} }
EXPORT_SYMBOL(csum_partial_copy_nocheck); EXPORT_SYMBOL(csum_partial_copy_nocheck);
/*
* Copy from userspace and compute checksum. If we catch an exception
* then zero the rest of the buffer.
*/
__wsum csum_and_copy_from_user(const void __user *src,
void *dst, int len,
__wsum sum, int *err_ptr)
{
if (copy_from_user(dst, src, len))
*err_ptr = -EFAULT;
return csum_partial(dst, len, sum);
}
EXPORT_SYMBOL(csum_and_copy_from_user);
...@@ -39,25 +39,6 @@ csum_partial(const void *buff, int len, __wsum sum) ...@@ -39,25 +39,6 @@ csum_partial(const void *buff, int len, __wsum sum)
return sum; return sum;
} }
/*
* the same as csum_partial_copy, but copies from user space.
*
* here even more important to align src and dst on a 32-bit (or even
* better 64-bit) boundary
*
* Copy from userspace and compute checksum.
*/
static inline __wsum
csum_partial_copy_from_user(const void __user *src, void *dst,
int len, __wsum sum,
int *err_ptr)
{
if (unlikely(copy_from_user(dst, src, len)))
*err_ptr = -EFAULT;
return csum_partial(dst, len, sum);
}
static inline __wsum static inline __wsum
csum_partial_copy_nocheck (const void *src, void *dst, int len, __wsum sum) csum_partial_copy_nocheck (const void *src, void *dst, int len, __wsum sum)
{ {
......
...@@ -36,26 +36,6 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst, ...@@ -36,26 +36,6 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
return csum_partial(dst, len, sum); return csum_partial(dst, len, sum);
} }
/*
* the same as csum_partial, but copies from src while it
* checksums, and handles user-space pointer exceptions correctly, when needed.
*
* here even more important to align src and dst on a 32-bit (or even
* better 64-bit) boundary
*/
static __inline__
__wsum csum_partial_copy_from_user(const void __user *src, void *dst,
int len, __wsum sum, int *err_ptr)
{
if (copy_from_user(dst, src, len)) {
*err_ptr = -EFAULT;
return (__force __wsum)-1;
}
return csum_partial(dst, len, sum);
}
/** /**
* csum_fold - Fold and invert a 32bit checksum. * csum_fold - Fold and invert a 32bit checksum.
* sum: 32bit unfolded sum * sum: 32bit unfolded sum
......
...@@ -25,15 +25,6 @@ extern __wsum csum_partial(const void *buff, int len, __wsum sum); ...@@ -25,15 +25,6 @@ extern __wsum csum_partial(const void *buff, int len, __wsum sum);
*/ */
extern __wsum csum_partial_copy(const void *src, void *dst, int len, __wsum sum); extern __wsum csum_partial_copy(const void *src, void *dst, int len, __wsum sum);
/*
* the same as csum_partial_copy, but copies from user space.
*
* here even more important to align src and dst on a 32-bit (or even
* better 64-bit) boundary
*/
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
int len, __wsum sum, int *csum_err);
#ifndef csum_partial_copy_nocheck #ifndef csum_partial_copy_nocheck
#define csum_partial_copy_nocheck(src, dst, len, sum) \ #define csum_partial_copy_nocheck(src, dst, len, sum) \
csum_partial_copy((src), (dst), (len), (sum)) csum_partial_copy((src), (dst), (len), (sum))
......
...@@ -26,13 +26,9 @@ static inline ...@@ -26,13 +26,9 @@ static inline
__wsum csum_and_copy_from_user (const void __user *src, void *dst, __wsum csum_and_copy_from_user (const void __user *src, void *dst,
int len, __wsum sum, int *err_ptr) int len, __wsum sum, int *err_ptr)
{ {
if (access_ok(src, len)) if (copy_from_user(dst, src, len))
return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
if (len)
*err_ptr = -EFAULT; *err_ptr = -EFAULT;
return csum_partial(dst, len, sum);
return sum;
} }
#endif #endif
......
...@@ -145,26 +145,6 @@ __sum16 ip_compute_csum(const void *buff, int len) ...@@ -145,26 +145,6 @@ __sum16 ip_compute_csum(const void *buff, int len)
} }
EXPORT_SYMBOL(ip_compute_csum); EXPORT_SYMBOL(ip_compute_csum);
/*
* copy from fs while checksumming, otherwise like csum_partial
*/
__wsum
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
__wsum sum, int *csum_err)
{
int missing;
missing = __copy_from_user(dst, src, len);
if (missing) {
memset(dst + len - missing, 0, missing);
*csum_err = -EFAULT;
} else
*csum_err = 0;
return csum_partial(dst, len, sum);
}
EXPORT_SYMBOL(csum_partial_copy_from_user);
/* /*
* copy from ds while checksumming, otherwise like csum_partial * copy from ds while checksumming, otherwise like csum_partial
*/ */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment