Merge branch 'uaccess.csum' of git://git.kernel.org/pub/scm/linux/kernel/git/viro/vfs
Pull uaccess/csum updates from Al Viro: "Regularize the sitation with uaccess checksum primitives: - fold csum_partial_... into csum_and_copy_..._user() - on x86 collapse several access_ok()/stac()/clac() into user_access_begin()/user_access_end()" * 'uaccess.csum' of git://git.kernel.org/pub/scm/linux/kernel/git/viro/vfs: default csum_and_copy_to_user(): don't bother with access_ok() take the dummy csum_and_copy_from_user() into net/checksum.h arm: switch to csum_and_copy_from_user() sh32: convert to csum_and_copy_from_user() m68k: convert to csum_and_copy_from_user() xtensa: switch to providing csum_and_copy_from_user() sparc: switch to providing csum_and_copy_from_user() parisc: turn csum_partial_copy_from_user() into csum_and_copy_from_user() alpha: turn csum_partial_copy_from_user() into csum_and_copy_from_user() ia64: turn csum_partial_copy_from_user() into csum_and_copy_from_user() ia64: csum_partial_copy_nocheck(): don't abuse csum_partial_copy_from_user() x86: switch 32bit csum_and_copy_to_user() to user_access_{begin,end}() x86: switch both 32bit and 64bit to providing csum_and_copy_from_user() x86_64: csum_..._copy_..._user(): switch to unsafe_..._user() get rid of csum_partial_copy_to_user()
This commit is contained in:
Коммит
4b01285e16
|
@ -41,7 +41,8 @@ extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
|||
* here even more important to align src and dst on a 32-bit (or even
|
||||
* better 64-bit) boundary
|
||||
*/
|
||||
__wsum csum_partial_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *errp);
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *errp);
|
||||
|
||||
__wsum csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum);
|
||||
|
||||
|
|
|
@ -325,7 +325,7 @@ csum_partial_cfu_unaligned(const unsigned long __user * src,
|
|||
}
|
||||
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *errp)
|
||||
{
|
||||
unsigned long checksum = (__force u32) sum;
|
||||
|
@ -369,7 +369,7 @@ csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
|||
}
|
||||
return (__force __wsum)checksum;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
||||
__wsum
|
||||
csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
|
||||
|
@ -377,7 +377,7 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
|
|||
__wsum checksum;
|
||||
mm_segment_t oldfs = get_fs();
|
||||
set_fs(KERNEL_DS);
|
||||
checksum = csum_partial_copy_from_user((__force const void __user *)src,
|
||||
checksum = csum_and_copy_from_user((__force const void __user *)src,
|
||||
dst, len, sum, NULL);
|
||||
set_fs(oldfs);
|
||||
return checksum;
|
||||
|
|
|
@ -40,6 +40,20 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum);
|
|||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *err_ptr);
|
||||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user (const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
/*
|
||||
* Fold a partial checksum without adding pseudo headers
|
||||
*/
|
||||
|
|
|
@ -4,28 +4,6 @@
|
|||
#include <linux/module.h>
|
||||
#include <net/checksum.h>
|
||||
|
||||
#include <asm/byteorder.h>
|
||||
|
||||
/*
|
||||
* copy from fs while checksumming, otherwise like csum_partial
|
||||
*/
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *csum_err)
|
||||
{
|
||||
int missing;
|
||||
|
||||
missing = __copy_from_user(dst, src, len);
|
||||
if (missing) {
|
||||
memset(dst + len - missing, 0, missing);
|
||||
*csum_err = -EFAULT;
|
||||
} else
|
||||
*csum_err = 0;
|
||||
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
|
||||
/* These are from csum_64plus.S */
|
||||
EXPORT_SYMBOL(csum_partial);
|
||||
EXPORT_SYMBOL(csum_partial_copy);
|
||||
|
|
|
@ -37,16 +37,6 @@ extern __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr,
|
|||
*/
|
||||
extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
||||
|
||||
/*
|
||||
* Same as csum_partial, but copies from src while it checksums.
|
||||
*
|
||||
* Here it is even more important to align src and dst on a 32-bit (or
|
||||
* even better 64-bit) boundary.
|
||||
*/
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum,
|
||||
int *errp);
|
||||
|
||||
extern __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
||||
int len, __wsum sum);
|
||||
|
||||
|
|
|
@ -103,39 +103,11 @@ out:
|
|||
* This is very ugly but temporary. THIS NEEDS SERIOUS ENHANCEMENTS.
|
||||
* But it's very tricky to get right even in C.
|
||||
*/
|
||||
extern unsigned long do_csum(const unsigned char *, long);
|
||||
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum psum, int *errp)
|
||||
{
|
||||
unsigned long result;
|
||||
|
||||
/* XXX Fixme
|
||||
* for now we separate the copy from checksum for obvious
|
||||
* alignment difficulties. Look at the Alpha code and you'll be
|
||||
* scared.
|
||||
*/
|
||||
|
||||
if (__copy_from_user(dst, src, len) != 0 && errp)
|
||||
*errp = -EFAULT;
|
||||
|
||||
result = do_csum(dst, len);
|
||||
|
||||
/* add in old sum, and carry.. */
|
||||
result += (__force u32)psum;
|
||||
/* 32+c bits -> 32 bits */
|
||||
result = (result & 0xffffffff) + (result >> 32);
|
||||
return (__force __wsum)result;
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
|
||||
__wsum
|
||||
csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
|
||||
{
|
||||
return csum_partial_copy_from_user((__force const void __user *)src,
|
||||
dst, len, sum, NULL);
|
||||
memcpy(dst, src, len);
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(csum_partial_copy_nocheck);
|
||||
|
|
|
@ -30,7 +30,8 @@ __wsum csum_partial(const void *buff, int len, __wsum sum);
|
|||
* better 64-bit) boundary
|
||||
*/
|
||||
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src,
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src,
|
||||
void *dst,
|
||||
int len, __wsum sum,
|
||||
int *csum_err);
|
||||
|
|
|
@ -129,7 +129,7 @@ EXPORT_SYMBOL(csum_partial);
|
|||
*/
|
||||
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *csum_err)
|
||||
{
|
||||
/*
|
||||
|
@ -316,7 +316,7 @@ csum_partial_copy_from_user(const void __user *src, void *dst,
|
|||
return(sum);
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
||||
|
||||
/*
|
||||
|
|
|
@ -14,8 +14,6 @@
|
|||
extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
||||
extern __wsum csum_partial_copy(const void *src, void *dst, int len,
|
||||
__wsum sum);
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *csum_err);
|
||||
#define csum_partial_copy_nocheck(src, dst, len, sum) \
|
||||
csum_partial_copy((src), (dst), (len), (sum))
|
||||
|
||||
|
|
|
@ -26,13 +26,6 @@ extern __wsum csum_partial(const void *, int, __wsum);
|
|||
*/
|
||||
extern __wsum csum_partial_copy_nocheck(const void *, void *, int, __wsum);
|
||||
|
||||
/*
|
||||
* this is a new version of the above that records errors it finds in *errp,
|
||||
* but continues and zeros the rest of the buffer.
|
||||
*/
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src,
|
||||
void *dst, int len, __wsum sum, int *errp);
|
||||
|
||||
/*
|
||||
* Optimized for IP headers, which always checksum on 4 octet boundaries.
|
||||
*
|
||||
|
|
|
@ -123,23 +123,3 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
|||
return sum;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_nocheck);
|
||||
|
||||
/*
|
||||
* Copy from userspace and compute checksum. If we catch an exception
|
||||
* then zero the rest of the buffer.
|
||||
*/
|
||||
__wsum csum_partial_copy_from_user(const void __user *src,
|
||||
void *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
{
|
||||
int missing;
|
||||
|
||||
missing = copy_from_user(dst, src, len);
|
||||
if (missing) {
|
||||
memset(dst + len - missing, 0, missing);
|
||||
*err_ptr = -EFAULT;
|
||||
}
|
||||
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
|
|
|
@ -39,25 +39,6 @@ csum_partial(const void *buff, int len, __wsum sum)
|
|||
return sum;
|
||||
}
|
||||
|
||||
/*
|
||||
* the same as csum_partial_copy, but copies from user space.
|
||||
*
|
||||
* here even more important to align src and dst on a 32-bit (or even
|
||||
* better 64-bit) boundary
|
||||
*
|
||||
* Copy from userspace and compute checksum.
|
||||
*/
|
||||
static inline __wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum,
|
||||
int *err_ptr)
|
||||
{
|
||||
if (unlikely(copy_from_user(dst, src, len)))
|
||||
*err_ptr = -EFAULT;
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
|
||||
|
||||
static inline __wsum
|
||||
csum_partial_copy_nocheck (const void *src, void *dst, int len, __wsum sum)
|
||||
{
|
||||
|
|
|
@ -48,12 +48,17 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
|||
return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL);
|
||||
}
|
||||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
/* SPDX-License-Identifier: GPL-2.0 */
|
||||
#ifndef ___ASM_SPARC_CHECKSUM_H
|
||||
#define ___ASM_SPARC_CHECKSUM_H
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
#if defined(__sparc__) && defined(__arch64__)
|
||||
#include <asm/checksum_64.h>
|
||||
#else
|
||||
|
|
|
@ -60,7 +60,7 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
|
|||
}
|
||||
|
||||
static inline __wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
{
|
||||
register unsigned long ret asm("o0") = (unsigned long)src;
|
||||
|
@ -68,6 +68,12 @@ csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
|||
register int l asm("g1") = len;
|
||||
register __wsum s asm("g7") = sum;
|
||||
|
||||
if (unlikely(!access_ok(src, len))) {
|
||||
if (len)
|
||||
*err = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
|
||||
__asm__ __volatile__ (
|
||||
".section __ex_table,#alloc\n\t"
|
||||
".align 4\n\t"
|
||||
|
@ -83,8 +89,10 @@ csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
|||
return (__force __wsum)ret;
|
||||
}
|
||||
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
|
||||
static inline __wsum
|
||||
csum_partial_copy_to_user(const void *src, void __user *dst, int len,
|
||||
csum_and_copy_to_user(const void *src, void __user *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
{
|
||||
if (!access_ok(dst, len)) {
|
||||
|
@ -113,9 +121,6 @@ csum_partial_copy_to_user(const void *src, void __user *dst, int len,
|
|||
}
|
||||
}
|
||||
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
#define csum_and_copy_to_user csum_partial_copy_to_user
|
||||
|
||||
/* ihl is always 5 or greater, almost always is 5, and iph is word aligned
|
||||
* the majority of the time.
|
||||
*/
|
||||
|
|
|
@ -46,7 +46,7 @@ long __csum_partial_copy_from_user(const void __user *src,
|
|||
__wsum sum);
|
||||
|
||||
static inline __wsum
|
||||
csum_partial_copy_from_user(const void __user *src,
|
||||
csum_and_copy_from_user(const void __user *src,
|
||||
void *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
{
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
/* SPDX-License-Identifier: GPL-2.0 */
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 1
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
#ifdef CONFIG_X86_32
|
||||
# include <asm/checksum_32.h>
|
||||
#else
|
||||
|
|
|
@ -44,18 +44,21 @@ static inline __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
|||
return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL);
|
||||
}
|
||||
|
||||
static inline __wsum csum_partial_copy_from_user(const void __user *src,
|
||||
void *dst,
|
||||
int len, __wsum sum,
|
||||
int *err_ptr)
|
||||
static inline __wsum csum_and_copy_from_user(const void __user *src,
|
||||
void *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
{
|
||||
__wsum ret;
|
||||
|
||||
might_sleep();
|
||||
stac();
|
||||
if (!user_access_begin(src, len)) {
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
ret = csum_partial_copy_generic((__force void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
clac();
|
||||
user_access_end();
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
@ -173,7 +176,6 @@ static inline __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
/*
|
||||
* Copy and checksum to user
|
||||
*/
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
static inline __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst,
|
||||
int len, __wsum sum,
|
||||
|
@ -182,11 +184,10 @@ static inline __wsum csum_and_copy_to_user(const void *src,
|
|||
__wsum ret;
|
||||
|
||||
might_sleep();
|
||||
if (access_ok(dst, len)) {
|
||||
stac();
|
||||
if (user_access_begin(dst, len)) {
|
||||
ret = csum_partial_copy_generic(src, (__force void *)dst,
|
||||
len, sum, NULL, err_ptr);
|
||||
clac();
|
||||
user_access_end();
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -129,27 +129,19 @@ static inline __sum16 csum_tcpudp_magic(__be32 saddr, __be32 daddr,
|
|||
*/
|
||||
extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
||||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER 1
|
||||
#define HAVE_CSUM_COPY_USER 1
|
||||
|
||||
|
||||
/* Do not call this directly. Use the wrappers below */
|
||||
extern __visible __wsum csum_partial_copy_generic(const void *src, const void *dst,
|
||||
int len, __wsum sum,
|
||||
int *src_err_ptr, int *dst_err_ptr);
|
||||
|
||||
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum isum, int *errp);
|
||||
extern __wsum csum_partial_copy_to_user(const void *src, void __user *dst,
|
||||
extern __wsum csum_and_copy_to_user(const void *src, void __user *dst,
|
||||
int len, __wsum isum, int *errp);
|
||||
extern __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
||||
int len, __wsum sum);
|
||||
|
||||
/* Old names. To be removed. */
|
||||
#define csum_and_copy_to_user csum_partial_copy_to_user
|
||||
#define csum_and_copy_from_user csum_partial_copy_from_user
|
||||
|
||||
/**
|
||||
* ip_compute_csum - Compute an 16bit IP checksum.
|
||||
* @buff: buffer address.
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
#include <asm/smap.h>
|
||||
|
||||
/**
|
||||
* csum_partial_copy_from_user - Copy and checksum from user space.
|
||||
* csum_and_copy_from_user - Copy and checksum from user space.
|
||||
* @src: source address (user space)
|
||||
* @dst: destination address
|
||||
* @len: number of bytes to be copied.
|
||||
|
@ -21,13 +21,13 @@
|
|||
* src and dst are best aligned to 64bits.
|
||||
*/
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum isum, int *errp)
|
||||
{
|
||||
might_sleep();
|
||||
*errp = 0;
|
||||
|
||||
if (!likely(access_ok(src, len)))
|
||||
if (!user_access_begin(src, len))
|
||||
goto out_err;
|
||||
|
||||
/*
|
||||
|
@ -42,8 +42,7 @@ csum_partial_copy_from_user(const void __user *src, void *dst,
|
|||
while (((unsigned long)src & 6) && len >= 2) {
|
||||
__u16 val16;
|
||||
|
||||
if (__get_user(val16, (const __u16 __user *)src))
|
||||
goto out_err;
|
||||
unsafe_get_user(val16, (const __u16 __user *)src, out);
|
||||
|
||||
*(__u16 *)dst = val16;
|
||||
isum = (__force __wsum)add32_with_carry(
|
||||
|
@ -53,25 +52,26 @@ csum_partial_copy_from_user(const void __user *src, void *dst,
|
|||
len -= 2;
|
||||
}
|
||||
}
|
||||
stac();
|
||||
isum = csum_partial_copy_generic((__force const void *)src,
|
||||
dst, len, isum, errp, NULL);
|
||||
clac();
|
||||
user_access_end();
|
||||
if (unlikely(*errp))
|
||||
goto out_err;
|
||||
|
||||
return isum;
|
||||
|
||||
out:
|
||||
user_access_end();
|
||||
out_err:
|
||||
*errp = -EFAULT;
|
||||
memset(dst, 0, len);
|
||||
|
||||
return isum;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
||||
/**
|
||||
* csum_partial_copy_to_user - Copy and checksum to user space.
|
||||
* csum_and_copy_to_user - Copy and checksum to user space.
|
||||
* @src: source address
|
||||
* @dst: destination address (user space)
|
||||
* @len: number of bytes to be copied.
|
||||
|
@ -82,14 +82,14 @@ EXPORT_SYMBOL(csum_partial_copy_from_user);
|
|||
* src and dst are best aligned to 64bits.
|
||||
*/
|
||||
__wsum
|
||||
csum_partial_copy_to_user(const void *src, void __user *dst,
|
||||
csum_and_copy_to_user(const void *src, void __user *dst,
|
||||
int len, __wsum isum, int *errp)
|
||||
{
|
||||
__wsum ret;
|
||||
|
||||
might_sleep();
|
||||
|
||||
if (unlikely(!access_ok(dst, len))) {
|
||||
if (!user_access_begin(dst, len)) {
|
||||
*errp = -EFAULT;
|
||||
return 0;
|
||||
}
|
||||
|
@ -100,9 +100,7 @@ csum_partial_copy_to_user(const void *src, void __user *dst,
|
|||
|
||||
isum = (__force __wsum)add32_with_carry(
|
||||
(__force unsigned)isum, val16);
|
||||
*errp = __put_user(val16, (__u16 __user *)dst);
|
||||
if (*errp)
|
||||
return isum;
|
||||
unsafe_put_user(val16, (__u16 __user *)dst, out);
|
||||
src += 2;
|
||||
dst += 2;
|
||||
len -= 2;
|
||||
|
@ -110,13 +108,16 @@ csum_partial_copy_to_user(const void *src, void __user *dst,
|
|||
}
|
||||
|
||||
*errp = 0;
|
||||
stac();
|
||||
ret = csum_partial_copy_generic(src, (void __force *)dst,
|
||||
len, isum, NULL, errp);
|
||||
clac();
|
||||
user_access_end();
|
||||
return ret;
|
||||
out:
|
||||
user_access_end();
|
||||
*errp = -EFAULT;
|
||||
return isum;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_to_user);
|
||||
EXPORT_SYMBOL(csum_and_copy_to_user);
|
||||
|
||||
/**
|
||||
* csum_partial_copy_nocheck - Copy and checksum.
|
||||
|
|
|
@ -36,26 +36,6 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
|||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
|
||||
/*
|
||||
* the same as csum_partial, but copies from src while it
|
||||
* checksums, and handles user-space pointer exceptions correctly, when needed.
|
||||
*
|
||||
* here even more important to align src and dst on a 32-bit (or even
|
||||
* better 64-bit) boundary
|
||||
*/
|
||||
|
||||
static __inline__
|
||||
__wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
if (copy_from_user(dst, src, len)) {
|
||||
*err_ptr = -EFAULT;
|
||||
return (__force __wsum)-1;
|
||||
}
|
||||
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
|
||||
/**
|
||||
* csum_fold - Fold and invert a 32bit checksum.
|
||||
* sum: 32bit unfolded sum
|
||||
|
|
|
@ -44,8 +44,6 @@ asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst,
|
|||
/*
|
||||
* Note: when you get a NULL pointer exception here this means someone
|
||||
* passed in an incorrect kernel address to one of these functions.
|
||||
*
|
||||
* If you use these functions directly please don't forget the access_ok().
|
||||
*/
|
||||
static inline
|
||||
__wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
||||
|
@ -54,12 +52,17 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst,
|
|||
return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL);
|
||||
}
|
||||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
if (access_ok(dst, len))
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -25,15 +25,6 @@ extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
|||
*/
|
||||
extern __wsum csum_partial_copy(const void *src, void *dst, int len, __wsum sum);
|
||||
|
||||
/*
|
||||
* the same as csum_partial_copy, but copies from user space.
|
||||
*
|
||||
* here even more important to align src and dst on a 32-bit (or even
|
||||
* better 64-bit) boundary
|
||||
*/
|
||||
extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *csum_err);
|
||||
|
||||
#ifndef csum_partial_copy_nocheck
|
||||
#define csum_partial_copy_nocheck(src, dst, len, sum) \
|
||||
csum_partial_copy((src), (dst), (len), (sum))
|
||||
|
|
|
@ -26,13 +26,9 @@ static inline
|
|||
__wsum csum_and_copy_from_user (const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
|
||||
|
||||
if (len)
|
||||
if (copy_from_user(dst, src, len))
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return sum;
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -42,10 +38,8 @@ static __inline__ __wsum csum_and_copy_to_user
|
|||
{
|
||||
sum = csum_partial(src, len, sum);
|
||||
|
||||
if (access_ok(dst, len)) {
|
||||
if (copy_to_user(dst, src, len) == 0)
|
||||
return sum;
|
||||
}
|
||||
if (copy_to_user(dst, src, len) == 0)
|
||||
return sum;
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
|
|
|
@ -145,26 +145,6 @@ __sum16 ip_compute_csum(const void *buff, int len)
|
|||
}
|
||||
EXPORT_SYMBOL(ip_compute_csum);
|
||||
|
||||
/*
|
||||
* copy from fs while checksumming, otherwise like csum_partial
|
||||
*/
|
||||
__wsum
|
||||
csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *csum_err)
|
||||
{
|
||||
int missing;
|
||||
|
||||
missing = __copy_from_user(dst, src, len);
|
||||
if (missing) {
|
||||
memset(dst + len - missing, 0, missing);
|
||||
*csum_err = -EFAULT;
|
||||
} else
|
||||
*csum_err = 0;
|
||||
|
||||
return csum_partial(dst, len, sum);
|
||||
}
|
||||
EXPORT_SYMBOL(csum_partial_copy_from_user);
|
||||
|
||||
/*
|
||||
* copy from ds while checksumming, otherwise like csum_partial
|
||||
*/
|
||||
|
|
Загрузка…
Ссылка в новой задаче