Contributors: 9
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Hiro Yoshioka |
60 |
35.93% |
1 |
7.14% |
Neil Brown |
37 |
22.16% |
1 |
7.14% |
Linus Torvalds (pre-git) |
29 |
17.37% |
3 |
21.43% |
Andrew Morton |
17 |
10.18% |
3 |
21.43% |
Al Viro |
15 |
8.98% |
1 |
7.14% |
H. Peter Anvin |
6 |
3.59% |
2 |
14.29% |
Greg Kroah-Hartman |
1 |
0.60% |
1 |
7.14% |
Ingo Molnar |
1 |
0.60% |
1 |
7.14% |
Linus Torvalds |
1 |
0.60% |
1 |
7.14% |
Total |
167 |
|
14 |
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_UACCESS_32_H
#define _ASM_X86_UACCESS_32_H
/*
* User space memory access functions
*/
#include <linux/string.h>
#include <asm/asm.h>
#include <asm/page.h>
unsigned long __must_check __copy_user_ll
(void *to, const void *from, unsigned long n);
unsigned long __must_check __copy_from_user_ll_nocache_nozero
(void *to, const void __user *from, unsigned long n);
static __always_inline unsigned long __must_check
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
return __copy_user_ll((__force void *)to, from, n);
}
static __always_inline unsigned long
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
{
return __copy_user_ll(to, (__force const void *)from, n);
}
static __always_inline unsigned long
__copy_from_user_inatomic_nocache(void *to, const void __user *from,
unsigned long n)
{
return __copy_from_user_ll_nocache_nozero(to, from, n);
}
#endif /* _ASM_X86_UACCESS_32_H */