Contributors: 9
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Linus Torvalds (pre-git) |
94 |
56.29% |
7 |
35.00% |
Andrew Morton |
21 |
12.57% |
4 |
20.00% |
Al Viro |
15 |
8.98% |
1 |
5.00% |
Neil Brown |
10 |
5.99% |
1 |
5.00% |
Ingo Molnar |
9 |
5.39% |
2 |
10.00% |
Hiro Yoshioka |
7 |
4.19% |
1 |
5.00% |
H. Peter Anvin |
6 |
3.59% |
2 |
10.00% |
Linus Torvalds |
4 |
2.40% |
1 |
5.00% |
Greg Kroah-Hartman |
1 |
0.60% |
1 |
5.00% |
Total |
167 |
|
20 |
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_UACCESS_32_H
#define _ASM_X86_UACCESS_32_H
/*
* User space memory access functions
*/
#include <linux/string.h>
#include <asm/asm.h>
#include <asm/page.h>
unsigned long __must_check __copy_user_ll
(void *to, const void *from, unsigned long n);
unsigned long __must_check __copy_from_user_ll_nocache_nozero
(void *to, const void __user *from, unsigned long n);
static __always_inline unsigned long __must_check
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
return __copy_user_ll((__force void *)to, from, n);
}
static __always_inline unsigned long
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
{
return __copy_user_ll(to, (__force const void *)from, n);
}
static __always_inline unsigned long
__copy_from_user_inatomic_nocache(void *to, const void __user *from,
unsigned long n)
{
return __copy_from_user_ll_nocache_nozero(to, from, n);
}
#endif /* _ASM_X86_UACCESS_32_H */