Contributors: 10
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Linus Torvalds (pre-git) |
98 |
49.75% |
7 |
31.82% |
Andrew Morton |
23 |
11.68% |
4 |
18.18% |
Andi Kleen |
20 |
10.15% |
1 |
4.55% |
Al Viro |
17 |
8.63% |
2 |
9.09% |
Neil Brown |
12 |
6.09% |
1 |
4.55% |
Ingo Molnar |
9 |
4.57% |
2 |
9.09% |
Hiro Yoshioka |
7 |
3.55% |
1 |
4.55% |
H. Peter Anvin |
6 |
3.05% |
2 |
9.09% |
Linus Torvalds |
4 |
2.03% |
1 |
4.55% |
Greg Kroah-Hartman |
1 |
0.51% |
1 |
4.55% |
Total |
197 |
|
22 |
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_UACCESS_32_H
#define _ASM_X86_UACCESS_32_H
/*
* User space memory access functions
*/
#include <linux/string.h>
#include <asm/asm.h>
#include <asm/page.h>
unsigned long __must_check __copy_user_ll
(void *to, const void *from, unsigned long n);
unsigned long __must_check __copy_from_user_ll_nocache_nozero
(void *to, const void __user *from, unsigned long n);
static __always_inline unsigned long __must_check
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
return __copy_user_ll((__force void *)to, from, n);
}
static __always_inline unsigned long
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
{
return __copy_user_ll(to, (__force const void *)from, n);
}
static __always_inline unsigned long
__copy_from_user_inatomic_nocache(void *to, const void __user *from,
unsigned long n)
{
return __copy_from_user_ll_nocache_nozero(to, from, n);
}
unsigned long __must_check clear_user(void __user *mem, unsigned long len);
unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
#endif /* _ASM_X86_UACCESS_32_H */