Contributors: 17
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Dan J Williams |
154 |
54.23% |
6 |
26.09% |
Arnaldo Carvalho de Melo |
64 |
22.54% |
1 |
4.35% |
Aleksa Sarai |
15 |
5.28% |
1 |
4.35% |
Andi Kleen |
15 |
5.28% |
2 |
8.70% |
Al Viro |
8 |
2.82% |
1 |
4.35% |
Linus Torvalds |
6 |
2.11% |
1 |
4.35% |
Tony Luck |
3 |
1.06% |
1 |
4.35% |
Shaohua Li |
3 |
1.06% |
1 |
4.35% |
Matt Domsch |
3 |
1.06% |
1 |
4.35% |
Andrew Morton |
3 |
1.06% |
1 |
4.35% |
René Herman |
2 |
0.70% |
1 |
4.35% |
Dave Jones |
2 |
0.70% |
1 |
4.35% |
H. Peter Anvin |
2 |
0.70% |
1 |
4.35% |
Mikulas Patocka |
1 |
0.35% |
1 |
4.35% |
Ingo Molnar |
1 |
0.35% |
1 |
4.35% |
Josh Poimboeuf |
1 |
0.35% |
1 |
4.35% |
Andrew Lutomirski |
1 |
0.35% |
1 |
4.35% |
Total |
284 |
|
23 |
|
// SPDX-License-Identifier: GPL-2.0
/* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */
#include <linux/jump_label.h>
#include <linux/uaccess.h>
#include <linux/export.h>
#include <linux/string.h>
#include <linux/types.h>
#include <asm/mce.h>
#ifdef CONFIG_X86_MCE
static DEFINE_STATIC_KEY_FALSE(copy_mc_fragile_key);
void enable_copy_mc_fragile(void)
{
static_branch_inc(©_mc_fragile_key);
}
#define copy_mc_fragile_enabled (static_branch_unlikely(©_mc_fragile_key))
/*
* Similar to copy_user_handle_tail, probe for the write fault point, or
* source exception point.
*/
__visible notrace unsigned long
copy_mc_fragile_handle_tail(char *to, char *from, unsigned len)
{
for (; len; --len, to++, from++)
if (copy_mc_fragile(to, from, 1))
break;
return len;
}
#else
/*
* No point in doing careful copying, or consulting a static key when
* there is no #MC handler in the CONFIG_X86_MCE=n case.
*/
void enable_copy_mc_fragile(void)
{
}
#define copy_mc_fragile_enabled (0)
#endif
unsigned long copy_mc_enhanced_fast_string(void *dst, const void *src, unsigned len);
/**
* copy_mc_to_kernel - memory copy that handles source exceptions
*
* @dst: destination address
* @src: source address
* @len: number of bytes to copy
*
* Call into the 'fragile' version on systems that benefit from avoiding
* corner case poison consumption scenarios, For example, accessing
* poison across 2 cachelines with a single instruction. Almost all
* other uses case can use copy_mc_enhanced_fast_string() for a fast
* recoverable copy, or fallback to plain memcpy.
*
* Return 0 for success, or number of bytes not copied if there was an
* exception.
*/
unsigned long __must_check copy_mc_to_kernel(void *dst, const void *src, unsigned len)
{
if (copy_mc_fragile_enabled)
return copy_mc_fragile(dst, src, len);
if (static_cpu_has(X86_FEATURE_ERMS))
return copy_mc_enhanced_fast_string(dst, src, len);
memcpy(dst, src, len);
return 0;
}
EXPORT_SYMBOL_GPL(copy_mc_to_kernel);
unsigned long __must_check copy_mc_to_user(void *dst, const void *src, unsigned len)
{
unsigned long ret;
if (copy_mc_fragile_enabled) {
__uaccess_begin();
ret = copy_mc_fragile(dst, src, len);
__uaccess_end();
return ret;
}
if (static_cpu_has(X86_FEATURE_ERMS)) {
__uaccess_begin();
ret = copy_mc_enhanced_fast_string(dst, src, len);
__uaccess_end();
return ret;
}
return copy_user_generic(dst, src, len);
}