1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */
3
4 #include <linux/jump_label.h>
5 #include <linux/uaccess.h>
6 #include <linux/export.h>
7 #include <linux/string.h>
8 #include <linux/types.h>
9
10 #include <asm/mce.h>
11
12 #ifdef CONFIG_X86_MCE
13 /*
14 * See COPY_MC_TEST for self-test of the copy_mc_fragile()
15 * implementation.
16 */
17 static DEFINE_STATIC_KEY_FALSE(copy_mc_fragile_key);
18
enable_copy_mc_fragile(void)19 void enable_copy_mc_fragile(void)
20 {
21 static_branch_inc(©_mc_fragile_key);
22 }
23 #define copy_mc_fragile_enabled (static_branch_unlikely(©_mc_fragile_key))
24
25 /*
26 * Similar to copy_user_handle_tail, probe for the write fault point, or
27 * source exception point.
28 */
29 __visible notrace unsigned long
copy_mc_fragile_handle_tail(char * to,char * from,unsigned len)30 copy_mc_fragile_handle_tail(char *to, char *from, unsigned len)
31 {
32 for (; len; --len, to++, from++)
33 if (copy_mc_fragile(to, from, 1))
34 break;
35 return len;
36 }
37 #else
38 /*
39 * No point in doing careful copying, or consulting a static key when
40 * there is no #MC handler in the CONFIG_X86_MCE=n case.
41 */
enable_copy_mc_fragile(void)42 void enable_copy_mc_fragile(void)
43 {
44 }
45 #define copy_mc_fragile_enabled (0)
46 #endif
47
48 unsigned long copy_mc_enhanced_fast_string(void *dst, const void *src, unsigned len);
49
50 /**
51 * copy_mc_to_kernel - memory copy that handles source exceptions
52 *
53 * @dst: destination address
54 * @src: source address
55 * @len: number of bytes to copy
56 *
57 * Call into the 'fragile' version on systems that benefit from avoiding
58 * corner case poison consumption scenarios, For example, accessing
59 * poison across 2 cachelines with a single instruction. Almost all
60 * other uses case can use copy_mc_enhanced_fast_string() for a fast
61 * recoverable copy, or fallback to plain memcpy.
62 *
63 * Return 0 for success, or number of bytes not copied if there was an
64 * exception.
65 */
copy_mc_to_kernel(void * dst,const void * src,unsigned len)66 unsigned long __must_check copy_mc_to_kernel(void *dst, const void *src, unsigned len)
67 {
68 if (copy_mc_fragile_enabled)
69 return copy_mc_fragile(dst, src, len);
70 if (static_cpu_has(X86_FEATURE_ERMS))
71 return copy_mc_enhanced_fast_string(dst, src, len);
72 memcpy(dst, src, len);
73 return 0;
74 }
75 EXPORT_SYMBOL_GPL(copy_mc_to_kernel);
76
copy_mc_to_user(void __user * dst,const void * src,unsigned len)77 unsigned long __must_check copy_mc_to_user(void __user *dst, const void *src, unsigned len)
78 {
79 unsigned long ret;
80
81 if (copy_mc_fragile_enabled) {
82 __uaccess_begin();
83 ret = copy_mc_fragile((__force void *)dst, src, len);
84 __uaccess_end();
85 return ret;
86 }
87
88 if (static_cpu_has(X86_FEATURE_ERMS)) {
89 __uaccess_begin();
90 ret = copy_mc_enhanced_fast_string((__force void *)dst, src, len);
91 __uaccess_end();
92 return ret;
93 }
94
95 return copy_user_generic((__force void *)dst, src, len);
96 }
97