mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-13 14:24:11 +08:00
x86: move stac/clac from user copy routines into callers
This is preparatory work for inlining the 'rep movs' case, but also a cleanup. The __copy_user_nocache() function was mis-used by the rdma code to do uncached kernel copies that don't actually want user copies at all, and as a result doesn't want the stac/clac either. Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
d2c95f9d68
commit
3639a53558
@ -27,6 +27,7 @@ copy_user_generic(void *to, const void *from, unsigned len)
|
||||
{
|
||||
unsigned ret;
|
||||
|
||||
stac();
|
||||
/*
|
||||
* If CPU has FSRM feature, use 'rep movs'.
|
||||
* Otherwise, use copy_user_generic_unrolled.
|
||||
@ -38,6 +39,7 @@ copy_user_generic(void *to, const void *from, unsigned len)
|
||||
"=d" (len)),
|
||||
"1" (to), "2" (from), "3" (len)
|
||||
: "memory", "rcx", "r8", "r9", "r10", "r11");
|
||||
clac();
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -64,8 +66,12 @@ static inline int
|
||||
__copy_from_user_inatomic_nocache(void *dst, const void __user *src,
|
||||
unsigned size)
|
||||
{
|
||||
long ret;
|
||||
kasan_check_write(dst, size);
|
||||
return __copy_user_nocache(dst, src, size, 0);
|
||||
stac();
|
||||
ret = __copy_user_nocache(dst, src, size, 0);
|
||||
clac();
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline int
|
||||
|
@ -51,7 +51,6 @@
|
||||
* eax uncopied bytes or 0 if successful.
|
||||
*/
|
||||
SYM_FUNC_START(copy_user_generic_unrolled)
|
||||
ASM_STAC
|
||||
cmpl $8,%edx
|
||||
jb .Lcopy_user_short_string_bytes
|
||||
ALIGN_DESTINATION
|
||||
@ -123,15 +122,12 @@ EXPORT_SYMBOL(copy_user_generic_unrolled)
|
||||
* eax uncopied bytes or 0 if successful.
|
||||
*/
|
||||
SYM_FUNC_START(copy_user_fast_string)
|
||||
ASM_STAC
|
||||
movl %edx,%ecx
|
||||
1: rep movsb
|
||||
xorl %eax,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
12: movl %ecx,%eax /* ecx is zerorest also */
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 12b)
|
||||
@ -160,12 +156,10 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail)
|
||||
movl %edx,%ecx
|
||||
1: rep movsb
|
||||
2: mov %ecx,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
3:
|
||||
movl %edx,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 2b)
|
||||
@ -209,7 +203,6 @@ SYM_CODE_START_LOCAL(copy_user_short_string)
|
||||
decl %ecx
|
||||
jnz 21b
|
||||
23: xor %eax,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
40: leal (%rdx,%rcx,8),%edx
|
||||
@ -233,8 +226,6 @@ SYM_CODE_END(copy_user_short_string)
|
||||
* - Require 4-byte alignment when size is 4 bytes.
|
||||
*/
|
||||
SYM_FUNC_START(__copy_user_nocache)
|
||||
ASM_STAC
|
||||
|
||||
/* If size is less than 8 bytes, go to 4-byte copy */
|
||||
cmpl $8,%edx
|
||||
jb .L_4b_nocache_copy_entry
|
||||
@ -327,7 +318,6 @@ SYM_FUNC_START(__copy_user_nocache)
|
||||
/* Finished copying; fence the prior stores */
|
||||
.L_finish_copy:
|
||||
xorl %eax,%eax
|
||||
ASM_CLAC
|
||||
sfence
|
||||
RET
|
||||
|
||||
|
@ -45,7 +45,11 @@ EXPORT_SYMBOL_GPL(arch_wb_cache_pmem);
|
||||
long __copy_user_flushcache(void *dst, const void __user *src, unsigned size)
|
||||
{
|
||||
unsigned long flushed, dest = (unsigned long) dst;
|
||||
long rc = __copy_user_nocache(dst, src, size, 0);
|
||||
long rc;
|
||||
|
||||
stac();
|
||||
rc = __copy_user_nocache(dst, src, size, 0);
|
||||
clac();
|
||||
|
||||
/*
|
||||
* __copy_user_nocache() uses non-temporal stores for the bulk
|
||||
|
@ -1285,6 +1285,9 @@ static const char *uaccess_safe_builtin[] = {
|
||||
"copy_mc_enhanced_fast_string",
|
||||
"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
|
||||
"clear_user_original",
|
||||
"copy_user_generic_unrolled",
|
||||
"copy_user_fast_string",
|
||||
"__copy_user_nocache",
|
||||
NULL
|
||||
};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user