mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-22 12:14:01 +08:00
b1c03f1ef4
The __clear_user function is defined to return the number of bytes that
could not be cleared. From the underlying memset / bzero implementation
this means setting register a2 to that number on return. Currently if a
page fault is triggered within the MIPSr6 version of setting of initial
unaligned bytes, the value loaded into a2 on return is meaningless.
During the MIPSr6 version of the initial unaligned bytes block, register
a2 contains the number of bytes to be set beyond the initial unaligned
bytes. The t0 register is initally set to the number of unaligned bytes
- STORSIZE, effectively a negative version of the number of unaligned
bytes. This is then incremented before each byte is saved.
The label .Lbyte_fixup\@ is jumped to on page fault. Currently the value
in a2 is incorrectly replaced by 0 - t0 + 1, effectively the number of
unaligned bytes remaining. This leads to the failures being reported by
the following test code:
static int __init test_clear_user(void)
{
int j, k;
pr_info("\n\n\nTesting clear_user\n");
for (j = 0; j < 512; j++) {
if ((k = clear_user(NULL+3, j)) != j) {
pr_err("clear_user (NULL %d) returned %d\n", j, k);
}
}
return 0;
}
late_initcall(test_clear_user);
Which reports:
[ 3.965439] Testing clear_user
[ 3.973169] clear_user (NULL 8) returned 6
[ 3.976782] clear_user (NULL 9) returned 6
[ 3.980390] clear_user (NULL 10) returned 6
[ 3.984052] clear_user (NULL 11) returned 6
[ 3.987524] clear_user (NULL 12) returned 6
Fix this by subtracting t0 from a2 (rather than $0), effectivey giving:
unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
a2 = a2 - t0 + 1
This fixes the value returned from __clear user when the number of bytes
to set is > LONGSIZE and the address is invalid and unaligned.
Unfortunately, this breaks the fixup handling for unaligned bytes after
the final long, where register a2 still contains the number of bytes
remaining to be set and the t0 register is to 0 - the number of
unaligned bytes remaining.
Because t0 is now is now subtracted from a2 rather than 0, the number of
bytes unset is reported incorrectly:
static int __init test_clear_user(void)
{
char *test;
int j, k;
pr_info("\n\n\nTesting clear_user\n");
test = vmalloc(PAGE_SIZE);
for (j = 256; j < 512; j++) {
if ((k = clear_user(test + PAGE_SIZE - 254, j)) != j - 254) {
pr_err("clear_user (%px %d) returned %d\n",
test + PAGE_SIZE - 254, j, k);
}
}
return 0;
}
late_initcall(test_clear_user);
[ 3.976775] clear_user (c00000000000df02 256) returned 4
[ 3.981957] clear_user (c00000000000df02 257) returned 6
[ 3.986425] clear_user (c00000000000df02 258) returned 8
[ 3.990850] clear_user (c00000000000df02 259) returned 10
[ 3.995332] clear_user (c00000000000df02 260) returned 12
[ 3.999815] clear_user (c00000000000df02 261) returned 14
Fix this by ensuring that a2 is set to 0 during the set of final
unaligned bytes.
Signed-off-by: Matt Redfearn <matt.redfearn@mips.com>
Signed-off-by: Paul Burton <paul.burton@mips.com>
Fixes: 8c56208aff
("MIPS: lib: memset: Add MIPS R6 support")
Patchwork: https://patchwork.linux-mips.org/patch/19338/
Cc: James Hogan <jhogan@kernel.org>
Cc: Ralf Baechle <ralf@linux-mips.org>
Cc: linux-mips@linux-mips.org
Cc: linux-kernel@vger.kernel.org
Cc: stable@vger.kernel.org # v4.0+
309 lines
6.7 KiB
ArmAsm
309 lines
6.7 KiB
ArmAsm
/*
|
|
* This file is subject to the terms and conditions of the GNU General Public
|
|
* License. See the file "COPYING" in the main directory of this archive
|
|
* for more details.
|
|
*
|
|
* Copyright (C) 1998, 1999, 2000 by Ralf Baechle
|
|
* Copyright (C) 1999, 2000 Silicon Graphics, Inc.
|
|
* Copyright (C) 2007 by Maciej W. Rozycki
|
|
* Copyright (C) 2011, 2012 MIPS Technologies, Inc.
|
|
*/
|
|
#include <asm/asm.h>
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/export.h>
|
|
#include <asm/regdef.h>
|
|
|
|
#if LONGSIZE == 4
|
|
#define LONG_S_L swl
|
|
#define LONG_S_R swr
|
|
#else
|
|
#define LONG_S_L sdl
|
|
#define LONG_S_R sdr
|
|
#endif
|
|
|
|
#ifdef CONFIG_CPU_MICROMIPS
|
|
#define STORSIZE (LONGSIZE * 2)
|
|
#define STORMASK (STORSIZE - 1)
|
|
#define FILL64RG t8
|
|
#define FILLPTRG t7
|
|
#undef LONG_S
|
|
#define LONG_S LONG_SP
|
|
#else
|
|
#define STORSIZE LONGSIZE
|
|
#define STORMASK LONGMASK
|
|
#define FILL64RG a1
|
|
#define FILLPTRG t0
|
|
#endif
|
|
|
|
#define LEGACY_MODE 1
|
|
#define EVA_MODE 2
|
|
|
|
/*
|
|
* No need to protect it with EVA #ifdefery. The generated block of code
|
|
* will never be assembled if EVA is not enabled.
|
|
*/
|
|
#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
|
|
#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
|
|
|
|
#define EX(insn,reg,addr,handler) \
|
|
.if \mode == LEGACY_MODE; \
|
|
9: insn reg, addr; \
|
|
.else; \
|
|
9: ___BUILD_EVA_INSN(insn, reg, addr); \
|
|
.endif; \
|
|
.section __ex_table,"a"; \
|
|
PTR 9b, handler; \
|
|
.previous
|
|
|
|
.macro f_fill64 dst, offset, val, fixup, mode
|
|
EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
|
|
#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
|
|
EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
|
|
#endif
|
|
#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
|
|
EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
|
|
EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
|
|
#endif
|
|
.endm
|
|
|
|
.set noreorder
|
|
.align 5
|
|
|
|
/*
|
|
* Macro to generate the __bzero{,_user} symbol
|
|
* Arguments:
|
|
* mode: LEGACY_MODE or EVA_MODE
|
|
*/
|
|
.macro __BUILD_BZERO mode
|
|
/* Initialize __memset if this is the first time we call this macro */
|
|
.ifnotdef __memset
|
|
.set __memset, 1
|
|
.hidden __memset /* Make sure it does not leak */
|
|
.endif
|
|
|
|
sltiu t0, a2, STORSIZE /* very small region? */
|
|
bnez t0, .Lsmall_memset\@
|
|
andi t0, a0, STORMASK /* aligned? */
|
|
|
|
#ifdef CONFIG_CPU_MICROMIPS
|
|
move t8, a1 /* used by 'swp' instruction */
|
|
move t9, a1
|
|
#endif
|
|
#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
|
|
beqz t0, 1f
|
|
PTR_SUBU t0, STORSIZE /* alignment in bytes */
|
|
#else
|
|
.set noat
|
|
li AT, STORSIZE
|
|
beqz t0, 1f
|
|
PTR_SUBU t0, AT /* alignment in bytes */
|
|
.set at
|
|
#endif
|
|
|
|
#ifndef CONFIG_CPU_MIPSR6
|
|
R10KCBARRIER(0(ra))
|
|
#ifdef __MIPSEB__
|
|
EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
|
|
#else
|
|
EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
|
|
#endif
|
|
PTR_SUBU a0, t0 /* long align ptr */
|
|
PTR_ADDU a2, t0 /* correct size */
|
|
|
|
#else /* CONFIG_CPU_MIPSR6 */
|
|
#define STORE_BYTE(N) \
|
|
EX(sb, a1, N(a0), .Lbyte_fixup\@); \
|
|
beqz t0, 0f; \
|
|
PTR_ADDU t0, 1;
|
|
|
|
PTR_ADDU a2, t0 /* correct size */
|
|
PTR_ADDU t0, 1
|
|
STORE_BYTE(0)
|
|
STORE_BYTE(1)
|
|
#if LONGSIZE == 4
|
|
EX(sb, a1, 2(a0), .Lbyte_fixup\@)
|
|
#else
|
|
STORE_BYTE(2)
|
|
STORE_BYTE(3)
|
|
STORE_BYTE(4)
|
|
STORE_BYTE(5)
|
|
EX(sb, a1, 6(a0), .Lbyte_fixup\@)
|
|
#endif
|
|
0:
|
|
ori a0, STORMASK
|
|
xori a0, STORMASK
|
|
PTR_ADDIU a0, STORSIZE
|
|
#endif /* CONFIG_CPU_MIPSR6 */
|
|
1: ori t1, a2, 0x3f /* # of full blocks */
|
|
xori t1, 0x3f
|
|
beqz t1, .Lmemset_partial\@ /* no block to fill */
|
|
andi t0, a2, 0x40-STORSIZE
|
|
|
|
PTR_ADDU t1, a0 /* end address */
|
|
.set reorder
|
|
1: PTR_ADDIU a0, 64
|
|
R10KCBARRIER(0(ra))
|
|
f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
|
|
bne t1, a0, 1b
|
|
.set noreorder
|
|
|
|
.Lmemset_partial\@:
|
|
R10KCBARRIER(0(ra))
|
|
PTR_LA t1, 2f /* where to start */
|
|
#ifdef CONFIG_CPU_MICROMIPS
|
|
LONG_SRL t7, t0, 1
|
|
#endif
|
|
#if LONGSIZE == 4
|
|
PTR_SUBU t1, FILLPTRG
|
|
#else
|
|
.set noat
|
|
LONG_SRL AT, FILLPTRG, 1
|
|
PTR_SUBU t1, AT
|
|
.set at
|
|
#endif
|
|
jr t1
|
|
PTR_ADDU a0, t0 /* dest ptr */
|
|
|
|
.set push
|
|
.set noreorder
|
|
.set nomacro
|
|
/* ... but first do longs ... */
|
|
f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
|
|
2: .set pop
|
|
andi a2, STORMASK /* At most one long to go */
|
|
|
|
beqz a2, 1f
|
|
#ifndef CONFIG_CPU_MIPSR6
|
|
PTR_ADDU a0, a2 /* What's left */
|
|
R10KCBARRIER(0(ra))
|
|
#ifdef __MIPSEB__
|
|
EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
|
|
#else
|
|
EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
|
|
#endif
|
|
#else
|
|
PTR_SUBU t0, $0, a2
|
|
move a2, zero /* No remaining longs */
|
|
PTR_ADDIU t0, 1
|
|
STORE_BYTE(0)
|
|
STORE_BYTE(1)
|
|
#if LONGSIZE == 4
|
|
EX(sb, a1, 2(a0), .Lbyte_fixup\@)
|
|
#else
|
|
STORE_BYTE(2)
|
|
STORE_BYTE(3)
|
|
STORE_BYTE(4)
|
|
STORE_BYTE(5)
|
|
EX(sb, a1, 6(a0), .Lbyte_fixup\@)
|
|
#endif
|
|
0:
|
|
#endif
|
|
1: jr ra
|
|
move a2, zero
|
|
|
|
.Lsmall_memset\@:
|
|
beqz a2, 2f
|
|
PTR_ADDU t1, a0, a2
|
|
|
|
1: PTR_ADDIU a0, 1 /* fill bytewise */
|
|
R10KCBARRIER(0(ra))
|
|
bne t1, a0, 1b
|
|
EX(sb, a1, -1(a0), .Lsmall_fixup\@)
|
|
|
|
2: jr ra /* done */
|
|
move a2, zero
|
|
.if __memset == 1
|
|
END(memset)
|
|
.set __memset, 0
|
|
.hidden __memset
|
|
.endif
|
|
|
|
#ifdef CONFIG_CPU_MIPSR6
|
|
.Lbyte_fixup\@:
|
|
PTR_SUBU a2, t0
|
|
jr ra
|
|
PTR_ADDIU a2, 1
|
|
#endif /* CONFIG_CPU_MIPSR6 */
|
|
|
|
.Lfirst_fixup\@:
|
|
jr ra
|
|
nop
|
|
|
|
.Lfwd_fixup\@:
|
|
PTR_L t0, TI_TASK($28)
|
|
andi a2, 0x3f
|
|
LONG_L t0, THREAD_BUADDR(t0)
|
|
LONG_ADDU a2, t1
|
|
jr ra
|
|
LONG_SUBU a2, t0
|
|
|
|
.Lpartial_fixup\@:
|
|
PTR_L t0, TI_TASK($28)
|
|
andi a2, STORMASK
|
|
LONG_L t0, THREAD_BUADDR(t0)
|
|
LONG_ADDU a2, a0
|
|
jr ra
|
|
LONG_SUBU a2, t0
|
|
|
|
.Llast_fixup\@:
|
|
jr ra
|
|
nop
|
|
|
|
.Lsmall_fixup\@:
|
|
PTR_SUBU a2, t1, a0
|
|
jr ra
|
|
PTR_ADDIU a2, 1
|
|
|
|
.endm
|
|
|
|
/*
|
|
* memset(void *s, int c, size_t n)
|
|
*
|
|
* a0: start of area to clear
|
|
* a1: char to fill with
|
|
* a2: size of area to clear
|
|
*/
|
|
|
|
LEAF(memset)
|
|
EXPORT_SYMBOL(memset)
|
|
beqz a1, 1f
|
|
move v0, a0 /* result */
|
|
|
|
andi a1, 0xff /* spread fillword */
|
|
LONG_SLL t1, a1, 8
|
|
or a1, t1
|
|
LONG_SLL t1, a1, 16
|
|
#if LONGSIZE == 8
|
|
or a1, t1
|
|
LONG_SLL t1, a1, 32
|
|
#endif
|
|
or a1, t1
|
|
1:
|
|
#ifndef CONFIG_EVA
|
|
FEXPORT(__bzero)
|
|
EXPORT_SYMBOL(__bzero)
|
|
#else
|
|
FEXPORT(__bzero_kernel)
|
|
EXPORT_SYMBOL(__bzero_kernel)
|
|
#endif
|
|
__BUILD_BZERO LEGACY_MODE
|
|
|
|
#ifdef CONFIG_EVA
|
|
LEAF(__bzero)
|
|
EXPORT_SYMBOL(__bzero)
|
|
__BUILD_BZERO EVA_MODE
|
|
END(__bzero)
|
|
#endif
|