mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-12-22 18:44:44 +08:00
00f554fade
Unaligned stores take alignment exceptions on POWER7 running in little-endian. This is a dumb little-endian base memcpy that prevents unaligned stores. Once booted the feature fixup code switches over to the VMX copy loops (which are already endian safe). The question is what we do before that switch over. The base 64bit memcpy takes alignment exceptions on POWER7 so we can't use it as is. Fixing the causes of alignment exception would slow it down, because we'd need to ensure all loads and stores are aligned either through rotate tricks or bytewise loads and stores. Either would be bad for all other 64bit platforms. [ I simplified the loop a bit - Anton ] Signed-off-by: Philippe Bergheaud <felix@linux.vnet.ibm.com> Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
43 lines
1.1 KiB
Makefile
43 lines
1.1 KiB
Makefile
#
|
|
# Makefile for ppc-specific library files..
|
|
#
|
|
|
|
subdir-ccflags-$(CONFIG_PPC_WERROR) := -Werror
|
|
|
|
ccflags-$(CONFIG_PPC64) := $(NO_MINIMAL_TOC)
|
|
|
|
CFLAGS_REMOVE_code-patching.o = -pg
|
|
CFLAGS_REMOVE_feature-fixups.o = -pg
|
|
|
|
obj-y := string.o alloc.o \
|
|
crtsavres.o
|
|
obj-$(CONFIG_PPC32) += div64.o copy_32.o
|
|
obj-$(CONFIG_HAS_IOMEM) += devres.o
|
|
|
|
obj-$(CONFIG_PPC64) += copypage_64.o copyuser_64.o \
|
|
usercopy_64.o mem_64.o string.o \
|
|
hweight_64.o \
|
|
copyuser_power7.o string_64.o copypage_power7.o
|
|
ifeq ($(CONFIG_GENERIC_CSUM),)
|
|
obj-y += checksum_$(CONFIG_WORD_SIZE).o
|
|
obj-$(CONFIG_PPC64) += checksum_wrappers_64.o
|
|
endif
|
|
|
|
obj-$(CONFIG_PPC64) += memcpy_power7.o memcpy_64.o
|
|
|
|
obj-$(CONFIG_PPC_EMULATE_SSTEP) += sstep.o ldstfp.o
|
|
|
|
ifeq ($(CONFIG_PPC64),y)
|
|
obj-$(CONFIG_SMP) += locks.o
|
|
obj-$(CONFIG_ALTIVEC) += vmx-helper.o
|
|
endif
|
|
|
|
obj-$(CONFIG_PPC_LIB_RHEAP) += rheap.o
|
|
|
|
obj-y += code-patching.o
|
|
obj-y += feature-fixups.o
|
|
obj-$(CONFIG_FTR_FIXUP_SELFTEST) += feature-fixups-test.o
|
|
|
|
obj-$(CONFIG_ALTIVEC) += xor_vmx.o
|
|
CFLAGS_xor_vmx.o += -maltivec -mabi=altivec
|