2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-25 21:54:06 +08:00
linux-next/arch/arm64/kernel/entry-fpsimd.S
Thomas Gleixner caab277b1d treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 234
Based on 1 normalized pattern(s):

  this program is free software you can redistribute it and or modify
  it under the terms of the gnu general public license version 2 as
  published by the free software foundation this program is
  distributed in the hope that it will be useful but without any
  warranty without even the implied warranty of merchantability or
  fitness for a particular purpose see the gnu general public license
  for more details you should have received a copy of the gnu general
  public license along with this program if not see http www gnu org
  licenses

extracted by the scancode license scanner the SPDX license identifier

  GPL-2.0-only

has been chosen to replace the boilerplate/reference in 503 file(s).

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Reviewed-by: Alexios Zavras <alexios.zavras@intel.com>
Reviewed-by: Allison Randal <allison@lohutok.net>
Reviewed-by: Enrico Weigelt <info@metux.net>
Cc: linux-spdx@vger.kernel.org
Link: https://lkml.kernel.org/r/20190602204653.811534538@linutronix.de
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
2019-06-19 17:09:07 +02:00

50 lines
833 B
ArmAsm

/* SPDX-License-Identifier: GPL-2.0-only */
/*
* FP/SIMD state saving and restoring
*
* Copyright (C) 2012 ARM Ltd.
* Author: Catalin Marinas <catalin.marinas@arm.com>
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
#include <asm/fpsimdmacros.h>
/*
* Save the FP registers.
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_save_state)
fpsimd_save x0, 8
ret
ENDPROC(fpsimd_save_state)
/*
* Load the FP registers.
*
* x0 - pointer to struct fpsimd_state
*/
ENTRY(fpsimd_load_state)
fpsimd_restore x0, 8
ret
ENDPROC(fpsimd_load_state)
#ifdef CONFIG_ARM64_SVE
ENTRY(sve_save_state)
sve_save 0, x1, 2
ret
ENDPROC(sve_save_state)
ENTRY(sve_load_state)
sve_load 0, x1, x2, 3, x4
ret
ENDPROC(sve_load_state)
ENTRY(sve_get_vl)
_sve_rdvl 0, 1
ret
ENDPROC(sve_get_vl)
#endif /* CONFIG_ARM64_SVE */