2019-06-04 16:11:33 +08:00
|
|
|
/* SPDX-License-Identifier: GPL-2.0-only */
|
2017-07-18 05:10:03 +08:00
|
|
|
/*
|
|
|
|
* AMD Memory Encryption Support
|
|
|
|
*
|
|
|
|
* Copyright (C) 2016 Advanced Micro Devices, Inc.
|
|
|
|
*
|
|
|
|
* Author: Tom Lendacky <thomas.lendacky@amd.com>
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef __X86_MEM_ENCRYPT_H__
|
|
|
|
#define __X86_MEM_ENCRYPT_H__
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
|
2017-07-18 05:10:05 +08:00
|
|
|
#include <linux/init.h>
|
|
|
|
|
2017-07-18 05:10:35 +08:00
|
|
|
#include <asm/bootparam.h>
|
|
|
|
|
2017-07-18 05:10:03 +08:00
|
|
|
#ifdef CONFIG_AMD_MEM_ENCRYPT
|
|
|
|
|
2017-09-07 17:38:37 +08:00
|
|
|
extern u64 sme_me_mask;
|
2018-01-31 21:54:02 +08:00
|
|
|
extern bool sev_enabled;
|
2017-07-18 05:10:03 +08:00
|
|
|
|
2017-07-18 05:10:32 +08:00
|
|
|
void sme_encrypt_execute(unsigned long encrypted_kernel_vaddr,
|
|
|
|
unsigned long decrypted_kernel_vaddr,
|
|
|
|
unsigned long kernel_len,
|
|
|
|
unsigned long encryption_wa,
|
|
|
|
unsigned long encryption_pgd);
|
|
|
|
|
2017-07-18 05:10:10 +08:00
|
|
|
void __init sme_early_encrypt(resource_size_t paddr,
|
|
|
|
unsigned long size);
|
|
|
|
void __init sme_early_decrypt(resource_size_t paddr,
|
|
|
|
unsigned long size);
|
|
|
|
|
2017-07-18 05:10:11 +08:00
|
|
|
void __init sme_map_bootdata(char *real_mode_data);
|
|
|
|
void __init sme_unmap_bootdata(char *real_mode_data);
|
|
|
|
|
2017-07-18 05:10:07 +08:00
|
|
|
void __init sme_early_init(void);
|
|
|
|
|
2018-01-11 03:26:34 +08:00
|
|
|
void __init sme_encrypt_kernel(struct boot_params *bp);
|
2017-07-18 05:10:35 +08:00
|
|
|
void __init sme_enable(struct boot_params *bp);
|
2017-07-18 05:10:05 +08:00
|
|
|
|
2017-10-20 22:30:56 +08:00
|
|
|
int __init early_set_memory_decrypted(unsigned long vaddr, unsigned long size);
|
|
|
|
int __init early_set_memory_encrypted(unsigned long vaddr, unsigned long size);
|
|
|
|
|
2017-07-18 05:10:21 +08:00
|
|
|
/* Architecture __weak replacement functions */
|
|
|
|
void __init mem_encrypt_init(void);
|
2018-09-14 21:45:58 +08:00
|
|
|
void __init mem_encrypt_free_decrypted_mem(void);
|
2017-07-18 05:10:21 +08:00
|
|
|
|
2017-10-20 22:30:44 +08:00
|
|
|
bool sme_active(void);
|
|
|
|
bool sev_active(void);
|
|
|
|
|
2018-09-14 21:45:58 +08:00
|
|
|
#define __bss_decrypted __attribute__((__section__(".bss..decrypted")))
|
|
|
|
|
2017-07-18 05:10:03 +08:00
|
|
|
#else /* !CONFIG_AMD_MEM_ENCRYPT */
|
|
|
|
|
2017-09-07 17:38:37 +08:00
|
|
|
#define sme_me_mask 0ULL
|
2017-07-18 05:10:03 +08:00
|
|
|
|
2017-07-18 05:10:10 +08:00
|
|
|
static inline void __init sme_early_encrypt(resource_size_t paddr,
|
|
|
|
unsigned long size) { }
|
|
|
|
static inline void __init sme_early_decrypt(resource_size_t paddr,
|
|
|
|
unsigned long size) { }
|
|
|
|
|
2017-07-18 05:10:11 +08:00
|
|
|
static inline void __init sme_map_bootdata(char *real_mode_data) { }
|
|
|
|
static inline void __init sme_unmap_bootdata(char *real_mode_data) { }
|
|
|
|
|
2017-07-18 05:10:07 +08:00
|
|
|
static inline void __init sme_early_init(void) { }
|
|
|
|
|
2018-01-11 03:26:34 +08:00
|
|
|
static inline void __init sme_encrypt_kernel(struct boot_params *bp) { }
|
2017-07-18 05:10:35 +08:00
|
|
|
static inline void __init sme_enable(struct boot_params *bp) { }
|
2017-07-18 05:10:05 +08:00
|
|
|
|
2017-10-20 22:30:44 +08:00
|
|
|
static inline bool sme_active(void) { return false; }
|
|
|
|
static inline bool sev_active(void) { return false; }
|
|
|
|
|
2017-10-20 22:30:56 +08:00
|
|
|
static inline int __init
|
|
|
|
early_set_memory_decrypted(unsigned long vaddr, unsigned long size) { return 0; }
|
|
|
|
static inline int __init
|
|
|
|
early_set_memory_encrypted(unsigned long vaddr, unsigned long size) { return 0; }
|
|
|
|
|
2018-09-14 21:45:58 +08:00
|
|
|
#define __bss_decrypted
|
|
|
|
|
2017-07-18 05:10:03 +08:00
|
|
|
#endif /* CONFIG_AMD_MEM_ENCRYPT */
|
|
|
|
|
2017-07-18 05:10:07 +08:00
|
|
|
/*
|
|
|
|
* The __sme_pa() and __sme_pa_nodebug() macros are meant for use when
|
|
|
|
* writing to or comparing values from the cr3 register. Having the
|
|
|
|
* encryption mask set in cr3 enables the PGD entry to be encrypted and
|
|
|
|
* avoid special case handling of PGD allocations.
|
|
|
|
*/
|
|
|
|
#define __sme_pa(x) (__pa(x) | sme_me_mask)
|
|
|
|
#define __sme_pa_nodebug(x) (__pa_nodebug(x) | sme_me_mask)
|
|
|
|
|
2018-09-14 21:45:58 +08:00
|
|
|
extern char __start_bss_decrypted[], __end_bss_decrypted[], __start_bss_decrypted_unused[];
|
|
|
|
|
2019-08-06 12:49:17 +08:00
|
|
|
static inline bool mem_encrypt_active(void)
|
|
|
|
{
|
|
|
|
return sme_me_mask;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline u64 sme_get_me_mask(void)
|
|
|
|
{
|
|
|
|
return sme_me_mask;
|
|
|
|
}
|
|
|
|
|
2017-07-18 05:10:03 +08:00
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
|
|
|
|
#endif /* __X86_MEM_ENCRYPT_H__ */
|