Arm64: simplify Crypto arch extension handling

This, at the assembler level, is just a "brace" feature covering both
AES and SHA2. Hence there's no need for it to have a separate feature
flag, freeing up a bit for future re-use. Along these lines there are
also a number of dead definitions/variables in the opcode table file.
This commit is contained in:
Jan Beulich 2019-12-05 08:44:22 +01:00
parent d4340f89ec
commit 2dc4b12fcd
6 changed files with 20 additions and 16 deletions

View File

@ -1,3 +1,8 @@
2019-12-05 Jan Beulich <jbeulich@suse.com>
* config/tc-aarch64.c (aarch64_features): Drop redundant AES and
SHA2 flags from "crypto" entry.
2019-12-05 Jan Beulich <jbeulich@suse.com>
* config/tc-aarch64.c (aarch64_features): Make SHA2 a prereq of

View File

@ -8990,9 +8990,7 @@ struct aarch64_option_cpu_value_table
static const struct aarch64_option_cpu_value_table aarch64_features[] = {
{"crc", AARCH64_FEATURE (AARCH64_FEATURE_CRC, 0),
AARCH64_ARCH_NONE},
{"crypto", AARCH64_FEATURE (AARCH64_FEATURE_CRYPTO
| AARCH64_FEATURE_AES
| AARCH64_FEATURE_SHA2, 0),
{"crypto", AARCH64_FEATURE (AARCH64_FEATURE_CRYPTO, 0),
AARCH64_FEATURE (AARCH64_FEATURE_SIMD, 0)},
{"fp", AARCH64_FEATURE (AARCH64_FEATURE_FP, 0),
AARCH64_ARCH_NONE},

View File

@ -1,3 +1,8 @@
2019-12-05 Jan Beulich <jbeulich@suse.com>
* opcode/aarch64.h (AARCH64_FEATURE_CRYPTO): Expand to the
combination of AES and SHA2.
2019-11-25 Alan Modra <amodra@gmail.com>
* coff/ti.h (GET_SCNHDR_SIZE, PUT_SCNHDR_SIZE, GET_SCN_SCNLEN),

View File

@ -45,7 +45,6 @@ typedef uint32_t aarch64_insn;
#define AARCH64_FEATURE_V8 0x00000001 /* All processors. */
#define AARCH64_FEATURE_V8_2 0x00000020 /* ARMv8.2 processors. */
#define AARCH64_FEATURE_V8_3 0x00000040 /* ARMv8.3 processors. */
#define AARCH64_FEATURE_CRYPTO 0x00010000 /* Crypto instructions. */
#define AARCH64_FEATURE_FP 0x00020000 /* FP instructions. */
#define AARCH64_FEATURE_SIMD 0x00040000 /* SIMD instructions. */
#define AARCH64_FEATURE_CRC 0x00080000 /* CRC instructions. */
@ -103,6 +102,9 @@ typedef uint32_t aarch64_insn;
#define AARCH64_FEATURE_SVE2_SM4 0x000000200
#define AARCH64_FEATURE_SVE2_SHA3 0x000000400
/* Crypto instructions are the combination of AES and SHA2. */
#define AARCH64_FEATURE_CRYPTO (AARCH64_FEATURE_SHA2 | AARCH64_FEATURE_AES)
/* Architectures are the sum of the base and extensions. */
#define AARCH64_ARCH_V8 AARCH64_FEATURE (AARCH64_FEATURE_V8, \
AARCH64_FEATURE_FP \

View File

@ -1,3 +1,9 @@
2019-12-05 Jan Beulich <jbeulich@suse.com>
* aarch64-tbl.h (aarch64_feature_crypto,
aarch64_feature_crypto_v8_2, CRYPTO, CRYPTO_V8_2, CRYP_INSN,
CRYPTO_V8_2_INSN): Delete.
2019-12-05 Alan Modra <amodra@gmail.com>
PR 25249

View File

@ -2324,9 +2324,6 @@ static const aarch64_feature_set aarch64_feature_fp =
AARCH64_FEATURE (AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_simd =
AARCH64_FEATURE (AARCH64_FEATURE_SIMD, 0);
static const aarch64_feature_set aarch64_feature_crypto =
AARCH64_FEATURE (AARCH64_FEATURE_CRYPTO | AARCH64_FEATURE_AES
| AARCH64_FEATURE_SHA2 | AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_crc =
AARCH64_FEATURE (AARCH64_FEATURE_CRC, 0);
static const aarch64_feature_set aarch64_feature_lse =
@ -2363,9 +2360,6 @@ static const aarch64_feature_set aarch64_feature_aes =
AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_AES, 0);
static const aarch64_feature_set aarch64_feature_v8_4 =
AARCH64_FEATURE (AARCH64_FEATURE_V8_4, 0);
static const aarch64_feature_set aarch64_feature_crypto_v8_2 =
AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_CRYPTO
| AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0);
static const aarch64_feature_set aarch64_feature_sm4 =
AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SM4
| AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0);
@ -2423,7 +2417,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve =
#define CORE &aarch64_feature_v8
#define FP &aarch64_feature_fp
#define SIMD &aarch64_feature_simd
#define CRYPTO &aarch64_feature_crypto
#define CRC &aarch64_feature_crc
#define LSE &aarch64_feature_lse
#define LOR &aarch64_feature_lor
@ -2443,7 +2436,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve =
#define ARMV8_4 &aarch64_feature_v8_4
#define SHA3 &aarch64_feature_sha3
#define SM4 &aarch64_feature_sm4
#define CRYPTO_V8_2 &aarch64_feature_crypto_v8_2
#define FP_F16_V8_2 &aarch64_feature_fp_16_v8_2
#define DOTPROD &aarch64_feature_dotprod
#define ARMV8_5 &aarch64_feature_v8_5
@ -2476,8 +2468,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve =
{ NAME, OPCODE, MASK, CLASS, OP, SIMD, OPS, QUALS, FLAGS, 0, 0, NULL }
#define _SIMD_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS,VERIFIER) \
{ NAME, OPCODE, MASK, CLASS, OP, SIMD, OPS, QUALS, FLAGS, 0, 0, VERIFIER }
#define CRYP_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, CRYPTO, OPS, QUALS, FLAGS, 0, 0, NULL }
#define _CRC_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, CRC, OPS, QUALS, FLAGS, 0, 0, NULL }
#define _LSE_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
@ -2510,8 +2500,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve =
{ NAME, OPCODE, MASK, CLASS, 0, AES, OPS, QUALS, FLAGS, 0, 0, NULL }
#define V8_4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, ARMV8_4, OPS, QUALS, FLAGS, 0, 0, NULL }
#define CRYPTO_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, CRYPTO_V8_2, OPS, QUALS, FLAGS, 0, NULL }
#define SHA3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
{ NAME, OPCODE, MASK, CLASS, 0, SHA3, OPS, QUALS, FLAGS, 0, 0, NULL }
#define SM4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \