[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH 12/35] target/arm: Use aesdec_ISB_ISR
From: |
Richard Henderson |
Subject: |
[PATCH 12/35] target/arm: Use aesdec_ISB_ISR |
Date: |
Fri, 2 Jun 2023 19:34:03 -0700 |
This implements the AESD instruction.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
target/arm/tcg/crypto_helper.c | 37 +++++++++++++++-------------------
1 file changed, 16 insertions(+), 21 deletions(-)
diff --git a/target/arm/tcg/crypto_helper.c b/target/arm/tcg/crypto_helper.c
index 5cebc88f5f..d7b644851f 100644
--- a/target/arm/tcg/crypto_helper.c
+++ b/target/arm/tcg/crypto_helper.c
@@ -46,26 +46,6 @@ static void clear_tail_16(void *vd, uint32_t desc)
clear_tail(vd, opr_sz, max_sz);
}
-static void do_crypto_aese(uint64_t *rd, uint64_t *rn, uint64_t *rm,
- const uint8_t *sbox, const uint8_t *shift)
-{
- union CRYPTO_STATE rk = { .l = { rm[0], rm[1] } };
- union CRYPTO_STATE st = { .l = { rn[0], rn[1] } };
- int i;
-
- /* xor state vector with round key */
- rk.l[0] ^= st.l[0];
- rk.l[1] ^= st.l[1];
-
- /* combine ShiftRows operation and sbox substitution */
- for (i = 0; i < 16; i++) {
- CR_ST_BYTE(st, i) = sbox[CR_ST_BYTE(rk, shift[i])];
- }
-
- rd[0] = st.l[0];
- rd[1] = st.l[1];
-}
-
void HELPER(crypto_aese)(void *vd, void *vn, void *vm, uint32_t desc)
{
intptr_t i, opr_sz = simd_oprsz(desc);
@@ -96,7 +76,22 @@ void HELPER(crypto_aesd)(void *vd, void *vn, void *vm,
uint32_t desc)
intptr_t i, opr_sz = simd_oprsz(desc);
for (i = 0; i < opr_sz; i += 16) {
- do_crypto_aese(vd + i, vn + i, vm + i, AES_isbox, AES_ishifts);
+ AESState *ad = (AESState *)(vd + i);
+ AESState *st = (AESState *)(vn + i);
+ AESState *rk = (AESState *)(vm + i);
+ AESState t;
+
+ /* Our uint64_t are in the wrong order for big-endian. */
+ if (HOST_BIG_ENDIAN) {
+ t.d[0] = st->d[1] ^ rk->d[1];
+ t.d[1] = st->d[0] ^ rk->d[0];
+ aesdec_ISB_ISR(&t, &t, false);
+ ad->d[0] = t.d[1];
+ ad->d[1] = t.d[0];
+ } else {
+ t.v = st->v ^ rk->v;
+ aesdec_ISB_ISR(ad, &t, false);
+ }
}
clear_tail(vd, opr_sz, simd_maxsz(desc));
}
--
2.34.1
- [PATCH 06/35] target/arm: Demultiplex AESE and AESMC, (continued)
- [PATCH 06/35] target/arm: Demultiplex AESE and AESMC, Richard Henderson, 2023/06/02
- [PATCH 13/35] target/ppc: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 09/35] target/riscv: Use aesenc_SB_SR, Richard Henderson, 2023/06/02
- [PATCH 10/35] crypto: Add aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 05/35] target/i386: Use aesenc_SB_SR, Richard Henderson, 2023/06/02
- [PATCH 11/35] target/i386: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 14/35] target/riscv: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 15/35] crypto: Add aesenc_MC, Richard Henderson, 2023/06/02
- [PATCH 16/35] target/arm: Use aesenc_MC, Richard Henderson, 2023/06/02
- [PATCH 12/35] target/arm: Use aesdec_ISB_ISR,
Richard Henderson <=
- [PATCH 18/35] target/i386: Use aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 21/35] crypto: Add aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 23/35] target/ppc: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 27/35] target/riscv: Use aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 29/35] target/ppc: Use aesdec_ISB_ISR_AK_IMC, Richard Henderson, 2023/06/02
- [PATCH 35/35] crypto: Unexport AES_*_rot, AES_TeN, AES_TdN, Richard Henderson, 2023/06/02
- [PATCH 33/35] crypto: Implement aesdec_IMC with AES_imc_rot, Richard Henderson, 2023/06/02
- [PATCH 17/35] crypto: Add aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 19/35] target/arm: Use aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 20/35] target/riscv: Use aesdec_IMC, Richard Henderson, 2023/06/02