Skip to content

Commit

Permalink
Provide additional AES-GCM test patterns to enhance test coverage.
Browse files Browse the repository at this point in the history
To enhance test coverage for AES-GCM mode, we provided longer additional
testing patterns for AES-GCM testing.

Signed-off-by: Phoebe Chen <phoebe.chen@sifive.com>
Signed-off-by: Jerry Shih <jerry.shih@sifive.com>

Reviewed-by: Tomas Mraz <tomas@openssl.org>
Reviewed-by: Paul Dale <pauli@openssl.org>
Reviewed-by: Hugo Landau <hlandau@openssl.org>
(Merged from #21923)
  • Loading branch information
phoebesv authored and hlandau committed Oct 26, 2023
1 parent d056e90 commit ebecf32
Show file tree
Hide file tree
Showing 5 changed files with 985 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@

# - RV64I
# - RISC-V Vector ('V') with VLEN >= 128
# - RISC-V Vector Basic Bit-manipulation extension ('Zvbb')
# - RISC-V Vector Cryptography Bit-manipulation extension ('Zvkb')
# - RISC-V Vector GCM/GMAC extension ('Zvkg')
# - RISC-V Vector AES Block Cipher extension ('Zvkned')
# - RISC-V Zicclsm(Main memory supports misaligned loads/stores)
Expand Down Expand Up @@ -601,16 +601,16 @@ sub compute_final_tag {
}

################################################################################
# size_t rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt(const unsigned char *in,
# size_t rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt(const unsigned char *in,
# unsigned char *out, size_t len,
# const void *key,
# unsigned char ivec[16], u64 *Xi);
{
$code .= <<___;
.p2align 3
.globl rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt
.type rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt,\@function
rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt:
.globl rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt
.type rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt,\@function
rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt:
srli $T0, $LEN, 4
beqz $T0, .Lenc_end
slli $LEN32, $T0, 2
Expand All @@ -633,7 +633,7 @@ sub compute_final_tag {
li $PROCESSED_LEN, 0
ret
.size rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt,.-rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt
.size rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt,.-rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt
___

$code .= <<___;
Expand Down Expand Up @@ -786,16 +786,16 @@ sub compute_final_tag {
}

################################################################################
# size_t rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt(const unsigned char *in,
# size_t rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt(const unsigned char *in,
# unsigned char *out, size_t len,
# const void *key,
# unsigned char ivec[16], u64 *Xi);
{
$code .= <<___;
.p2align 3
.globl rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt
.type rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt,\@function
rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt:
.globl rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt
.type rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt,\@function
rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt:
srli $T0, $LEN, 4
beqz $T0, .Ldec_end
slli $LEN32, $T0, 2
Expand All @@ -817,7 +817,7 @@ sub compute_final_tag {
.Ldec_end:
li $PROCESSED_LEN, 0
ret
.size rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt,.-rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt
.size rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt,.-rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt
___

$code .= <<___;
Expand Down
2 changes: 1 addition & 1 deletion crypto/modes/build.info
Original file line number Diff line number Diff line change
Expand Up @@ -93,4 +93,4 @@ GENERATE[ghash-c64xplus.S]=asm/ghash-c64xplus.pl
GENERATE[ghash-riscv64.s]=asm/ghash-riscv64.pl
GENERATE[ghash-riscv64-zvkb-zvbc.s]=asm/ghash-riscv64-zvkb-zvbc.pl
GENERATE[ghash-riscv64-zvkg.s]=asm/ghash-riscv64-zvkg.pl
GENERATE[aes-gcm-riscv64-zvbb-zvkg-zvkned.s]=asm/aes-gcm-riscv64-zvbb-zvkg-zvkned.pl
GENERATE[aes-gcm-riscv64-zvkb-zvkg-zvkned.s]=asm/aes-gcm-riscv64-zvkb-zvkg-zvkned.pl
17 changes: 9 additions & 8 deletions include/crypto/aes_platform.h
Original file line number Diff line number Diff line change
Expand Up @@ -475,25 +475,26 @@ void rv64i_zvkb_zvkned_ctr32_encrypt_blocks(const unsigned char *in,
const void *key,
const unsigned char ivec[16]);

size_t rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt(const unsigned char *in,
size_t rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt(const unsigned char *in,
unsigned char *out, size_t len,
const void *key,
unsigned char ivec[16], u64 *Xi);

size_t rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt(const unsigned char *in,
size_t rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt(const unsigned char *in,
unsigned char *out, size_t len,
const void *key,
unsigned char ivec[16], u64 *Xi);

void gcm_ghash_rv64i_zvkg(u64 Xi[2], const u128 Htable[16], const u8 *inp,
size_t len);

# define AES_GCM_ENC_BYTES 64
# define AES_GCM_DEC_BYTES 64
# define AES_gcm_encrypt rv64i_zvbb_zvkg_zvkned_aes_gcm_encrypt
# define AES_gcm_decrypt rv64i_zvbb_zvkg_zvkned_aes_gcm_decrypt
# define AES_GCM_ASM(ctx) (ctx->ctr == rv64i_zvbb_zvkned_ctr32_encrypt_blocks && \
ctx->gcm.funcs.ghash == gcm_ghash_rv64i_zvkg)
#define AES_GCM_ENC_BYTES 64
#define AES_GCM_DEC_BYTES 64
#define AES_gcm_encrypt rv64i_zvkb_zvkg_zvkned_aes_gcm_encrypt
#define AES_gcm_decrypt rv64i_zvkb_zvkg_zvkned_aes_gcm_decrypt
#define AES_GCM_ASM(ctx) \
(ctx->ctr == rv64i_zvkb_zvkned_ctr32_encrypt_blocks && \
ctx->gcm.funcs.ghash == gcm_ghash_rv64i_zvkg)

# elif defined(OPENSSL_CPUID_OBJ) && defined(__riscv) && __riscv_xlen == 32
/* RISC-V 32 support */
Expand Down
16 changes: 8 additions & 8 deletions providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ static const PROV_GCM_HW rv64i_zvkned_gcm = {
};

/*-
* RISC-V RV64 ZVBB, ZVKG and ZVKNED support for AES GCM.
* RISC-V RV64 ZVKB, ZVKG and ZVKNED support for AES GCM.
*/
static int rv64i_zvbb_zvkg_zvkned_gcm_initkey(PROV_GCM_CTX *ctx,
static int rv64i_zvkb_zvkg_zvkned_gcm_initkey(PROV_GCM_CTX *ctx,
const unsigned char *key,
size_t keylen) {
PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
Expand All @@ -83,18 +83,18 @@ static int rv64i_zvbb_zvkg_zvkned_gcm_initkey(PROV_GCM_CTX *ctx,
if (keylen * 8 == 128 || keylen * 8 == 256) {
GCM_HW_SET_KEY_CTR_FN(ks, rv64i_zvkned_set_encrypt_key,
rv64i_zvkned_encrypt,
rv64i_zvbb_zvkned_ctr32_encrypt_blocks);
rv64i_zvkb_zvkned_ctr32_encrypt_blocks);
} else {
GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key,
rv64i_zvkned_encrypt,
rv64i_zvbb_zvkned_ctr32_encrypt_blocks);
rv64i_zvkb_zvkned_ctr32_encrypt_blocks);
}

return 1;
}

static const PROV_GCM_HW rv64i_zvbb_zvkg_zvkned_gcm = {
rv64i_zvbb_zvkg_zvkned_gcm_initkey,
static const PROV_GCM_HW rv64i_zvkb_zvkg_zvkned_gcm = {
rv64i_zvkb_zvkg_zvkned_gcm_initkey,
ossl_gcm_setiv,
ossl_gcm_aad_update,
generic_aes_gcm_cipher_update,
Expand All @@ -104,8 +104,8 @@ static const PROV_GCM_HW rv64i_zvbb_zvkg_zvkned_gcm = {

const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits) {
if (RISCV_HAS_ZVKNED()) {
if (RISCV_HAS_ZVBB() && RISCV_HAS_ZVKG() && riscv_vlen() >= 128) {
return &rv64i_zvbb_zvkg_zvkned_gcm;
if (RISCV_HAS_ZVKB() && RISCV_HAS_ZVKG() && riscv_vlen() >= 128) {
return &rv64i_zvkb_zvkg_zvkned_gcm;
}
return &rv64i_zvkned_gcm;
}
Expand Down

0 comments on commit ebecf32

Please sign in to comment.