Skip to content

Commit

Permalink
providers: Add SM4 XTS implementation
Browse files Browse the repository at this point in the history
Signed-off-by: Xu Yizhou <xuyizhou1@huawei.com>

Reviewed-by: Hugo Landau <hlandau@openssl.org>
Reviewed-by: Tomas Mraz <tomas@openssl.org>
(Merged from #19619)
  • Loading branch information
xu-yi-zhou authored and t8m committed Nov 29, 2022
1 parent de8f6a3 commit 2788b56
Show file tree
Hide file tree
Showing 11 changed files with 629 additions and 2 deletions.
2 changes: 1 addition & 1 deletion crypto/modes/build.info
Expand Up @@ -52,7 +52,7 @@ IF[{- !$disabled{asm} -}]
ENDIF

$COMMON=cbc128.c ctr128.c cfb128.c ofb128.c gcm128.c ccm128.c xts128.c \
wrap128.c $MODESASM
wrap128.c xts128gb.c $MODESASM
SOURCE[../../libcrypto]=$COMMON \
cts128.c ocb128.c siv128.c
SOURCE[../../providers/libfips.a]=$COMMON
Expand Down
199 changes: 199 additions & 0 deletions crypto/modes/xts128gb.c
@@ -0,0 +1,199 @@
/*
* Copyright 2022 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/

#include <string.h>
#include <openssl/crypto.h>
#include "internal/endian.h"
#include "crypto/modes.h"

#ifndef STRICT_ALIGNMENT
# ifdef __GNUC__
typedef u64 u64_a1 __attribute((__aligned__(1)));
# else
typedef u64 u64_a1;
# endif
#endif

int ossl_crypto_xts128gb_encrypt(const XTS128_CONTEXT *ctx,
const unsigned char iv[16],
const unsigned char *inp, unsigned char *out,
size_t len, int enc)
{
DECLARE_IS_ENDIAN;
union {
u64 u[2];
u32 d[4];
u8 c[16];
} tweak, scratch;
unsigned int i;

if (len < 16)
return -1;

memcpy(tweak.c, iv, 16);

(*ctx->block2) (tweak.c, tweak.c, ctx->key2);

if (!enc && (len % 16))
len -= 16;

while (len >= 16) {
#if defined(STRICT_ALIGNMENT)
memcpy(scratch.c, inp, 16);
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
#else
scratch.u[0] = ((u64_a1 *)inp)[0] ^ tweak.u[0];
scratch.u[1] = ((u64_a1 *)inp)[1] ^ tweak.u[1];
#endif
(*ctx->block1) (scratch.c, scratch.c, ctx->key1);
#if defined(STRICT_ALIGNMENT)
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
memcpy(out, scratch.c, 16);
#else
((u64_a1 *)out)[0] = scratch.u[0] ^= tweak.u[0];
((u64_a1 *)out)[1] = scratch.u[1] ^= tweak.u[1];
#endif
inp += 16;
out += 16;
len -= 16;

if (len == 0)
return 0;

if (IS_LITTLE_ENDIAN) {
u8 res;
u64 hi, lo;
#ifdef BSWAP8
hi = BSWAP8(tweak.u[0]);
lo = BSWAP8(tweak.u[1]);
#else
u8 *p = tweak.c;

hi = (u64)GETU32(p) << 32 | GETU32(p + 4);
lo = (u64)GETU32(p + 8) << 32 | GETU32(p + 12);
#endif
res = (u8)lo & 1;
tweak.u[0] = (lo >> 1) | (hi << 63);
tweak.u[1] = hi >> 1;
if (res)
tweak.c[15] ^= 0xe1;
#ifdef BSWAP8
hi = BSWAP8(tweak.u[0]);
lo = BSWAP8(tweak.u[1]);
#else
p = tweak.c;

hi = (u64)GETU32(p) << 32 | GETU32(p + 4);
lo = (u64)GETU32(p + 8) << 32 | GETU32(p + 12);
#endif
tweak.u[0] = lo;
tweak.u[1] = hi;
} else {
u8 carry, res;
carry = 0;
for (i = 0; i < 16; ++i) {
res = (tweak.c[i] << 7) & 0x80;
tweak.c[i] = ((tweak.c[i] >> 1) + carry) & 0xff;
carry = res;
}
if (res)
tweak.c[0] ^= 0xe1;
}
}
if (enc) {
for (i = 0; i < len; ++i) {
u8 c = inp[i];
out[i] = scratch.c[i];
scratch.c[i] = c;
}
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
(*ctx->block1) (scratch.c, scratch.c, ctx->key1);
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
memcpy(out - 16, scratch.c, 16);
} else {
union {
u64 u[2];
u8 c[16];
} tweak1;

if (IS_LITTLE_ENDIAN) {
u8 res;
u64 hi, lo;
#ifdef BSWAP8
hi = BSWAP8(tweak.u[0]);
lo = BSWAP8(tweak.u[1]);
#else
u8 *p = tweak.c;

hi = (u64)GETU32(p) << 32 | GETU32(p + 4);
lo = (u64)GETU32(p + 8) << 32 | GETU32(p + 12);
#endif
res = (u8)lo & 1;
tweak1.u[0] = (lo >> 1) | (hi << 63);
tweak1.u[1] = hi >> 1;
if (res)
tweak1.c[15] ^= 0xe1;
#ifdef BSWAP8
hi = BSWAP8(tweak1.u[0]);
lo = BSWAP8(tweak1.u[1]);
#else
p = tweak1.c;

hi = (u64)GETU32(p) << 32 | GETU32(p + 4);
lo = (u64)GETU32(p + 8) << 32 | GETU32(p + 12);
#endif
tweak1.u[0] = lo;
tweak1.u[1] = hi;
} else {
u8 carry, res;
carry = 0;
for (i = 0; i < 16; ++i) {
res = (tweak.c[i] << 7) & 0x80;
tweak1.c[i] = ((tweak.c[i] >> 1) + carry) & 0xff;
carry = res;
}
if (res)
tweak1.c[0] ^= 0xe1;
}
#if defined(STRICT_ALIGNMENT)
memcpy(scratch.c, inp, 16);
scratch.u[0] ^= tweak1.u[0];
scratch.u[1] ^= tweak1.u[1];
#else
scratch.u[0] = ((u64_a1 *)inp)[0] ^ tweak1.u[0];
scratch.u[1] = ((u64_a1 *)inp)[1] ^ tweak1.u[1];
#endif
(*ctx->block1) (scratch.c, scratch.c, ctx->key1);
scratch.u[0] ^= tweak1.u[0];
scratch.u[1] ^= tweak1.u[1];

for (i = 0; i < len; ++i) {
u8 c = inp[16 + i];
out[16 + i] = scratch.c[i];
scratch.c[i] = c;
}
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
(*ctx->block1) (scratch.c, scratch.c, ctx->key1);
#if defined(STRICT_ALIGNMENT)
scratch.u[0] ^= tweak.u[0];
scratch.u[1] ^= tweak.u[1];
memcpy(out, scratch.c, 16);
#else
((u64_a1 *)out)[0] = scratch.u[0] ^ tweak.u[0];
((u64_a1 *)out)[1] = scratch.u[1] ^ tweak.u[1];
#endif
}

return 0;
}
6 changes: 6 additions & 0 deletions include/crypto/modes.h
Expand Up @@ -155,6 +155,12 @@ struct xts128_context {
block128_f block1, block2;
};

/* XTS mode for SM4 algorithm specified by GB/T 17964-2021 */
int ossl_crypto_xts128gb_encrypt(const XTS128_CONTEXT *ctx,
const unsigned char iv[16],
const unsigned char *inp, unsigned char *out,
size_t len, int enc);

struct ccm128_context {
union {
u64 u[2];
Expand Down
1 change: 1 addition & 0 deletions include/openssl/core_names.h
Expand Up @@ -97,6 +97,7 @@ extern "C" {
#define OSSL_CIPHER_PARAM_CTS_MODE "cts_mode" /* utf8_string */
/* For passing the AlgorithmIdentifier parameter in DER form */
#define OSSL_CIPHER_PARAM_ALGORITHM_ID_PARAMS "alg_id_param" /* octet_string */
#define OSSL_CIPHER_PARAM_XTS_STANDARD "xts_standard" /* utf8_string */

#define OSSL_CIPHER_PARAM_TLS1_MULTIBLOCK_MAX_SEND_FRAGMENT \
"tls1multi_maxsndfrag" /* uint */
Expand Down
1 change: 1 addition & 0 deletions providers/defltprov.c
Expand Up @@ -304,6 +304,7 @@ static const OSSL_ALGORITHM_CAPABLE deflt_ciphers[] = {
ALG(PROV_NAMES_SM4_CTR, ossl_sm4128ctr_functions),
ALG(PROV_NAMES_SM4_OFB, ossl_sm4128ofb128_functions),
ALG(PROV_NAMES_SM4_CFB, ossl_sm4128cfb128_functions),
ALG(PROV_NAMES_SM4_XTS, ossl_sm4128xts_functions),
#endif /* OPENSSL_NO_SM4 */
#ifndef OPENSSL_NO_CHACHA
ALG(PROV_NAMES_ChaCha20, ossl_chacha20_functions),
Expand Down
4 changes: 3 additions & 1 deletion providers/implementations/ciphers/build.info
Expand Up @@ -153,7 +153,9 @@ IF[{- !$disabled{sm4} -}]
SOURCE[$SM4_GOAL]=\
cipher_sm4.c cipher_sm4_hw.c \
cipher_sm4_gcm.c cipher_sm4_gcm_hw.c \
cipher_sm4_ccm.c cipher_sm4_ccm_hw.c
cipher_sm4_ccm.c cipher_sm4_ccm_hw.c \
cipher_sm4_xts.c cipher_sm4_xts_hw.c

ENDIF

IF[{- !$disabled{ocb} -}]
Expand Down

0 comments on commit 2788b56

Please sign in to comment.