Skip to content

Commit

Permalink
crypto: x86 - Regularize glue function prototypes
Browse files Browse the repository at this point in the history
commit 9c1e883 upstream.

The crypto glue performed function prototype casting via macros to make
indirect calls to assembly routines. Instead of performing casts at the
call sites (which trips Control Flow Integrity prototype checking), switch
each prototype to a common standard set of arguments which allows the
removal of the existing macros. In order to keep pointer math unchanged,
internal casting between u128 pointers and u8 pointers is added.

Co-developed-by: João Moreira <joao.moreira@intel.com>
Signed-off-by: João Moreira <joao.moreira@intel.com>
Signed-off-by: Kees Cook <keescook@chromium.org>
Reviewed-by: Eric Biggers <ebiggers@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Cc: Ard Biesheuvel <ardb@google.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
  • Loading branch information
kees authored and gregkh committed Mar 20, 2021
1 parent 187ae04 commit eeb0899
Show file tree
Hide file tree
Showing 22 changed files with 374 additions and 413 deletions.
8 changes: 4 additions & 4 deletions arch/x86/crypto/aesni-intel_asm.S
Original file line number Diff line number Diff line change
Expand Up @@ -1946,7 +1946,7 @@ ENTRY(aesni_set_key)
ENDPROC(aesni_set_key)

/*
* void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
* void aesni_enc(const void *ctx, u8 *dst, const u8 *src)
*/
ENTRY(aesni_enc)
FRAME_BEGIN
Expand Down Expand Up @@ -2137,7 +2137,7 @@ _aesni_enc4:
ENDPROC(_aesni_enc4)

/*
* void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
* void aesni_dec (const void *ctx, u8 *dst, const u8 *src)
*/
ENTRY(aesni_dec)
FRAME_BEGIN
Expand Down Expand Up @@ -2726,8 +2726,8 @@ ENDPROC(aesni_ctr_enc)
pxor CTR, IV;

/*
* void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, const u8 *dst, u8 *src,
* bool enc, u8 *iv)
* void aesni_xts_crypt8(const struct crypto_aes_ctx *ctx, u8 *dst,
* const u8 *src, bool enc, le128 *iv)
*/
ENTRY(aesni_xts_crypt8)
FRAME_BEGIN
Expand Down
45 changes: 18 additions & 27 deletions arch/x86/crypto/aesni-intel_glue.c
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,8 @@ struct gcm_context_data {

asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
unsigned int key_len);
asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in);
asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in);
asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len);
asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
Expand All @@ -106,8 +104,8 @@ static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);

asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, bool enc, u8 *iv);
asmlinkage void aesni_xts_crypt8(const struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, bool enc, le128 *iv);

/* asmlinkage void aesni_gcm_enc()
* void *ctx, AES Key schedule. Starts on a 16 byte boundary.
Expand Down Expand Up @@ -550,29 +548,24 @@ static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
}


static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
aesni_enc(ctx, out, in);
glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc);
}

static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec);
}

static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
static void aesni_xts_enc8(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
aesni_xts_crypt8(ctx, dst, src, true, iv);
}

static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
static void aesni_xts_dec8(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
}

static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
{
aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
aesni_xts_crypt8(ctx, dst, src, false, iv);
}

static const struct common_glue_ctx aesni_enc_xts = {
Expand All @@ -581,10 +574,10 @@ static const struct common_glue_ctx aesni_enc_xts = {

.funcs = { {
.num_blocks = 8,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
.fn_u = { .xts = aesni_xts_enc8 }
}, {
.num_blocks = 1,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
.fn_u = { .xts = aesni_xts_enc }
} }
};

Expand All @@ -594,10 +587,10 @@ static const struct common_glue_ctx aesni_dec_xts = {

.funcs = { {
.num_blocks = 8,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
.fn_u = { .xts = aesni_xts_dec8 }
}, {
.num_blocks = 1,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
.fn_u = { .xts = aesni_xts_dec }
} }
};

Expand All @@ -606,8 +599,7 @@ static int xts_encrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&aesni_enc_xts, req,
XTS_TWEAK_CAST(aesni_xts_tweak),
return glue_xts_req_128bit(&aesni_enc_xts, req, aesni_enc,
aes_ctx(ctx->raw_tweak_ctx),
aes_ctx(ctx->raw_crypt_ctx),
false);
Expand All @@ -618,8 +610,7 @@ static int xts_decrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&aesni_dec_xts, req,
XTS_TWEAK_CAST(aesni_xts_tweak),
return glue_xts_req_128bit(&aesni_dec_xts, req, aesni_enc,
aes_ctx(ctx->raw_tweak_ctx),
aes_ctx(ctx->raw_crypt_ctx),
true);
Expand Down
74 changes: 34 additions & 40 deletions arch/x86/crypto/camellia_aesni_avx2_glue.c
Original file line number Diff line number Diff line change
Expand Up @@ -19,37 +19,34 @@
#define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32

/* 32-way AVX2/AES-NI parallel cipher functions */
asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src);
asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src);
asmlinkage void camellia_ecb_enc_32way(const void *ctx, u8 *dst, const u8 *src);
asmlinkage void camellia_ecb_dec_32way(const void *ctx, u8 *dst, const u8 *src);

asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src);
asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src, le128 *iv);
asmlinkage void camellia_cbc_dec_32way(const void *ctx, u8 *dst, const u8 *src);
asmlinkage void camellia_ctr_32way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);

asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src, le128 *iv);
asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
const u8 *src, le128 *iv);
asmlinkage void camellia_xts_enc_32way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);
asmlinkage void camellia_xts_dec_32way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);

static const struct common_glue_ctx camellia_enc = {
.num_funcs = 4,
.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
.fn_u = { .ecb = camellia_ecb_enc_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
.fn_u = { .ecb = camellia_ecb_enc_16way }
}, {
.num_blocks = 2,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
.fn_u = { .ecb = camellia_enc_blk_2way }
}, {
.num_blocks = 1,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
.fn_u = { .ecb = camellia_enc_blk }
} }
};

Expand All @@ -59,16 +56,16 @@ static const struct common_glue_ctx camellia_ctr = {

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
.fn_u = { .ctr = camellia_ctr_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
.fn_u = { .ctr = camellia_ctr_16way }
}, {
.num_blocks = 2,
.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
.fn_u = { .ctr = camellia_crypt_ctr_2way }
}, {
.num_blocks = 1,
.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
.fn_u = { .ctr = camellia_crypt_ctr }
} }
};

Expand All @@ -78,13 +75,13 @@ static const struct common_glue_ctx camellia_enc_xts = {

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
.fn_u = { .xts = camellia_xts_enc_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
.fn_u = { .xts = camellia_xts_enc_16way }
}, {
.num_blocks = 1,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
.fn_u = { .xts = camellia_xts_enc }
} }
};

Expand All @@ -94,16 +91,16 @@ static const struct common_glue_ctx camellia_dec = {

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
.fn_u = { .ecb = camellia_ecb_dec_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
.fn_u = { .ecb = camellia_ecb_dec_16way }
}, {
.num_blocks = 2,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
.fn_u = { .ecb = camellia_dec_blk_2way }
}, {
.num_blocks = 1,
.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
.fn_u = { .ecb = camellia_dec_blk }
} }
};

Expand All @@ -113,16 +110,16 @@ static const struct common_glue_ctx camellia_dec_cbc = {

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
.fn_u = { .cbc = camellia_cbc_dec_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
.fn_u = { .cbc = camellia_cbc_dec_16way }
}, {
.num_blocks = 2,
.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
.fn_u = { .cbc = camellia_decrypt_cbc_2way }
}, {
.num_blocks = 1,
.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
.fn_u = { .cbc = camellia_dec_blk }
} }
};

Expand All @@ -132,13 +129,13 @@ static const struct common_glue_ctx camellia_dec_xts = {

.funcs = { {
.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
.fn_u = { .xts = camellia_xts_dec_32way }
}, {
.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
.fn_u = { .xts = camellia_xts_dec_16way }
}, {
.num_blocks = 1,
.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
.fn_u = { .xts = camellia_xts_dec }
} }
};

Expand All @@ -161,8 +158,7 @@ static int ecb_decrypt(struct skcipher_request *req)

static int cbc_encrypt(struct skcipher_request *req)
{
return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
req);
return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
}

static int cbc_decrypt(struct skcipher_request *req)
Expand All @@ -180,8 +176,7 @@ static int xts_encrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&camellia_enc_xts, req,
XTS_TWEAK_CAST(camellia_enc_blk),
return glue_xts_req_128bit(&camellia_enc_xts, req, camellia_enc_blk,
&ctx->tweak_ctx, &ctx->crypt_ctx, false);
}

Expand All @@ -190,8 +185,7 @@ static int xts_decrypt(struct skcipher_request *req)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&camellia_dec_xts, req,
XTS_TWEAK_CAST(camellia_enc_blk),
return glue_xts_req_128bit(&camellia_dec_xts, req, camellia_enc_blk,
&ctx->tweak_ctx, &ctx->crypt_ctx, true);
}

Expand Down

0 comments on commit eeb0899

Please sign in to comment.