Skip to content

Commit

Permalink
crypto: caam/qi2 - add fallback for XTS with more than 8B IV
Browse files Browse the repository at this point in the history
commit 36e2d7c upstream.

A hardware limitation exists for CAAM until Era 9 which restricts
the accelerator to IVs with only 8 bytes. When CAAM has a lower era
a fallback is necessary to process 16 bytes IV.

Fixes: 226853a ("crypto: caam/qi2 - add skcipher algorithms")
Cc: <stable@vger.kernel.org> # v4.20+
Signed-off-by: Andrei Botila <andrei.botila@nxp.com>
Reviewed-by: Horia Geantă <horia.geanta@nxp.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
  • Loading branch information
andreibotila authored and gregkh committed Oct 29, 2020
1 parent e9a14a2 commit f61a67c
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 5 deletions.
1 change: 1 addition & 0 deletions drivers/crypto/caam/Kconfig
Expand Up @@ -167,6 +167,7 @@ config CRYPTO_DEV_FSL_DPAA2_CAAM
select CRYPTO_AEAD
select CRYPTO_HASH
select CRYPTO_DES
select CRYPTO_XTS
help
CAAM driver for QorIQ Data Path Acceleration Architecture 2.
It handles DPSECI DPAA2 objects that sit on the Management Complex
Expand Down
80 changes: 75 additions & 5 deletions drivers/crypto/caam/caamalg_qi2.c
Expand Up @@ -19,6 +19,7 @@
#include <linux/fsl/mc.h>
#include <soc/fsl/dpaa2-io.h>
#include <soc/fsl/dpaa2-fd.h>
#include <asm/unaligned.h>

#define CAAM_CRA_PRIORITY 2000

Expand Down Expand Up @@ -80,6 +81,7 @@ struct caam_ctx {
struct alginfo adata;
struct alginfo cdata;
unsigned int authsize;
struct crypto_skcipher *fallback;
};

static void *dpaa2_caam_iova_to_virt(struct dpaa2_caam_priv *priv,
Expand Down Expand Up @@ -1056,12 +1058,17 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
struct device *dev = ctx->dev;
struct caam_flc *flc;
u32 *desc;
int err;

if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
dev_dbg(dev, "key size mismatch\n");
return -EINVAL;
}

err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
if (err)
return err;

ctx->cdata.keylen = keylen;
ctx->cdata.key_virt = key;
ctx->cdata.key_inline = true;
Expand Down Expand Up @@ -1443,6 +1450,14 @@ static void skcipher_decrypt_done(void *cbk_ctx, u32 status)
skcipher_request_complete(req, ecode);
}

static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
{
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
unsigned int ivsize = crypto_skcipher_ivsize(skcipher);

return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
}

static int skcipher_encrypt(struct skcipher_request *req)
{
struct skcipher_edesc *edesc;
Expand All @@ -1459,6 +1474,18 @@ static int skcipher_encrypt(struct skcipher_request *req)
if (!req->cryptlen && !ctx->fallback)
return 0;

if (ctx->fallback && xts_skcipher_ivsize(req)) {
skcipher_request_set_tfm(&caam_req->fallback_req, ctx->fallback);
skcipher_request_set_callback(&caam_req->fallback_req,
req->base.flags,
req->base.complete,
req->base.data);
skcipher_request_set_crypt(&caam_req->fallback_req, req->src,
req->dst, req->cryptlen, req->iv);

return crypto_skcipher_encrypt(&caam_req->fallback_req);
}

/* allocate extended descriptor */
edesc = skcipher_edesc_alloc(req);
if (IS_ERR(edesc))
Expand Down Expand Up @@ -1494,6 +1521,19 @@ static int skcipher_decrypt(struct skcipher_request *req)
*/
if (!req->cryptlen && !ctx->fallback)
return 0;

if (ctx->fallback && xts_skcipher_ivsize(req)) {
skcipher_request_set_tfm(&caam_req->fallback_req, ctx->fallback);
skcipher_request_set_callback(&caam_req->fallback_req,
req->base.flags,
req->base.complete,
req->base.data);
skcipher_request_set_crypt(&caam_req->fallback_req, req->src,
req->dst, req->cryptlen, req->iv);

return crypto_skcipher_decrypt(&caam_req->fallback_req);
}

/* allocate extended descriptor */
edesc = skcipher_edesc_alloc(req);
if (IS_ERR(edesc))
Expand Down Expand Up @@ -1547,9 +1587,34 @@ static int caam_cra_init_skcipher(struct crypto_skcipher *tfm)
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct caam_skcipher_alg *caam_alg =
container_of(alg, typeof(*caam_alg), skcipher);
struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
int ret = 0;

if (alg_aai == OP_ALG_AAI_XTS) {
const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
struct crypto_skcipher *fallback;

fallback = crypto_alloc_skcipher(tfm_name, 0,
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(fallback)) {
dev_err(ctx->dev, "Failed to allocate %s fallback: %ld\n",
tfm_name, PTR_ERR(fallback));
return PTR_ERR(fallback);
}

ctx->fallback = fallback;
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request) +
crypto_skcipher_reqsize(fallback));
} else {
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request));
}

crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request));
return caam_cra_init(crypto_skcipher_ctx(tfm), &caam_alg->caam, false);
ret = caam_cra_init(ctx, &caam_alg->caam, false);
if (ret && ctx->fallback)
crypto_free_skcipher(ctx->fallback);

return ret;
}

static int caam_cra_init_aead(struct crypto_aead *tfm)
Expand All @@ -1572,7 +1637,11 @@ static void caam_exit_common(struct caam_ctx *ctx)

static void caam_cra_exit(struct crypto_skcipher *tfm)
{
caam_exit_common(crypto_skcipher_ctx(tfm));
struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);

if (ctx->fallback)
crypto_free_skcipher(ctx->fallback);
caam_exit_common(ctx);
}

static void caam_cra_exit_aead(struct crypto_aead *tfm)
Expand Down Expand Up @@ -1675,6 +1744,7 @@ static struct caam_skcipher_alg driver_algs[] = {
.base = {
.cra_name = "xts(aes)",
.cra_driver_name = "xts-aes-caam-qi2",
.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
},
.setkey = xts_skcipher_setkey,
Expand Down Expand Up @@ -2922,8 +2992,8 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
alg->base.cra_module = THIS_MODULE;
alg->base.cra_priority = CAAM_CRA_PRIORITY;
alg->base.cra_ctxsize = sizeof(struct caam_ctx);
alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY;
alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY);

alg->init = caam_cra_init_skcipher;
alg->exit = caam_cra_exit;
Expand Down
2 changes: 2 additions & 0 deletions drivers/crypto/caam/caamalg_qi2.h
Expand Up @@ -13,6 +13,7 @@
#include <linux/netdevice.h>
#include "dpseci.h"
#include "desc_constr.h"
#include <crypto/skcipher.h>

#define DPAA2_CAAM_STORE_SIZE 16
/* NAPI weight *must* be a multiple of the store size. */
Expand Down Expand Up @@ -186,6 +187,7 @@ struct caam_request {
void (*cbk)(void *ctx, u32 err);
void *ctx;
void *edesc;
struct skcipher_request fallback_req;
};

/**
Expand Down

0 comments on commit f61a67c

Please sign in to comment.