|
1 | 1 | /* |
2 | 2 | * Bit sliced AES using NEON instructions |
3 | 3 | * |
4 | | - * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org> |
| 4 | + * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> |
5 | 5 | * |
6 | 6 | * This program is free software; you can redistribute it and/or modify |
7 | 7 | * it under the terms of the GNU General Public License version 2 as |
8 | 8 | * published by the Free Software Foundation. |
9 | 9 | */ |
10 | 10 |
|
11 | 11 | #include <asm/neon.h> |
| 12 | +#include <asm/simd.h> |
12 | 13 | #include <crypto/aes.h> |
13 | 14 | #include <crypto/internal/simd.h> |
14 | 15 | #include <crypto/internal/skcipher.h> |
15 | 16 | #include <crypto/xts.h> |
16 | 17 | #include <linux/module.h> |
17 | 18 |
|
| 19 | +#include "aes-ctr-fallback.h" |
| 20 | + |
18 | 21 | MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); |
19 | 22 | MODULE_LICENSE("GPL v2"); |
20 | 23 |
|
@@ -58,6 +61,11 @@ struct aesbs_cbc_ctx { |
58 | 61 | u32 enc[AES_MAX_KEYLENGTH_U32]; |
59 | 62 | }; |
60 | 63 |
|
| 64 | +struct aesbs_ctr_ctx { |
| 65 | + struct aesbs_ctx key; /* must be first member */ |
| 66 | + struct crypto_aes_ctx fallback; |
| 67 | +}; |
| 68 | + |
61 | 69 | struct aesbs_xts_ctx { |
62 | 70 | struct aesbs_ctx key; |
63 | 71 | u32 twkey[AES_MAX_KEYLENGTH_U32]; |
@@ -196,6 +204,25 @@ static int cbc_decrypt(struct skcipher_request *req) |
196 | 204 | return err; |
197 | 205 | } |
198 | 206 |
|
| 207 | +static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key, |
| 208 | + unsigned int key_len) |
| 209 | +{ |
| 210 | + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 211 | + int err; |
| 212 | + |
| 213 | + err = crypto_aes_expand_key(&ctx->fallback, in_key, key_len); |
| 214 | + if (err) |
| 215 | + return err; |
| 216 | + |
| 217 | + ctx->key.rounds = 6 + key_len / 4; |
| 218 | + |
| 219 | + kernel_neon_begin(); |
| 220 | + aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); |
| 221 | + kernel_neon_end(); |
| 222 | + |
| 223 | + return 0; |
| 224 | +} |
| 225 | + |
199 | 226 | static int ctr_encrypt(struct skcipher_request *req) |
200 | 227 | { |
201 | 228 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
@@ -259,6 +286,17 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key, |
259 | 286 | return aesbs_setkey(tfm, in_key, key_len); |
260 | 287 | } |
261 | 288 |
|
| 289 | +static int ctr_encrypt_sync(struct skcipher_request *req) |
| 290 | +{ |
| 291 | + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 292 | + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 293 | + |
| 294 | + if (!may_use_simd()) |
| 295 | + return aes_ctr_encrypt_fallback(&ctx->fallback, req); |
| 296 | + |
| 297 | + return ctr_encrypt(req); |
| 298 | +} |
| 299 | + |
262 | 300 | static int __xts_crypt(struct skcipher_request *req, |
263 | 301 | void (*fn)(u8 out[], u8 const in[], u8 const rk[], |
264 | 302 | int rounds, int blocks, u8 iv[])) |
@@ -355,17 +393,17 @@ static struct skcipher_alg aes_algs[] = { { |
355 | 393 | .base.cra_driver_name = "ctr-aes-neonbs", |
356 | 394 | .base.cra_priority = 250 - 1, |
357 | 395 | .base.cra_blocksize = 1, |
358 | | - .base.cra_ctxsize = sizeof(struct aesbs_ctx), |
| 396 | + .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx), |
359 | 397 | .base.cra_module = THIS_MODULE, |
360 | 398 |
|
361 | 399 | .min_keysize = AES_MIN_KEY_SIZE, |
362 | 400 | .max_keysize = AES_MAX_KEY_SIZE, |
363 | 401 | .chunksize = AES_BLOCK_SIZE, |
364 | 402 | .walksize = 8 * AES_BLOCK_SIZE, |
365 | 403 | .ivsize = AES_BLOCK_SIZE, |
366 | | - .setkey = aesbs_setkey, |
367 | | - .encrypt = ctr_encrypt, |
368 | | - .decrypt = ctr_encrypt, |
| 404 | + .setkey = aesbs_ctr_setkey_sync, |
| 405 | + .encrypt = ctr_encrypt_sync, |
| 406 | + .decrypt = ctr_encrypt_sync, |
369 | 407 | }, { |
370 | 408 | .base.cra_name = "__xts(aes)", |
371 | 409 | .base.cra_driver_name = "__xts-aes-neonbs", |
|
0 commit comments