11/*
22 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
33 *
4- * Copyright (C) 2013 - 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
4+ * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
55 *
66 * This program is free software; you can redistribute it and/or modify
77 * it under the terms of the GNU General Public License version 2 as
88 * published by the Free Software Foundation.
99 */
1010
1111#include <asm/neon.h>
12+ #include <asm/simd.h>
1213#include <asm/unaligned.h>
1314#include <crypto/aes.h>
1415#include <crypto/scatterwalk.h>
@@ -44,6 +45,8 @@ asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
4445asmlinkage void ce_aes_ccm_final (u8 mac [], u8 const ctr [], u32 const rk [],
4546 u32 rounds );
4647
48+ asmlinkage void __aes_arm64_encrypt (u32 * rk , u8 * out , const u8 * in , int rounds );
49+
4750static int ccm_setkey (struct crypto_aead * tfm , const u8 * in_key ,
4851 unsigned int key_len )
4952{
@@ -103,7 +106,45 @@ static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
103106 return 0 ;
104107}
105108
106- static void ccm_calculate_auth_mac (struct aead_request * req , u8 mac [])
109+ static void ccm_update_mac (struct crypto_aes_ctx * key , u8 mac [], u8 const in [],
110+ u32 abytes , u32 * macp , bool use_neon )
111+ {
112+ if (likely (use_neon )) {
113+ ce_aes_ccm_auth_data (mac , in , abytes , macp , key -> key_enc ,
114+ num_rounds (key ));
115+ } else {
116+ if (* macp > 0 && * macp < AES_BLOCK_SIZE ) {
117+ int added = min (abytes , AES_BLOCK_SIZE - * macp );
118+
119+ crypto_xor (& mac [* macp ], in , added );
120+
121+ * macp += added ;
122+ in += added ;
123+ abytes -= added ;
124+ }
125+
126+ while (abytes > AES_BLOCK_SIZE ) {
127+ __aes_arm64_encrypt (key -> key_enc , mac , mac ,
128+ num_rounds (key ));
129+ crypto_xor (mac , in , AES_BLOCK_SIZE );
130+
131+ in += AES_BLOCK_SIZE ;
132+ abytes -= AES_BLOCK_SIZE ;
133+ }
134+
135+ if (abytes > 0 ) {
136+ __aes_arm64_encrypt (key -> key_enc , mac , mac ,
137+ num_rounds (key ));
138+ crypto_xor (mac , in , abytes );
139+ * macp = abytes ;
140+ } else {
141+ * macp = 0 ;
142+ }
143+ }
144+ }
145+
146+ static void ccm_calculate_auth_mac (struct aead_request * req , u8 mac [],
147+ bool use_neon )
107148{
108149 struct crypto_aead * aead = crypto_aead_reqtfm (req );
109150 struct crypto_aes_ctx * ctx = crypto_aead_ctx (aead );
@@ -122,8 +163,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
122163 ltag .len = 6 ;
123164 }
124165
125- ce_aes_ccm_auth_data (mac , (u8 * )& ltag , ltag .len , & macp , ctx -> key_enc ,
126- num_rounds (ctx ));
166+ ccm_update_mac (ctx , mac , (u8 * )& ltag , ltag .len , & macp , use_neon );
127167 scatterwalk_start (& walk , req -> src );
128168
129169 do {
@@ -135,8 +175,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
135175 n = scatterwalk_clamp (& walk , len );
136176 }
137177 p = scatterwalk_map (& walk );
138- ce_aes_ccm_auth_data (mac , p , n , & macp , ctx -> key_enc ,
139- num_rounds (ctx ));
178+ ccm_update_mac (ctx , mac , p , n , & macp , use_neon );
140179 len -= n ;
141180
142181 scatterwalk_unmap (p );
@@ -145,6 +184,56 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
145184 } while (len );
146185}
147186
187+ static int ccm_crypt_fallback (struct skcipher_walk * walk , u8 mac [], u8 iv0 [],
188+ struct crypto_aes_ctx * ctx , bool enc )
189+ {
190+ u8 buf [AES_BLOCK_SIZE ];
191+ int err = 0 ;
192+
193+ while (walk -> nbytes ) {
194+ int blocks = walk -> nbytes / AES_BLOCK_SIZE ;
195+ u32 tail = walk -> nbytes % AES_BLOCK_SIZE ;
196+ u8 * dst = walk -> dst .virt .addr ;
197+ u8 * src = walk -> src .virt .addr ;
198+ u32 nbytes = walk -> nbytes ;
199+
200+ if (nbytes == walk -> total && tail > 0 ) {
201+ blocks ++ ;
202+ tail = 0 ;
203+ }
204+
205+ do {
206+ u32 bsize = AES_BLOCK_SIZE ;
207+
208+ if (nbytes < AES_BLOCK_SIZE )
209+ bsize = nbytes ;
210+
211+ crypto_inc (walk -> iv , AES_BLOCK_SIZE );
212+ __aes_arm64_encrypt (ctx -> key_enc , buf , walk -> iv ,
213+ num_rounds (ctx ));
214+ __aes_arm64_encrypt (ctx -> key_enc , mac , mac ,
215+ num_rounds (ctx ));
216+ if (enc )
217+ crypto_xor (mac , src , bsize );
218+ crypto_xor_cpy (dst , src , buf , bsize );
219+ if (!enc )
220+ crypto_xor (mac , dst , bsize );
221+ dst += bsize ;
222+ src += bsize ;
223+ nbytes -= bsize ;
224+ } while (-- blocks );
225+
226+ err = skcipher_walk_done (walk , tail );
227+ }
228+
229+ if (!err ) {
230+ __aes_arm64_encrypt (ctx -> key_enc , buf , iv0 , num_rounds (ctx ));
231+ __aes_arm64_encrypt (ctx -> key_enc , mac , mac , num_rounds (ctx ));
232+ crypto_xor (mac , buf , AES_BLOCK_SIZE );
233+ }
234+ return err ;
235+ }
236+
148237static int ccm_encrypt (struct aead_request * req )
149238{
150239 struct crypto_aead * aead = crypto_aead_reqtfm (req );
@@ -153,39 +242,46 @@ static int ccm_encrypt(struct aead_request *req)
153242 u8 __aligned (8 ) mac [AES_BLOCK_SIZE ];
154243 u8 buf [AES_BLOCK_SIZE ];
155244 u32 len = req -> cryptlen ;
245+ bool use_neon = may_use_simd ();
156246 int err ;
157247
158248 err = ccm_init_mac (req , mac , len );
159249 if (err )
160250 return err ;
161251
162- kernel_neon_begin_partial (6 );
252+ if (likely (use_neon ))
253+ kernel_neon_begin ();
163254
164255 if (req -> assoclen )
165- ccm_calculate_auth_mac (req , mac );
256+ ccm_calculate_auth_mac (req , mac , use_neon );
166257
167258 /* preserve the original iv for the final round */
168259 memcpy (buf , req -> iv , AES_BLOCK_SIZE );
169260
170261 err = skcipher_walk_aead_encrypt (& walk , req , true);
171262
172- while (walk .nbytes ) {
173- u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
174-
175- if (walk .nbytes == walk .total )
176- tail = 0 ;
263+ if (likely (use_neon )) {
264+ while (walk .nbytes ) {
265+ u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
177266
178- ce_aes_ccm_encrypt (walk .dst .virt .addr , walk .src .virt .addr ,
179- walk .nbytes - tail , ctx -> key_enc ,
180- num_rounds (ctx ), mac , walk .iv );
267+ if (walk .nbytes == walk .total )
268+ tail = 0 ;
181269
182- err = skcipher_walk_done ( & walk , tail );
183- }
184- if (! err )
185- ce_aes_ccm_final ( mac , buf , ctx -> key_enc , num_rounds (ctx ));
270+ ce_aes_ccm_encrypt ( walk . dst . virt . addr ,
271+ walk . src . virt . addr ,
272+ walk . nbytes - tail , ctx -> key_enc ,
273+ num_rounds (ctx ), mac , walk . iv );
186274
187- kernel_neon_end ();
275+ err = skcipher_walk_done (& walk , tail );
276+ }
277+ if (!err )
278+ ce_aes_ccm_final (mac , buf , ctx -> key_enc ,
279+ num_rounds (ctx ));
188280
281+ kernel_neon_end ();
282+ } else {
283+ err = ccm_crypt_fallback (& walk , mac , buf , ctx , true);
284+ }
189285 if (err )
190286 return err ;
191287
@@ -205,38 +301,46 @@ static int ccm_decrypt(struct aead_request *req)
205301 u8 __aligned (8 ) mac [AES_BLOCK_SIZE ];
206302 u8 buf [AES_BLOCK_SIZE ];
207303 u32 len = req -> cryptlen - authsize ;
304+ bool use_neon = may_use_simd ();
208305 int err ;
209306
210307 err = ccm_init_mac (req , mac , len );
211308 if (err )
212309 return err ;
213310
214- kernel_neon_begin_partial (6 );
311+ if (likely (use_neon ))
312+ kernel_neon_begin ();
215313
216314 if (req -> assoclen )
217- ccm_calculate_auth_mac (req , mac );
315+ ccm_calculate_auth_mac (req , mac , use_neon );
218316
219317 /* preserve the original iv for the final round */
220318 memcpy (buf , req -> iv , AES_BLOCK_SIZE );
221319
222320 err = skcipher_walk_aead_decrypt (& walk , req , true);
223321
224- while (walk .nbytes ) {
225- u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
322+ if (likely (use_neon )) {
323+ while (walk .nbytes ) {
324+ u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
226325
227- if (walk .nbytes == walk .total )
228- tail = 0 ;
326+ if (walk .nbytes == walk .total )
327+ tail = 0 ;
229328
230- ce_aes_ccm_decrypt (walk .dst .virt .addr , walk .src .virt .addr ,
231- walk .nbytes - tail , ctx -> key_enc ,
232- num_rounds (ctx ), mac , walk .iv );
329+ ce_aes_ccm_decrypt (walk .dst .virt .addr ,
330+ walk .src .virt .addr ,
331+ walk .nbytes - tail , ctx -> key_enc ,
332+ num_rounds (ctx ), mac , walk .iv );
233333
234- err = skcipher_walk_done (& walk , tail );
235- }
236- if (!err )
237- ce_aes_ccm_final (mac , buf , ctx -> key_enc , num_rounds (ctx ));
334+ err = skcipher_walk_done (& walk , tail );
335+ }
336+ if (!err )
337+ ce_aes_ccm_final (mac , buf , ctx -> key_enc ,
338+ num_rounds (ctx ));
238339
239- kernel_neon_end ();
340+ kernel_neon_end ();
341+ } else {
342+ err = ccm_crypt_fallback (& walk , mac , buf , ctx , false);
343+ }
240344
241345 if (err )
242346 return err ;
0 commit comments