1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * The AEGIS-128 Authenticated-Encryption Algorithm
4 *
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
7 */
8
9#include <crypto/algapi.h>
10#include <crypto/internal/aead.h>
11#include <crypto/internal/simd.h>
12#include <crypto/internal/skcipher.h>
13#include <crypto/scatterwalk.h>
14#include <linux/err.h>
15#include <linux/init.h>
16#include <linux/jump_label.h>
17#include <linux/kernel.h>
18#include <linux/module.h>
19#include <linux/scatterlist.h>
20
21#include <asm/simd.h>
22
23#include "aegis.h"
24
25#define AEGIS128_NONCE_SIZE 16
26#define AEGIS128_STATE_BLOCKS 5
27#define AEGIS128_KEY_SIZE 16
28#define AEGIS128_MIN_AUTH_SIZE 8
29#define AEGIS128_MAX_AUTH_SIZE 16
30
31struct aegis_state {
32 union aegis_block blocks[AEGIS128_STATE_BLOCKS];
33};
34
35struct aegis_ctx {
36 union aegis_block key;
37};
38
39static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd);
40
41static const union aegis_block crypto_aegis_const[2] = {
42 { .words64 = {
43 cpu_to_le64(U64_C(0x0d08050302010100)),
44 cpu_to_le64(U64_C(0x6279e99059372215)),
45 } },
46 { .words64 = {
47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
48 cpu_to_le64(U64_C(0xdd28b57342311120)),
49 } },
50};
51
52static bool aegis128_do_simd(void)
53{
54#ifdef CONFIG_CRYPTO_AEGIS128_SIMD
55 if (static_branch_likely(&have_simd))
56 return crypto_simd_usable();
57#endif
58 return false;
59}
60
61static void crypto_aegis128_update(struct aegis_state *state)
62{
63 union aegis_block tmp;
64 unsigned int i;
65
66 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
67 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
68 crypto_aegis_aesenc(dst: &state->blocks[i], src: &state->blocks[i - 1],
69 key: &state->blocks[i]);
70 crypto_aegis_aesenc(dst: &state->blocks[0], src: &tmp, key: &state->blocks[0]);
71}
72
73static void crypto_aegis128_update_a(struct aegis_state *state,
74 const union aegis_block *msg,
75 bool do_simd)
76{
77 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
78 crypto_aegis128_update_simd(state, msg);
79 return;
80 }
81
82 crypto_aegis128_update(state);
83 crypto_aegis_block_xor(dst: &state->blocks[0], src: msg);
84}
85
86static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg,
87 bool do_simd)
88{
89 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
90 crypto_aegis128_update_simd(state, msg);
91 return;
92 }
93
94 crypto_aegis128_update(state);
95 crypto_xor(dst: state->blocks[0].bytes, src: msg, AEGIS_BLOCK_SIZE);
96}
97
98static void crypto_aegis128_init(struct aegis_state *state,
99 const union aegis_block *key,
100 const u8 *iv)
101{
102 union aegis_block key_iv;
103 unsigned int i;
104
105 key_iv = *key;
106 crypto_xor(dst: key_iv.bytes, src: iv, AEGIS_BLOCK_SIZE);
107
108 state->blocks[0] = key_iv;
109 state->blocks[1] = crypto_aegis_const[1];
110 state->blocks[2] = crypto_aegis_const[0];
111 state->blocks[3] = *key;
112 state->blocks[4] = *key;
113
114 crypto_aegis_block_xor(dst: &state->blocks[3], src: &crypto_aegis_const[0]);
115 crypto_aegis_block_xor(dst: &state->blocks[4], src: &crypto_aegis_const[1]);
116
117 for (i = 0; i < 5; i++) {
118 crypto_aegis128_update_a(state, msg: key, do_simd: false);
119 crypto_aegis128_update_a(state, msg: &key_iv, do_simd: false);
120 }
121}
122
123static void crypto_aegis128_ad(struct aegis_state *state,
124 const u8 *src, unsigned int size,
125 bool do_simd)
126{
127 if (AEGIS_ALIGNED(src)) {
128 const union aegis_block *src_blk =
129 (const union aegis_block *)src;
130
131 while (size >= AEGIS_BLOCK_SIZE) {
132 crypto_aegis128_update_a(state, msg: src_blk, do_simd);
133
134 size -= AEGIS_BLOCK_SIZE;
135 src_blk++;
136 }
137 } else {
138 while (size >= AEGIS_BLOCK_SIZE) {
139 crypto_aegis128_update_u(state, msg: src, do_simd);
140
141 size -= AEGIS_BLOCK_SIZE;
142 src += AEGIS_BLOCK_SIZE;
143 }
144 }
145}
146
147static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst,
148 const u8 *src, unsigned int size)
149{
150 memzero_explicit(s: dst, count: size);
151}
152
153static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
154 const u8 *src, unsigned int size)
155{
156 union aegis_block tmp;
157
158 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
159 while (size >= AEGIS_BLOCK_SIZE) {
160 union aegis_block *dst_blk =
161 (union aegis_block *)dst;
162 const union aegis_block *src_blk =
163 (const union aegis_block *)src;
164
165 tmp = state->blocks[2];
166 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
167 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
168 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
169 crypto_aegis_block_xor(dst: &tmp, src: src_blk);
170
171 crypto_aegis128_update_a(state, msg: src_blk, do_simd: false);
172
173 *dst_blk = tmp;
174
175 size -= AEGIS_BLOCK_SIZE;
176 src += AEGIS_BLOCK_SIZE;
177 dst += AEGIS_BLOCK_SIZE;
178 }
179 } else {
180 while (size >= AEGIS_BLOCK_SIZE) {
181 tmp = state->blocks[2];
182 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
183 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
184 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
185 crypto_xor(dst: tmp.bytes, src, AEGIS_BLOCK_SIZE);
186
187 crypto_aegis128_update_u(state, msg: src, do_simd: false);
188
189 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
190
191 size -= AEGIS_BLOCK_SIZE;
192 src += AEGIS_BLOCK_SIZE;
193 dst += AEGIS_BLOCK_SIZE;
194 }
195 }
196
197 if (size > 0) {
198 union aegis_block msg = {};
199 memcpy(msg.bytes, src, size);
200
201 tmp = state->blocks[2];
202 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
203 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
204 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
205
206 crypto_aegis128_update_a(state, msg: &msg, do_simd: false);
207
208 crypto_aegis_block_xor(dst: &msg, src: &tmp);
209
210 memcpy(dst, msg.bytes, size);
211 }
212}
213
214static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
215 const u8 *src, unsigned int size)
216{
217 union aegis_block tmp;
218
219 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
220 while (size >= AEGIS_BLOCK_SIZE) {
221 union aegis_block *dst_blk =
222 (union aegis_block *)dst;
223 const union aegis_block *src_blk =
224 (const union aegis_block *)src;
225
226 tmp = state->blocks[2];
227 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
228 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
229 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
230 crypto_aegis_block_xor(dst: &tmp, src: src_blk);
231
232 crypto_aegis128_update_a(state, msg: &tmp, do_simd: false);
233
234 *dst_blk = tmp;
235
236 size -= AEGIS_BLOCK_SIZE;
237 src += AEGIS_BLOCK_SIZE;
238 dst += AEGIS_BLOCK_SIZE;
239 }
240 } else {
241 while (size >= AEGIS_BLOCK_SIZE) {
242 tmp = state->blocks[2];
243 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
244 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
245 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
246 crypto_xor(dst: tmp.bytes, src, AEGIS_BLOCK_SIZE);
247
248 crypto_aegis128_update_a(state, msg: &tmp, do_simd: false);
249
250 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
251
252 size -= AEGIS_BLOCK_SIZE;
253 src += AEGIS_BLOCK_SIZE;
254 dst += AEGIS_BLOCK_SIZE;
255 }
256 }
257
258 if (size > 0) {
259 union aegis_block msg = {};
260 memcpy(msg.bytes, src, size);
261
262 tmp = state->blocks[2];
263 crypto_aegis_block_and(dst: &tmp, src: &state->blocks[3]);
264 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[4]);
265 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[1]);
266 crypto_aegis_block_xor(dst: &msg, src: &tmp);
267
268 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
269
270 crypto_aegis128_update_a(state, msg: &msg, do_simd: false);
271
272 memcpy(dst, msg.bytes, size);
273 }
274}
275
276static void crypto_aegis128_process_ad(struct aegis_state *state,
277 struct scatterlist *sg_src,
278 unsigned int assoclen,
279 bool do_simd)
280{
281 struct scatter_walk walk;
282 union aegis_block buf;
283 unsigned int pos = 0;
284
285 scatterwalk_start(walk: &walk, sg: sg_src);
286 while (assoclen != 0) {
287 unsigned int size = scatterwalk_clamp(walk: &walk, nbytes: assoclen);
288 unsigned int left = size;
289 void *mapped = scatterwalk_map(walk: &walk);
290 const u8 *src = (const u8 *)mapped;
291
292 if (pos + size >= AEGIS_BLOCK_SIZE) {
293 if (pos > 0) {
294 unsigned int fill = AEGIS_BLOCK_SIZE - pos;
295 memcpy(buf.bytes + pos, src, fill);
296 crypto_aegis128_update_a(state, msg: &buf, do_simd);
297 pos = 0;
298 left -= fill;
299 src += fill;
300 }
301
302 crypto_aegis128_ad(state, src, size: left, do_simd);
303 src += left & ~(AEGIS_BLOCK_SIZE - 1);
304 left &= AEGIS_BLOCK_SIZE - 1;
305 }
306
307 memcpy(buf.bytes + pos, src, left);
308
309 pos += left;
310 assoclen -= size;
311 scatterwalk_unmap(vaddr: mapped);
312 scatterwalk_advance(walk: &walk, nbytes: size);
313 scatterwalk_done(walk: &walk, out: 0, more: assoclen);
314 }
315
316 if (pos > 0) {
317 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
318 crypto_aegis128_update_a(state, msg: &buf, do_simd);
319 }
320}
321
322static __always_inline
323int crypto_aegis128_process_crypt(struct aegis_state *state,
324 struct skcipher_walk *walk,
325 void (*crypt)(struct aegis_state *state,
326 u8 *dst, const u8 *src,
327 unsigned int size))
328{
329 int err = 0;
330
331 while (walk->nbytes) {
332 unsigned int nbytes = walk->nbytes;
333
334 if (nbytes < walk->total)
335 nbytes = round_down(nbytes, walk->stride);
336
337 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes);
338
339 err = skcipher_walk_done(walk, err: walk->nbytes - nbytes);
340 }
341 return err;
342}
343
344static void crypto_aegis128_final(struct aegis_state *state,
345 union aegis_block *tag_xor,
346 u64 assoclen, u64 cryptlen)
347{
348 u64 assocbits = assoclen * 8;
349 u64 cryptbits = cryptlen * 8;
350
351 union aegis_block tmp;
352 unsigned int i;
353
354 tmp.words64[0] = cpu_to_le64(assocbits);
355 tmp.words64[1] = cpu_to_le64(cryptbits);
356
357 crypto_aegis_block_xor(dst: &tmp, src: &state->blocks[3]);
358
359 for (i = 0; i < 7; i++)
360 crypto_aegis128_update_a(state, msg: &tmp, do_simd: false);
361
362 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
363 crypto_aegis_block_xor(dst: tag_xor, src: &state->blocks[i]);
364}
365
366static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
367 unsigned int keylen)
368{
369 struct aegis_ctx *ctx = crypto_aead_ctx(tfm: aead);
370
371 if (keylen != AEGIS128_KEY_SIZE)
372 return -EINVAL;
373
374 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
375 return 0;
376}
377
378static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
379 unsigned int authsize)
380{
381 if (authsize > AEGIS128_MAX_AUTH_SIZE)
382 return -EINVAL;
383 if (authsize < AEGIS128_MIN_AUTH_SIZE)
384 return -EINVAL;
385 return 0;
386}
387
388static int crypto_aegis128_encrypt_generic(struct aead_request *req)
389{
390 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
391 union aegis_block tag = {};
392 unsigned int authsize = crypto_aead_authsize(tfm);
393 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
394 unsigned int cryptlen = req->cryptlen;
395 struct skcipher_walk walk;
396 struct aegis_state state;
397
398 skcipher_walk_aead_encrypt(walk: &walk, req, atomic: false);
399 crypto_aegis128_init(state: &state, key: &ctx->key, iv: req->iv);
400 crypto_aegis128_process_ad(state: &state, sg_src: req->src, assoclen: req->assoclen, do_simd: false);
401 crypto_aegis128_process_crypt(state: &state, walk: &walk,
402 crypt: crypto_aegis128_encrypt_chunk);
403 crypto_aegis128_final(state: &state, tag_xor: &tag, assoclen: req->assoclen, cryptlen);
404
405 scatterwalk_map_and_copy(buf: tag.bytes, sg: req->dst, start: req->assoclen + cryptlen,
406 nbytes: authsize, out: 1);
407 return 0;
408}
409
410static int crypto_aegis128_decrypt_generic(struct aead_request *req)
411{
412 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
413 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
414 union aegis_block tag;
415 unsigned int authsize = crypto_aead_authsize(tfm);
416 unsigned int cryptlen = req->cryptlen - authsize;
417 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
418 struct skcipher_walk walk;
419 struct aegis_state state;
420
421 scatterwalk_map_and_copy(buf: tag.bytes, sg: req->src, start: req->assoclen + cryptlen,
422 nbytes: authsize, out: 0);
423
424 skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
425 crypto_aegis128_init(state: &state, key: &ctx->key, iv: req->iv);
426 crypto_aegis128_process_ad(state: &state, sg_src: req->src, assoclen: req->assoclen, do_simd: false);
427 crypto_aegis128_process_crypt(state: &state, walk: &walk,
428 crypt: crypto_aegis128_decrypt_chunk);
429 crypto_aegis128_final(state: &state, tag_xor: &tag, assoclen: req->assoclen, cryptlen);
430
431 if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) {
432 /*
433 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
434 *
435 * "3. If verification fails, the decrypted plaintext and the
436 * wrong authentication tag should not be given as output."
437 *
438 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
439 */
440 skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
441 crypto_aegis128_process_crypt(NULL, walk: &walk,
442 crypt: crypto_aegis128_wipe_chunk);
443 memzero_explicit(s: &tag, count: sizeof(tag));
444 return -EBADMSG;
445 }
446 return 0;
447}
448
449static int crypto_aegis128_encrypt_simd(struct aead_request *req)
450{
451 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
452 union aegis_block tag = {};
453 unsigned int authsize = crypto_aead_authsize(tfm);
454 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
455 unsigned int cryptlen = req->cryptlen;
456 struct skcipher_walk walk;
457 struct aegis_state state;
458
459 if (!aegis128_do_simd())
460 return crypto_aegis128_encrypt_generic(req);
461
462 skcipher_walk_aead_encrypt(walk: &walk, req, atomic: false);
463 crypto_aegis128_init_simd(state: &state, key: &ctx->key, iv: req->iv);
464 crypto_aegis128_process_ad(state: &state, sg_src: req->src, assoclen: req->assoclen, do_simd: true);
465 crypto_aegis128_process_crypt(state: &state, walk: &walk,
466 crypt: crypto_aegis128_encrypt_chunk_simd);
467 crypto_aegis128_final_simd(state: &state, tag_xor: &tag, assoclen: req->assoclen, cryptlen, authsize: 0);
468
469 scatterwalk_map_and_copy(buf: tag.bytes, sg: req->dst, start: req->assoclen + cryptlen,
470 nbytes: authsize, out: 1);
471 return 0;
472}
473
474static int crypto_aegis128_decrypt_simd(struct aead_request *req)
475{
476 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
477 union aegis_block tag;
478 unsigned int authsize = crypto_aead_authsize(tfm);
479 unsigned int cryptlen = req->cryptlen - authsize;
480 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
481 struct skcipher_walk walk;
482 struct aegis_state state;
483
484 if (!aegis128_do_simd())
485 return crypto_aegis128_decrypt_generic(req);
486
487 scatterwalk_map_and_copy(buf: tag.bytes, sg: req->src, start: req->assoclen + cryptlen,
488 nbytes: authsize, out: 0);
489
490 skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
491 crypto_aegis128_init_simd(state: &state, key: &ctx->key, iv: req->iv);
492 crypto_aegis128_process_ad(state: &state, sg_src: req->src, assoclen: req->assoclen, do_simd: true);
493 crypto_aegis128_process_crypt(state: &state, walk: &walk,
494 crypt: crypto_aegis128_decrypt_chunk_simd);
495
496 if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen,
497 cryptlen, authsize))) {
498 skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
499 crypto_aegis128_process_crypt(NULL, walk: &walk,
500 crypt: crypto_aegis128_wipe_chunk);
501 return -EBADMSG;
502 }
503 return 0;
504}
505
506static struct aead_alg crypto_aegis128_alg_generic = {
507 .setkey = crypto_aegis128_setkey,
508 .setauthsize = crypto_aegis128_setauthsize,
509 .encrypt = crypto_aegis128_encrypt_generic,
510 .decrypt = crypto_aegis128_decrypt_generic,
511
512 .ivsize = AEGIS128_NONCE_SIZE,
513 .maxauthsize = AEGIS128_MAX_AUTH_SIZE,
514 .chunksize = AEGIS_BLOCK_SIZE,
515
516 .base.cra_blocksize = 1,
517 .base.cra_ctxsize = sizeof(struct aegis_ctx),
518 .base.cra_alignmask = 0,
519 .base.cra_priority = 100,
520 .base.cra_name = "aegis128",
521 .base.cra_driver_name = "aegis128-generic",
522 .base.cra_module = THIS_MODULE,
523};
524
525static struct aead_alg crypto_aegis128_alg_simd = {
526 .setkey = crypto_aegis128_setkey,
527 .setauthsize = crypto_aegis128_setauthsize,
528 .encrypt = crypto_aegis128_encrypt_simd,
529 .decrypt = crypto_aegis128_decrypt_simd,
530
531 .ivsize = AEGIS128_NONCE_SIZE,
532 .maxauthsize = AEGIS128_MAX_AUTH_SIZE,
533 .chunksize = AEGIS_BLOCK_SIZE,
534
535 .base.cra_blocksize = 1,
536 .base.cra_ctxsize = sizeof(struct aegis_ctx),
537 .base.cra_alignmask = 0,
538 .base.cra_priority = 200,
539 .base.cra_name = "aegis128",
540 .base.cra_driver_name = "aegis128-simd",
541 .base.cra_module = THIS_MODULE,
542};
543
544static int __init crypto_aegis128_module_init(void)
545{
546 int ret;
547
548 ret = crypto_register_aead(alg: &crypto_aegis128_alg_generic);
549 if (ret)
550 return ret;
551
552 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
553 crypto_aegis128_have_simd()) {
554 ret = crypto_register_aead(alg: &crypto_aegis128_alg_simd);
555 if (ret) {
556 crypto_unregister_aead(alg: &crypto_aegis128_alg_generic);
557 return ret;
558 }
559 static_branch_enable(&have_simd);
560 }
561 return 0;
562}
563
564static void __exit crypto_aegis128_module_exit(void)
565{
566 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
567 crypto_aegis128_have_simd())
568 crypto_unregister_aead(alg: &crypto_aegis128_alg_simd);
569
570 crypto_unregister_aead(alg: &crypto_aegis128_alg_generic);
571}
572
573subsys_initcall(crypto_aegis128_module_init);
574module_exit(crypto_aegis128_module_exit);
575
576MODULE_LICENSE("GPL");
577MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
578MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
579MODULE_ALIAS_CRYPTO("aegis128");
580MODULE_ALIAS_CRYPTO("aegis128-generic");
581MODULE_ALIAS_CRYPTO("aegis128-simd");
582

source code of linux/crypto/aegis128-core.c