1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Cryptographic API.
4 *
5 * s390 implementation of the AES Cipher Algorithm with protected keys.
6 *
7 * s390 Version:
8 * Copyright IBM Corp. 2017, 2023
9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10 * Harald Freudenberger <freude@de.ibm.com>
11 */
12
13#define KMSG_COMPONENT "paes_s390"
14#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
15
16#include <crypto/aes.h>
17#include <crypto/algapi.h>
18#include <linux/bug.h>
19#include <linux/err.h>
20#include <linux/module.h>
21#include <linux/cpufeature.h>
22#include <linux/init.h>
23#include <linux/mutex.h>
24#include <linux/spinlock.h>
25#include <linux/delay.h>
26#include <crypto/internal/skcipher.h>
27#include <crypto/xts.h>
28#include <asm/cpacf.h>
29#include <asm/pkey.h>
30
31/*
32 * Key blobs smaller/bigger than these defines are rejected
33 * by the common code even before the individual setkey function
34 * is called. As paes can handle different kinds of key blobs
35 * and padding is also possible, the limits need to be generous.
36 */
37#define PAES_MIN_KEYSIZE 16
38#define PAES_MAX_KEYSIZE MAXEP11AESKEYBLOBSIZE
39
40static u8 *ctrblk;
41static DEFINE_MUTEX(ctrblk_lock);
42
43static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
44
45struct key_blob {
46 /*
47 * Small keys will be stored in the keybuf. Larger keys are
48 * stored in extra allocated memory. In both cases does
49 * key point to the memory where the key is stored.
50 * The code distinguishes by checking keylen against
51 * sizeof(keybuf). See the two following helper functions.
52 */
53 u8 *key;
54 u8 keybuf[128];
55 unsigned int keylen;
56};
57
58static inline int _key_to_kb(struct key_blob *kb,
59 const u8 *key,
60 unsigned int keylen)
61{
62 struct clearkey_header {
63 u8 type;
64 u8 res0[3];
65 u8 version;
66 u8 res1[3];
67 u32 keytype;
68 u32 len;
69 } __packed * h;
70
71 switch (keylen) {
72 case 16:
73 case 24:
74 case 32:
75 /* clear key value, prepare pkey clear key token in keybuf */
76 memset(kb->keybuf, 0, sizeof(kb->keybuf));
77 h = (struct clearkey_header *) kb->keybuf;
78 h->version = 0x02; /* TOKVER_CLEAR_KEY */
79 h->keytype = (keylen - 8) >> 3;
80 h->len = keylen;
81 memcpy(kb->keybuf + sizeof(*h), key, keylen);
82 kb->keylen = sizeof(*h) + keylen;
83 kb->key = kb->keybuf;
84 break;
85 default:
86 /* other key material, let pkey handle this */
87 if (keylen <= sizeof(kb->keybuf))
88 kb->key = kb->keybuf;
89 else {
90 kb->key = kmalloc(size: keylen, GFP_KERNEL);
91 if (!kb->key)
92 return -ENOMEM;
93 }
94 memcpy(kb->key, key, keylen);
95 kb->keylen = keylen;
96 break;
97 }
98
99 return 0;
100}
101
102static inline void _free_kb_keybuf(struct key_blob *kb)
103{
104 if (kb->key && kb->key != kb->keybuf
105 && kb->keylen > sizeof(kb->keybuf)) {
106 kfree_sensitive(objp: kb->key);
107 kb->key = NULL;
108 }
109}
110
111struct s390_paes_ctx {
112 struct key_blob kb;
113 struct pkey_protkey pk;
114 spinlock_t pk_lock;
115 unsigned long fc;
116};
117
118struct s390_pxts_ctx {
119 struct key_blob kb[2];
120 struct pkey_protkey pk[2];
121 spinlock_t pk_lock;
122 unsigned long fc;
123};
124
125static inline int __paes_keyblob2pkey(struct key_blob *kb,
126 struct pkey_protkey *pk)
127{
128 return pkey_keyblob2pkey(kb->key, kb->keylen,
129 pk->protkey, &pk->len, &pk->type);
130}
131
132static inline int __paes_convert_key(struct s390_paes_ctx *ctx)
133{
134 int ret;
135 struct pkey_protkey pkey;
136
137 pkey.len = sizeof(pkey.protkey);
138 ret = __paes_keyblob2pkey(kb: &ctx->kb, pk: &pkey);
139 if (ret)
140 return ret;
141
142 spin_lock_bh(lock: &ctx->pk_lock);
143 memcpy(&ctx->pk, &pkey, sizeof(pkey));
144 spin_unlock_bh(lock: &ctx->pk_lock);
145
146 return 0;
147}
148
149static int ecb_paes_init(struct crypto_skcipher *tfm)
150{
151 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
152
153 ctx->kb.key = NULL;
154 spin_lock_init(&ctx->pk_lock);
155
156 return 0;
157}
158
159static void ecb_paes_exit(struct crypto_skcipher *tfm)
160{
161 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
162
163 _free_kb_keybuf(kb: &ctx->kb);
164}
165
166static inline int __ecb_paes_set_key(struct s390_paes_ctx *ctx)
167{
168 int rc;
169 unsigned long fc;
170
171 rc = __paes_convert_key(ctx);
172 if (rc)
173 return rc;
174
175 /* Pick the correct function code based on the protected key type */
176 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
177 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
178 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
179
180 /* Check if the function code is available */
181 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
182
183 return ctx->fc ? 0 : -EINVAL;
184}
185
186static int ecb_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
187 unsigned int key_len)
188{
189 int rc;
190 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
191
192 _free_kb_keybuf(kb: &ctx->kb);
193 rc = _key_to_kb(kb: &ctx->kb, key: in_key, keylen: key_len);
194 if (rc)
195 return rc;
196
197 return __ecb_paes_set_key(ctx);
198}
199
200static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier)
201{
202 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
203 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
204 struct skcipher_walk walk;
205 unsigned int nbytes, n, k;
206 int ret;
207 struct {
208 u8 key[MAXPROTKEYSIZE];
209 } param;
210
211 ret = skcipher_walk_virt(walk: &walk, req, atomic: false);
212 if (ret)
213 return ret;
214
215 spin_lock_bh(lock: &ctx->pk_lock);
216 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
217 spin_unlock_bh(lock: &ctx->pk_lock);
218
219 while ((nbytes = walk.nbytes) != 0) {
220 /* only use complete blocks */
221 n = nbytes & ~(AES_BLOCK_SIZE - 1);
222 k = cpacf_km(ctx->fc | modifier, &param,
223 walk.dst.virt.addr, walk.src.virt.addr, n);
224 if (k)
225 ret = skcipher_walk_done(walk: &walk, err: nbytes - k);
226 if (k < n) {
227 if (__paes_convert_key(ctx))
228 return skcipher_walk_done(walk: &walk, err: -EIO);
229 spin_lock_bh(lock: &ctx->pk_lock);
230 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
231 spin_unlock_bh(lock: &ctx->pk_lock);
232 }
233 }
234 return ret;
235}
236
237static int ecb_paes_encrypt(struct skcipher_request *req)
238{
239 return ecb_paes_crypt(req, modifier: 0);
240}
241
242static int ecb_paes_decrypt(struct skcipher_request *req)
243{
244 return ecb_paes_crypt(req, modifier: CPACF_DECRYPT);
245}
246
247static struct skcipher_alg ecb_paes_alg = {
248 .base.cra_name = "ecb(paes)",
249 .base.cra_driver_name = "ecb-paes-s390",
250 .base.cra_priority = 401, /* combo: aes + ecb + 1 */
251 .base.cra_blocksize = AES_BLOCK_SIZE,
252 .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
253 .base.cra_module = THIS_MODULE,
254 .base.cra_list = LIST_HEAD_INIT(ecb_paes_alg.base.cra_list),
255 .init = ecb_paes_init,
256 .exit = ecb_paes_exit,
257 .min_keysize = PAES_MIN_KEYSIZE,
258 .max_keysize = PAES_MAX_KEYSIZE,
259 .setkey = ecb_paes_set_key,
260 .encrypt = ecb_paes_encrypt,
261 .decrypt = ecb_paes_decrypt,
262};
263
264static int cbc_paes_init(struct crypto_skcipher *tfm)
265{
266 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
267
268 ctx->kb.key = NULL;
269 spin_lock_init(&ctx->pk_lock);
270
271 return 0;
272}
273
274static void cbc_paes_exit(struct crypto_skcipher *tfm)
275{
276 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
277
278 _free_kb_keybuf(kb: &ctx->kb);
279}
280
281static inline int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
282{
283 int rc;
284 unsigned long fc;
285
286 rc = __paes_convert_key(ctx);
287 if (rc)
288 return rc;
289
290 /* Pick the correct function code based on the protected key type */
291 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
292 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
293 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
294
295 /* Check if the function code is available */
296 ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
297
298 return ctx->fc ? 0 : -EINVAL;
299}
300
301static int cbc_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
302 unsigned int key_len)
303{
304 int rc;
305 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
306
307 _free_kb_keybuf(kb: &ctx->kb);
308 rc = _key_to_kb(kb: &ctx->kb, key: in_key, keylen: key_len);
309 if (rc)
310 return rc;
311
312 return __cbc_paes_set_key(ctx);
313}
314
315static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier)
316{
317 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
318 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
319 struct skcipher_walk walk;
320 unsigned int nbytes, n, k;
321 int ret;
322 struct {
323 u8 iv[AES_BLOCK_SIZE];
324 u8 key[MAXPROTKEYSIZE];
325 } param;
326
327 ret = skcipher_walk_virt(walk: &walk, req, atomic: false);
328 if (ret)
329 return ret;
330
331 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE);
332 spin_lock_bh(lock: &ctx->pk_lock);
333 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
334 spin_unlock_bh(lock: &ctx->pk_lock);
335
336 while ((nbytes = walk.nbytes) != 0) {
337 /* only use complete blocks */
338 n = nbytes & ~(AES_BLOCK_SIZE - 1);
339 k = cpacf_kmc(ctx->fc | modifier, &param,
340 walk.dst.virt.addr, walk.src.virt.addr, n);
341 if (k) {
342 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE);
343 ret = skcipher_walk_done(walk: &walk, err: nbytes - k);
344 }
345 if (k < n) {
346 if (__paes_convert_key(ctx))
347 return skcipher_walk_done(walk: &walk, err: -EIO);
348 spin_lock_bh(lock: &ctx->pk_lock);
349 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
350 spin_unlock_bh(lock: &ctx->pk_lock);
351 }
352 }
353 return ret;
354}
355
356static int cbc_paes_encrypt(struct skcipher_request *req)
357{
358 return cbc_paes_crypt(req, modifier: 0);
359}
360
361static int cbc_paes_decrypt(struct skcipher_request *req)
362{
363 return cbc_paes_crypt(req, modifier: CPACF_DECRYPT);
364}
365
366static struct skcipher_alg cbc_paes_alg = {
367 .base.cra_name = "cbc(paes)",
368 .base.cra_driver_name = "cbc-paes-s390",
369 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
370 .base.cra_blocksize = AES_BLOCK_SIZE,
371 .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
372 .base.cra_module = THIS_MODULE,
373 .base.cra_list = LIST_HEAD_INIT(cbc_paes_alg.base.cra_list),
374 .init = cbc_paes_init,
375 .exit = cbc_paes_exit,
376 .min_keysize = PAES_MIN_KEYSIZE,
377 .max_keysize = PAES_MAX_KEYSIZE,
378 .ivsize = AES_BLOCK_SIZE,
379 .setkey = cbc_paes_set_key,
380 .encrypt = cbc_paes_encrypt,
381 .decrypt = cbc_paes_decrypt,
382};
383
384static int xts_paes_init(struct crypto_skcipher *tfm)
385{
386 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
387
388 ctx->kb[0].key = NULL;
389 ctx->kb[1].key = NULL;
390 spin_lock_init(&ctx->pk_lock);
391
392 return 0;
393}
394
395static void xts_paes_exit(struct crypto_skcipher *tfm)
396{
397 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
398
399 _free_kb_keybuf(kb: &ctx->kb[0]);
400 _free_kb_keybuf(kb: &ctx->kb[1]);
401}
402
403static inline int __xts_paes_convert_key(struct s390_pxts_ctx *ctx)
404{
405 struct pkey_protkey pkey0, pkey1;
406
407 pkey0.len = sizeof(pkey0.protkey);
408 pkey1.len = sizeof(pkey1.protkey);
409
410 if (__paes_keyblob2pkey(kb: &ctx->kb[0], pk: &pkey0) ||
411 __paes_keyblob2pkey(kb: &ctx->kb[1], pk: &pkey1))
412 return -EINVAL;
413
414 spin_lock_bh(lock: &ctx->pk_lock);
415 memcpy(&ctx->pk[0], &pkey0, sizeof(pkey0));
416 memcpy(&ctx->pk[1], &pkey1, sizeof(pkey1));
417 spin_unlock_bh(lock: &ctx->pk_lock);
418
419 return 0;
420}
421
422static inline int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
423{
424 unsigned long fc;
425
426 if (__xts_paes_convert_key(ctx))
427 return -EINVAL;
428
429 if (ctx->pk[0].type != ctx->pk[1].type)
430 return -EINVAL;
431
432 /* Pick the correct function code based on the protected key type */
433 fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
434 (ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
435 CPACF_KM_PXTS_256 : 0;
436
437 /* Check if the function code is available */
438 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
439
440 return ctx->fc ? 0 : -EINVAL;
441}
442
443static int xts_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
444 unsigned int xts_key_len)
445{
446 int rc;
447 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
448 u8 ckey[2 * AES_MAX_KEY_SIZE];
449 unsigned int ckey_len, key_len;
450
451 if (xts_key_len % 2)
452 return -EINVAL;
453
454 key_len = xts_key_len / 2;
455
456 _free_kb_keybuf(kb: &ctx->kb[0]);
457 _free_kb_keybuf(kb: &ctx->kb[1]);
458 rc = _key_to_kb(kb: &ctx->kb[0], key: in_key, keylen: key_len);
459 if (rc)
460 return rc;
461 rc = _key_to_kb(kb: &ctx->kb[1], key: in_key + key_len, keylen: key_len);
462 if (rc)
463 return rc;
464
465 rc = __xts_paes_set_key(ctx);
466 if (rc)
467 return rc;
468
469 /*
470 * xts_verify_key verifies the key length is not odd and makes
471 * sure that the two keys are not the same. This can be done
472 * on the two protected keys as well
473 */
474 ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
475 AES_KEYSIZE_128 : AES_KEYSIZE_256;
476 memcpy(ckey, ctx->pk[0].protkey, ckey_len);
477 memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
478 return xts_verify_key(tfm, key: ckey, keylen: 2*ckey_len);
479}
480
481static int xts_paes_crypt(struct skcipher_request *req, unsigned long modifier)
482{
483 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
484 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
485 struct skcipher_walk walk;
486 unsigned int keylen, offset, nbytes, n, k;
487 int ret;
488 struct {
489 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
490 u8 tweak[16];
491 u8 block[16];
492 u8 bit[16];
493 u8 xts[16];
494 } pcc_param;
495 struct {
496 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
497 u8 init[16];
498 } xts_param;
499
500 ret = skcipher_walk_virt(walk: &walk, req, atomic: false);
501 if (ret)
502 return ret;
503
504 keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
505 offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
506
507 memset(&pcc_param, 0, sizeof(pcc_param));
508 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak));
509 spin_lock_bh(lock: &ctx->pk_lock);
510 memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
511 memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
512 spin_unlock_bh(lock: &ctx->pk_lock);
513 cpacf_pcc(ctx->fc, pcc_param.key + offset);
514 memcpy(xts_param.init, pcc_param.xts, 16);
515
516 while ((nbytes = walk.nbytes) != 0) {
517 /* only use complete blocks */
518 n = nbytes & ~(AES_BLOCK_SIZE - 1);
519 k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
520 walk.dst.virt.addr, walk.src.virt.addr, n);
521 if (k)
522 ret = skcipher_walk_done(walk: &walk, err: nbytes - k);
523 if (k < n) {
524 if (__xts_paes_convert_key(ctx))
525 return skcipher_walk_done(walk: &walk, err: -EIO);
526 spin_lock_bh(lock: &ctx->pk_lock);
527 memcpy(xts_param.key + offset,
528 ctx->pk[0].protkey, keylen);
529 spin_unlock_bh(lock: &ctx->pk_lock);
530 }
531 }
532
533 return ret;
534}
535
536static int xts_paes_encrypt(struct skcipher_request *req)
537{
538 return xts_paes_crypt(req, modifier: 0);
539}
540
541static int xts_paes_decrypt(struct skcipher_request *req)
542{
543 return xts_paes_crypt(req, modifier: CPACF_DECRYPT);
544}
545
546static struct skcipher_alg xts_paes_alg = {
547 .base.cra_name = "xts(paes)",
548 .base.cra_driver_name = "xts-paes-s390",
549 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
550 .base.cra_blocksize = AES_BLOCK_SIZE,
551 .base.cra_ctxsize = sizeof(struct s390_pxts_ctx),
552 .base.cra_module = THIS_MODULE,
553 .base.cra_list = LIST_HEAD_INIT(xts_paes_alg.base.cra_list),
554 .init = xts_paes_init,
555 .exit = xts_paes_exit,
556 .min_keysize = 2 * PAES_MIN_KEYSIZE,
557 .max_keysize = 2 * PAES_MAX_KEYSIZE,
558 .ivsize = AES_BLOCK_SIZE,
559 .setkey = xts_paes_set_key,
560 .encrypt = xts_paes_encrypt,
561 .decrypt = xts_paes_decrypt,
562};
563
564static int ctr_paes_init(struct crypto_skcipher *tfm)
565{
566 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
567
568 ctx->kb.key = NULL;
569 spin_lock_init(&ctx->pk_lock);
570
571 return 0;
572}
573
574static void ctr_paes_exit(struct crypto_skcipher *tfm)
575{
576 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
577
578 _free_kb_keybuf(kb: &ctx->kb);
579}
580
581static inline int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
582{
583 int rc;
584 unsigned long fc;
585
586 rc = __paes_convert_key(ctx);
587 if (rc)
588 return rc;
589
590 /* Pick the correct function code based on the protected key type */
591 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
592 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
593 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
594 CPACF_KMCTR_PAES_256 : 0;
595
596 /* Check if the function code is available */
597 ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
598
599 return ctx->fc ? 0 : -EINVAL;
600}
601
602static int ctr_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
603 unsigned int key_len)
604{
605 int rc;
606 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
607
608 _free_kb_keybuf(kb: &ctx->kb);
609 rc = _key_to_kb(kb: &ctx->kb, key: in_key, keylen: key_len);
610 if (rc)
611 return rc;
612
613 return __ctr_paes_set_key(ctx);
614}
615
616static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
617{
618 unsigned int i, n;
619
620 /* only use complete blocks, max. PAGE_SIZE */
621 memcpy(ctrptr, iv, AES_BLOCK_SIZE);
622 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
623 for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
624 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
625 crypto_inc(a: ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
626 ctrptr += AES_BLOCK_SIZE;
627 }
628 return n;
629}
630
631static int ctr_paes_crypt(struct skcipher_request *req)
632{
633 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
634 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
635 u8 buf[AES_BLOCK_SIZE], *ctrptr;
636 struct skcipher_walk walk;
637 unsigned int nbytes, n, k;
638 int ret, locked;
639 struct {
640 u8 key[MAXPROTKEYSIZE];
641 } param;
642
643 ret = skcipher_walk_virt(walk: &walk, req, atomic: false);
644 if (ret)
645 return ret;
646
647 spin_lock_bh(lock: &ctx->pk_lock);
648 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
649 spin_unlock_bh(lock: &ctx->pk_lock);
650
651 locked = mutex_trylock(lock: &ctrblk_lock);
652
653 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
654 n = AES_BLOCK_SIZE;
655 if (nbytes >= 2*AES_BLOCK_SIZE && locked)
656 n = __ctrblk_init(ctrptr: ctrblk, iv: walk.iv, nbytes);
657 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv;
658 k = cpacf_kmctr(ctx->fc, &param, walk.dst.virt.addr,
659 walk.src.virt.addr, n, ctrptr);
660 if (k) {
661 if (ctrptr == ctrblk)
662 memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE,
663 AES_BLOCK_SIZE);
664 crypto_inc(a: walk.iv, AES_BLOCK_SIZE);
665 ret = skcipher_walk_done(walk: &walk, err: nbytes - k);
666 }
667 if (k < n) {
668 if (__paes_convert_key(ctx)) {
669 if (locked)
670 mutex_unlock(lock: &ctrblk_lock);
671 return skcipher_walk_done(walk: &walk, err: -EIO);
672 }
673 spin_lock_bh(lock: &ctx->pk_lock);
674 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
675 spin_unlock_bh(lock: &ctx->pk_lock);
676 }
677 }
678 if (locked)
679 mutex_unlock(lock: &ctrblk_lock);
680 /*
681 * final block may be < AES_BLOCK_SIZE, copy only nbytes
682 */
683 if (nbytes) {
684 memset(buf, 0, AES_BLOCK_SIZE);
685 memcpy(buf, walk.src.virt.addr, nbytes);
686 while (1) {
687 if (cpacf_kmctr(ctx->fc, &param, buf,
688 buf, AES_BLOCK_SIZE,
689 walk.iv) == AES_BLOCK_SIZE)
690 break;
691 if (__paes_convert_key(ctx))
692 return skcipher_walk_done(walk: &walk, err: -EIO);
693 spin_lock_bh(lock: &ctx->pk_lock);
694 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
695 spin_unlock_bh(lock: &ctx->pk_lock);
696 }
697 memcpy(walk.dst.virt.addr, buf, nbytes);
698 crypto_inc(a: walk.iv, AES_BLOCK_SIZE);
699 ret = skcipher_walk_done(walk: &walk, err: nbytes);
700 }
701
702 return ret;
703}
704
705static struct skcipher_alg ctr_paes_alg = {
706 .base.cra_name = "ctr(paes)",
707 .base.cra_driver_name = "ctr-paes-s390",
708 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */
709 .base.cra_blocksize = 1,
710 .base.cra_ctxsize = sizeof(struct s390_paes_ctx),
711 .base.cra_module = THIS_MODULE,
712 .base.cra_list = LIST_HEAD_INIT(ctr_paes_alg.base.cra_list),
713 .init = ctr_paes_init,
714 .exit = ctr_paes_exit,
715 .min_keysize = PAES_MIN_KEYSIZE,
716 .max_keysize = PAES_MAX_KEYSIZE,
717 .ivsize = AES_BLOCK_SIZE,
718 .setkey = ctr_paes_set_key,
719 .encrypt = ctr_paes_crypt,
720 .decrypt = ctr_paes_crypt,
721 .chunksize = AES_BLOCK_SIZE,
722};
723
724static inline void __crypto_unregister_skcipher(struct skcipher_alg *alg)
725{
726 if (!list_empty(head: &alg->base.cra_list))
727 crypto_unregister_skcipher(alg);
728}
729
730static void paes_s390_fini(void)
731{
732 __crypto_unregister_skcipher(alg: &ctr_paes_alg);
733 __crypto_unregister_skcipher(alg: &xts_paes_alg);
734 __crypto_unregister_skcipher(alg: &cbc_paes_alg);
735 __crypto_unregister_skcipher(alg: &ecb_paes_alg);
736 if (ctrblk)
737 free_page((unsigned long) ctrblk);
738}
739
740static int __init paes_s390_init(void)
741{
742 int ret;
743
744 /* Query available functions for KM, KMC and KMCTR */
745 cpacf_query(CPACF_KM, &km_functions);
746 cpacf_query(CPACF_KMC, &kmc_functions);
747 cpacf_query(CPACF_KMCTR, &kmctr_functions);
748
749 if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
750 cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
751 cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
752 ret = crypto_register_skcipher(alg: &ecb_paes_alg);
753 if (ret)
754 goto out_err;
755 }
756
757 if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
758 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
759 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
760 ret = crypto_register_skcipher(alg: &cbc_paes_alg);
761 if (ret)
762 goto out_err;
763 }
764
765 if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
766 cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
767 ret = crypto_register_skcipher(alg: &xts_paes_alg);
768 if (ret)
769 goto out_err;
770 }
771
772 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
773 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
774 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
775 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
776 if (!ctrblk) {
777 ret = -ENOMEM;
778 goto out_err;
779 }
780 ret = crypto_register_skcipher(alg: &ctr_paes_alg);
781 if (ret)
782 goto out_err;
783 }
784
785 return 0;
786out_err:
787 paes_s390_fini();
788 return ret;
789}
790
791module_init(paes_s390_init);
792module_exit(paes_s390_fini);
793
794MODULE_ALIAS_CRYPTO("paes");
795
796MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
797MODULE_LICENSE("GPL");
798

source code of linux/arch/s390/crypto/paes_s390.c