1 | // SPDX-License-Identifier: GPL-2.0-only |
2 | /* Glue code for AES encryption optimized for sparc64 crypto opcodes. |
3 | * |
4 | * This is based largely upon arch/x86/crypto/aesni-intel_glue.c |
5 | * |
6 | * Copyright (C) 2008, Intel Corp. |
7 | * Author: Huang Ying <ying.huang@intel.com> |
8 | * |
9 | * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD |
10 | * interface for 64-bit kernels. |
11 | * Authors: Adrian Hoban <adrian.hoban@intel.com> |
12 | * Gabriele Paoloni <gabriele.paoloni@intel.com> |
13 | * Tadeusz Struk (tadeusz.struk@intel.com) |
14 | * Aidan O'Mahony (aidan.o.mahony@intel.com) |
15 | * Copyright (c) 2010, Intel Corporation. |
16 | */ |
17 | |
18 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
19 | |
20 | #include <linux/crypto.h> |
21 | #include <linux/init.h> |
22 | #include <linux/module.h> |
23 | #include <linux/mm.h> |
24 | #include <linux/types.h> |
25 | #include <crypto/algapi.h> |
26 | #include <crypto/aes.h> |
27 | #include <crypto/internal/skcipher.h> |
28 | |
29 | #include <asm/fpumacro.h> |
30 | #include <asm/pstate.h> |
31 | #include <asm/elf.h> |
32 | |
33 | #include "opcodes.h" |
34 | |
35 | struct aes_ops { |
36 | void (*encrypt)(const u64 *key, const u32 *input, u32 *output); |
37 | void (*decrypt)(const u64 *key, const u32 *input, u32 *output); |
38 | void (*load_encrypt_keys)(const u64 *key); |
39 | void (*load_decrypt_keys)(const u64 *key); |
40 | void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output, |
41 | unsigned int len); |
42 | void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output, |
43 | unsigned int len); |
44 | void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output, |
45 | unsigned int len, u64 *iv); |
46 | void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output, |
47 | unsigned int len, u64 *iv); |
48 | void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output, |
49 | unsigned int len, u64 *iv); |
50 | }; |
51 | |
52 | struct crypto_sparc64_aes_ctx { |
53 | struct aes_ops *ops; |
54 | u64 key[AES_MAX_KEYLENGTH / sizeof(u64)]; |
55 | u32 key_length; |
56 | u32 expanded_key_length; |
57 | }; |
58 | |
59 | extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input, |
60 | u32 *output); |
61 | extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input, |
62 | u32 *output); |
63 | extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input, |
64 | u32 *output); |
65 | |
66 | extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input, |
67 | u32 *output); |
68 | extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input, |
69 | u32 *output); |
70 | extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input, |
71 | u32 *output); |
72 | |
73 | extern void aes_sparc64_load_encrypt_keys_128(const u64 *key); |
74 | extern void aes_sparc64_load_encrypt_keys_192(const u64 *key); |
75 | extern void aes_sparc64_load_encrypt_keys_256(const u64 *key); |
76 | |
77 | extern void aes_sparc64_load_decrypt_keys_128(const u64 *key); |
78 | extern void aes_sparc64_load_decrypt_keys_192(const u64 *key); |
79 | extern void aes_sparc64_load_decrypt_keys_256(const u64 *key); |
80 | |
81 | extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input, |
82 | u64 *output, unsigned int len); |
83 | extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input, |
84 | u64 *output, unsigned int len); |
85 | extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input, |
86 | u64 *output, unsigned int len); |
87 | |
88 | extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input, |
89 | u64 *output, unsigned int len); |
90 | extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input, |
91 | u64 *output, unsigned int len); |
92 | extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input, |
93 | u64 *output, unsigned int len); |
94 | |
95 | extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input, |
96 | u64 *output, unsigned int len, |
97 | u64 *iv); |
98 | |
99 | extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input, |
100 | u64 *output, unsigned int len, |
101 | u64 *iv); |
102 | |
103 | extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input, |
104 | u64 *output, unsigned int len, |
105 | u64 *iv); |
106 | |
107 | extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input, |
108 | u64 *output, unsigned int len, |
109 | u64 *iv); |
110 | |
111 | extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input, |
112 | u64 *output, unsigned int len, |
113 | u64 *iv); |
114 | |
115 | extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input, |
116 | u64 *output, unsigned int len, |
117 | u64 *iv); |
118 | |
119 | extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input, |
120 | u64 *output, unsigned int len, |
121 | u64 *iv); |
122 | extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input, |
123 | u64 *output, unsigned int len, |
124 | u64 *iv); |
125 | extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input, |
126 | u64 *output, unsigned int len, |
127 | u64 *iv); |
128 | |
129 | static struct aes_ops aes128_ops = { |
130 | .encrypt = aes_sparc64_encrypt_128, |
131 | .decrypt = aes_sparc64_decrypt_128, |
132 | .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128, |
133 | .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128, |
134 | .ecb_encrypt = aes_sparc64_ecb_encrypt_128, |
135 | .ecb_decrypt = aes_sparc64_ecb_decrypt_128, |
136 | .cbc_encrypt = aes_sparc64_cbc_encrypt_128, |
137 | .cbc_decrypt = aes_sparc64_cbc_decrypt_128, |
138 | .ctr_crypt = aes_sparc64_ctr_crypt_128, |
139 | }; |
140 | |
141 | static struct aes_ops aes192_ops = { |
142 | .encrypt = aes_sparc64_encrypt_192, |
143 | .decrypt = aes_sparc64_decrypt_192, |
144 | .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192, |
145 | .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192, |
146 | .ecb_encrypt = aes_sparc64_ecb_encrypt_192, |
147 | .ecb_decrypt = aes_sparc64_ecb_decrypt_192, |
148 | .cbc_encrypt = aes_sparc64_cbc_encrypt_192, |
149 | .cbc_decrypt = aes_sparc64_cbc_decrypt_192, |
150 | .ctr_crypt = aes_sparc64_ctr_crypt_192, |
151 | }; |
152 | |
153 | static struct aes_ops aes256_ops = { |
154 | .encrypt = aes_sparc64_encrypt_256, |
155 | .decrypt = aes_sparc64_decrypt_256, |
156 | .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256, |
157 | .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256, |
158 | .ecb_encrypt = aes_sparc64_ecb_encrypt_256, |
159 | .ecb_decrypt = aes_sparc64_ecb_decrypt_256, |
160 | .cbc_encrypt = aes_sparc64_cbc_encrypt_256, |
161 | .cbc_decrypt = aes_sparc64_cbc_decrypt_256, |
162 | .ctr_crypt = aes_sparc64_ctr_crypt_256, |
163 | }; |
164 | |
165 | extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key, |
166 | unsigned int key_len); |
167 | |
168 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
169 | unsigned int key_len) |
170 | { |
171 | struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
172 | |
173 | switch (key_len) { |
174 | case AES_KEYSIZE_128: |
175 | ctx->expanded_key_length = 0xb0; |
176 | ctx->ops = &aes128_ops; |
177 | break; |
178 | |
179 | case AES_KEYSIZE_192: |
180 | ctx->expanded_key_length = 0xd0; |
181 | ctx->ops = &aes192_ops; |
182 | break; |
183 | |
184 | case AES_KEYSIZE_256: |
185 | ctx->expanded_key_length = 0xf0; |
186 | ctx->ops = &aes256_ops; |
187 | break; |
188 | |
189 | default: |
190 | return -EINVAL; |
191 | } |
192 | |
193 | aes_sparc64_key_expand(in_key: (const u32 *)in_key, output_key: &ctx->key[0], key_len); |
194 | ctx->key_length = key_len; |
195 | |
196 | return 0; |
197 | } |
198 | |
199 | static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key, |
200 | unsigned int key_len) |
201 | { |
202 | return aes_set_key(tfm: crypto_skcipher_tfm(tfm), in_key, key_len); |
203 | } |
204 | |
205 | static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
206 | { |
207 | struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
208 | |
209 | ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); |
210 | } |
211 | |
212 | static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
213 | { |
214 | struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
215 | |
216 | ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); |
217 | } |
218 | |
219 | static int ecb_encrypt(struct skcipher_request *req) |
220 | { |
221 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
222 | const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
223 | struct skcipher_walk walk; |
224 | unsigned int nbytes; |
225 | int err; |
226 | |
227 | err = skcipher_walk_virt(walk: &walk, req, atomic: true); |
228 | if (err) |
229 | return err; |
230 | |
231 | ctx->ops->load_encrypt_keys(&ctx->key[0]); |
232 | while ((nbytes = walk.nbytes) != 0) { |
233 | ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr, |
234 | walk.dst.virt.addr, |
235 | round_down(nbytes, AES_BLOCK_SIZE)); |
236 | err = skcipher_walk_done(walk: &walk, err: nbytes % AES_BLOCK_SIZE); |
237 | } |
238 | fprs_write(0); |
239 | return err; |
240 | } |
241 | |
242 | static int ecb_decrypt(struct skcipher_request *req) |
243 | { |
244 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
245 | const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
246 | const u64 *key_end; |
247 | struct skcipher_walk walk; |
248 | unsigned int nbytes; |
249 | int err; |
250 | |
251 | err = skcipher_walk_virt(walk: &walk, req, atomic: true); |
252 | if (err) |
253 | return err; |
254 | |
255 | ctx->ops->load_decrypt_keys(&ctx->key[0]); |
256 | key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; |
257 | while ((nbytes = walk.nbytes) != 0) { |
258 | ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr, |
259 | walk.dst.virt.addr, |
260 | round_down(nbytes, AES_BLOCK_SIZE)); |
261 | err = skcipher_walk_done(walk: &walk, err: nbytes % AES_BLOCK_SIZE); |
262 | } |
263 | fprs_write(0); |
264 | |
265 | return err; |
266 | } |
267 | |
268 | static int cbc_encrypt(struct skcipher_request *req) |
269 | { |
270 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
271 | const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
272 | struct skcipher_walk walk; |
273 | unsigned int nbytes; |
274 | int err; |
275 | |
276 | err = skcipher_walk_virt(walk: &walk, req, atomic: true); |
277 | if (err) |
278 | return err; |
279 | |
280 | ctx->ops->load_encrypt_keys(&ctx->key[0]); |
281 | while ((nbytes = walk.nbytes) != 0) { |
282 | ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr, |
283 | walk.dst.virt.addr, |
284 | round_down(nbytes, AES_BLOCK_SIZE), |
285 | walk.iv); |
286 | err = skcipher_walk_done(walk: &walk, err: nbytes % AES_BLOCK_SIZE); |
287 | } |
288 | fprs_write(0); |
289 | return err; |
290 | } |
291 | |
292 | static int cbc_decrypt(struct skcipher_request *req) |
293 | { |
294 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
295 | const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
296 | const u64 *key_end; |
297 | struct skcipher_walk walk; |
298 | unsigned int nbytes; |
299 | int err; |
300 | |
301 | err = skcipher_walk_virt(walk: &walk, req, atomic: true); |
302 | if (err) |
303 | return err; |
304 | |
305 | ctx->ops->load_decrypt_keys(&ctx->key[0]); |
306 | key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; |
307 | while ((nbytes = walk.nbytes) != 0) { |
308 | ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr, |
309 | walk.dst.virt.addr, |
310 | round_down(nbytes, AES_BLOCK_SIZE), |
311 | walk.iv); |
312 | err = skcipher_walk_done(walk: &walk, err: nbytes % AES_BLOCK_SIZE); |
313 | } |
314 | fprs_write(0); |
315 | |
316 | return err; |
317 | } |
318 | |
319 | static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx, |
320 | struct skcipher_walk *walk) |
321 | { |
322 | u8 *ctrblk = walk->iv; |
323 | u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; |
324 | u8 *src = walk->src.virt.addr; |
325 | u8 *dst = walk->dst.virt.addr; |
326 | unsigned int nbytes = walk->nbytes; |
327 | |
328 | ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, |
329 | keystream, AES_BLOCK_SIZE); |
330 | crypto_xor_cpy(dst, src1: (u8 *) keystream, src2: src, size: nbytes); |
331 | crypto_inc(a: ctrblk, AES_BLOCK_SIZE); |
332 | } |
333 | |
334 | static int ctr_crypt(struct skcipher_request *req) |
335 | { |
336 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
337 | const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
338 | struct skcipher_walk walk; |
339 | unsigned int nbytes; |
340 | int err; |
341 | |
342 | err = skcipher_walk_virt(walk: &walk, req, atomic: true); |
343 | if (err) |
344 | return err; |
345 | |
346 | ctx->ops->load_encrypt_keys(&ctx->key[0]); |
347 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
348 | ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr, |
349 | walk.dst.virt.addr, |
350 | round_down(nbytes, AES_BLOCK_SIZE), |
351 | walk.iv); |
352 | err = skcipher_walk_done(walk: &walk, err: nbytes % AES_BLOCK_SIZE); |
353 | } |
354 | if (walk.nbytes) { |
355 | ctr_crypt_final(ctx, walk: &walk); |
356 | err = skcipher_walk_done(walk: &walk, err: 0); |
357 | } |
358 | fprs_write(0); |
359 | return err; |
360 | } |
361 | |
362 | static struct crypto_alg cipher_alg = { |
363 | .cra_name = "aes" , |
364 | .cra_driver_name = "aes-sparc64" , |
365 | .cra_priority = SPARC_CR_OPCODE_PRIORITY, |
366 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
367 | .cra_blocksize = AES_BLOCK_SIZE, |
368 | .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), |
369 | .cra_alignmask = 3, |
370 | .cra_module = THIS_MODULE, |
371 | .cra_u = { |
372 | .cipher = { |
373 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
374 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
375 | .cia_setkey = aes_set_key, |
376 | .cia_encrypt = crypto_aes_encrypt, |
377 | .cia_decrypt = crypto_aes_decrypt |
378 | } |
379 | } |
380 | }; |
381 | |
382 | static struct skcipher_alg skcipher_algs[] = { |
383 | { |
384 | .base.cra_name = "ecb(aes)" , |
385 | .base.cra_driver_name = "ecb-aes-sparc64" , |
386 | .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, |
387 | .base.cra_blocksize = AES_BLOCK_SIZE, |
388 | .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), |
389 | .base.cra_alignmask = 7, |
390 | .base.cra_module = THIS_MODULE, |
391 | .min_keysize = AES_MIN_KEY_SIZE, |
392 | .max_keysize = AES_MAX_KEY_SIZE, |
393 | .setkey = aes_set_key_skcipher, |
394 | .encrypt = ecb_encrypt, |
395 | .decrypt = ecb_decrypt, |
396 | }, { |
397 | .base.cra_name = "cbc(aes)" , |
398 | .base.cra_driver_name = "cbc-aes-sparc64" , |
399 | .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, |
400 | .base.cra_blocksize = AES_BLOCK_SIZE, |
401 | .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), |
402 | .base.cra_alignmask = 7, |
403 | .base.cra_module = THIS_MODULE, |
404 | .min_keysize = AES_MIN_KEY_SIZE, |
405 | .max_keysize = AES_MAX_KEY_SIZE, |
406 | .ivsize = AES_BLOCK_SIZE, |
407 | .setkey = aes_set_key_skcipher, |
408 | .encrypt = cbc_encrypt, |
409 | .decrypt = cbc_decrypt, |
410 | }, { |
411 | .base.cra_name = "ctr(aes)" , |
412 | .base.cra_driver_name = "ctr-aes-sparc64" , |
413 | .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, |
414 | .base.cra_blocksize = 1, |
415 | .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), |
416 | .base.cra_alignmask = 7, |
417 | .base.cra_module = THIS_MODULE, |
418 | .min_keysize = AES_MIN_KEY_SIZE, |
419 | .max_keysize = AES_MAX_KEY_SIZE, |
420 | .ivsize = AES_BLOCK_SIZE, |
421 | .setkey = aes_set_key_skcipher, |
422 | .encrypt = ctr_crypt, |
423 | .decrypt = ctr_crypt, |
424 | .chunksize = AES_BLOCK_SIZE, |
425 | } |
426 | }; |
427 | |
428 | static bool __init sparc64_has_aes_opcode(void) |
429 | { |
430 | unsigned long cfr; |
431 | |
432 | if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO)) |
433 | return false; |
434 | |
435 | __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr)); |
436 | if (!(cfr & CFR_AES)) |
437 | return false; |
438 | |
439 | return true; |
440 | } |
441 | |
442 | static int __init aes_sparc64_mod_init(void) |
443 | { |
444 | int err; |
445 | |
446 | if (!sparc64_has_aes_opcode()) { |
447 | pr_info("sparc64 aes opcodes not available.\n" ); |
448 | return -ENODEV; |
449 | } |
450 | pr_info("Using sparc64 aes opcodes optimized AES implementation\n" ); |
451 | err = crypto_register_alg(alg: &cipher_alg); |
452 | if (err) |
453 | return err; |
454 | err = crypto_register_skciphers(algs: skcipher_algs, |
455 | ARRAY_SIZE(skcipher_algs)); |
456 | if (err) |
457 | crypto_unregister_alg(alg: &cipher_alg); |
458 | return err; |
459 | } |
460 | |
461 | static void __exit aes_sparc64_mod_fini(void) |
462 | { |
463 | crypto_unregister_alg(alg: &cipher_alg); |
464 | crypto_unregister_skciphers(algs: skcipher_algs, ARRAY_SIZE(skcipher_algs)); |
465 | } |
466 | |
467 | module_init(aes_sparc64_mod_init); |
468 | module_exit(aes_sparc64_mod_fini); |
469 | |
470 | MODULE_LICENSE("GPL" ); |
471 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated" ); |
472 | |
473 | MODULE_ALIAS_CRYPTO("aes" ); |
474 | |
475 | #include "crop_devid.c" |
476 | |