1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Glue Code for the AVX2/AES-NI/GFNI assembler implementation of the ARIA Cipher
4 *
5 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/simd.h>
10#include <crypto/aria.h>
11#include <linux/crypto.h>
12#include <linux/err.h>
13#include <linux/module.h>
14#include <linux/types.h>
15
16#include "ecb_cbc_helpers.h"
17#include "aria-avx.h"
18
19asmlinkage void aria_aesni_avx2_encrypt_32way(const void *ctx, u8 *dst,
20 const u8 *src);
21EXPORT_SYMBOL_GPL(aria_aesni_avx2_encrypt_32way);
22asmlinkage void aria_aesni_avx2_decrypt_32way(const void *ctx, u8 *dst,
23 const u8 *src);
24EXPORT_SYMBOL_GPL(aria_aesni_avx2_decrypt_32way);
25asmlinkage void aria_aesni_avx2_ctr_crypt_32way(const void *ctx, u8 *dst,
26 const u8 *src,
27 u8 *keystream, u8 *iv);
28EXPORT_SYMBOL_GPL(aria_aesni_avx2_ctr_crypt_32way);
29#ifdef CONFIG_AS_GFNI
30asmlinkage void aria_aesni_avx2_gfni_encrypt_32way(const void *ctx, u8 *dst,
31 const u8 *src);
32EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_encrypt_32way);
33asmlinkage void aria_aesni_avx2_gfni_decrypt_32way(const void *ctx, u8 *dst,
34 const u8 *src);
35EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_decrypt_32way);
36asmlinkage void aria_aesni_avx2_gfni_ctr_crypt_32way(const void *ctx, u8 *dst,
37 const u8 *src,
38 u8 *keystream, u8 *iv);
39EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_ctr_crypt_32way);
40#endif /* CONFIG_AS_GFNI */
41
42static struct aria_avx_ops aria_ops;
43
44struct aria_avx2_request_ctx {
45 u8 keystream[ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE];
46};
47
48static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
49{
50 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
51 ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_encrypt_32way);
52 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
53 ECB_BLOCK(1, aria_encrypt);
54 ECB_WALK_END();
55}
56
57static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
58{
59 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
60 ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_decrypt_32way);
61 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
62 ECB_BLOCK(1, aria_decrypt);
63 ECB_WALK_END();
64}
65
66static int aria_avx2_ecb_encrypt(struct skcipher_request *req)
67{
68 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
69 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
70
71 return ecb_do_encrypt(req, rkey: ctx->enc_key[0]);
72}
73
74static int aria_avx2_ecb_decrypt(struct skcipher_request *req)
75{
76 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
77 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
78
79 return ecb_do_decrypt(req, rkey: ctx->dec_key[0]);
80}
81
82static int aria_avx2_set_key(struct crypto_skcipher *tfm, const u8 *key,
83 unsigned int keylen)
84{
85 return aria_set_key(tfm: &tfm->base, in_key: key, key_len: keylen);
86}
87
88static int aria_avx2_ctr_encrypt(struct skcipher_request *req)
89{
90 struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req);
91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
92 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
93 struct skcipher_walk walk;
94 unsigned int nbytes;
95 int err;
96
97 err = skcipher_walk_virt(walk: &walk, req, atomic: false);
98
99 while ((nbytes = walk.nbytes) > 0) {
100 const u8 *src = walk.src.virt.addr;
101 u8 *dst = walk.dst.virt.addr;
102
103 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) {
104 kernel_fpu_begin();
105 aria_ops.aria_ctr_crypt_32way(ctx, dst, src,
106 &req_ctx->keystream[0],
107 walk.iv);
108 kernel_fpu_end();
109 dst += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
110 src += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
111 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
112 }
113
114 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
115 kernel_fpu_begin();
116 aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
117 &req_ctx->keystream[0],
118 walk.iv);
119 kernel_fpu_end();
120 dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
121 src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
122 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
123 }
124
125 while (nbytes >= ARIA_BLOCK_SIZE) {
126 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
127 crypto_inc(a: walk.iv, ARIA_BLOCK_SIZE);
128
129 aria_encrypt(ctx, out: &req_ctx->keystream[0],
130 in: &req_ctx->keystream[0]);
131
132 crypto_xor_cpy(dst, src1: src, src2: &req_ctx->keystream[0],
133 ARIA_BLOCK_SIZE);
134 dst += ARIA_BLOCK_SIZE;
135 src += ARIA_BLOCK_SIZE;
136 nbytes -= ARIA_BLOCK_SIZE;
137 }
138
139 if (walk.nbytes == walk.total && nbytes > 0) {
140 memcpy(&req_ctx->keystream[0], walk.iv,
141 ARIA_BLOCK_SIZE);
142 crypto_inc(a: walk.iv, ARIA_BLOCK_SIZE);
143
144 aria_encrypt(ctx, out: &req_ctx->keystream[0],
145 in: &req_ctx->keystream[0]);
146
147 crypto_xor_cpy(dst, src1: src, src2: &req_ctx->keystream[0],
148 size: nbytes);
149 dst += nbytes;
150 src += nbytes;
151 nbytes = 0;
152 }
153 err = skcipher_walk_done(walk: &walk, err: nbytes);
154 }
155
156 return err;
157}
158
159static int aria_avx2_init_tfm(struct crypto_skcipher *tfm)
160{
161 crypto_skcipher_set_reqsize(skcipher: tfm, reqsize: sizeof(struct aria_avx2_request_ctx));
162
163 return 0;
164}
165
166static struct skcipher_alg aria_algs[] = {
167 {
168 .base.cra_name = "__ecb(aria)",
169 .base.cra_driver_name = "__ecb-aria-avx2",
170 .base.cra_priority = 500,
171 .base.cra_flags = CRYPTO_ALG_INTERNAL,
172 .base.cra_blocksize = ARIA_BLOCK_SIZE,
173 .base.cra_ctxsize = sizeof(struct aria_ctx),
174 .base.cra_module = THIS_MODULE,
175 .min_keysize = ARIA_MIN_KEY_SIZE,
176 .max_keysize = ARIA_MAX_KEY_SIZE,
177 .setkey = aria_avx2_set_key,
178 .encrypt = aria_avx2_ecb_encrypt,
179 .decrypt = aria_avx2_ecb_decrypt,
180 }, {
181 .base.cra_name = "__ctr(aria)",
182 .base.cra_driver_name = "__ctr-aria-avx2",
183 .base.cra_priority = 500,
184 .base.cra_flags = CRYPTO_ALG_INTERNAL |
185 CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE,
186 .base.cra_blocksize = 1,
187 .base.cra_ctxsize = sizeof(struct aria_ctx),
188 .base.cra_module = THIS_MODULE,
189 .min_keysize = ARIA_MIN_KEY_SIZE,
190 .max_keysize = ARIA_MAX_KEY_SIZE,
191 .ivsize = ARIA_BLOCK_SIZE,
192 .chunksize = ARIA_BLOCK_SIZE,
193 .setkey = aria_avx2_set_key,
194 .encrypt = aria_avx2_ctr_encrypt,
195 .decrypt = aria_avx2_ctr_encrypt,
196 .init = aria_avx2_init_tfm,
197 }
198};
199
200static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
201
202static int __init aria_avx2_init(void)
203{
204 const char *feature_name;
205
206 if (!boot_cpu_has(X86_FEATURE_AVX) ||
207 !boot_cpu_has(X86_FEATURE_AVX2) ||
208 !boot_cpu_has(X86_FEATURE_AES) ||
209 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
210 pr_info("AVX2 or AES-NI instructions are not detected.\n");
211 return -ENODEV;
212 }
213
214 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
215 feature_name: &feature_name)) {
216 pr_info("CPU feature '%s' is not supported.\n", feature_name);
217 return -ENODEV;
218 }
219
220 if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
221 aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
222 aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
223 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
224 aria_ops.aria_encrypt_32way = aria_aesni_avx2_gfni_encrypt_32way;
225 aria_ops.aria_decrypt_32way = aria_aesni_avx2_gfni_decrypt_32way;
226 aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_gfni_ctr_crypt_32way;
227 } else {
228 aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
229 aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
230 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
231 aria_ops.aria_encrypt_32way = aria_aesni_avx2_encrypt_32way;
232 aria_ops.aria_decrypt_32way = aria_aesni_avx2_decrypt_32way;
233 aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_ctr_crypt_32way;
234 }
235
236 return simd_register_skciphers_compat(algs: aria_algs,
237 ARRAY_SIZE(aria_algs),
238 simd_algs: aria_simd_algs);
239}
240
241static void __exit aria_avx2_exit(void)
242{
243 simd_unregister_skciphers(algs: aria_algs, ARRAY_SIZE(aria_algs),
244 simd_algs: aria_simd_algs);
245}
246
247module_init(aria_avx2_init);
248module_exit(aria_avx2_exit);
249
250MODULE_LICENSE("GPL");
251MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
252MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX2/AES-NI/GFNI optimized");
253MODULE_ALIAS_CRYPTO("aria");
254MODULE_ALIAS_CRYPTO("aria-aesni-avx2");
255

source code of linux/arch/x86/crypto/aria_aesni_avx2_glue.c