1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4 *
5 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/simd.h>
10#include <crypto/aria.h>
11#include <linux/crypto.h>
12#include <linux/err.h>
13#include <linux/module.h>
14#include <linux/types.h>
15
16#include "ecb_cbc_helpers.h"
17#include "aria-avx.h"
18
19asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
20 const u8 *src);
21EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way);
22asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
23 const u8 *src);
24EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way);
25asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
26 const u8 *src,
27 u8 *keystream, u8 *iv);
28EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way);
29#ifdef CONFIG_AS_GFNI
30asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
31 const u8 *src);
32EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way);
33asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
34 const u8 *src);
35EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way);
36asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
37 const u8 *src,
38 u8 *keystream, u8 *iv);
39EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way);
40#endif /* CONFIG_AS_GFNI */
41
42static struct aria_avx_ops aria_ops;
43
44struct aria_avx_request_ctx {
45 u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
46};
47
48static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
49{
50 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
51 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
52 ECB_BLOCK(1, aria_encrypt);
53 ECB_WALK_END();
54}
55
56static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
57{
58 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
59 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
60 ECB_BLOCK(1, aria_decrypt);
61 ECB_WALK_END();
62}
63
64static int aria_avx_ecb_encrypt(struct skcipher_request *req)
65{
66 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
67 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
68
69 return ecb_do_encrypt(req, rkey: ctx->enc_key[0]);
70}
71
72static int aria_avx_ecb_decrypt(struct skcipher_request *req)
73{
74 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
75 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
76
77 return ecb_do_decrypt(req, rkey: ctx->dec_key[0]);
78}
79
80static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
81 unsigned int keylen)
82{
83 return aria_set_key(tfm: &tfm->base, in_key: key, key_len: keylen);
84}
85
86static int aria_avx_ctr_encrypt(struct skcipher_request *req)
87{
88 struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req);
89 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
90 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
91 struct skcipher_walk walk;
92 unsigned int nbytes;
93 int err;
94
95 err = skcipher_walk_virt(walk: &walk, req, atomic: false);
96
97 while ((nbytes = walk.nbytes) > 0) {
98 const u8 *src = walk.src.virt.addr;
99 u8 *dst = walk.dst.virt.addr;
100
101 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
102 kernel_fpu_begin();
103 aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
104 &req_ctx->keystream[0],
105 walk.iv);
106 kernel_fpu_end();
107 dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
108 src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
109 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
110 }
111
112 while (nbytes >= ARIA_BLOCK_SIZE) {
113 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
114 crypto_inc(a: walk.iv, ARIA_BLOCK_SIZE);
115
116 aria_encrypt(ctx, out: &req_ctx->keystream[0],
117 in: &req_ctx->keystream[0]);
118
119 crypto_xor_cpy(dst, src1: src, src2: &req_ctx->keystream[0],
120 ARIA_BLOCK_SIZE);
121 dst += ARIA_BLOCK_SIZE;
122 src += ARIA_BLOCK_SIZE;
123 nbytes -= ARIA_BLOCK_SIZE;
124 }
125
126 if (walk.nbytes == walk.total && nbytes > 0) {
127 memcpy(&req_ctx->keystream[0], walk.iv,
128 ARIA_BLOCK_SIZE);
129 crypto_inc(a: walk.iv, ARIA_BLOCK_SIZE);
130
131 aria_encrypt(ctx, out: &req_ctx->keystream[0],
132 in: &req_ctx->keystream[0]);
133
134 crypto_xor_cpy(dst, src1: src, src2: &req_ctx->keystream[0],
135 size: nbytes);
136 dst += nbytes;
137 src += nbytes;
138 nbytes = 0;
139 }
140 err = skcipher_walk_done(walk: &walk, err: nbytes);
141 }
142
143 return err;
144}
145
146static int aria_avx_init_tfm(struct crypto_skcipher *tfm)
147{
148 crypto_skcipher_set_reqsize(skcipher: tfm, reqsize: sizeof(struct aria_avx_request_ctx));
149
150 return 0;
151}
152
153static struct skcipher_alg aria_algs[] = {
154 {
155 .base.cra_name = "__ecb(aria)",
156 .base.cra_driver_name = "__ecb-aria-avx",
157 .base.cra_priority = 400,
158 .base.cra_flags = CRYPTO_ALG_INTERNAL,
159 .base.cra_blocksize = ARIA_BLOCK_SIZE,
160 .base.cra_ctxsize = sizeof(struct aria_ctx),
161 .base.cra_module = THIS_MODULE,
162 .min_keysize = ARIA_MIN_KEY_SIZE,
163 .max_keysize = ARIA_MAX_KEY_SIZE,
164 .setkey = aria_avx_set_key,
165 .encrypt = aria_avx_ecb_encrypt,
166 .decrypt = aria_avx_ecb_decrypt,
167 }, {
168 .base.cra_name = "__ctr(aria)",
169 .base.cra_driver_name = "__ctr-aria-avx",
170 .base.cra_priority = 400,
171 .base.cra_flags = CRYPTO_ALG_INTERNAL,
172 .base.cra_blocksize = 1,
173 .base.cra_ctxsize = sizeof(struct aria_ctx),
174 .base.cra_module = THIS_MODULE,
175 .min_keysize = ARIA_MIN_KEY_SIZE,
176 .max_keysize = ARIA_MAX_KEY_SIZE,
177 .ivsize = ARIA_BLOCK_SIZE,
178 .chunksize = ARIA_BLOCK_SIZE,
179 .walksize = 16 * ARIA_BLOCK_SIZE,
180 .setkey = aria_avx_set_key,
181 .encrypt = aria_avx_ctr_encrypt,
182 .decrypt = aria_avx_ctr_encrypt,
183 .init = aria_avx_init_tfm,
184 }
185};
186
187static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
188
189static int __init aria_avx_init(void)
190{
191 const char *feature_name;
192
193 if (!boot_cpu_has(X86_FEATURE_AVX) ||
194 !boot_cpu_has(X86_FEATURE_AES) ||
195 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
196 pr_info("AVX or AES-NI instructions are not detected.\n");
197 return -ENODEV;
198 }
199
200 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
201 feature_name: &feature_name)) {
202 pr_info("CPU feature '%s' is not supported.\n", feature_name);
203 return -ENODEV;
204 }
205
206 if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
207 aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
208 aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
209 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
210 } else {
211 aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
212 aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
213 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
214 }
215
216 return simd_register_skciphers_compat(algs: aria_algs,
217 ARRAY_SIZE(aria_algs),
218 simd_algs: aria_simd_algs);
219}
220
221static void __exit aria_avx_exit(void)
222{
223 simd_unregister_skciphers(algs: aria_algs, ARRAY_SIZE(aria_algs),
224 simd_algs: aria_simd_algs);
225}
226
227module_init(aria_avx_init);
228module_exit(aria_avx_exit);
229
230MODULE_LICENSE("GPL");
231MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
232MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
233MODULE_ALIAS_CRYPTO("aria");
234MODULE_ALIAS_CRYPTO("aria-aesni-avx");
235

source code of linux/arch/x86/crypto/aria_aesni_avx_glue.c