1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * SM4-CCM AEAD Algorithm using ARMv8 Crypto Extensions
4 * as specified in rfc8998
5 * https://datatracker.ietf.org/doc/html/rfc8998
6 *
7 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
8 */
9
10#include <linux/module.h>
11#include <linux/crypto.h>
12#include <linux/kernel.h>
13#include <linux/cpufeature.h>
14#include <asm/neon.h>
15#include <crypto/scatterwalk.h>
16#include <crypto/internal/aead.h>
17#include <crypto/internal/skcipher.h>
18#include <crypto/sm4.h>
19#include "sm4-ce.h"
20
21asmlinkage void sm4_ce_cbcmac_update(const u32 *rkey_enc, u8 *mac,
22 const u8 *src, unsigned int nblocks);
23asmlinkage void sm4_ce_ccm_enc(const u32 *rkey_enc, u8 *dst, const u8 *src,
24 u8 *iv, unsigned int nbytes, u8 *mac);
25asmlinkage void sm4_ce_ccm_dec(const u32 *rkey_enc, u8 *dst, const u8 *src,
26 u8 *iv, unsigned int nbytes, u8 *mac);
27asmlinkage void sm4_ce_ccm_final(const u32 *rkey_enc, u8 *iv, u8 *mac);
28
29
30static int ccm_setkey(struct crypto_aead *tfm, const u8 *key,
31 unsigned int key_len)
32{
33 struct sm4_ctx *ctx = crypto_aead_ctx(tfm);
34
35 if (key_len != SM4_KEY_SIZE)
36 return -EINVAL;
37
38 kernel_neon_begin();
39 sm4_ce_expand_key(key, rkey_enc: ctx->rkey_enc, rkey_dec: ctx->rkey_dec,
40 fk: crypto_sm4_fk, ck: crypto_sm4_ck);
41 kernel_neon_end();
42
43 return 0;
44}
45
46static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
47{
48 if ((authsize & 1) || authsize < 4)
49 return -EINVAL;
50 return 0;
51}
52
53static int ccm_format_input(u8 info[], struct aead_request *req,
54 unsigned int msglen)
55{
56 struct crypto_aead *aead = crypto_aead_reqtfm(req);
57 unsigned int l = req->iv[0] + 1;
58 unsigned int m;
59 __be32 len;
60
61 /* verify that CCM dimension 'L': 2 <= L <= 8 */
62 if (l < 2 || l > 8)
63 return -EINVAL;
64 if (l < 4 && msglen >> (8 * l))
65 return -EOVERFLOW;
66
67 memset(&req->iv[SM4_BLOCK_SIZE - l], 0, l);
68
69 memcpy(info, req->iv, SM4_BLOCK_SIZE);
70
71 m = crypto_aead_authsize(tfm: aead);
72
73 /* format flags field per RFC 3610/NIST 800-38C */
74 *info |= ((m - 2) / 2) << 3;
75 if (req->assoclen)
76 *info |= (1 << 6);
77
78 /*
79 * format message length field,
80 * Linux uses a u32 type to represent msglen
81 */
82 if (l >= 4)
83 l = 4;
84
85 len = cpu_to_be32(msglen);
86 memcpy(&info[SM4_BLOCK_SIZE - l], (u8 *)&len + 4 - l, l);
87
88 return 0;
89}
90
91static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
92{
93 struct crypto_aead *aead = crypto_aead_reqtfm(req);
94 struct sm4_ctx *ctx = crypto_aead_ctx(tfm: aead);
95 struct __packed { __be16 l; __be32 h; } aadlen;
96 u32 assoclen = req->assoclen;
97 struct scatter_walk walk;
98 unsigned int len;
99
100 if (assoclen < 0xff00) {
101 aadlen.l = cpu_to_be16(assoclen);
102 len = 2;
103 } else {
104 aadlen.l = cpu_to_be16(0xfffe);
105 put_unaligned_be32(val: assoclen, p: &aadlen.h);
106 len = 6;
107 }
108
109 sm4_ce_crypt_block(rkey: ctx->rkey_enc, dst: mac, src: mac);
110 crypto_xor(dst: mac, src: (const u8 *)&aadlen, size: len);
111
112 scatterwalk_start(walk: &walk, sg: req->src);
113
114 do {
115 unsigned int n, orig_n;
116 const u8 *p;
117
118 orig_n = scatterwalk_next(walk: &walk, total: assoclen);
119 p = walk.addr;
120 n = orig_n;
121
122 while (n > 0) {
123 unsigned int l, nblocks;
124
125 if (len == SM4_BLOCK_SIZE) {
126 if (n < SM4_BLOCK_SIZE) {
127 sm4_ce_crypt_block(rkey: ctx->rkey_enc,
128 dst: mac, src: mac);
129
130 len = 0;
131 } else {
132 nblocks = n / SM4_BLOCK_SIZE;
133 sm4_ce_cbcmac_update(rkey_enc: ctx->rkey_enc,
134 mac, src: p, nblocks);
135
136 p += nblocks * SM4_BLOCK_SIZE;
137 n %= SM4_BLOCK_SIZE;
138
139 continue;
140 }
141 }
142
143 l = min(n, SM4_BLOCK_SIZE - len);
144 if (l) {
145 crypto_xor(dst: mac + len, src: p, size: l);
146 len += l;
147 p += l;
148 n -= l;
149 }
150 }
151
152 scatterwalk_done_src(walk: &walk, nbytes: orig_n);
153 assoclen -= orig_n;
154 } while (assoclen);
155}
156
157static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
158 u32 *rkey_enc, u8 mac[],
159 void (*sm4_ce_ccm_crypt)(const u32 *rkey_enc, u8 *dst,
160 const u8 *src, u8 *iv,
161 unsigned int nbytes, u8 *mac))
162{
163 u8 __aligned(8) ctr0[SM4_BLOCK_SIZE];
164 int err = 0;
165
166 /* preserve the initial ctr0 for the TAG */
167 memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE);
168 crypto_inc(a: walk->iv, SM4_BLOCK_SIZE);
169
170 kernel_neon_begin();
171
172 if (req->assoclen)
173 ccm_calculate_auth_mac(req, mac);
174
175 while (walk->nbytes && walk->nbytes != walk->total) {
176 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
177
178 sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
179 walk->src.virt.addr, walk->iv,
180 walk->nbytes - tail, mac);
181
182 kernel_neon_end();
183
184 err = skcipher_walk_done(walk, res: tail);
185
186 kernel_neon_begin();
187 }
188
189 if (walk->nbytes) {
190 sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
191 walk->src.virt.addr, walk->iv,
192 walk->nbytes, mac);
193
194 sm4_ce_ccm_final(rkey_enc, iv: ctr0, mac);
195
196 kernel_neon_end();
197
198 err = skcipher_walk_done(walk, res: 0);
199 } else {
200 sm4_ce_ccm_final(rkey_enc, iv: ctr0, mac);
201
202 kernel_neon_end();
203 }
204
205 return err;
206}
207
208static int ccm_encrypt(struct aead_request *req)
209{
210 struct crypto_aead *aead = crypto_aead_reqtfm(req);
211 struct sm4_ctx *ctx = crypto_aead_ctx(tfm: aead);
212 u8 __aligned(8) mac[SM4_BLOCK_SIZE];
213 struct skcipher_walk walk;
214 int err;
215
216 err = ccm_format_input(info: mac, req, msglen: req->cryptlen);
217 if (err)
218 return err;
219
220 err = skcipher_walk_aead_encrypt(walk: &walk, req, atomic: false);
221 if (err)
222 return err;
223
224 err = ccm_crypt(req, walk: &walk, rkey_enc: ctx->rkey_enc, mac, sm4_ce_ccm_crypt: sm4_ce_ccm_enc);
225 if (err)
226 return err;
227
228 /* copy authtag to end of dst */
229 scatterwalk_map_and_copy(buf: mac, sg: req->dst, start: req->assoclen + req->cryptlen,
230 nbytes: crypto_aead_authsize(tfm: aead), out: 1);
231
232 return 0;
233}
234
235static int ccm_decrypt(struct aead_request *req)
236{
237 struct crypto_aead *aead = crypto_aead_reqtfm(req);
238 unsigned int authsize = crypto_aead_authsize(tfm: aead);
239 struct sm4_ctx *ctx = crypto_aead_ctx(tfm: aead);
240 u8 __aligned(8) mac[SM4_BLOCK_SIZE];
241 u8 authtag[SM4_BLOCK_SIZE];
242 struct skcipher_walk walk;
243 int err;
244
245 err = ccm_format_input(info: mac, req, msglen: req->cryptlen - authsize);
246 if (err)
247 return err;
248
249 err = skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
250 if (err)
251 return err;
252
253 err = ccm_crypt(req, walk: &walk, rkey_enc: ctx->rkey_enc, mac, sm4_ce_ccm_crypt: sm4_ce_ccm_dec);
254 if (err)
255 return err;
256
257 /* compare calculated auth tag with the stored one */
258 scatterwalk_map_and_copy(buf: authtag, sg: req->src,
259 start: req->assoclen + req->cryptlen - authsize,
260 nbytes: authsize, out: 0);
261
262 if (crypto_memneq(a: authtag, b: mac, size: authsize))
263 return -EBADMSG;
264
265 return 0;
266}
267
268static struct aead_alg sm4_ccm_alg = {
269 .base = {
270 .cra_name = "ccm(sm4)",
271 .cra_driver_name = "ccm-sm4-ce",
272 .cra_priority = 400,
273 .cra_blocksize = 1,
274 .cra_ctxsize = sizeof(struct sm4_ctx),
275 .cra_module = THIS_MODULE,
276 },
277 .ivsize = SM4_BLOCK_SIZE,
278 .chunksize = SM4_BLOCK_SIZE,
279 .maxauthsize = SM4_BLOCK_SIZE,
280 .setkey = ccm_setkey,
281 .setauthsize = ccm_setauthsize,
282 .encrypt = ccm_encrypt,
283 .decrypt = ccm_decrypt,
284};
285
286static int __init sm4_ce_ccm_init(void)
287{
288 return crypto_register_aead(alg: &sm4_ccm_alg);
289}
290
291static void __exit sm4_ce_ccm_exit(void)
292{
293 crypto_unregister_aead(alg: &sm4_ccm_alg);
294}
295
296module_cpu_feature_match(SM4, sm4_ce_ccm_init);
297module_exit(sm4_ce_ccm_exit);
298
299MODULE_DESCRIPTION("Synchronous SM4 in CCM mode using ARMv8 Crypto Extensions");
300MODULE_ALIAS_CRYPTO("ccm(sm4)");
301MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
302MODULE_LICENSE("GPL v2");
303

Provided by KDAB

Privacy Policy
Improve your Profiling and Debugging skills
Find out more

source code of linux/arch/arm64/crypto/sm4-ce-ccm-glue.c