1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * linux/arch/arm64/crypto/aes-ce.S - AES cipher for ARMv8 with |
4 | * Crypto Extensions |
5 | * |
6 | * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> |
7 | */ |
8 | |
9 | #include <linux/linkage.h> |
10 | #include <asm/assembler.h> |
11 | |
12 | #define AES_FUNC_START(func) SYM_FUNC_START(ce_ ## func) |
13 | #define AES_FUNC_END(func) SYM_FUNC_END(ce_ ## func) |
14 | |
15 | .arch armv8-a+crypto |
16 | |
17 | xtsmask .req v16 |
18 | cbciv .req v16 |
19 | vctr .req v16 |
20 | |
21 | .macro xts_reload_mask, tmp |
22 | .endm |
23 | |
24 | .macro xts_cts_skip_tw, reg, lbl |
25 | .endm |
26 | |
27 | /* preload all round keys */ |
28 | .macro load_round_keys, rounds, rk |
29 | cmp \rounds, #12 |
30 | blo 2222f /* 128 bits */ |
31 | beq 1111f /* 192 bits */ |
32 | ld1 {v17.4s-v18.4s}, [\rk], #32 |
33 | 1111: ld1 {v19.4s-v20.4s}, [\rk], #32 |
34 | 2222: ld1 {v21.4s-v24.4s}, [\rk], #64 |
35 | ld1 {v25.4s-v28.4s}, [\rk], #64 |
36 | ld1 {v29.4s-v31.4s}, [\rk] |
37 | .endm |
38 | |
39 | /* prepare for encryption with key in rk[] */ |
40 | .macro enc_prepare, rounds, rk, temp |
41 | mov \temp, \rk |
42 | load_round_keys \rounds, \temp |
43 | .endm |
44 | |
45 | /* prepare for encryption (again) but with new key in rk[] */ |
46 | .macro enc_switch_key, rounds, rk, temp |
47 | mov \temp, \rk |
48 | load_round_keys \rounds, \temp |
49 | .endm |
50 | |
51 | /* prepare for decryption with key in rk[] */ |
52 | .macro dec_prepare, rounds, rk, temp |
53 | mov \temp, \rk |
54 | load_round_keys \rounds, \temp |
55 | .endm |
56 | |
57 | .macro do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4 |
58 | aes\de \i0\().16b, \k\().16b |
59 | aes\mc \i0\().16b, \i0\().16b |
60 | .ifnb \i1 |
61 | aes\de \i1\().16b, \k\().16b |
62 | aes\mc \i1\().16b, \i1\().16b |
63 | .ifnb \i3 |
64 | aes\de \i2\().16b, \k\().16b |
65 | aes\mc \i2\().16b, \i2\().16b |
66 | aes\de \i3\().16b, \k\().16b |
67 | aes\mc \i3\().16b, \i3\().16b |
68 | .ifnb \i4 |
69 | aes\de \i4\().16b, \k\().16b |
70 | aes\mc \i4\().16b, \i4\().16b |
71 | .endif |
72 | .endif |
73 | .endif |
74 | .endm |
75 | |
76 | /* up to 5 interleaved encryption rounds with the same round key */ |
77 | .macro round_Nx, enc, k, i0, i1, i2, i3, i4 |
78 | .ifc \enc, e |
79 | do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3, \i4 |
80 | .else |
81 | do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3, \i4 |
82 | .endif |
83 | .endm |
84 | |
85 | /* up to 5 interleaved final rounds */ |
86 | .macro fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4 |
87 | aes\de \i0\().16b, \k\().16b |
88 | .ifnb \i1 |
89 | aes\de \i1\().16b, \k\().16b |
90 | .ifnb \i3 |
91 | aes\de \i2\().16b, \k\().16b |
92 | aes\de \i3\().16b, \k\().16b |
93 | .ifnb \i4 |
94 | aes\de \i4\().16b, \k\().16b |
95 | .endif |
96 | .endif |
97 | .endif |
98 | eor \i0\().16b, \i0\().16b, \k2\().16b |
99 | .ifnb \i1 |
100 | eor \i1\().16b, \i1\().16b, \k2\().16b |
101 | .ifnb \i3 |
102 | eor \i2\().16b, \i2\().16b, \k2\().16b |
103 | eor \i3\().16b, \i3\().16b, \k2\().16b |
104 | .ifnb \i4 |
105 | eor \i4\().16b, \i4\().16b, \k2\().16b |
106 | .endif |
107 | .endif |
108 | .endif |
109 | .endm |
110 | |
111 | /* up to 5 interleaved blocks */ |
112 | .macro do_block_Nx, enc, rounds, i0, i1, i2, i3, i4 |
113 | cmp \rounds, #12 |
114 | blo 2222f /* 128 bits */ |
115 | beq 1111f /* 192 bits */ |
116 | round_Nx \enc, v17, \i0, \i1, \i2, \i3, \i4 |
117 | round_Nx \enc, v18, \i0, \i1, \i2, \i3, \i4 |
118 | 1111: round_Nx \enc, v19, \i0, \i1, \i2, \i3, \i4 |
119 | round_Nx \enc, v20, \i0, \i1, \i2, \i3, \i4 |
120 | 2222: .irp key, v21, v22, v23, v24, v25, v26, v27, v28, v29 |
121 | round_Nx \enc, \key, \i0, \i1, \i2, \i3, \i4 |
122 | .endr |
123 | fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3, \i4 |
124 | .endm |
125 | |
126 | .macro encrypt_block, in, rounds, t0, t1, t2 |
127 | do_block_Nx e, \rounds, \in |
128 | .endm |
129 | |
130 | .macro encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 |
131 | do_block_Nx e, \rounds, \i0, \i1, \i2, \i3 |
132 | .endm |
133 | |
134 | .macro encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 |
135 | do_block_Nx e, \rounds, \i0, \i1, \i2, \i3, \i4 |
136 | .endm |
137 | |
138 | .macro decrypt_block, in, rounds, t0, t1, t2 |
139 | do_block_Nx d, \rounds, \in |
140 | .endm |
141 | |
142 | .macro decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 |
143 | do_block_Nx d, \rounds, \i0, \i1, \i2, \i3 |
144 | .endm |
145 | |
146 | .macro decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 |
147 | do_block_Nx d, \rounds, \i0, \i1, \i2, \i3, \i4 |
148 | .endm |
149 | |
150 | #define MAX_STRIDE 5 |
151 | |
152 | #include "aes-modes.S" |
153 | |