1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * Scalar AES core transform |
4 | * |
5 | * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org> |
6 | */ |
7 | |
8 | #include <linux/linkage.h> |
9 | #include <asm/assembler.h> |
10 | #include <asm/cache.h> |
11 | |
12 | .text |
13 | |
14 | rk .req x0 |
15 | out .req x1 |
16 | in .req x2 |
17 | rounds .req x3 |
18 | tt .req x2 |
19 | |
20 | .macro __pair1, sz, op, reg0, reg1, in0, in1e, in1d, shift |
21 | .ifc \op\shift, b0 |
22 | ubfiz \reg0, \in0, #2, #8 |
23 | ubfiz \reg1, \in1e, #2, #8 |
24 | .else |
25 | ubfx \reg0, \in0, #\shift, #8 |
26 | ubfx \reg1, \in1e, #\shift, #8 |
27 | .endif |
28 | |
29 | /* |
30 | * AArch64 cannot do byte size indexed loads from a table containing |
31 | * 32-bit quantities, i.e., 'ldrb w12, [tt, w12, uxtw #2]' is not a |
32 | * valid instruction. So perform the shift explicitly first for the |
33 | * high bytes (the low byte is shifted implicitly by using ubfiz rather |
34 | * than ubfx above) |
35 | */ |
36 | .ifnc \op, b |
37 | ldr \reg0, [tt, \reg0, uxtw #2] |
38 | ldr \reg1, [tt, \reg1, uxtw #2] |
39 | .else |
40 | .if \shift > 0 |
41 | lsl \reg0, \reg0, #2 |
42 | lsl \reg1, \reg1, #2 |
43 | .endif |
44 | ldrb \reg0, [tt, \reg0, uxtw] |
45 | ldrb \reg1, [tt, \reg1, uxtw] |
46 | .endif |
47 | .endm |
48 | |
49 | .macro __pair0, sz, op, reg0, reg1, in0, in1e, in1d, shift |
50 | ubfx \reg0, \in0, #\shift, #8 |
51 | ubfx \reg1, \in1d, #\shift, #8 |
52 | ldr\op \reg0, [tt, \reg0, uxtw #\sz] |
53 | ldr\op \reg1, [tt, \reg1, uxtw #\sz] |
54 | .endm |
55 | |
56 | .macro __hround, out0, out1, in0, in1, in2, in3, t0, t1, enc, sz, op |
57 | ldp \out0, \out1, [rk], #8 |
58 | |
59 | __pair\enc \sz, \op, w12, w13, \in0, \in1, \in3, 0 |
60 | __pair\enc \sz, \op, w14, w15, \in1, \in2, \in0, 8 |
61 | __pair\enc \sz, \op, w16, w17, \in2, \in3, \in1, 16 |
62 | __pair\enc \sz, \op, \t0, \t1, \in3, \in0, \in2, 24 |
63 | |
64 | eor \out0, \out0, w12 |
65 | eor \out1, \out1, w13 |
66 | eor \out0, \out0, w14, ror #24 |
67 | eor \out1, \out1, w15, ror #24 |
68 | eor \out0, \out0, w16, ror #16 |
69 | eor \out1, \out1, w17, ror #16 |
70 | eor \out0, \out0, \t0, ror #8 |
71 | eor \out1, \out1, \t1, ror #8 |
72 | .endm |
73 | |
74 | .macro fround, out0, out1, out2, out3, in0, in1, in2, in3, sz=2, op |
75 | __hround \out0, \out1, \in0, \in1, \in2, \in3, \out2, \out3, 1, \sz, \op |
76 | __hround \out2, \out3, \in2, \in3, \in0, \in1, \in1, \in2, 1, \sz, \op |
77 | .endm |
78 | |
79 | .macro iround, out0, out1, out2, out3, in0, in1, in2, in3, sz=2, op |
80 | __hround \out0, \out1, \in0, \in3, \in2, \in1, \out2, \out3, 0, \sz, \op |
81 | __hround \out2, \out3, \in2, \in1, \in0, \in3, \in1, \in0, 0, \sz, \op |
82 | .endm |
83 | |
84 | .macro do_crypt, round, ttab, ltab, bsz |
85 | ldp w4, w5, [in] |
86 | ldp w6, w7, [in, #8] |
87 | ldp w8, w9, [rk], #16 |
88 | ldp w10, w11, [rk, #-8] |
89 | |
90 | CPU_BE( rev w4, w4 ) |
91 | CPU_BE( rev w5, w5 ) |
92 | CPU_BE( rev w6, w6 ) |
93 | CPU_BE( rev w7, w7 ) |
94 | |
95 | eor w4, w4, w8 |
96 | eor w5, w5, w9 |
97 | eor w6, w6, w10 |
98 | eor w7, w7, w11 |
99 | |
100 | adr_l tt, \ttab |
101 | |
102 | tbnz rounds, #1, 1f |
103 | |
104 | 0: \round w8, w9, w10, w11, w4, w5, w6, w7 |
105 | \round w4, w5, w6, w7, w8, w9, w10, w11 |
106 | |
107 | 1: subs rounds, rounds, #4 |
108 | \round w8, w9, w10, w11, w4, w5, w6, w7 |
109 | b.ls 3f |
110 | 2: \round w4, w5, w6, w7, w8, w9, w10, w11 |
111 | b 0b |
112 | 3: adr_l tt, \ltab |
113 | \round w4, w5, w6, w7, w8, w9, w10, w11, \bsz, b |
114 | |
115 | CPU_BE( rev w4, w4 ) |
116 | CPU_BE( rev w5, w5 ) |
117 | CPU_BE( rev w6, w6 ) |
118 | CPU_BE( rev w7, w7 ) |
119 | |
120 | stp w4, w5, [out] |
121 | stp w6, w7, [out, #8] |
122 | ret |
123 | .endm |
124 | |
125 | SYM_FUNC_START(__aes_arm64_encrypt) |
126 | do_crypt fround, crypto_ft_tab, crypto_ft_tab + 1, 2 |
127 | SYM_FUNC_END(__aes_arm64_encrypt) |
128 | |
129 | .align 5 |
130 | SYM_FUNC_START(__aes_arm64_decrypt) |
131 | do_crypt iround, crypto_it_tab, crypto_aes_inv_sbox, 0 |
132 | SYM_FUNC_END(__aes_arm64_decrypt) |
133 | |