1//! Pure Rust implementation of the [Advanced Encryption Standard][AES]
2//! (AES, a.k.a. Rijndael).
3//!
4//! # ⚠️ Security Warning: Hazmat!
5//!
6//! This crate implements only the low-level block cipher function, and is intended
7//! for use for implementing higher-level constructions *only*. It is NOT
8//! intended for direct use in applications.
9//!
10//! USE AT YOUR OWN RISK!
11//!
12//! # Supported backends
13//! This crate provides multiple backends including a portable pure Rust
14//! backend as well as ones based on CPU intrinsics.
15//!
16//! By default, it performs runtime detection of CPU intrinsics and uses them
17//! if they are available.
18//!
19//! ## "soft" portable backend
20//! As a baseline implementation, this crate provides a constant-time pure Rust
21//! implementation based on [fixslicing], a more advanced form of bitslicing
22//! implemented entirely in terms of bitwise arithmetic with no use of any
23//! lookup tables or data-dependent branches.
24//!
25//! Enabling the `aes_compact` configuration flag will reduce the code size of this
26//! backend at the cost of decreased performance (using a modified form of
27//! the fixslicing technique called "semi-fixslicing").
28//!
29//! ## ARMv8 intrinsics (Rust 1.61+)
30//! On `aarch64` targets including `aarch64-apple-darwin` (Apple M1) and Linux
31//! targets such as `aarch64-unknown-linux-gnu` and `aarch64-unknown-linux-musl`,
32//! support for using AES intrinsics provided by the ARMv8 Cryptography Extensions
33//! is available when using Rust 1.61 or above, and can be enabled using the
34//! `aes_armv8` configuration flag.
35//!
36//! On Linux and macOS, when the `aes_armv8` flag is enabled support for AES
37//! intrinsics is autodetected at runtime. On other platforms the `aes`
38//! target feature must be enabled via RUSTFLAGS.
39//!
40//! ## `x86`/`x86_64` intrinsics (AES-NI)
41//! By default this crate uses runtime detection on `i686`/`x86_64` targets
42//! in order to determine if AES-NI is available, and if it is not, it will
43//! fallback to using a constant-time software implementation.
44//!
45//! Passing `RUSTFLAGS=-C target-feature=+aes,+ssse3` explicitly at compile-time
46//! will override runtime detection and ensure that AES-NI is always used.
47//! Programs built in this manner will crash with an illegal instruction on
48//! CPUs which do not have AES-NI enabled.
49//!
50//! Note: runtime detection is not possible on SGX targets. Please use the
51//! afforementioned `RUSTFLAGS` to leverage AES-NI on these targets.
52//!
53//! # Examples
54//! ```
55//! use aes::Aes128;
56//! use aes::cipher::{
57//! BlockCipher, BlockEncrypt, BlockDecrypt, KeyInit,
58//! generic_array::GenericArray,
59//! };
60//!
61//! let key = GenericArray::from([0u8; 16]);
62//! let mut block = GenericArray::from([42u8; 16]);
63//!
64//! // Initialize cipher
65//! let cipher = Aes128::new(&key);
66//!
67//! let block_copy = block.clone();
68//!
69//! // Encrypt block in-place
70//! cipher.encrypt_block(&mut block);
71//!
72//! // And decrypt it back
73//! cipher.decrypt_block(&mut block);
74//! assert_eq!(block, block_copy);
75//!
76//! // Implementation supports parallel block processing. Number of blocks
77//! // processed in parallel depends in general on hardware capabilities.
78//! // This is achieved by instruction-level parallelism (ILP) on a single
79//! // CPU core, which is differen from multi-threaded parallelism.
80//! let mut blocks = [block; 100];
81//! cipher.encrypt_blocks(&mut blocks);
82//!
83//! for block in blocks.iter_mut() {
84//! cipher.decrypt_block(block);
85//! assert_eq!(block, &block_copy);
86//! }
87//!
88//! // `decrypt_blocks` also supports parallel block processing.
89//! cipher.decrypt_blocks(&mut blocks);
90//!
91//! for block in blocks.iter_mut() {
92//! cipher.encrypt_block(block);
93//! assert_eq!(block, &block_copy);
94//! }
95//! ```
96//!
97//! For implementation of block cipher modes of operation see
98//! [`block-modes`] repository.
99//!
100//! # Configuration Flags
101//!
102//! You can modify crate using the following configuration flags:
103//!
104//! - `aes_armv8`: enable ARMv8 AES intrinsics (Rust 1.61+).
105//! - `aes_force_soft`: force software implementation.
106//! - `aes_compact`: reduce code size at the cost of slower performance
107//! (affects only software backend).
108//!
109//! It can be enabled using `RUSTFLAGS` environmental variable
110//! (e.g. `RUSTFLAGS="--cfg aes_compact"`) or by modifying `.cargo/config`.
111//!
112//! [AES]: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard
113//! [fixslicing]: https://eprint.iacr.org/2020/1123.pdf
114//! [AES-NI]: https://en.wikipedia.org/wiki/AES_instruction_set
115//! [`block-modes`]: https://github.com/RustCrypto/block-modes/
116
117#![no_std]
118#![doc(
119 html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/26acc39f/logo.svg",
120 html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/26acc39f/logo.svg"
121)]
122#![cfg_attr(docsrs, feature(doc_cfg))]
123#![warn(missing_docs, rust_2018_idioms)]
124
125#[cfg(feature = "hazmat")]
126#[cfg_attr(docsrs, doc(cfg(feature = "hazmat")))]
127pub mod hazmat;
128
129mod soft;
130
131use cfg_if::cfg_if;
132
133cfg_if! {
134 if #[cfg(all(target_arch = "aarch64", aes_armv8, not(aes_force_soft)))] {
135 mod armv8;
136 mod autodetect;
137 pub use autodetect::*;
138 } else if #[cfg(all(
139 any(target_arch = "x86", target_arch = "x86_64"),
140 not(aes_force_soft)
141 ))] {
142 mod autodetect;
143 mod ni;
144 pub use autodetect::*;
145 } else {
146 pub use soft::*;
147 }
148}
149
150pub use cipher;
151use cipher::{
152 consts::{U16, U8},
153 generic_array::GenericArray,
154};
155
156/// 128-bit AES block
157pub type Block = GenericArray<u8, U16>;
158/// Eight 128-bit AES blocks
159pub type Block8 = GenericArray<Block, U8>;
160
161#[cfg(test)]
162mod tests {
163 #[cfg(feature = "zeroize")]
164 #[test]
165 fn zeroize_works() {
166 use super::soft;
167
168 fn test_for<T: zeroize::ZeroizeOnDrop>(val: T) {
169 use core::mem::{size_of, ManuallyDrop};
170
171 let mut val = ManuallyDrop::new(val);
172 let ptr = &val as *const _ as *const u8;
173 let len = size_of::<ManuallyDrop<T>>();
174
175 unsafe { ManuallyDrop::drop(&mut val) };
176
177 let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
178
179 assert!(slice.iter().all(|&byte| byte == 0));
180 }
181
182 let key_128 = [42; 16].into();
183 let key_192 = [42; 24].into();
184 let key_256 = [42; 32].into();
185
186 use cipher::KeyInit as _;
187 test_for(soft::Aes128::new(&key_128));
188 test_for(soft::Aes128Enc::new(&key_128));
189 test_for(soft::Aes128Dec::new(&key_128));
190 test_for(soft::Aes192::new(&key_192));
191 test_for(soft::Aes192Enc::new(&key_192));
192 test_for(soft::Aes192Dec::new(&key_192));
193 test_for(soft::Aes256::new(&key_256));
194 test_for(soft::Aes256Enc::new(&key_256));
195 test_for(soft::Aes256Dec::new(&key_256));
196
197 #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), not(aes_force_soft)))]
198 {
199 use super::ni;
200
201 cpufeatures::new!(aes_intrinsics, "aes");
202 if aes_intrinsics::get() {
203 test_for(ni::Aes128::new(&key_128));
204 test_for(ni::Aes128Enc::new(&key_128));
205 test_for(ni::Aes128Dec::new(&key_128));
206 test_for(ni::Aes192::new(&key_192));
207 test_for(ni::Aes192Enc::new(&key_192));
208 test_for(ni::Aes192Dec::new(&key_192));
209 test_for(ni::Aes256::new(&key_256));
210 test_for(ni::Aes256Enc::new(&key_256));
211 test_for(ni::Aes256Dec::new(&key_256));
212 }
213 }
214
215 #[cfg(all(target_arch = "aarch64", aes_armv8, not(aes_force_soft)))]
216 {
217 use super::armv8;
218
219 cpufeatures::new!(aes_intrinsics, "aes");
220 if aes_intrinsics::get() {
221 test_for(armv8::Aes128::new(&key_128));
222 test_for(armv8::Aes128Enc::new(&key_128));
223 test_for(armv8::Aes128Dec::new(&key_128));
224 test_for(armv8::Aes192::new(&key_192));
225 test_for(armv8::Aes192Enc::new(&key_192));
226 test_for(armv8::Aes192Dec::new(&key_192));
227 test_for(armv8::Aes256::new(&key_256));
228 test_for(armv8::Aes256Enc::new(&key_256));
229 test_for(armv8::Aes256Dec::new(&key_256));
230 }
231 }
232 }
233}
234