1/* SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) */
2/* Copyright (C) 2016-2022 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
3 *
4 * SipHash: a fast short-input PRF
5 * https://131002.net/siphash/
6 *
7 * This implementation is specifically for SipHash2-4 for a secure PRF
8 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
9 * hashtables.
10 */
11
12#ifndef _LINUX_SIPHASH_H
13#define _LINUX_SIPHASH_H
14
15#include <linux/types.h>
16#include <linux/kernel.h>
17
18#define SIPHASH_ALIGNMENT __alignof__(u64)
19typedef struct {
20 u64 key[2];
21} siphash_key_t;
22
23#define siphash_aligned_key_t siphash_key_t __aligned(16)
24
25static inline bool siphash_key_is_zero(const siphash_key_t *key)
26{
27 return !(key->key[0] | key->key[1]);
28}
29
30u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key);
31u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key);
32
33u64 siphash_1u64(const u64 a, const siphash_key_t *key);
34u64 siphash_2u64(const u64 a, const u64 b, const siphash_key_t *key);
35u64 siphash_3u64(const u64 a, const u64 b, const u64 c,
36 const siphash_key_t *key);
37u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
38 const siphash_key_t *key);
39u64 siphash_1u32(const u32 a, const siphash_key_t *key);
40u64 siphash_3u32(const u32 a, const u32 b, const u32 c,
41 const siphash_key_t *key);
42
43static inline u64 siphash_2u32(const u32 a, const u32 b,
44 const siphash_key_t *key)
45{
46 return siphash_1u64(a: (u64)b << 32 | a, key);
47}
48static inline u64 siphash_4u32(const u32 a, const u32 b, const u32 c,
49 const u32 d, const siphash_key_t *key)
50{
51 return siphash_2u64(a: (u64)b << 32 | a, b: (u64)d << 32 | c, key);
52}
53
54
55static inline u64 ___siphash_aligned(const __le64 *data, size_t len,
56 const siphash_key_t *key)
57{
58 if (__builtin_constant_p(len) && len == 4)
59 return siphash_1u32(le32_to_cpup(p: (const __le32 *)data), key);
60 if (__builtin_constant_p(len) && len == 8)
61 return siphash_1u64(le64_to_cpu(data[0]), key);
62 if (__builtin_constant_p(len) && len == 16)
63 return siphash_2u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
64 key);
65 if (__builtin_constant_p(len) && len == 24)
66 return siphash_3u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
67 le64_to_cpu(data[2]), key);
68 if (__builtin_constant_p(len) && len == 32)
69 return siphash_4u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
70 le64_to_cpu(data[2]), le64_to_cpu(data[3]),
71 key);
72 return __siphash_aligned(data, len, key);
73}
74
75/**
76 * siphash - compute 64-bit siphash PRF value
77 * @data: buffer to hash
78 * @size: size of @data
79 * @key: the siphash key
80 */
81static inline u64 siphash(const void *data, size_t len,
82 const siphash_key_t *key)
83{
84 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
85 !IS_ALIGNED((unsigned long)data, SIPHASH_ALIGNMENT))
86 return __siphash_unaligned(data, len, key);
87 return ___siphash_aligned(data, len, key);
88}
89
90#define HSIPHASH_ALIGNMENT __alignof__(unsigned long)
91typedef struct {
92 unsigned long key[2];
93} hsiphash_key_t;
94
95u32 __hsiphash_aligned(const void *data, size_t len,
96 const hsiphash_key_t *key);
97u32 __hsiphash_unaligned(const void *data, size_t len,
98 const hsiphash_key_t *key);
99
100u32 hsiphash_1u32(const u32 a, const hsiphash_key_t *key);
101u32 hsiphash_2u32(const u32 a, const u32 b, const hsiphash_key_t *key);
102u32 hsiphash_3u32(const u32 a, const u32 b, const u32 c,
103 const hsiphash_key_t *key);
104u32 hsiphash_4u32(const u32 a, const u32 b, const u32 c, const u32 d,
105 const hsiphash_key_t *key);
106
107static inline u32 ___hsiphash_aligned(const __le32 *data, size_t len,
108 const hsiphash_key_t *key)
109{
110 if (__builtin_constant_p(len) && len == 4)
111 return hsiphash_1u32(le32_to_cpu(data[0]), key);
112 if (__builtin_constant_p(len) && len == 8)
113 return hsiphash_2u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
114 key);
115 if (__builtin_constant_p(len) && len == 12)
116 return hsiphash_3u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
117 le32_to_cpu(data[2]), key);
118 if (__builtin_constant_p(len) && len == 16)
119 return hsiphash_4u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
120 le32_to_cpu(data[2]), le32_to_cpu(data[3]),
121 key);
122 return __hsiphash_aligned(data, len, key);
123}
124
125/**
126 * hsiphash - compute 32-bit hsiphash PRF value
127 * @data: buffer to hash
128 * @size: size of @data
129 * @key: the hsiphash key
130 */
131static inline u32 hsiphash(const void *data, size_t len,
132 const hsiphash_key_t *key)
133{
134 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
135 !IS_ALIGNED((unsigned long)data, HSIPHASH_ALIGNMENT))
136 return __hsiphash_unaligned(data, len, key);
137 return ___hsiphash_aligned(data, len, key);
138}
139
140/*
141 * These macros expose the raw SipHash and HalfSipHash permutations.
142 * Do not use them directly! If you think you have a use for them,
143 * be sure to CC the maintainer of this file explaining why.
144 */
145
146#define SIPHASH_PERMUTATION(a, b, c, d) ( \
147 (a) += (b), (b) = rol64((b), 13), (b) ^= (a), (a) = rol64((a), 32), \
148 (c) += (d), (d) = rol64((d), 16), (d) ^= (c), \
149 (a) += (d), (d) = rol64((d), 21), (d) ^= (a), \
150 (c) += (b), (b) = rol64((b), 17), (b) ^= (c), (c) = rol64((c), 32))
151
152#define SIPHASH_CONST_0 0x736f6d6570736575ULL
153#define SIPHASH_CONST_1 0x646f72616e646f6dULL
154#define SIPHASH_CONST_2 0x6c7967656e657261ULL
155#define SIPHASH_CONST_3 0x7465646279746573ULL
156
157#define HSIPHASH_PERMUTATION(a, b, c, d) ( \
158 (a) += (b), (b) = rol32((b), 5), (b) ^= (a), (a) = rol32((a), 16), \
159 (c) += (d), (d) = rol32((d), 8), (d) ^= (c), \
160 (a) += (d), (d) = rol32((d), 7), (d) ^= (a), \
161 (c) += (b), (b) = rol32((b), 13), (b) ^= (c), (c) = rol32((c), 16))
162
163#define HSIPHASH_CONST_0 0U
164#define HSIPHASH_CONST_1 0U
165#define HSIPHASH_CONST_2 0x6c796765U
166#define HSIPHASH_CONST_3 0x74656462U
167
168#endif /* _LINUX_SIPHASH_H */
169

source code of linux/include/linux/siphash.h