objtool: Support per-function rodata sections
[sfrench/cifs-2.6.git] / arch / arm64 / crypto / speck-neon-glue.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * NEON-accelerated implementation of Speck128-XTS and Speck64-XTS
4  * (64-bit version; based on the 32-bit version)
5  *
6  * Copyright (c) 2018 Google, Inc
7  */
8
9 #include <asm/hwcap.h>
10 #include <asm/neon.h>
11 #include <asm/simd.h>
12 #include <crypto/algapi.h>
13 #include <crypto/gf128mul.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/speck.h>
16 #include <crypto/xts.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19
20 /* The assembly functions only handle multiples of 128 bytes */
21 #define SPECK_NEON_CHUNK_SIZE   128
22
23 /* Speck128 */
24
25 struct speck128_xts_tfm_ctx {
26         struct speck128_tfm_ctx main_key;
27         struct speck128_tfm_ctx tweak_key;
28 };
29
30 asmlinkage void speck128_xts_encrypt_neon(const u64 *round_keys, int nrounds,
31                                           void *dst, const void *src,
32                                           unsigned int nbytes, void *tweak);
33
34 asmlinkage void speck128_xts_decrypt_neon(const u64 *round_keys, int nrounds,
35                                           void *dst, const void *src,
36                                           unsigned int nbytes, void *tweak);
37
38 typedef void (*speck128_crypt_one_t)(const struct speck128_tfm_ctx *,
39                                      u8 *, const u8 *);
40 typedef void (*speck128_xts_crypt_many_t)(const u64 *, int, void *,
41                                           const void *, unsigned int, void *);
42
43 static __always_inline int
44 __speck128_xts_crypt(struct skcipher_request *req,
45                      speck128_crypt_one_t crypt_one,
46                      speck128_xts_crypt_many_t crypt_many)
47 {
48         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
49         const struct speck128_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
50         struct skcipher_walk walk;
51         le128 tweak;
52         int err;
53
54         err = skcipher_walk_virt(&walk, req, true);
55
56         crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
57
58         while (walk.nbytes > 0) {
59                 unsigned int nbytes = walk.nbytes;
60                 u8 *dst = walk.dst.virt.addr;
61                 const u8 *src = walk.src.virt.addr;
62
63                 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
64                         unsigned int count;
65
66                         count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
67                         kernel_neon_begin();
68                         (*crypt_many)(ctx->main_key.round_keys,
69                                       ctx->main_key.nrounds,
70                                       dst, src, count, &tweak);
71                         kernel_neon_end();
72                         dst += count;
73                         src += count;
74                         nbytes -= count;
75                 }
76
77                 /* Handle any remainder with generic code */
78                 while (nbytes >= sizeof(tweak)) {
79                         le128_xor((le128 *)dst, (const le128 *)src, &tweak);
80                         (*crypt_one)(&ctx->main_key, dst, dst);
81                         le128_xor((le128 *)dst, (const le128 *)dst, &tweak);
82                         gf128mul_x_ble(&tweak, &tweak);
83
84                         dst += sizeof(tweak);
85                         src += sizeof(tweak);
86                         nbytes -= sizeof(tweak);
87                 }
88                 err = skcipher_walk_done(&walk, nbytes);
89         }
90
91         return err;
92 }
93
94 static int speck128_xts_encrypt(struct skcipher_request *req)
95 {
96         return __speck128_xts_crypt(req, crypto_speck128_encrypt,
97                                     speck128_xts_encrypt_neon);
98 }
99
100 static int speck128_xts_decrypt(struct skcipher_request *req)
101 {
102         return __speck128_xts_crypt(req, crypto_speck128_decrypt,
103                                     speck128_xts_decrypt_neon);
104 }
105
106 static int speck128_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
107                                unsigned int keylen)
108 {
109         struct speck128_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
110         int err;
111
112         err = xts_verify_key(tfm, key, keylen);
113         if (err)
114                 return err;
115
116         keylen /= 2;
117
118         err = crypto_speck128_setkey(&ctx->main_key, key, keylen);
119         if (err)
120                 return err;
121
122         return crypto_speck128_setkey(&ctx->tweak_key, key + keylen, keylen);
123 }
124
125 /* Speck64 */
126
127 struct speck64_xts_tfm_ctx {
128         struct speck64_tfm_ctx main_key;
129         struct speck64_tfm_ctx tweak_key;
130 };
131
132 asmlinkage void speck64_xts_encrypt_neon(const u32 *round_keys, int nrounds,
133                                          void *dst, const void *src,
134                                          unsigned int nbytes, void *tweak);
135
136 asmlinkage void speck64_xts_decrypt_neon(const u32 *round_keys, int nrounds,
137                                          void *dst, const void *src,
138                                          unsigned int nbytes, void *tweak);
139
140 typedef void (*speck64_crypt_one_t)(const struct speck64_tfm_ctx *,
141                                     u8 *, const u8 *);
142 typedef void (*speck64_xts_crypt_many_t)(const u32 *, int, void *,
143                                          const void *, unsigned int, void *);
144
145 static __always_inline int
146 __speck64_xts_crypt(struct skcipher_request *req, speck64_crypt_one_t crypt_one,
147                     speck64_xts_crypt_many_t crypt_many)
148 {
149         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
150         const struct speck64_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
151         struct skcipher_walk walk;
152         __le64 tweak;
153         int err;
154
155         err = skcipher_walk_virt(&walk, req, true);
156
157         crypto_speck64_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
158
159         while (walk.nbytes > 0) {
160                 unsigned int nbytes = walk.nbytes;
161                 u8 *dst = walk.dst.virt.addr;
162                 const u8 *src = walk.src.virt.addr;
163
164                 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
165                         unsigned int count;
166
167                         count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
168                         kernel_neon_begin();
169                         (*crypt_many)(ctx->main_key.round_keys,
170                                       ctx->main_key.nrounds,
171                                       dst, src, count, &tweak);
172                         kernel_neon_end();
173                         dst += count;
174                         src += count;
175                         nbytes -= count;
176                 }
177
178                 /* Handle any remainder with generic code */
179                 while (nbytes >= sizeof(tweak)) {
180                         *(__le64 *)dst = *(__le64 *)src ^ tweak;
181                         (*crypt_one)(&ctx->main_key, dst, dst);
182                         *(__le64 *)dst ^= tweak;
183                         tweak = cpu_to_le64((le64_to_cpu(tweak) << 1) ^
184                                             ((tweak & cpu_to_le64(1ULL << 63)) ?
185                                              0x1B : 0));
186                         dst += sizeof(tweak);
187                         src += sizeof(tweak);
188                         nbytes -= sizeof(tweak);
189                 }
190                 err = skcipher_walk_done(&walk, nbytes);
191         }
192
193         return err;
194 }
195
196 static int speck64_xts_encrypt(struct skcipher_request *req)
197 {
198         return __speck64_xts_crypt(req, crypto_speck64_encrypt,
199                                    speck64_xts_encrypt_neon);
200 }
201
202 static int speck64_xts_decrypt(struct skcipher_request *req)
203 {
204         return __speck64_xts_crypt(req, crypto_speck64_decrypt,
205                                    speck64_xts_decrypt_neon);
206 }
207
208 static int speck64_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
209                               unsigned int keylen)
210 {
211         struct speck64_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
212         int err;
213
214         err = xts_verify_key(tfm, key, keylen);
215         if (err)
216                 return err;
217
218         keylen /= 2;
219
220         err = crypto_speck64_setkey(&ctx->main_key, key, keylen);
221         if (err)
222                 return err;
223
224         return crypto_speck64_setkey(&ctx->tweak_key, key + keylen, keylen);
225 }
226
227 static struct skcipher_alg speck_algs[] = {
228         {
229                 .base.cra_name          = "xts(speck128)",
230                 .base.cra_driver_name   = "xts-speck128-neon",
231                 .base.cra_priority      = 300,
232                 .base.cra_blocksize     = SPECK128_BLOCK_SIZE,
233                 .base.cra_ctxsize       = sizeof(struct speck128_xts_tfm_ctx),
234                 .base.cra_alignmask     = 7,
235                 .base.cra_module        = THIS_MODULE,
236                 .min_keysize            = 2 * SPECK128_128_KEY_SIZE,
237                 .max_keysize            = 2 * SPECK128_256_KEY_SIZE,
238                 .ivsize                 = SPECK128_BLOCK_SIZE,
239                 .walksize               = SPECK_NEON_CHUNK_SIZE,
240                 .setkey                 = speck128_xts_setkey,
241                 .encrypt                = speck128_xts_encrypt,
242                 .decrypt                = speck128_xts_decrypt,
243         }, {
244                 .base.cra_name          = "xts(speck64)",
245                 .base.cra_driver_name   = "xts-speck64-neon",
246                 .base.cra_priority      = 300,
247                 .base.cra_blocksize     = SPECK64_BLOCK_SIZE,
248                 .base.cra_ctxsize       = sizeof(struct speck64_xts_tfm_ctx),
249                 .base.cra_alignmask     = 7,
250                 .base.cra_module        = THIS_MODULE,
251                 .min_keysize            = 2 * SPECK64_96_KEY_SIZE,
252                 .max_keysize            = 2 * SPECK64_128_KEY_SIZE,
253                 .ivsize                 = SPECK64_BLOCK_SIZE,
254                 .walksize               = SPECK_NEON_CHUNK_SIZE,
255                 .setkey                 = speck64_xts_setkey,
256                 .encrypt                = speck64_xts_encrypt,
257                 .decrypt                = speck64_xts_decrypt,
258         }
259 };
260
261 static int __init speck_neon_module_init(void)
262 {
263         if (!(elf_hwcap & HWCAP_ASIMD))
264                 return -ENODEV;
265         return crypto_register_skciphers(speck_algs, ARRAY_SIZE(speck_algs));
266 }
267
268 static void __exit speck_neon_module_exit(void)
269 {
270         crypto_unregister_skciphers(speck_algs, ARRAY_SIZE(speck_algs));
271 }
272
273 module_init(speck_neon_module_init);
274 module_exit(speck_neon_module_exit);
275
276 MODULE_DESCRIPTION("Speck block cipher (NEON-accelerated)");
277 MODULE_LICENSE("GPL");
278 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
279 MODULE_ALIAS_CRYPTO("xts(speck128)");
280 MODULE_ALIAS_CRYPTO("xts-speck128-neon");
281 MODULE_ALIAS_CRYPTO("xts(speck64)");
282 MODULE_ALIAS_CRYPTO("xts-speck64-neon");