treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 500
[sfrench/cifs-2.6.git] / arch / arm64 / crypto / aes-ctr-fallback.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Fallback for sync aes(ctr) in contexts where kernel mode NEON
4  * is not allowed
5  *
6  * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
7  */
8
9 #include <crypto/aes.h>
10 #include <crypto/internal/skcipher.h>
11
12 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
13
14 static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
15                                            struct skcipher_request *req)
16 {
17         struct skcipher_walk walk;
18         u8 buf[AES_BLOCK_SIZE];
19         int err;
20
21         err = skcipher_walk_virt(&walk, req, true);
22
23         while (walk.nbytes > 0) {
24                 u8 *dst = walk.dst.virt.addr;
25                 u8 *src = walk.src.virt.addr;
26                 int nbytes = walk.nbytes;
27                 int tail = 0;
28
29                 if (nbytes < walk.total) {
30                         nbytes = round_down(nbytes, AES_BLOCK_SIZE);
31                         tail = walk.nbytes % AES_BLOCK_SIZE;
32                 }
33
34                 do {
35                         int bsize = min(nbytes, AES_BLOCK_SIZE);
36
37                         __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
38                                             6 + ctx->key_length / 4);
39                         crypto_xor_cpy(dst, src, buf, bsize);
40                         crypto_inc(walk.iv, AES_BLOCK_SIZE);
41
42                         dst += AES_BLOCK_SIZE;
43                         src += AES_BLOCK_SIZE;
44                         nbytes -= AES_BLOCK_SIZE;
45                 } while (nbytes > 0);
46
47                 err = skcipher_walk_done(&walk, tail);
48         }
49         return err;
50 }