Merge branch 'for-5.1' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/cgroup
[sfrench/cifs-2.6.git] / arch / arm64 / crypto / ghash-ce-glue.c
index 067d8937d5af1e74a69ae7b14b1b0306a53fe087..791ad422c427dedc8eb022c609f86a7429b92c3d 100644 (file)
@@ -60,10 +60,6 @@ asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
                                      struct ghash_key const *k,
                                      const char *head);
 
-static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
-                                 struct ghash_key const *k,
-                                 const char *head);
-
 asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
                                  const u8 src[], struct ghash_key const *k,
                                  u8 ctr[], u32 const rk[], int rounds,
@@ -87,11 +83,15 @@ static int ghash_init(struct shash_desc *desc)
 }
 
 static void ghash_do_update(int blocks, u64 dg[], const char *src,
-                           struct ghash_key *key, const char *head)
+                           struct ghash_key *key, const char *head,
+                           void (*simd_update)(int blocks, u64 dg[],
+                                               const char *src,
+                                               struct ghash_key const *k,
+                                               const char *head))
 {
        if (likely(may_use_simd())) {
                kernel_neon_begin();
-               pmull_ghash_update(blocks, dg, src, key, head);
+               simd_update(blocks, dg, src, key, head);
                kernel_neon_end();
        } else {
                be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
@@ -119,8 +119,12 @@ static void ghash_do_update(int blocks, u64 dg[], const char *src,
 /* avoid hogging the CPU for too long */
 #define MAX_BLOCKS     (SZ_64K / GHASH_BLOCK_SIZE)
 
-static int ghash_update(struct shash_desc *desc, const u8 *src,
-                       unsigned int len)
+static int __ghash_update(struct shash_desc *desc, const u8 *src,
+                         unsigned int len,
+                         void (*simd_update)(int blocks, u64 dg[],
+                                             const char *src,
+                                             struct ghash_key const *k,
+                                             const char *head))
 {
        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
@@ -146,7 +150,8 @@ static int ghash_update(struct shash_desc *desc, const u8 *src,
                        int chunk = min(blocks, MAX_BLOCKS);
 
                        ghash_do_update(chunk, ctx->digest, src, key,
-                                       partial ? ctx->buf : NULL);
+                                       partial ? ctx->buf : NULL,
+                                       simd_update);
 
                        blocks -= chunk;
                        src += chunk * GHASH_BLOCK_SIZE;
@@ -158,7 +163,19 @@ static int ghash_update(struct shash_desc *desc, const u8 *src,
        return 0;
 }
 
-static int ghash_final(struct shash_desc *desc, u8 *dst)
+static int ghash_update_p8(struct shash_desc *desc, const u8 *src,
+                          unsigned int len)
+{
+       return __ghash_update(desc, src, len, pmull_ghash_update_p8);
+}
+
+static int ghash_update_p64(struct shash_desc *desc, const u8 *src,
+                           unsigned int len)
+{
+       return __ghash_update(desc, src, len, pmull_ghash_update_p64);
+}
+
+static int ghash_final_p8(struct shash_desc *desc, u8 *dst)
 {
        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
@@ -168,7 +185,28 @@ static int ghash_final(struct shash_desc *desc, u8 *dst)
 
                memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
 
-               ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
+               ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
+                               pmull_ghash_update_p8);
+       }
+       put_unaligned_be64(ctx->digest[1], dst);
+       put_unaligned_be64(ctx->digest[0], dst + 8);
+
+       *ctx = (struct ghash_desc_ctx){};
+       return 0;
+}
+
+static int ghash_final_p64(struct shash_desc *desc, u8 *dst)
+{
+       struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
+       unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
+
+       if (partial) {
+               struct ghash_key *key = crypto_shash_ctx(desc->tfm);
+
+               memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
+
+               ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
+                               pmull_ghash_update_p64);
        }
        put_unaligned_be64(ctx->digest[1], dst);
        put_unaligned_be64(ctx->digest[0], dst + 8);
@@ -224,7 +262,21 @@ static int ghash_setkey(struct crypto_shash *tfm,
        return __ghash_setkey(key, inkey, keylen);
 }
 
-static struct shash_alg ghash_alg = {
+static struct shash_alg ghash_alg[] = {{
+       .base.cra_name          = "ghash",
+       .base.cra_driver_name   = "ghash-neon",
+       .base.cra_priority      = 100,
+       .base.cra_blocksize     = GHASH_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct ghash_key),
+       .base.cra_module        = THIS_MODULE,
+
+       .digestsize             = GHASH_DIGEST_SIZE,
+       .init                   = ghash_init,
+       .update                 = ghash_update_p8,
+       .final                  = ghash_final_p8,
+       .setkey                 = ghash_setkey,
+       .descsize               = sizeof(struct ghash_desc_ctx),
+}, {
        .base.cra_name          = "ghash",
        .base.cra_driver_name   = "ghash-ce",
        .base.cra_priority      = 200,
@@ -234,11 +286,11 @@ static struct shash_alg ghash_alg = {
 
        .digestsize             = GHASH_DIGEST_SIZE,
        .init                   = ghash_init,
-       .update                 = ghash_update,
-       .final                  = ghash_final,
+       .update                 = ghash_update_p64,
+       .final                  = ghash_final_p64,
        .setkey                 = ghash_setkey,
        .descsize               = sizeof(struct ghash_desc_ctx),
-};
+}};
 
 static int num_rounds(struct crypto_aes_ctx *ctx)
 {
@@ -301,7 +353,8 @@ static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
                int blocks = count / GHASH_BLOCK_SIZE;
 
                ghash_do_update(blocks, dg, src, &ctx->ghash_key,
-                               *buf_count ? buf : NULL);
+                               *buf_count ? buf : NULL,
+                               pmull_ghash_update_p64);
 
                src += blocks * GHASH_BLOCK_SIZE;
                count %= GHASH_BLOCK_SIZE;
@@ -345,7 +398,8 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
 
        if (buf_count) {
                memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
-               ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
+               ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL,
+                               pmull_ghash_update_p64);
        }
 }
 
@@ -358,7 +412,8 @@ static void gcm_final(struct aead_request *req, struct gcm_aes_ctx *ctx,
        lengths.a = cpu_to_be64(req->assoclen * 8);
        lengths.b = cpu_to_be64(cryptlen * 8);
 
-       ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL);
+       ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL,
+                       pmull_ghash_update_p64);
 
        put_unaligned_be64(dg[1], mac);
        put_unaligned_be64(dg[0], mac + 8);
@@ -434,7 +489,7 @@ static int gcm_encrypt(struct aead_request *req)
 
                        ghash_do_update(walk.nbytes / AES_BLOCK_SIZE, dg,
                                        walk.dst.virt.addr, &ctx->ghash_key,
-                                       NULL);
+                                       NULL, pmull_ghash_update_p64);
 
                        err = skcipher_walk_done(&walk,
                                                 walk.nbytes % (2 * AES_BLOCK_SIZE));
@@ -469,7 +524,8 @@ static int gcm_encrypt(struct aead_request *req)
 
                memcpy(buf, dst, nbytes);
                memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
-               ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head);
+               ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
+                               pmull_ghash_update_p64);
 
                err = skcipher_walk_done(&walk, 0);
        }
@@ -558,7 +614,8 @@ static int gcm_decrypt(struct aead_request *req)
                        u8 *src = walk.src.virt.addr;
 
                        ghash_do_update(blocks, dg, walk.src.virt.addr,
-                                       &ctx->ghash_key, NULL);
+                                       &ctx->ghash_key, NULL,
+                                       pmull_ghash_update_p64);
 
                        do {
                                __aes_arm64_encrypt(ctx->aes_key.key_enc,
@@ -602,7 +659,8 @@ static int gcm_decrypt(struct aead_request *req)
 
                memcpy(buf, src, nbytes);
                memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
-               ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head);
+               ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
+                               pmull_ghash_update_p64);
 
                crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv,
                               walk.nbytes);
@@ -650,26 +708,30 @@ static int __init ghash_ce_mod_init(void)
                return -ENODEV;
 
        if (elf_hwcap & HWCAP_PMULL)
-               pmull_ghash_update = pmull_ghash_update_p64;
-
+               ret = crypto_register_shashes(ghash_alg,
+                                             ARRAY_SIZE(ghash_alg));
        else
-               pmull_ghash_update = pmull_ghash_update_p8;
+               /* only register the first array element */
+               ret = crypto_register_shash(ghash_alg);
 
-       ret = crypto_register_shash(&ghash_alg);
        if (ret)
                return ret;
 
        if (elf_hwcap & HWCAP_PMULL) {
                ret = crypto_register_aead(&gcm_aes_alg);
                if (ret)
-                       crypto_unregister_shash(&ghash_alg);
+                       crypto_unregister_shashes(ghash_alg,
+                                                 ARRAY_SIZE(ghash_alg));
        }
        return ret;
 }
 
 static void __exit ghash_ce_mod_exit(void)
 {
-       crypto_unregister_shash(&ghash_alg);
+       if (elf_hwcap & HWCAP_PMULL)
+               crypto_unregister_shashes(ghash_alg, ARRAY_SIZE(ghash_alg));
+       else
+               crypto_unregister_shash(ghash_alg);
        crypto_unregister_aead(&gcm_aes_alg);
 }