Merge tag 'microblaze-v5.6-rc1' of git://git.monstr.eu/linux-2.6-microblaze
[sfrench/cifs-2.6.git] / arch / arm64 / crypto / sha256-glue.c
index 999da59f03a9d092b5b7adf1d2308b13efffa91b..ddf4a0d85c1c2083a3fff1fe485a70a4bae2a79b 100644 (file)
@@ -27,14 +27,26 @@ asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
                                        unsigned int num_blks);
 EXPORT_SYMBOL(sha256_block_data_order);
 
+static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
+                                     int blocks)
+{
+       sha256_block_data_order(sst->state, src, blocks);
+}
+
 asmlinkage void sha256_block_neon(u32 *digest, const void *data,
                                  unsigned int num_blks);
 
+static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
+                               int blocks)
+{
+       sha256_block_neon(sst->state, src, blocks);
+}
+
 static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
                                      unsigned int len)
 {
        return sha256_base_do_update(desc, data, len,
-                               (sha256_block_fn *)sha256_block_data_order);
+                                    __sha256_block_data_order);
 }
 
 static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
@@ -42,9 +54,8 @@ static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
 {
        if (len)
                sha256_base_do_update(desc, data, len,
-                               (sha256_block_fn *)sha256_block_data_order);
-       sha256_base_do_finalize(desc,
-                               (sha256_block_fn *)sha256_block_data_order);
+                                     __sha256_block_data_order);
+       sha256_base_do_finalize(desc, __sha256_block_data_order);
 
        return sha256_base_finish(desc, out);
 }
@@ -87,7 +98,7 @@ static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
 
        if (!crypto_simd_usable())
                return sha256_base_do_update(desc, data, len,
-                               (sha256_block_fn *)sha256_block_data_order);
+                               __sha256_block_data_order);
 
        while (len > 0) {
                unsigned int chunk = len;
@@ -103,8 +114,7 @@ static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
                                sctx->count % SHA256_BLOCK_SIZE;
 
                kernel_neon_begin();
-               sha256_base_do_update(desc, data, chunk,
-                                     (sha256_block_fn *)sha256_block_neon);
+               sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
                kernel_neon_end();
                data += chunk;
                len -= chunk;
@@ -118,15 +128,13 @@ static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
        if (!crypto_simd_usable()) {
                if (len)
                        sha256_base_do_update(desc, data, len,
-                               (sha256_block_fn *)sha256_block_data_order);
-               sha256_base_do_finalize(desc,
-                               (sha256_block_fn *)sha256_block_data_order);
+                               __sha256_block_data_order);
+               sha256_base_do_finalize(desc, __sha256_block_data_order);
        } else {
                if (len)
                        sha256_update_neon(desc, data, len);
                kernel_neon_begin();
-               sha256_base_do_finalize(desc,
-                               (sha256_block_fn *)sha256_block_neon);
+               sha256_base_do_finalize(desc, __sha256_block_neon);
                kernel_neon_end();
        }
        return sha256_base_finish(desc, out);