Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
authorLinus Torvalds <torvalds@linux-foundation.org>
Tue, 23 Jun 2015 04:04:48 +0000 (21:04 -0700)
committerLinus Torvalds <torvalds@linux-foundation.org>
Tue, 23 Jun 2015 04:04:48 +0000 (21:04 -0700)
Pull crypto update from Herbert Xu:
 "Here is the crypto update for 4.2:

  API:

   - Convert RNG interface to new style.

   - New AEAD interface with one SG list for AD and plain/cipher text.
     All external AEAD users have been converted.

   - New asymmetric key interface (akcipher).

  Algorithms:

   - Chacha20, Poly1305 and RFC7539 support.

   - New RSA implementation.

   - Jitter RNG.

   - DRBG is now seeded with both /dev/random and Jitter RNG.  If kernel
     pool isn't ready then DRBG will be reseeded when it is.

   - DRBG is now the default crypto API RNG, replacing krng.

   - 842 compression (previously part of powerpc nx driver).

  Drivers:

   - Accelerated SHA-512 for arm64.

   - New Marvell CESA driver that supports DMA and more algorithms.

   - Updated powerpc nx 842 support.

   - Added support for SEC1 hardware to talitos"

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (292 commits)
  crypto: marvell/cesa - remove COMPILE_TEST dependency
  crypto: algif_aead - Temporarily disable all AEAD algorithms
  crypto: af_alg - Forbid the use internal algorithms
  crypto: echainiv - Only hold RNG during initialisation
  crypto: seqiv - Add compatibility support without RNG
  crypto: eseqiv - Offer normal cipher functionality without RNG
  crypto: chainiv - Offer normal cipher functionality without RNG
  crypto: user - Add CRYPTO_MSG_DELRNG
  crypto: user - Move cryptouser.h to uapi
  crypto: rng - Do not free default RNG when it becomes unused
  crypto: skcipher - Allow givencrypt to be NULL
  crypto: sahara - propagate the error on clk_disable_unprepare() failure
  crypto: rsa - fix invalid select for AKCIPHER
  crypto: picoxcell - Update to the current clk API
  crypto: nx - Check for bogus firmware properties
  crypto: marvell/cesa - add DT bindings documentation
  crypto: marvell/cesa - add support for Kirkwood and Dove SoCs
  crypto: marvell/cesa - add support for Orion SoCs
  crypto: marvell/cesa - add allhwsupport module parameter
  crypto: marvell/cesa - add support for all armada SoCs
  ...

1  2 
MAINTAINERS
arch/x86/crypto/aesni-intel_glue.c
arch/x86/crypto/fpu.c
arch/x86/crypto/sha-mb/sha1_mb.c
drivers/crypto/caam/caamhash.c
drivers/crypto/vmx/aes.c
drivers/crypto/vmx/aes_cbc.c
drivers/crypto/vmx/ghash.c
net/mac802154/llsec.c

diff --cc MAINTAINERS
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
index a9064e36e7b5488c4f47f628bb317c48b7efa485,023e5f01478324884af2df0206d7e90ff135134a..e79e567e43aacae4584b32c2d7fc9ae1e6c1e300
@@@ -73,53 -76,47 +76,53 @@@ static void p8_aes_exit(struct crypto_t
  }
  
  static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key,
-     unsigned int keylen)
+                        unsigned int keylen)
  {
-     int ret;
-     struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-     preempt_disable();
-     pagefault_disable();
-     enable_kernel_altivec();
-     ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
-     ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
-     pagefault_enable();
-     preempt_enable();
-     ret += crypto_cipher_setkey(ctx->fallback, key, keylen);
-     return ret;
+       int ret;
+       struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
++      preempt_disable();
+       pagefault_disable();
+       enable_kernel_altivec();
+       ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
+       ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
+       pagefault_enable();
++      preempt_enable();
+       ret += crypto_cipher_setkey(ctx->fallback, key, keylen);
+       return ret;
  }
  
  static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  {
-     struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-     if (in_interrupt()) {
-         crypto_cipher_encrypt_one(ctx->fallback, dst, src);
-     } else {
-       preempt_disable();
-         pagefault_disable();
-         enable_kernel_altivec();
-         aes_p8_encrypt(src, dst, &ctx->enc_key);
-         pagefault_enable();
-       preempt_enable();
-     }
+       struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       if (in_interrupt()) {
+               crypto_cipher_encrypt_one(ctx->fallback, dst, src);
+       } else {
++              preempt_disable();
+               pagefault_disable();
+               enable_kernel_altivec();
+               aes_p8_encrypt(src, dst, &ctx->enc_key);
+               pagefault_enable();
++              preempt_enable();
+       }
  }
  
  static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  {
-     struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
-     if (in_interrupt()) {
-         crypto_cipher_decrypt_one(ctx->fallback, dst, src);
-     } else {
-       preempt_disable();
-         pagefault_disable();
-         enable_kernel_altivec();
-         aes_p8_decrypt(src, dst, &ctx->dec_key);
-         pagefault_enable();
-       preempt_enable();
-     }
+       struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       if (in_interrupt()) {
+               crypto_cipher_decrypt_one(ctx->fallback, dst, src);
+       } else {
++              preempt_disable();
+               pagefault_disable();
+               enable_kernel_altivec();
+               aes_p8_decrypt(src, dst, &ctx->dec_key);
+               pagefault_enable();
++              preempt_enable();
+       }
  }
  
  struct crypto_alg p8_aes_alg = {
index 477284abdd11dc9d738fe08858d7cbe48a65fe32,7120ab24d8c6e05ad8b872a2b28ea29e084fc193..7299995c78ec3b34ea76e289cf84dc877f1175ef
@@@ -74,95 -77,95 +77,101 @@@ static void p8_aes_cbc_exit(struct cryp
  }
  
  static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
-     unsigned int keylen)
+                            unsigned int keylen)
  {
-     int ret;
-     struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
-     preempt_disable();
-     pagefault_disable();
-     enable_kernel_altivec();
-     ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
-     ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
-     pagefault_enable();
-     preempt_enable();
-     ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
-     return ret;
+       int ret;
+       struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
++      preempt_disable();
+       pagefault_disable();
+       enable_kernel_altivec();
+       ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
+       ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
+       pagefault_enable();
++      preempt_enable();
+       ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
+       return ret;
  }
  
  static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
-     struct scatterlist *dst, struct scatterlist *src,
-     unsigned int nbytes)
+                             struct scatterlist *dst,
+                             struct scatterlist *src, unsigned int nbytes)
  {
-     int ret;
-     struct blkcipher_walk walk;
-     struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(
-             crypto_blkcipher_tfm(desc->tfm));
-     struct blkcipher_desc fallback_desc = {
-         .tfm = ctx->fallback,
-         .info = desc->info,
-         .flags = desc->flags
-     };
-     if (in_interrupt()) {
-         ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes);
-     } else {
-       preempt_disable();
-         pagefault_disable();
-         enable_kernel_altivec();
-       blkcipher_walk_init(&walk, dst, src, nbytes);
-         ret = blkcipher_walk_virt(desc, &walk);
-         while ((nbytes = walk.nbytes)) {
-                       aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
-                               nbytes & AES_BLOCK_MASK, &ctx->enc_key, walk.iv, 1);
+       int ret;
+       struct blkcipher_walk walk;
+       struct p8_aes_cbc_ctx *ctx =
+               crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
+       struct blkcipher_desc fallback_desc = {
+               .tfm = ctx->fallback,
+               .info = desc->info,
+               .flags = desc->flags
+       };
+       if (in_interrupt()) {
+               ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src,
+                                              nbytes);
+       } else {
++              preempt_disable();
+               pagefault_disable();
+               enable_kernel_altivec();
+               blkcipher_walk_init(&walk, dst, src, nbytes);
+               ret = blkcipher_walk_virt(desc, &walk);
+               while ((nbytes = walk.nbytes)) {
+                       aes_p8_cbc_encrypt(walk.src.virt.addr,
+                                          walk.dst.virt.addr,
+                                          nbytes & AES_BLOCK_MASK,
+                                          &ctx->enc_key, walk.iv, 1);
                        nbytes &= AES_BLOCK_SIZE - 1;
                        ret = blkcipher_walk_done(desc, &walk, nbytes);
-       }
+               }
  
-         pagefault_enable();
-       preempt_enable();
-     }
+               pagefault_enable();
++              preempt_enable();
+       }
  
-     return ret;
+       return ret;
  }
  
  static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
-     struct scatterlist *dst, struct scatterlist *src,
-     unsigned int nbytes)
+                             struct scatterlist *dst,
+                             struct scatterlist *src, unsigned int nbytes)
  {
-     int ret;
-     struct blkcipher_walk walk;
-     struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(
-             crypto_blkcipher_tfm(desc->tfm));
-     struct blkcipher_desc fallback_desc = {
-         .tfm = ctx->fallback,
-         .info = desc->info,
-         .flags = desc->flags
-     };
-     if (in_interrupt()) {
-         ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes);
-     } else {
-       preempt_disable();
-         pagefault_disable();
-         enable_kernel_altivec();
-       blkcipher_walk_init(&walk, dst, src, nbytes);
-         ret = blkcipher_walk_virt(desc, &walk);
-         while ((nbytes = walk.nbytes)) {
-                       aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
-                               nbytes & AES_BLOCK_MASK, &ctx->dec_key, walk.iv, 0);
+       int ret;
+       struct blkcipher_walk walk;
+       struct p8_aes_cbc_ctx *ctx =
+               crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
+       struct blkcipher_desc fallback_desc = {
+               .tfm = ctx->fallback,
+               .info = desc->info,
+               .flags = desc->flags
+       };
+       if (in_interrupt()) {
+               ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src,
+                                              nbytes);
+       } else {
++              preempt_disable();
+               pagefault_disable();
+               enable_kernel_altivec();
+               blkcipher_walk_init(&walk, dst, src, nbytes);
+               ret = blkcipher_walk_virt(desc, &walk);
+               while ((nbytes = walk.nbytes)) {
+                       aes_p8_cbc_encrypt(walk.src.virt.addr,
+                                          walk.dst.virt.addr,
+                                          nbytes & AES_BLOCK_MASK,
+                                          &ctx->dec_key, walk.iv, 0);
                        nbytes &= AES_BLOCK_SIZE - 1;
                        ret = blkcipher_walk_done(desc, &walk, nbytes);
                }
  
-         pagefault_enable();
-       preempt_enable();
-     }
+               pagefault_enable();
++              preempt_enable();
+       }
  
-     return ret;
+       return ret;
  }
  
  
index f255ec4a04d48d60a28e1025b3e2473cdcb7d8fc,4c3a8f7e5059978a8ec62ac127460fb184cd44cc..b5e29002b66678337c54ec7858634d43285c4213
@@@ -107,98 -109,92 +109,100 @@@ static int p8_ghash_init(struct shash_d
  }
  
  static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
-     unsigned int keylen)
+                          unsigned int keylen)
  {
-     struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
-     if (keylen != GHASH_KEY_LEN)
-         return -EINVAL;
-     preempt_disable();
-     pagefault_disable();
-     enable_kernel_altivec();
-     enable_kernel_fp();
-     gcm_init_p8(ctx->htable, (const u64 *) key);
-     pagefault_enable();
-     preempt_enable();
-     return crypto_shash_setkey(ctx->fallback, key, keylen);
+       struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
+       if (keylen != GHASH_KEY_LEN)
+               return -EINVAL;
++      preempt_disable();
+       pagefault_disable();
+       enable_kernel_altivec();
+       enable_kernel_fp();
+       gcm_init_p8(ctx->htable, (const u64 *) key);
+       pagefault_enable();
++      preempt_enable();
+       return crypto_shash_setkey(ctx->fallback, key, keylen);
  }
  
  static int p8_ghash_update(struct shash_desc *desc,
-         const u8 *src, unsigned int srclen)
+                          const u8 *src, unsigned int srclen)
  {
-     unsigned int len;
-     struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
-     struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
-     if (IN_INTERRUPT) {
-         return crypto_shash_update(&dctx->fallback_desc, src, srclen);
-     } else {
-         if (dctx->bytes) {
-             if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
-                 memcpy(dctx->buffer + dctx->bytes, src, srclen);
-                 dctx->bytes += srclen;
-                 return 0;
-             }
-             memcpy(dctx->buffer + dctx->bytes, src,
-                     GHASH_DIGEST_SIZE - dctx->bytes);
-           preempt_disable();
-             pagefault_disable();
-             enable_kernel_altivec();
-             enable_kernel_fp();
-             gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
-                     GHASH_DIGEST_SIZE);
-             pagefault_enable();
-           preempt_enable();
-             src += GHASH_DIGEST_SIZE - dctx->bytes;
-             srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
-             dctx->bytes = 0;
-         }
-         len = srclen & ~(GHASH_DIGEST_SIZE - 1);
-         if (len) {
-           preempt_disable();
-             pagefault_disable();
-             enable_kernel_altivec();
-             enable_kernel_fp();
-             gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
-             pagefault_enable();
-           preempt_enable();
-             src += len;
-             srclen -= len;
-         }
-         if (srclen) {
-             memcpy(dctx->buffer, src, srclen);
-             dctx->bytes = srclen;
-         }
-         return 0;
-     }
+       unsigned int len;
+       struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
+       struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
+       if (IN_INTERRUPT) {
+               return crypto_shash_update(&dctx->fallback_desc, src,
+                                          srclen);
+       } else {
+               if (dctx->bytes) {
+                       if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
+                               memcpy(dctx->buffer + dctx->bytes, src,
+                                      srclen);
+                               dctx->bytes += srclen;
+                               return 0;
+                       }
+                       memcpy(dctx->buffer + dctx->bytes, src,
+                              GHASH_DIGEST_SIZE - dctx->bytes);
++                      preempt_disable();
+                       pagefault_disable();
+                       enable_kernel_altivec();
+                       enable_kernel_fp();
+                       gcm_ghash_p8(dctx->shash, ctx->htable,
+                                    dctx->buffer, GHASH_DIGEST_SIZE);
+                       pagefault_enable();
++                      preempt_enable();
+                       src += GHASH_DIGEST_SIZE - dctx->bytes;
+                       srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
+                       dctx->bytes = 0;
+               }
+               len = srclen & ~(GHASH_DIGEST_SIZE - 1);
+               if (len) {
++                      preempt_disable();
+                       pagefault_disable();
+                       enable_kernel_altivec();
+                       enable_kernel_fp();
+                       gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
+                       pagefault_enable();
++                      preempt_enable();
+                       src += len;
+                       srclen -= len;
+               }
+               if (srclen) {
+                       memcpy(dctx->buffer, src, srclen);
+                       dctx->bytes = srclen;
+               }
+               return 0;
+       }
  }
  
  static int p8_ghash_final(struct shash_desc *desc, u8 *out)
  {
-     int i;
-     struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
-     struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
-     if (IN_INTERRUPT) {
-         return crypto_shash_final(&dctx->fallback_desc, out);
-     } else {
-         if (dctx->bytes) {
-             for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
-                 dctx->buffer[i] = 0;
-           preempt_disable();
-             pagefault_disable();
-             enable_kernel_altivec();
-             enable_kernel_fp();
-             gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
-                     GHASH_DIGEST_SIZE);
-             pagefault_enable();
-           preempt_enable();
-             dctx->bytes = 0;
-         }
-         memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
-         return 0;
-     }
+       int i;
+       struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
+       struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
+       if (IN_INTERRUPT) {
+               return crypto_shash_final(&dctx->fallback_desc, out);
+       } else {
+               if (dctx->bytes) {
+                       for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
+                               dctx->buffer[i] = 0;
++                      preempt_disable();
+                       pagefault_disable();
+                       enable_kernel_altivec();
+                       enable_kernel_fp();
+                       gcm_ghash_p8(dctx->shash, ctx->htable,
+                                    dctx->buffer, GHASH_DIGEST_SIZE);
+                       pagefault_enable();
++                      preempt_enable();
+                       dctx->bytes = 0;
+               }
+               memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
+               return 0;
+       }
  }
  
  struct shash_alg p8_ghash_alg = {
Simple merge