1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
63 #define CAAM_CRA_PRIORITY 3000
64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
66 CTR_RFC3686_NONCE_SIZE + \
67 SHA512_DIGEST_SIZE * 2)
69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
81 /* for print_hex_dumps with line references */
82 #define debug(format, arg...) printk(format, arg)
84 #define debug(format, arg...)
87 struct caam_alg_entry {
94 struct caam_aead_alg {
96 struct caam_alg_entry caam;
100 struct caam_skcipher_alg {
101 struct skcipher_alg skcipher;
102 struct caam_alg_entry caam;
107 * per-session context
110 u32 sh_desc_enc[DESC_MAX_USED_LEN];
111 u32 sh_desc_dec[DESC_MAX_USED_LEN];
112 u8 key[CAAM_MAX_KEY_SIZE];
113 dma_addr_t sh_desc_enc_dma;
114 dma_addr_t sh_desc_dec_dma;
116 enum dma_data_direction dir;
117 struct device *jrdev;
118 struct alginfo adata;
119 struct alginfo cdata;
120 unsigned int authsize;
123 static int aead_null_set_sh_desc(struct crypto_aead *aead)
125 struct caam_ctx *ctx = crypto_aead_ctx(aead);
126 struct device *jrdev = ctx->jrdev;
127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
130 ctx->adata.keylen_pad;
133 * Job Descriptor and Shared Descriptors
134 * must all fit into the 64-word Descriptor h/w Buffer
136 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
137 ctx->adata.key_inline = true;
138 ctx->adata.key_virt = ctx->key;
140 ctx->adata.key_inline = false;
141 ctx->adata.key_dma = ctx->key_dma;
144 /* aead_encrypt shared descriptor */
145 desc = ctx->sh_desc_enc;
146 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
148 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
149 desc_bytes(desc), ctx->dir);
152 * Job Descriptor and Shared Descriptors
153 * must all fit into the 64-word Descriptor h/w Buffer
155 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
156 ctx->adata.key_inline = true;
157 ctx->adata.key_virt = ctx->key;
159 ctx->adata.key_inline = false;
160 ctx->adata.key_dma = ctx->key_dma;
163 /* aead_decrypt shared descriptor */
164 desc = ctx->sh_desc_dec;
165 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
167 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
168 desc_bytes(desc), ctx->dir);
173 static int aead_set_sh_desc(struct crypto_aead *aead)
175 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
176 struct caam_aead_alg, aead);
177 unsigned int ivsize = crypto_aead_ivsize(aead);
178 struct caam_ctx *ctx = crypto_aead_ctx(aead);
179 struct device *jrdev = ctx->jrdev;
180 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
182 u32 *desc, *nonce = NULL;
184 unsigned int data_len[2];
185 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
186 OP_ALG_AAI_CTR_MOD128);
187 const bool is_rfc3686 = alg->caam.rfc3686;
192 /* NULL encryption / decryption */
193 if (!ctx->cdata.keylen)
194 return aead_null_set_sh_desc(aead);
197 * AES-CTR needs to load IV in CONTEXT1 reg
198 * at an offset of 128bits (16bytes)
199 * CONTEXT1[255:128] = IV
206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
209 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
210 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
211 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
214 data_len[0] = ctx->adata.keylen_pad;
215 data_len[1] = ctx->cdata.keylen;
221 * Job Descriptor and Shared Descriptors
222 * must all fit into the 64-word Descriptor h/w Buffer
224 if (desc_inline_query(DESC_AEAD_ENC_LEN +
225 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
226 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
227 ARRAY_SIZE(data_len)) < 0)
231 ctx->adata.key_virt = ctx->key;
233 ctx->adata.key_dma = ctx->key_dma;
236 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
238 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
240 ctx->adata.key_inline = !!(inl_mask & 1);
241 ctx->cdata.key_inline = !!(inl_mask & 2);
243 /* aead_encrypt shared descriptor */
244 desc = ctx->sh_desc_enc;
245 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
246 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
247 false, ctrlpriv->era);
248 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
249 desc_bytes(desc), ctx->dir);
253 * Job Descriptor and Shared Descriptors
254 * must all fit into the 64-word Descriptor h/w Buffer
256 if (desc_inline_query(DESC_AEAD_DEC_LEN +
257 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
258 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
259 ARRAY_SIZE(data_len)) < 0)
263 ctx->adata.key_virt = ctx->key;
265 ctx->adata.key_dma = ctx->key_dma;
268 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
270 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
272 ctx->adata.key_inline = !!(inl_mask & 1);
273 ctx->cdata.key_inline = !!(inl_mask & 2);
275 /* aead_decrypt shared descriptor */
276 desc = ctx->sh_desc_dec;
277 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
278 ctx->authsize, alg->caam.geniv, is_rfc3686,
279 nonce, ctx1_iv_off, false, ctrlpriv->era);
280 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
281 desc_bytes(desc), ctx->dir);
283 if (!alg->caam.geniv)
287 * Job Descriptor and Shared Descriptors
288 * must all fit into the 64-word Descriptor h/w Buffer
290 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
291 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
292 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
293 ARRAY_SIZE(data_len)) < 0)
297 ctx->adata.key_virt = ctx->key;
299 ctx->adata.key_dma = ctx->key_dma;
302 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
304 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
306 ctx->adata.key_inline = !!(inl_mask & 1);
307 ctx->cdata.key_inline = !!(inl_mask & 2);
309 /* aead_givencrypt shared descriptor */
310 desc = ctx->sh_desc_enc;
311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 ctx->authsize, is_rfc3686, nonce,
313 ctx1_iv_off, false, ctrlpriv->era);
314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
315 desc_bytes(desc), ctx->dir);
321 static int aead_setauthsize(struct crypto_aead *authenc,
322 unsigned int authsize)
324 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
326 ctx->authsize = authsize;
327 aead_set_sh_desc(authenc);
332 static int gcm_set_sh_desc(struct crypto_aead *aead)
334 struct caam_ctx *ctx = crypto_aead_ctx(aead);
335 struct device *jrdev = ctx->jrdev;
336 unsigned int ivsize = crypto_aead_ivsize(aead);
338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
341 if (!ctx->cdata.keylen || !ctx->authsize)
345 * AES GCM encrypt shared descriptor
346 * Job Descriptor and Shared Descriptor
347 * must fit into the 64-word Descriptor h/w Buffer
349 if (rem_bytes >= DESC_GCM_ENC_LEN) {
350 ctx->cdata.key_inline = true;
351 ctx->cdata.key_virt = ctx->key;
353 ctx->cdata.key_inline = false;
354 ctx->cdata.key_dma = ctx->key_dma;
357 desc = ctx->sh_desc_enc;
358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
360 desc_bytes(desc), ctx->dir);
363 * Job Descriptor and Shared Descriptors
364 * must all fit into the 64-word Descriptor h/w Buffer
366 if (rem_bytes >= DESC_GCM_DEC_LEN) {
367 ctx->cdata.key_inline = true;
368 ctx->cdata.key_virt = ctx->key;
370 ctx->cdata.key_inline = false;
371 ctx->cdata.key_dma = ctx->key_dma;
374 desc = ctx->sh_desc_dec;
375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
377 desc_bytes(desc), ctx->dir);
382 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
384 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
386 ctx->authsize = authsize;
387 gcm_set_sh_desc(authenc);
392 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
394 struct caam_ctx *ctx = crypto_aead_ctx(aead);
395 struct device *jrdev = ctx->jrdev;
396 unsigned int ivsize = crypto_aead_ivsize(aead);
398 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
401 if (!ctx->cdata.keylen || !ctx->authsize)
405 * RFC4106 encrypt shared descriptor
406 * Job Descriptor and Shared Descriptor
407 * must fit into the 64-word Descriptor h/w Buffer
409 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
410 ctx->cdata.key_inline = true;
411 ctx->cdata.key_virt = ctx->key;
413 ctx->cdata.key_inline = false;
414 ctx->cdata.key_dma = ctx->key_dma;
417 desc = ctx->sh_desc_enc;
418 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
420 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
421 desc_bytes(desc), ctx->dir);
424 * Job Descriptor and Shared Descriptors
425 * must all fit into the 64-word Descriptor h/w Buffer
427 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
428 ctx->cdata.key_inline = true;
429 ctx->cdata.key_virt = ctx->key;
431 ctx->cdata.key_inline = false;
432 ctx->cdata.key_dma = ctx->key_dma;
435 desc = ctx->sh_desc_dec;
436 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
438 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
439 desc_bytes(desc), ctx->dir);
444 static int rfc4106_setauthsize(struct crypto_aead *authenc,
445 unsigned int authsize)
447 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
449 ctx->authsize = authsize;
450 rfc4106_set_sh_desc(authenc);
455 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
457 struct caam_ctx *ctx = crypto_aead_ctx(aead);
458 struct device *jrdev = ctx->jrdev;
459 unsigned int ivsize = crypto_aead_ivsize(aead);
461 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
464 if (!ctx->cdata.keylen || !ctx->authsize)
468 * RFC4543 encrypt shared descriptor
469 * Job Descriptor and Shared Descriptor
470 * must fit into the 64-word Descriptor h/w Buffer
472 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
473 ctx->cdata.key_inline = true;
474 ctx->cdata.key_virt = ctx->key;
476 ctx->cdata.key_inline = false;
477 ctx->cdata.key_dma = ctx->key_dma;
480 desc = ctx->sh_desc_enc;
481 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
483 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
484 desc_bytes(desc), ctx->dir);
487 * Job Descriptor and Shared Descriptors
488 * must all fit into the 64-word Descriptor h/w Buffer
490 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
491 ctx->cdata.key_inline = true;
492 ctx->cdata.key_virt = ctx->key;
494 ctx->cdata.key_inline = false;
495 ctx->cdata.key_dma = ctx->key_dma;
498 desc = ctx->sh_desc_dec;
499 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
501 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
502 desc_bytes(desc), ctx->dir);
507 static int rfc4543_setauthsize(struct crypto_aead *authenc,
508 unsigned int authsize)
510 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
512 ctx->authsize = authsize;
513 rfc4543_set_sh_desc(authenc);
518 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
520 struct caam_ctx *ctx = crypto_aead_ctx(aead);
521 struct device *jrdev = ctx->jrdev;
522 unsigned int ivsize = crypto_aead_ivsize(aead);
525 if (!ctx->cdata.keylen || !ctx->authsize)
528 desc = ctx->sh_desc_enc;
529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
530 ctx->authsize, true, false);
531 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
532 desc_bytes(desc), ctx->dir);
534 desc = ctx->sh_desc_dec;
535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
536 ctx->authsize, false, false);
537 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
538 desc_bytes(desc), ctx->dir);
543 static int chachapoly_setauthsize(struct crypto_aead *aead,
544 unsigned int authsize)
546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
548 if (authsize != POLY1305_DIGEST_SIZE)
551 ctx->authsize = authsize;
552 return chachapoly_set_sh_desc(aead);
555 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
558 struct caam_ctx *ctx = crypto_aead_ctx(aead);
559 unsigned int ivsize = crypto_aead_ivsize(aead);
560 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
562 if (keylen != CHACHA_KEY_SIZE + saltlen) {
563 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
567 ctx->cdata.key_virt = key;
568 ctx->cdata.keylen = keylen - saltlen;
570 return chachapoly_set_sh_desc(aead);
573 static int aead_setkey(struct crypto_aead *aead,
574 const u8 *key, unsigned int keylen)
576 struct caam_ctx *ctx = crypto_aead_ctx(aead);
577 struct device *jrdev = ctx->jrdev;
578 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
579 struct crypto_authenc_keys keys;
582 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
586 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
587 keys.authkeylen + keys.enckeylen, keys.enckeylen,
589 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
590 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
594 * If DKP is supported, use it in the shared descriptor to generate
597 if (ctrlpriv->era >= 6) {
598 ctx->adata.keylen = keys.authkeylen;
599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
605 memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
608 dma_sync_single_for_device(jrdev, ctx->key_dma,
609 ctx->adata.keylen_pad +
610 keys.enckeylen, ctx->dir);
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 keys.authkeylen, CAAM_MAX_KEY_SIZE -
621 /* postpend encryption key to auth split key */
622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
624 keys.enckeylen, ctx->dir);
626 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
628 ctx->adata.keylen_pad + keys.enckeylen, 1);
632 ctx->cdata.keylen = keys.enckeylen;
633 memzero_explicit(&keys, sizeof(keys));
634 return aead_set_sh_desc(aead);
636 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
637 memzero_explicit(&keys, sizeof(keys));
641 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
644 struct crypto_authenc_keys keys;
648 err = crypto_authenc_extractkeys(&keys, key, keylen);
653 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
656 flags = crypto_aead_get_flags(aead);
657 err = __des3_verify_key(&flags, keys.enckey);
659 crypto_aead_set_flags(aead, flags);
663 err = aead_setkey(aead, key, keylen);
666 memzero_explicit(&keys, sizeof(keys));
670 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
674 static int gcm_setkey(struct crypto_aead *aead,
675 const u8 *key, unsigned int keylen)
677 struct caam_ctx *ctx = crypto_aead_ctx(aead);
678 struct device *jrdev = ctx->jrdev;
681 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
682 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
685 memcpy(ctx->key, key, keylen);
686 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
687 ctx->cdata.keylen = keylen;
689 return gcm_set_sh_desc(aead);
692 static int rfc4106_setkey(struct crypto_aead *aead,
693 const u8 *key, unsigned int keylen)
695 struct caam_ctx *ctx = crypto_aead_ctx(aead);
696 struct device *jrdev = ctx->jrdev;
702 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
703 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
706 memcpy(ctx->key, key, keylen);
709 * The last four bytes of the key material are used as the salt value
710 * in the nonce. Update the AES key length.
712 ctx->cdata.keylen = keylen - 4;
713 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
715 return rfc4106_set_sh_desc(aead);
718 static int rfc4543_setkey(struct crypto_aead *aead,
719 const u8 *key, unsigned int keylen)
721 struct caam_ctx *ctx = crypto_aead_ctx(aead);
722 struct device *jrdev = ctx->jrdev;
728 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
729 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
732 memcpy(ctx->key, key, keylen);
735 * The last four bytes of the key material are used as the salt value
736 * in the nonce. Update the AES key length.
738 ctx->cdata.keylen = keylen - 4;
739 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
741 return rfc4543_set_sh_desc(aead);
744 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
747 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
748 struct caam_skcipher_alg *alg =
749 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
751 struct device *jrdev = ctx->jrdev;
752 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
755 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
756 OP_ALG_AAI_CTR_MOD128);
757 const bool is_rfc3686 = alg->caam.rfc3686;
760 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
761 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
764 * AES-CTR needs to load IV in CONTEXT1 reg
765 * at an offset of 128bits (16bytes)
766 * CONTEXT1[255:128] = IV
773 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
774 * | *key = {KEY, NONCE}
777 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
778 keylen -= CTR_RFC3686_NONCE_SIZE;
781 ctx->cdata.keylen = keylen;
782 ctx->cdata.key_virt = key;
783 ctx->cdata.key_inline = true;
785 /* skcipher_encrypt shared descriptor */
786 desc = ctx->sh_desc_enc;
787 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
789 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
790 desc_bytes(desc), ctx->dir);
792 /* skcipher_decrypt shared descriptor */
793 desc = ctx->sh_desc_dec;
794 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
796 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
797 desc_bytes(desc), ctx->dir);
802 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
803 const u8 *key, unsigned int keylen)
805 u32 tmp[DES3_EDE_EXPKEY_WORDS];
806 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
808 if (keylen == DES3_EDE_KEY_SIZE &&
809 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) {
813 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) &
814 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
815 crypto_skcipher_set_flags(skcipher,
816 CRYPTO_TFM_RES_WEAK_KEY);
820 return skcipher_setkey(skcipher, key, keylen);
823 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
826 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
827 struct device *jrdev = ctx->jrdev;
830 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
831 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
832 dev_err(jrdev, "key size mismatch\n");
836 ctx->cdata.keylen = keylen;
837 ctx->cdata.key_virt = key;
838 ctx->cdata.key_inline = true;
840 /* xts_skcipher_encrypt shared descriptor */
841 desc = ctx->sh_desc_enc;
842 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
843 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
844 desc_bytes(desc), ctx->dir);
846 /* xts_skcipher_decrypt shared descriptor */
847 desc = ctx->sh_desc_dec;
848 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
849 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
850 desc_bytes(desc), ctx->dir);
856 * aead_edesc - s/w-extended aead descriptor
857 * @src_nents: number of segments in input s/w scatterlist
858 * @dst_nents: number of segments in output s/w scatterlist
859 * @mapped_src_nents: number of segments in input h/w link table
860 * @mapped_dst_nents: number of segments in output h/w link table
861 * @sec4_sg_bytes: length of dma mapped sec4_sg space
862 * @sec4_sg_dma: bus physical mapped address of h/w link table
863 * @sec4_sg: pointer to h/w link table
864 * @hw_desc: the h/w job descriptor followed by any referenced link tables
869 int mapped_src_nents;
870 int mapped_dst_nents;
872 dma_addr_t sec4_sg_dma;
873 struct sec4_sg_entry *sec4_sg;
878 * skcipher_edesc - s/w-extended skcipher descriptor
879 * @src_nents: number of segments in input s/w scatterlist
880 * @dst_nents: number of segments in output s/w scatterlist
881 * @mapped_src_nents: number of segments in input h/w link table
882 * @mapped_dst_nents: number of segments in output h/w link table
883 * @iv_dma: dma address of iv for checking continuity and link table
884 * @sec4_sg_bytes: length of dma mapped sec4_sg space
885 * @sec4_sg_dma: bus physical mapped address of h/w link table
886 * @sec4_sg: pointer to h/w link table
887 * @hw_desc: the h/w job descriptor followed by any referenced link tables
890 struct skcipher_edesc {
893 int mapped_src_nents;
894 int mapped_dst_nents;
897 dma_addr_t sec4_sg_dma;
898 struct sec4_sg_entry *sec4_sg;
902 static void caam_unmap(struct device *dev, struct scatterlist *src,
903 struct scatterlist *dst, int src_nents,
905 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
910 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
912 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
914 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
918 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
920 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
924 static void aead_unmap(struct device *dev,
925 struct aead_edesc *edesc,
926 struct aead_request *req)
928 caam_unmap(dev, req->src, req->dst,
929 edesc->src_nents, edesc->dst_nents, 0, 0,
930 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
933 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
934 struct skcipher_request *req)
936 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
937 int ivsize = crypto_skcipher_ivsize(skcipher);
939 caam_unmap(dev, req->src, req->dst,
940 edesc->src_nents, edesc->dst_nents,
941 edesc->iv_dma, ivsize,
942 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
945 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
948 struct aead_request *req = context;
949 struct aead_edesc *edesc;
952 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
955 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
958 caam_jr_strstatus(jrdev, err);
960 aead_unmap(jrdev, edesc, req);
964 aead_request_complete(req, err);
967 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
970 struct aead_request *req = context;
971 struct aead_edesc *edesc;
974 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
977 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
980 caam_jr_strstatus(jrdev, err);
982 aead_unmap(jrdev, edesc, req);
985 * verify hw auth check passed else return -EBADMSG
987 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
992 aead_request_complete(req, err);
995 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
998 struct skcipher_request *req = context;
999 struct skcipher_edesc *edesc;
1000 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1001 int ivsize = crypto_skcipher_ivsize(skcipher);
1004 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1007 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
1010 caam_jr_strstatus(jrdev, err);
1013 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1014 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1015 edesc->src_nents > 1 ? 100 : ivsize, 1);
1017 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1018 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1019 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1021 skcipher_unmap(jrdev, edesc, req);
1024 * The crypto API expects us to set the IV (req->iv) to the last
1025 * ciphertext block. This is used e.g. by the CTS mode.
1028 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1033 skcipher_request_complete(req, err);
1036 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1039 struct skcipher_request *req = context;
1040 struct skcipher_edesc *edesc;
1042 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1043 int ivsize = crypto_skcipher_ivsize(skcipher);
1045 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1048 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
1050 caam_jr_strstatus(jrdev, err);
1053 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1054 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1056 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1057 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1058 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1060 skcipher_unmap(jrdev, edesc, req);
1063 skcipher_request_complete(req, err);
1067 * Fill in aead job descriptor
1069 static void init_aead_job(struct aead_request *req,
1070 struct aead_edesc *edesc,
1071 bool all_contig, bool encrypt)
1073 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1074 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1075 int authsize = ctx->authsize;
1076 u32 *desc = edesc->hw_desc;
1077 u32 out_options, in_options;
1078 dma_addr_t dst_dma, src_dma;
1079 int len, sec4_sg_index = 0;
1083 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1084 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1086 len = desc_len(sh_desc);
1087 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1090 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1094 src_dma = edesc->sec4_sg_dma;
1095 sec4_sg_index += edesc->mapped_src_nents;
1096 in_options = LDST_SGF;
1099 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1103 out_options = in_options;
1105 if (unlikely(req->src != req->dst)) {
1106 if (!edesc->mapped_dst_nents) {
1108 } else if (edesc->mapped_dst_nents == 1) {
1109 dst_dma = sg_dma_address(req->dst);
1112 dst_dma = edesc->sec4_sg_dma +
1114 sizeof(struct sec4_sg_entry);
1115 out_options = LDST_SGF;
1120 append_seq_out_ptr(desc, dst_dma,
1121 req->assoclen + req->cryptlen + authsize,
1124 append_seq_out_ptr(desc, dst_dma,
1125 req->assoclen + req->cryptlen - authsize,
1129 static void init_gcm_job(struct aead_request *req,
1130 struct aead_edesc *edesc,
1131 bool all_contig, bool encrypt)
1133 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1134 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1135 unsigned int ivsize = crypto_aead_ivsize(aead);
1136 u32 *desc = edesc->hw_desc;
1137 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1140 init_aead_job(req, edesc, all_contig, encrypt);
1141 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1143 /* BUG This should not be specific to generic GCM. */
1145 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1146 last = FIFOLD_TYPE_LAST1;
1149 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1150 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1153 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1155 append_data(desc, req->iv, ivsize);
1156 /* End of blank commands */
1159 static void init_chachapoly_job(struct aead_request *req,
1160 struct aead_edesc *edesc, bool all_contig,
1163 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1164 unsigned int ivsize = crypto_aead_ivsize(aead);
1165 unsigned int assoclen = req->assoclen;
1166 u32 *desc = edesc->hw_desc;
1169 init_aead_job(req, edesc, all_contig, encrypt);
1171 if (ivsize != CHACHAPOLY_IV_SIZE) {
1172 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1176 * The associated data comes already with the IV but we need
1177 * to skip it when we authenticate or encrypt...
1182 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1185 * For IPsec load the IV further in the same register.
1186 * For RFC7539 simply load the 12 bytes nonce in a single operation
1188 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1189 LDST_SRCDST_BYTE_CONTEXT |
1190 ctx_iv_off << LDST_OFFSET_SHIFT);
1193 static void init_authenc_job(struct aead_request *req,
1194 struct aead_edesc *edesc,
1195 bool all_contig, bool encrypt)
1197 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1198 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1199 struct caam_aead_alg, aead);
1200 unsigned int ivsize = crypto_aead_ivsize(aead);
1201 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1202 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1203 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1204 OP_ALG_AAI_CTR_MOD128);
1205 const bool is_rfc3686 = alg->caam.rfc3686;
1206 u32 *desc = edesc->hw_desc;
1210 * AES-CTR needs to load IV in CONTEXT1 reg
1211 * at an offset of 128bits (16bytes)
1212 * CONTEXT1[255:128] = IV
1219 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1222 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1224 init_aead_job(req, edesc, all_contig, encrypt);
1227 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1228 * having DPOVRD as destination.
1230 if (ctrlpriv->era < 3)
1231 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1233 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1235 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1236 append_load_as_imm(desc, req->iv, ivsize,
1238 LDST_SRCDST_BYTE_CONTEXT |
1239 (ivoffset << LDST_OFFSET_SHIFT));
1243 * Fill in skcipher job descriptor
1245 static void init_skcipher_job(struct skcipher_request *req,
1246 struct skcipher_edesc *edesc,
1249 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1250 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1251 int ivsize = crypto_skcipher_ivsize(skcipher);
1252 u32 *desc = edesc->hw_desc;
1254 u32 in_options = 0, out_options = 0;
1255 dma_addr_t src_dma, dst_dma, ptr;
1256 int len, sec4_sg_index = 0;
1259 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1260 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1261 pr_err("asked=%d, cryptlen%d\n",
1262 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1264 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1265 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1266 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1268 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1269 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1271 len = desc_len(sh_desc);
1272 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1274 if (ivsize || edesc->mapped_src_nents > 1) {
1275 src_dma = edesc->sec4_sg_dma;
1276 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1277 in_options = LDST_SGF;
1279 src_dma = sg_dma_address(req->src);
1282 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1284 if (likely(req->src == req->dst)) {
1285 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1286 out_options = in_options;
1287 } else if (edesc->mapped_dst_nents == 1) {
1288 dst_dma = sg_dma_address(req->dst);
1290 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1291 sizeof(struct sec4_sg_entry);
1292 out_options = LDST_SGF;
1295 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1299 * allocate and map the aead extended descriptor
1301 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1302 int desc_bytes, bool *all_contig_ptr,
1305 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1306 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1307 struct device *jrdev = ctx->jrdev;
1308 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1309 GFP_KERNEL : GFP_ATOMIC;
1310 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1311 struct aead_edesc *edesc;
1312 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1313 unsigned int authsize = ctx->authsize;
1315 if (unlikely(req->dst != req->src)) {
1316 src_nents = sg_nents_for_len(req->src, req->assoclen +
1318 if (unlikely(src_nents < 0)) {
1319 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1320 req->assoclen + req->cryptlen);
1321 return ERR_PTR(src_nents);
1324 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1326 (encrypt ? authsize :
1328 if (unlikely(dst_nents < 0)) {
1329 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1330 req->assoclen + req->cryptlen +
1331 (encrypt ? authsize : (-authsize)));
1332 return ERR_PTR(dst_nents);
1335 src_nents = sg_nents_for_len(req->src, req->assoclen +
1337 (encrypt ? authsize : 0));
1338 if (unlikely(src_nents < 0)) {
1339 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1340 req->assoclen + req->cryptlen +
1341 (encrypt ? authsize : 0));
1342 return ERR_PTR(src_nents);
1346 if (likely(req->src == req->dst)) {
1347 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1349 if (unlikely(!mapped_src_nents)) {
1350 dev_err(jrdev, "unable to map source\n");
1351 return ERR_PTR(-ENOMEM);
1354 /* Cover also the case of null (zero length) input data */
1356 mapped_src_nents = dma_map_sg(jrdev, req->src,
1357 src_nents, DMA_TO_DEVICE);
1358 if (unlikely(!mapped_src_nents)) {
1359 dev_err(jrdev, "unable to map source\n");
1360 return ERR_PTR(-ENOMEM);
1363 mapped_src_nents = 0;
1366 /* Cover also the case of null (zero length) output data */
1368 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1371 if (unlikely(!mapped_dst_nents)) {
1372 dev_err(jrdev, "unable to map destination\n");
1373 dma_unmap_sg(jrdev, req->src, src_nents,
1375 return ERR_PTR(-ENOMEM);
1378 mapped_dst_nents = 0;
1382 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1383 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1384 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1386 /* allocate space for base edesc and hw desc commands, link tables */
1387 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1390 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1392 return ERR_PTR(-ENOMEM);
1395 edesc->src_nents = src_nents;
1396 edesc->dst_nents = dst_nents;
1397 edesc->mapped_src_nents = mapped_src_nents;
1398 edesc->mapped_dst_nents = mapped_dst_nents;
1399 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1401 *all_contig_ptr = !(mapped_src_nents > 1);
1404 if (mapped_src_nents > 1) {
1405 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1406 edesc->sec4_sg + sec4_sg_index, 0);
1407 sec4_sg_index += mapped_src_nents;
1409 if (mapped_dst_nents > 1) {
1410 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1411 edesc->sec4_sg + sec4_sg_index, 0);
1417 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1418 sec4_sg_bytes, DMA_TO_DEVICE);
1419 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1420 dev_err(jrdev, "unable to map S/G table\n");
1421 aead_unmap(jrdev, edesc, req);
1423 return ERR_PTR(-ENOMEM);
1426 edesc->sec4_sg_bytes = sec4_sg_bytes;
1431 static int gcm_encrypt(struct aead_request *req)
1433 struct aead_edesc *edesc;
1434 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1435 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1436 struct device *jrdev = ctx->jrdev;
1441 /* allocate extended descriptor */
1442 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1444 return PTR_ERR(edesc);
1446 /* Create and submit job descriptor */
1447 init_gcm_job(req, edesc, all_contig, true);
1449 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1450 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1451 desc_bytes(edesc->hw_desc), 1);
1454 desc = edesc->hw_desc;
1455 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1459 aead_unmap(jrdev, edesc, req);
1466 static int chachapoly_encrypt(struct aead_request *req)
1468 struct aead_edesc *edesc;
1469 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1470 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1471 struct device *jrdev = ctx->jrdev;
1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1479 return PTR_ERR(edesc);
1481 desc = edesc->hw_desc;
1483 init_chachapoly_job(req, edesc, all_contig, true);
1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1488 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1492 aead_unmap(jrdev, edesc, req);
1499 static int chachapoly_decrypt(struct aead_request *req)
1501 struct aead_edesc *edesc;
1502 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1503 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1504 struct device *jrdev = ctx->jrdev;
1509 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1512 return PTR_ERR(edesc);
1514 desc = edesc->hw_desc;
1516 init_chachapoly_job(req, edesc, all_contig, false);
1517 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1518 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1521 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1525 aead_unmap(jrdev, edesc, req);
1532 static int ipsec_gcm_encrypt(struct aead_request *req)
1534 if (req->assoclen < 8)
1537 return gcm_encrypt(req);
1540 static int aead_encrypt(struct aead_request *req)
1542 struct aead_edesc *edesc;
1543 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1544 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1545 struct device *jrdev = ctx->jrdev;
1550 /* allocate extended descriptor */
1551 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1554 return PTR_ERR(edesc);
1556 /* Create and submit job descriptor */
1557 init_authenc_job(req, edesc, all_contig, true);
1559 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1560 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 desc_bytes(edesc->hw_desc), 1);
1564 desc = edesc->hw_desc;
1565 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1569 aead_unmap(jrdev, edesc, req);
1576 static int gcm_decrypt(struct aead_request *req)
1578 struct aead_edesc *edesc;
1579 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1580 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1581 struct device *jrdev = ctx->jrdev;
1586 /* allocate extended descriptor */
1587 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1589 return PTR_ERR(edesc);
1591 /* Create and submit job descriptor*/
1592 init_gcm_job(req, edesc, all_contig, false);
1594 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1595 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1596 desc_bytes(edesc->hw_desc), 1);
1599 desc = edesc->hw_desc;
1600 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1604 aead_unmap(jrdev, edesc, req);
1611 static int ipsec_gcm_decrypt(struct aead_request *req)
1613 if (req->assoclen < 8)
1616 return gcm_decrypt(req);
1619 static int aead_decrypt(struct aead_request *req)
1621 struct aead_edesc *edesc;
1622 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1623 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1624 struct device *jrdev = ctx->jrdev;
1629 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1630 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1631 req->assoclen + req->cryptlen, 1);
1633 /* allocate extended descriptor */
1634 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1635 &all_contig, false);
1637 return PTR_ERR(edesc);
1639 /* Create and submit job descriptor*/
1640 init_authenc_job(req, edesc, all_contig, false);
1642 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1643 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1644 desc_bytes(edesc->hw_desc), 1);
1647 desc = edesc->hw_desc;
1648 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1652 aead_unmap(jrdev, edesc, req);
1660 * allocate and map the skcipher extended descriptor for skcipher
1662 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1665 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1666 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1667 struct device *jrdev = ctx->jrdev;
1668 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1669 GFP_KERNEL : GFP_ATOMIC;
1670 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1671 struct skcipher_edesc *edesc;
1672 dma_addr_t iv_dma = 0;
1674 int ivsize = crypto_skcipher_ivsize(skcipher);
1675 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1677 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1678 if (unlikely(src_nents < 0)) {
1679 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1681 return ERR_PTR(src_nents);
1684 if (req->dst != req->src) {
1685 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1686 if (unlikely(dst_nents < 0)) {
1687 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1689 return ERR_PTR(dst_nents);
1693 if (likely(req->src == req->dst)) {
1694 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1696 if (unlikely(!mapped_src_nents)) {
1697 dev_err(jrdev, "unable to map source\n");
1698 return ERR_PTR(-ENOMEM);
1701 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1703 if (unlikely(!mapped_src_nents)) {
1704 dev_err(jrdev, "unable to map source\n");
1705 return ERR_PTR(-ENOMEM);
1707 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1709 if (unlikely(!mapped_dst_nents)) {
1710 dev_err(jrdev, "unable to map destination\n");
1711 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1712 return ERR_PTR(-ENOMEM);
1716 if (!ivsize && mapped_src_nents == 1)
1717 sec4_sg_ents = 0; // no need for an input hw s/g table
1719 sec4_sg_ents = mapped_src_nents + !!ivsize;
1720 dst_sg_idx = sec4_sg_ents;
1721 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1722 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1725 * allocate space for base edesc and hw desc commands, link tables, IV
1727 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1730 dev_err(jrdev, "could not allocate extended descriptor\n");
1731 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1733 return ERR_PTR(-ENOMEM);
1736 edesc->src_nents = src_nents;
1737 edesc->dst_nents = dst_nents;
1738 edesc->mapped_src_nents = mapped_src_nents;
1739 edesc->mapped_dst_nents = mapped_dst_nents;
1740 edesc->sec4_sg_bytes = sec4_sg_bytes;
1741 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1744 /* Make sure IV is located in a DMAable area */
1746 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1747 memcpy(iv, req->iv, ivsize);
1749 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1750 if (dma_mapping_error(jrdev, iv_dma)) {
1751 dev_err(jrdev, "unable to map IV\n");
1752 caam_unmap(jrdev, req->src, req->dst, src_nents,
1753 dst_nents, 0, 0, 0, 0);
1755 return ERR_PTR(-ENOMEM);
1758 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1761 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg +
1764 if (mapped_dst_nents > 1) {
1765 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1766 edesc->sec4_sg + dst_sg_idx, 0);
1769 if (sec4_sg_bytes) {
1770 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1773 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1774 dev_err(jrdev, "unable to map S/G table\n");
1775 caam_unmap(jrdev, req->src, req->dst, src_nents,
1776 dst_nents, iv_dma, ivsize, 0, 0);
1778 return ERR_PTR(-ENOMEM);
1782 edesc->iv_dma = iv_dma;
1785 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
1786 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1793 static int skcipher_encrypt(struct skcipher_request *req)
1795 struct skcipher_edesc *edesc;
1796 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1797 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1798 struct device *jrdev = ctx->jrdev;
1802 /* allocate extended descriptor */
1803 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1805 return PTR_ERR(edesc);
1807 /* Create and submit job descriptor*/
1808 init_skcipher_job(req, edesc, true);
1810 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1811 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1812 desc_bytes(edesc->hw_desc), 1);
1814 desc = edesc->hw_desc;
1815 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
1820 skcipher_unmap(jrdev, edesc, req);
1827 static int skcipher_decrypt(struct skcipher_request *req)
1829 struct skcipher_edesc *edesc;
1830 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1831 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1832 int ivsize = crypto_skcipher_ivsize(skcipher);
1833 struct device *jrdev = ctx->jrdev;
1837 /* allocate extended descriptor */
1838 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1840 return PTR_ERR(edesc);
1843 * The crypto API expects us to set the IV (req->iv) to the last
1847 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1850 /* Create and submit job descriptor*/
1851 init_skcipher_job(req, edesc, false);
1852 desc = edesc->hw_desc;
1854 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1855 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1856 desc_bytes(edesc->hw_desc), 1);
1859 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
1863 skcipher_unmap(jrdev, edesc, req);
1870 static struct caam_skcipher_alg driver_algs[] = {
1874 .cra_name = "cbc(aes)",
1875 .cra_driver_name = "cbc-aes-caam",
1876 .cra_blocksize = AES_BLOCK_SIZE,
1878 .setkey = skcipher_setkey,
1879 .encrypt = skcipher_encrypt,
1880 .decrypt = skcipher_decrypt,
1881 .min_keysize = AES_MIN_KEY_SIZE,
1882 .max_keysize = AES_MAX_KEY_SIZE,
1883 .ivsize = AES_BLOCK_SIZE,
1885 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1890 .cra_name = "cbc(des3_ede)",
1891 .cra_driver_name = "cbc-3des-caam",
1892 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1894 .setkey = des_skcipher_setkey,
1895 .encrypt = skcipher_encrypt,
1896 .decrypt = skcipher_decrypt,
1897 .min_keysize = DES3_EDE_KEY_SIZE,
1898 .max_keysize = DES3_EDE_KEY_SIZE,
1899 .ivsize = DES3_EDE_BLOCK_SIZE,
1901 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1906 .cra_name = "cbc(des)",
1907 .cra_driver_name = "cbc-des-caam",
1908 .cra_blocksize = DES_BLOCK_SIZE,
1910 .setkey = des_skcipher_setkey,
1911 .encrypt = skcipher_encrypt,
1912 .decrypt = skcipher_decrypt,
1913 .min_keysize = DES_KEY_SIZE,
1914 .max_keysize = DES_KEY_SIZE,
1915 .ivsize = DES_BLOCK_SIZE,
1917 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1922 .cra_name = "ctr(aes)",
1923 .cra_driver_name = "ctr-aes-caam",
1926 .setkey = skcipher_setkey,
1927 .encrypt = skcipher_encrypt,
1928 .decrypt = skcipher_decrypt,
1929 .min_keysize = AES_MIN_KEY_SIZE,
1930 .max_keysize = AES_MAX_KEY_SIZE,
1931 .ivsize = AES_BLOCK_SIZE,
1932 .chunksize = AES_BLOCK_SIZE,
1934 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1935 OP_ALG_AAI_CTR_MOD128,
1940 .cra_name = "rfc3686(ctr(aes))",
1941 .cra_driver_name = "rfc3686-ctr-aes-caam",
1944 .setkey = skcipher_setkey,
1945 .encrypt = skcipher_encrypt,
1946 .decrypt = skcipher_decrypt,
1947 .min_keysize = AES_MIN_KEY_SIZE +
1948 CTR_RFC3686_NONCE_SIZE,
1949 .max_keysize = AES_MAX_KEY_SIZE +
1950 CTR_RFC3686_NONCE_SIZE,
1951 .ivsize = CTR_RFC3686_IV_SIZE,
1952 .chunksize = AES_BLOCK_SIZE,
1955 .class1_alg_type = OP_ALG_ALGSEL_AES |
1956 OP_ALG_AAI_CTR_MOD128,
1963 .cra_name = "xts(aes)",
1964 .cra_driver_name = "xts-aes-caam",
1965 .cra_blocksize = AES_BLOCK_SIZE,
1967 .setkey = xts_skcipher_setkey,
1968 .encrypt = skcipher_encrypt,
1969 .decrypt = skcipher_decrypt,
1970 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1971 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1972 .ivsize = AES_BLOCK_SIZE,
1974 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1979 .cra_name = "ecb(des)",
1980 .cra_driver_name = "ecb-des-caam",
1981 .cra_blocksize = DES_BLOCK_SIZE,
1983 .setkey = des_skcipher_setkey,
1984 .encrypt = skcipher_encrypt,
1985 .decrypt = skcipher_decrypt,
1986 .min_keysize = DES_KEY_SIZE,
1987 .max_keysize = DES_KEY_SIZE,
1989 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1994 .cra_name = "ecb(aes)",
1995 .cra_driver_name = "ecb-aes-caam",
1996 .cra_blocksize = AES_BLOCK_SIZE,
1998 .setkey = skcipher_setkey,
1999 .encrypt = skcipher_encrypt,
2000 .decrypt = skcipher_decrypt,
2001 .min_keysize = AES_MIN_KEY_SIZE,
2002 .max_keysize = AES_MAX_KEY_SIZE,
2004 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2009 .cra_name = "ecb(des3_ede)",
2010 .cra_driver_name = "ecb-des3-caam",
2011 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2013 .setkey = des_skcipher_setkey,
2014 .encrypt = skcipher_encrypt,
2015 .decrypt = skcipher_decrypt,
2016 .min_keysize = DES3_EDE_KEY_SIZE,
2017 .max_keysize = DES3_EDE_KEY_SIZE,
2019 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2024 .cra_name = "ecb(arc4)",
2025 .cra_driver_name = "ecb-arc4-caam",
2026 .cra_blocksize = ARC4_BLOCK_SIZE,
2028 .setkey = skcipher_setkey,
2029 .encrypt = skcipher_encrypt,
2030 .decrypt = skcipher_decrypt,
2031 .min_keysize = ARC4_MIN_KEY_SIZE,
2032 .max_keysize = ARC4_MAX_KEY_SIZE,
2034 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
2038 static struct caam_aead_alg driver_aeads[] = {
2042 .cra_name = "rfc4106(gcm(aes))",
2043 .cra_driver_name = "rfc4106-gcm-aes-caam",
2046 .setkey = rfc4106_setkey,
2047 .setauthsize = rfc4106_setauthsize,
2048 .encrypt = ipsec_gcm_encrypt,
2049 .decrypt = ipsec_gcm_decrypt,
2050 .ivsize = GCM_RFC4106_IV_SIZE,
2051 .maxauthsize = AES_BLOCK_SIZE,
2054 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2060 .cra_name = "rfc4543(gcm(aes))",
2061 .cra_driver_name = "rfc4543-gcm-aes-caam",
2064 .setkey = rfc4543_setkey,
2065 .setauthsize = rfc4543_setauthsize,
2066 .encrypt = ipsec_gcm_encrypt,
2067 .decrypt = ipsec_gcm_decrypt,
2068 .ivsize = GCM_RFC4543_IV_SIZE,
2069 .maxauthsize = AES_BLOCK_SIZE,
2072 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2075 /* Galois Counter Mode */
2079 .cra_name = "gcm(aes)",
2080 .cra_driver_name = "gcm-aes-caam",
2083 .setkey = gcm_setkey,
2084 .setauthsize = gcm_setauthsize,
2085 .encrypt = gcm_encrypt,
2086 .decrypt = gcm_decrypt,
2087 .ivsize = GCM_AES_IV_SIZE,
2088 .maxauthsize = AES_BLOCK_SIZE,
2091 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2094 /* single-pass ipsec_esp descriptor */
2098 .cra_name = "authenc(hmac(md5),"
2099 "ecb(cipher_null))",
2100 .cra_driver_name = "authenc-hmac-md5-"
2101 "ecb-cipher_null-caam",
2102 .cra_blocksize = NULL_BLOCK_SIZE,
2104 .setkey = aead_setkey,
2105 .setauthsize = aead_setauthsize,
2106 .encrypt = aead_encrypt,
2107 .decrypt = aead_decrypt,
2108 .ivsize = NULL_IV_SIZE,
2109 .maxauthsize = MD5_DIGEST_SIZE,
2112 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2113 OP_ALG_AAI_HMAC_PRECOMP,
2119 .cra_name = "authenc(hmac(sha1),"
2120 "ecb(cipher_null))",
2121 .cra_driver_name = "authenc-hmac-sha1-"
2122 "ecb-cipher_null-caam",
2123 .cra_blocksize = NULL_BLOCK_SIZE,
2125 .setkey = aead_setkey,
2126 .setauthsize = aead_setauthsize,
2127 .encrypt = aead_encrypt,
2128 .decrypt = aead_decrypt,
2129 .ivsize = NULL_IV_SIZE,
2130 .maxauthsize = SHA1_DIGEST_SIZE,
2133 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2134 OP_ALG_AAI_HMAC_PRECOMP,
2140 .cra_name = "authenc(hmac(sha224),"
2141 "ecb(cipher_null))",
2142 .cra_driver_name = "authenc-hmac-sha224-"
2143 "ecb-cipher_null-caam",
2144 .cra_blocksize = NULL_BLOCK_SIZE,
2146 .setkey = aead_setkey,
2147 .setauthsize = aead_setauthsize,
2148 .encrypt = aead_encrypt,
2149 .decrypt = aead_decrypt,
2150 .ivsize = NULL_IV_SIZE,
2151 .maxauthsize = SHA224_DIGEST_SIZE,
2154 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2155 OP_ALG_AAI_HMAC_PRECOMP,
2161 .cra_name = "authenc(hmac(sha256),"
2162 "ecb(cipher_null))",
2163 .cra_driver_name = "authenc-hmac-sha256-"
2164 "ecb-cipher_null-caam",
2165 .cra_blocksize = NULL_BLOCK_SIZE,
2167 .setkey = aead_setkey,
2168 .setauthsize = aead_setauthsize,
2169 .encrypt = aead_encrypt,
2170 .decrypt = aead_decrypt,
2171 .ivsize = NULL_IV_SIZE,
2172 .maxauthsize = SHA256_DIGEST_SIZE,
2175 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2176 OP_ALG_AAI_HMAC_PRECOMP,
2182 .cra_name = "authenc(hmac(sha384),"
2183 "ecb(cipher_null))",
2184 .cra_driver_name = "authenc-hmac-sha384-"
2185 "ecb-cipher_null-caam",
2186 .cra_blocksize = NULL_BLOCK_SIZE,
2188 .setkey = aead_setkey,
2189 .setauthsize = aead_setauthsize,
2190 .encrypt = aead_encrypt,
2191 .decrypt = aead_decrypt,
2192 .ivsize = NULL_IV_SIZE,
2193 .maxauthsize = SHA384_DIGEST_SIZE,
2196 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2197 OP_ALG_AAI_HMAC_PRECOMP,
2203 .cra_name = "authenc(hmac(sha512),"
2204 "ecb(cipher_null))",
2205 .cra_driver_name = "authenc-hmac-sha512-"
2206 "ecb-cipher_null-caam",
2207 .cra_blocksize = NULL_BLOCK_SIZE,
2209 .setkey = aead_setkey,
2210 .setauthsize = aead_setauthsize,
2211 .encrypt = aead_encrypt,
2212 .decrypt = aead_decrypt,
2213 .ivsize = NULL_IV_SIZE,
2214 .maxauthsize = SHA512_DIGEST_SIZE,
2217 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2218 OP_ALG_AAI_HMAC_PRECOMP,
2224 .cra_name = "authenc(hmac(md5),cbc(aes))",
2225 .cra_driver_name = "authenc-hmac-md5-"
2227 .cra_blocksize = AES_BLOCK_SIZE,
2229 .setkey = aead_setkey,
2230 .setauthsize = aead_setauthsize,
2231 .encrypt = aead_encrypt,
2232 .decrypt = aead_decrypt,
2233 .ivsize = AES_BLOCK_SIZE,
2234 .maxauthsize = MD5_DIGEST_SIZE,
2237 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2238 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2239 OP_ALG_AAI_HMAC_PRECOMP,
2245 .cra_name = "echainiv(authenc(hmac(md5),"
2247 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2249 .cra_blocksize = AES_BLOCK_SIZE,
2251 .setkey = aead_setkey,
2252 .setauthsize = aead_setauthsize,
2253 .encrypt = aead_encrypt,
2254 .decrypt = aead_decrypt,
2255 .ivsize = AES_BLOCK_SIZE,
2256 .maxauthsize = MD5_DIGEST_SIZE,
2259 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2260 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2261 OP_ALG_AAI_HMAC_PRECOMP,
2268 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 .cra_driver_name = "authenc-hmac-sha1-"
2271 .cra_blocksize = AES_BLOCK_SIZE,
2273 .setkey = aead_setkey,
2274 .setauthsize = aead_setauthsize,
2275 .encrypt = aead_encrypt,
2276 .decrypt = aead_decrypt,
2277 .ivsize = AES_BLOCK_SIZE,
2278 .maxauthsize = SHA1_DIGEST_SIZE,
2281 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2282 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2283 OP_ALG_AAI_HMAC_PRECOMP,
2289 .cra_name = "echainiv(authenc(hmac(sha1),"
2291 .cra_driver_name = "echainiv-authenc-"
2292 "hmac-sha1-cbc-aes-caam",
2293 .cra_blocksize = AES_BLOCK_SIZE,
2295 .setkey = aead_setkey,
2296 .setauthsize = aead_setauthsize,
2297 .encrypt = aead_encrypt,
2298 .decrypt = aead_decrypt,
2299 .ivsize = AES_BLOCK_SIZE,
2300 .maxauthsize = SHA1_DIGEST_SIZE,
2303 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2304 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2305 OP_ALG_AAI_HMAC_PRECOMP,
2312 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2313 .cra_driver_name = "authenc-hmac-sha224-"
2315 .cra_blocksize = AES_BLOCK_SIZE,
2317 .setkey = aead_setkey,
2318 .setauthsize = aead_setauthsize,
2319 .encrypt = aead_encrypt,
2320 .decrypt = aead_decrypt,
2321 .ivsize = AES_BLOCK_SIZE,
2322 .maxauthsize = SHA224_DIGEST_SIZE,
2325 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2326 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2327 OP_ALG_AAI_HMAC_PRECOMP,
2333 .cra_name = "echainiv(authenc(hmac(sha224),"
2335 .cra_driver_name = "echainiv-authenc-"
2336 "hmac-sha224-cbc-aes-caam",
2337 .cra_blocksize = AES_BLOCK_SIZE,
2339 .setkey = aead_setkey,
2340 .setauthsize = aead_setauthsize,
2341 .encrypt = aead_encrypt,
2342 .decrypt = aead_decrypt,
2343 .ivsize = AES_BLOCK_SIZE,
2344 .maxauthsize = SHA224_DIGEST_SIZE,
2347 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2348 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2349 OP_ALG_AAI_HMAC_PRECOMP,
2356 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2357 .cra_driver_name = "authenc-hmac-sha256-"
2359 .cra_blocksize = AES_BLOCK_SIZE,
2361 .setkey = aead_setkey,
2362 .setauthsize = aead_setauthsize,
2363 .encrypt = aead_encrypt,
2364 .decrypt = aead_decrypt,
2365 .ivsize = AES_BLOCK_SIZE,
2366 .maxauthsize = SHA256_DIGEST_SIZE,
2369 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2370 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2371 OP_ALG_AAI_HMAC_PRECOMP,
2377 .cra_name = "echainiv(authenc(hmac(sha256),"
2379 .cra_driver_name = "echainiv-authenc-"
2380 "hmac-sha256-cbc-aes-caam",
2381 .cra_blocksize = AES_BLOCK_SIZE,
2383 .setkey = aead_setkey,
2384 .setauthsize = aead_setauthsize,
2385 .encrypt = aead_encrypt,
2386 .decrypt = aead_decrypt,
2387 .ivsize = AES_BLOCK_SIZE,
2388 .maxauthsize = SHA256_DIGEST_SIZE,
2391 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2392 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2393 OP_ALG_AAI_HMAC_PRECOMP,
2400 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2401 .cra_driver_name = "authenc-hmac-sha384-"
2403 .cra_blocksize = AES_BLOCK_SIZE,
2405 .setkey = aead_setkey,
2406 .setauthsize = aead_setauthsize,
2407 .encrypt = aead_encrypt,
2408 .decrypt = aead_decrypt,
2409 .ivsize = AES_BLOCK_SIZE,
2410 .maxauthsize = SHA384_DIGEST_SIZE,
2413 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2414 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2415 OP_ALG_AAI_HMAC_PRECOMP,
2421 .cra_name = "echainiv(authenc(hmac(sha384),"
2423 .cra_driver_name = "echainiv-authenc-"
2424 "hmac-sha384-cbc-aes-caam",
2425 .cra_blocksize = AES_BLOCK_SIZE,
2427 .setkey = aead_setkey,
2428 .setauthsize = aead_setauthsize,
2429 .encrypt = aead_encrypt,
2430 .decrypt = aead_decrypt,
2431 .ivsize = AES_BLOCK_SIZE,
2432 .maxauthsize = SHA384_DIGEST_SIZE,
2435 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2436 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2437 OP_ALG_AAI_HMAC_PRECOMP,
2444 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2445 .cra_driver_name = "authenc-hmac-sha512-"
2447 .cra_blocksize = AES_BLOCK_SIZE,
2449 .setkey = aead_setkey,
2450 .setauthsize = aead_setauthsize,
2451 .encrypt = aead_encrypt,
2452 .decrypt = aead_decrypt,
2453 .ivsize = AES_BLOCK_SIZE,
2454 .maxauthsize = SHA512_DIGEST_SIZE,
2457 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2458 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2459 OP_ALG_AAI_HMAC_PRECOMP,
2465 .cra_name = "echainiv(authenc(hmac(sha512),"
2467 .cra_driver_name = "echainiv-authenc-"
2468 "hmac-sha512-cbc-aes-caam",
2469 .cra_blocksize = AES_BLOCK_SIZE,
2471 .setkey = aead_setkey,
2472 .setauthsize = aead_setauthsize,
2473 .encrypt = aead_encrypt,
2474 .decrypt = aead_decrypt,
2475 .ivsize = AES_BLOCK_SIZE,
2476 .maxauthsize = SHA512_DIGEST_SIZE,
2479 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2480 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2481 OP_ALG_AAI_HMAC_PRECOMP,
2488 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2489 .cra_driver_name = "authenc-hmac-md5-"
2490 "cbc-des3_ede-caam",
2491 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2493 .setkey = des3_aead_setkey,
2494 .setauthsize = aead_setauthsize,
2495 .encrypt = aead_encrypt,
2496 .decrypt = aead_decrypt,
2497 .ivsize = DES3_EDE_BLOCK_SIZE,
2498 .maxauthsize = MD5_DIGEST_SIZE,
2501 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2502 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2503 OP_ALG_AAI_HMAC_PRECOMP,
2509 .cra_name = "echainiv(authenc(hmac(md5),"
2511 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2512 "cbc-des3_ede-caam",
2513 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2515 .setkey = des3_aead_setkey,
2516 .setauthsize = aead_setauthsize,
2517 .encrypt = aead_encrypt,
2518 .decrypt = aead_decrypt,
2519 .ivsize = DES3_EDE_BLOCK_SIZE,
2520 .maxauthsize = MD5_DIGEST_SIZE,
2523 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2524 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2525 OP_ALG_AAI_HMAC_PRECOMP,
2532 .cra_name = "authenc(hmac(sha1),"
2534 .cra_driver_name = "authenc-hmac-sha1-"
2535 "cbc-des3_ede-caam",
2536 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2538 .setkey = des3_aead_setkey,
2539 .setauthsize = aead_setauthsize,
2540 .encrypt = aead_encrypt,
2541 .decrypt = aead_decrypt,
2542 .ivsize = DES3_EDE_BLOCK_SIZE,
2543 .maxauthsize = SHA1_DIGEST_SIZE,
2546 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2547 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2548 OP_ALG_AAI_HMAC_PRECOMP,
2554 .cra_name = "echainiv(authenc(hmac(sha1),"
2556 .cra_driver_name = "echainiv-authenc-"
2558 "cbc-des3_ede-caam",
2559 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2561 .setkey = des3_aead_setkey,
2562 .setauthsize = aead_setauthsize,
2563 .encrypt = aead_encrypt,
2564 .decrypt = aead_decrypt,
2565 .ivsize = DES3_EDE_BLOCK_SIZE,
2566 .maxauthsize = SHA1_DIGEST_SIZE,
2569 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2570 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2571 OP_ALG_AAI_HMAC_PRECOMP,
2578 .cra_name = "authenc(hmac(sha224),"
2580 .cra_driver_name = "authenc-hmac-sha224-"
2581 "cbc-des3_ede-caam",
2582 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2584 .setkey = des3_aead_setkey,
2585 .setauthsize = aead_setauthsize,
2586 .encrypt = aead_encrypt,
2587 .decrypt = aead_decrypt,
2588 .ivsize = DES3_EDE_BLOCK_SIZE,
2589 .maxauthsize = SHA224_DIGEST_SIZE,
2592 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2593 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2594 OP_ALG_AAI_HMAC_PRECOMP,
2600 .cra_name = "echainiv(authenc(hmac(sha224),"
2602 .cra_driver_name = "echainiv-authenc-"
2604 "cbc-des3_ede-caam",
2605 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2607 .setkey = des3_aead_setkey,
2608 .setauthsize = aead_setauthsize,
2609 .encrypt = aead_encrypt,
2610 .decrypt = aead_decrypt,
2611 .ivsize = DES3_EDE_BLOCK_SIZE,
2612 .maxauthsize = SHA224_DIGEST_SIZE,
2615 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2616 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2617 OP_ALG_AAI_HMAC_PRECOMP,
2624 .cra_name = "authenc(hmac(sha256),"
2626 .cra_driver_name = "authenc-hmac-sha256-"
2627 "cbc-des3_ede-caam",
2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2630 .setkey = des3_aead_setkey,
2631 .setauthsize = aead_setauthsize,
2632 .encrypt = aead_encrypt,
2633 .decrypt = aead_decrypt,
2634 .ivsize = DES3_EDE_BLOCK_SIZE,
2635 .maxauthsize = SHA256_DIGEST_SIZE,
2638 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2639 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2640 OP_ALG_AAI_HMAC_PRECOMP,
2646 .cra_name = "echainiv(authenc(hmac(sha256),"
2648 .cra_driver_name = "echainiv-authenc-"
2650 "cbc-des3_ede-caam",
2651 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2653 .setkey = des3_aead_setkey,
2654 .setauthsize = aead_setauthsize,
2655 .encrypt = aead_encrypt,
2656 .decrypt = aead_decrypt,
2657 .ivsize = DES3_EDE_BLOCK_SIZE,
2658 .maxauthsize = SHA256_DIGEST_SIZE,
2661 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2662 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2663 OP_ALG_AAI_HMAC_PRECOMP,
2670 .cra_name = "authenc(hmac(sha384),"
2672 .cra_driver_name = "authenc-hmac-sha384-"
2673 "cbc-des3_ede-caam",
2674 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2676 .setkey = des3_aead_setkey,
2677 .setauthsize = aead_setauthsize,
2678 .encrypt = aead_encrypt,
2679 .decrypt = aead_decrypt,
2680 .ivsize = DES3_EDE_BLOCK_SIZE,
2681 .maxauthsize = SHA384_DIGEST_SIZE,
2684 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2685 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2686 OP_ALG_AAI_HMAC_PRECOMP,
2692 .cra_name = "echainiv(authenc(hmac(sha384),"
2694 .cra_driver_name = "echainiv-authenc-"
2696 "cbc-des3_ede-caam",
2697 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2699 .setkey = des3_aead_setkey,
2700 .setauthsize = aead_setauthsize,
2701 .encrypt = aead_encrypt,
2702 .decrypt = aead_decrypt,
2703 .ivsize = DES3_EDE_BLOCK_SIZE,
2704 .maxauthsize = SHA384_DIGEST_SIZE,
2707 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2708 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2709 OP_ALG_AAI_HMAC_PRECOMP,
2716 .cra_name = "authenc(hmac(sha512),"
2718 .cra_driver_name = "authenc-hmac-sha512-"
2719 "cbc-des3_ede-caam",
2720 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2722 .setkey = des3_aead_setkey,
2723 .setauthsize = aead_setauthsize,
2724 .encrypt = aead_encrypt,
2725 .decrypt = aead_decrypt,
2726 .ivsize = DES3_EDE_BLOCK_SIZE,
2727 .maxauthsize = SHA512_DIGEST_SIZE,
2730 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2731 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2732 OP_ALG_AAI_HMAC_PRECOMP,
2738 .cra_name = "echainiv(authenc(hmac(sha512),"
2740 .cra_driver_name = "echainiv-authenc-"
2742 "cbc-des3_ede-caam",
2743 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2745 .setkey = des3_aead_setkey,
2746 .setauthsize = aead_setauthsize,
2747 .encrypt = aead_encrypt,
2748 .decrypt = aead_decrypt,
2749 .ivsize = DES3_EDE_BLOCK_SIZE,
2750 .maxauthsize = SHA512_DIGEST_SIZE,
2753 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2754 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2755 OP_ALG_AAI_HMAC_PRECOMP,
2762 .cra_name = "authenc(hmac(md5),cbc(des))",
2763 .cra_driver_name = "authenc-hmac-md5-"
2765 .cra_blocksize = DES_BLOCK_SIZE,
2767 .setkey = aead_setkey,
2768 .setauthsize = aead_setauthsize,
2769 .encrypt = aead_encrypt,
2770 .decrypt = aead_decrypt,
2771 .ivsize = DES_BLOCK_SIZE,
2772 .maxauthsize = MD5_DIGEST_SIZE,
2775 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2776 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2777 OP_ALG_AAI_HMAC_PRECOMP,
2783 .cra_name = "echainiv(authenc(hmac(md5),"
2785 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2787 .cra_blocksize = DES_BLOCK_SIZE,
2789 .setkey = aead_setkey,
2790 .setauthsize = aead_setauthsize,
2791 .encrypt = aead_encrypt,
2792 .decrypt = aead_decrypt,
2793 .ivsize = DES_BLOCK_SIZE,
2794 .maxauthsize = MD5_DIGEST_SIZE,
2797 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2798 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2799 OP_ALG_AAI_HMAC_PRECOMP,
2806 .cra_name = "authenc(hmac(sha1),cbc(des))",
2807 .cra_driver_name = "authenc-hmac-sha1-"
2809 .cra_blocksize = DES_BLOCK_SIZE,
2811 .setkey = aead_setkey,
2812 .setauthsize = aead_setauthsize,
2813 .encrypt = aead_encrypt,
2814 .decrypt = aead_decrypt,
2815 .ivsize = DES_BLOCK_SIZE,
2816 .maxauthsize = SHA1_DIGEST_SIZE,
2819 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2820 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2821 OP_ALG_AAI_HMAC_PRECOMP,
2827 .cra_name = "echainiv(authenc(hmac(sha1),"
2829 .cra_driver_name = "echainiv-authenc-"
2830 "hmac-sha1-cbc-des-caam",
2831 .cra_blocksize = DES_BLOCK_SIZE,
2833 .setkey = aead_setkey,
2834 .setauthsize = aead_setauthsize,
2835 .encrypt = aead_encrypt,
2836 .decrypt = aead_decrypt,
2837 .ivsize = DES_BLOCK_SIZE,
2838 .maxauthsize = SHA1_DIGEST_SIZE,
2841 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2842 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2843 OP_ALG_AAI_HMAC_PRECOMP,
2850 .cra_name = "authenc(hmac(sha224),cbc(des))",
2851 .cra_driver_name = "authenc-hmac-sha224-"
2853 .cra_blocksize = DES_BLOCK_SIZE,
2855 .setkey = aead_setkey,
2856 .setauthsize = aead_setauthsize,
2857 .encrypt = aead_encrypt,
2858 .decrypt = aead_decrypt,
2859 .ivsize = DES_BLOCK_SIZE,
2860 .maxauthsize = SHA224_DIGEST_SIZE,
2863 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2864 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2865 OP_ALG_AAI_HMAC_PRECOMP,
2871 .cra_name = "echainiv(authenc(hmac(sha224),"
2873 .cra_driver_name = "echainiv-authenc-"
2874 "hmac-sha224-cbc-des-caam",
2875 .cra_blocksize = DES_BLOCK_SIZE,
2877 .setkey = aead_setkey,
2878 .setauthsize = aead_setauthsize,
2879 .encrypt = aead_encrypt,
2880 .decrypt = aead_decrypt,
2881 .ivsize = DES_BLOCK_SIZE,
2882 .maxauthsize = SHA224_DIGEST_SIZE,
2885 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2886 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2887 OP_ALG_AAI_HMAC_PRECOMP,
2894 .cra_name = "authenc(hmac(sha256),cbc(des))",
2895 .cra_driver_name = "authenc-hmac-sha256-"
2897 .cra_blocksize = DES_BLOCK_SIZE,
2899 .setkey = aead_setkey,
2900 .setauthsize = aead_setauthsize,
2901 .encrypt = aead_encrypt,
2902 .decrypt = aead_decrypt,
2903 .ivsize = DES_BLOCK_SIZE,
2904 .maxauthsize = SHA256_DIGEST_SIZE,
2907 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2908 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2909 OP_ALG_AAI_HMAC_PRECOMP,
2915 .cra_name = "echainiv(authenc(hmac(sha256),"
2917 .cra_driver_name = "echainiv-authenc-"
2918 "hmac-sha256-cbc-des-caam",
2919 .cra_blocksize = DES_BLOCK_SIZE,
2921 .setkey = aead_setkey,
2922 .setauthsize = aead_setauthsize,
2923 .encrypt = aead_encrypt,
2924 .decrypt = aead_decrypt,
2925 .ivsize = DES_BLOCK_SIZE,
2926 .maxauthsize = SHA256_DIGEST_SIZE,
2929 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2930 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2931 OP_ALG_AAI_HMAC_PRECOMP,
2938 .cra_name = "authenc(hmac(sha384),cbc(des))",
2939 .cra_driver_name = "authenc-hmac-sha384-"
2941 .cra_blocksize = DES_BLOCK_SIZE,
2943 .setkey = aead_setkey,
2944 .setauthsize = aead_setauthsize,
2945 .encrypt = aead_encrypt,
2946 .decrypt = aead_decrypt,
2947 .ivsize = DES_BLOCK_SIZE,
2948 .maxauthsize = SHA384_DIGEST_SIZE,
2951 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2952 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2953 OP_ALG_AAI_HMAC_PRECOMP,
2959 .cra_name = "echainiv(authenc(hmac(sha384),"
2961 .cra_driver_name = "echainiv-authenc-"
2962 "hmac-sha384-cbc-des-caam",
2963 .cra_blocksize = DES_BLOCK_SIZE,
2965 .setkey = aead_setkey,
2966 .setauthsize = aead_setauthsize,
2967 .encrypt = aead_encrypt,
2968 .decrypt = aead_decrypt,
2969 .ivsize = DES_BLOCK_SIZE,
2970 .maxauthsize = SHA384_DIGEST_SIZE,
2973 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2974 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2975 OP_ALG_AAI_HMAC_PRECOMP,
2982 .cra_name = "authenc(hmac(sha512),cbc(des))",
2983 .cra_driver_name = "authenc-hmac-sha512-"
2985 .cra_blocksize = DES_BLOCK_SIZE,
2987 .setkey = aead_setkey,
2988 .setauthsize = aead_setauthsize,
2989 .encrypt = aead_encrypt,
2990 .decrypt = aead_decrypt,
2991 .ivsize = DES_BLOCK_SIZE,
2992 .maxauthsize = SHA512_DIGEST_SIZE,
2995 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2996 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2997 OP_ALG_AAI_HMAC_PRECOMP,
3003 .cra_name = "echainiv(authenc(hmac(sha512),"
3005 .cra_driver_name = "echainiv-authenc-"
3006 "hmac-sha512-cbc-des-caam",
3007 .cra_blocksize = DES_BLOCK_SIZE,
3009 .setkey = aead_setkey,
3010 .setauthsize = aead_setauthsize,
3011 .encrypt = aead_encrypt,
3012 .decrypt = aead_decrypt,
3013 .ivsize = DES_BLOCK_SIZE,
3014 .maxauthsize = SHA512_DIGEST_SIZE,
3017 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3018 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3019 OP_ALG_AAI_HMAC_PRECOMP,
3026 .cra_name = "authenc(hmac(md5),"
3027 "rfc3686(ctr(aes)))",
3028 .cra_driver_name = "authenc-hmac-md5-"
3029 "rfc3686-ctr-aes-caam",
3032 .setkey = aead_setkey,
3033 .setauthsize = aead_setauthsize,
3034 .encrypt = aead_encrypt,
3035 .decrypt = aead_decrypt,
3036 .ivsize = CTR_RFC3686_IV_SIZE,
3037 .maxauthsize = MD5_DIGEST_SIZE,
3040 .class1_alg_type = OP_ALG_ALGSEL_AES |
3041 OP_ALG_AAI_CTR_MOD128,
3042 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3043 OP_ALG_AAI_HMAC_PRECOMP,
3050 .cra_name = "seqiv(authenc("
3051 "hmac(md5),rfc3686(ctr(aes))))",
3052 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3053 "rfc3686-ctr-aes-caam",
3056 .setkey = aead_setkey,
3057 .setauthsize = aead_setauthsize,
3058 .encrypt = aead_encrypt,
3059 .decrypt = aead_decrypt,
3060 .ivsize = CTR_RFC3686_IV_SIZE,
3061 .maxauthsize = MD5_DIGEST_SIZE,
3064 .class1_alg_type = OP_ALG_ALGSEL_AES |
3065 OP_ALG_AAI_CTR_MOD128,
3066 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3067 OP_ALG_AAI_HMAC_PRECOMP,
3075 .cra_name = "authenc(hmac(sha1),"
3076 "rfc3686(ctr(aes)))",
3077 .cra_driver_name = "authenc-hmac-sha1-"
3078 "rfc3686-ctr-aes-caam",
3081 .setkey = aead_setkey,
3082 .setauthsize = aead_setauthsize,
3083 .encrypt = aead_encrypt,
3084 .decrypt = aead_decrypt,
3085 .ivsize = CTR_RFC3686_IV_SIZE,
3086 .maxauthsize = SHA1_DIGEST_SIZE,
3089 .class1_alg_type = OP_ALG_ALGSEL_AES |
3090 OP_ALG_AAI_CTR_MOD128,
3091 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3092 OP_ALG_AAI_HMAC_PRECOMP,
3099 .cra_name = "seqiv(authenc("
3100 "hmac(sha1),rfc3686(ctr(aes))))",
3101 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3102 "rfc3686-ctr-aes-caam",
3105 .setkey = aead_setkey,
3106 .setauthsize = aead_setauthsize,
3107 .encrypt = aead_encrypt,
3108 .decrypt = aead_decrypt,
3109 .ivsize = CTR_RFC3686_IV_SIZE,
3110 .maxauthsize = SHA1_DIGEST_SIZE,
3113 .class1_alg_type = OP_ALG_ALGSEL_AES |
3114 OP_ALG_AAI_CTR_MOD128,
3115 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3116 OP_ALG_AAI_HMAC_PRECOMP,
3124 .cra_name = "authenc(hmac(sha224),"
3125 "rfc3686(ctr(aes)))",
3126 .cra_driver_name = "authenc-hmac-sha224-"
3127 "rfc3686-ctr-aes-caam",
3130 .setkey = aead_setkey,
3131 .setauthsize = aead_setauthsize,
3132 .encrypt = aead_encrypt,
3133 .decrypt = aead_decrypt,
3134 .ivsize = CTR_RFC3686_IV_SIZE,
3135 .maxauthsize = SHA224_DIGEST_SIZE,
3138 .class1_alg_type = OP_ALG_ALGSEL_AES |
3139 OP_ALG_AAI_CTR_MOD128,
3140 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3141 OP_ALG_AAI_HMAC_PRECOMP,
3148 .cra_name = "seqiv(authenc("
3149 "hmac(sha224),rfc3686(ctr(aes))))",
3150 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3151 "rfc3686-ctr-aes-caam",
3154 .setkey = aead_setkey,
3155 .setauthsize = aead_setauthsize,
3156 .encrypt = aead_encrypt,
3157 .decrypt = aead_decrypt,
3158 .ivsize = CTR_RFC3686_IV_SIZE,
3159 .maxauthsize = SHA224_DIGEST_SIZE,
3162 .class1_alg_type = OP_ALG_ALGSEL_AES |
3163 OP_ALG_AAI_CTR_MOD128,
3164 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3165 OP_ALG_AAI_HMAC_PRECOMP,
3173 .cra_name = "authenc(hmac(sha256),"
3174 "rfc3686(ctr(aes)))",
3175 .cra_driver_name = "authenc-hmac-sha256-"
3176 "rfc3686-ctr-aes-caam",
3179 .setkey = aead_setkey,
3180 .setauthsize = aead_setauthsize,
3181 .encrypt = aead_encrypt,
3182 .decrypt = aead_decrypt,
3183 .ivsize = CTR_RFC3686_IV_SIZE,
3184 .maxauthsize = SHA256_DIGEST_SIZE,
3187 .class1_alg_type = OP_ALG_ALGSEL_AES |
3188 OP_ALG_AAI_CTR_MOD128,
3189 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3190 OP_ALG_AAI_HMAC_PRECOMP,
3197 .cra_name = "seqiv(authenc(hmac(sha256),"
3198 "rfc3686(ctr(aes))))",
3199 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3200 "rfc3686-ctr-aes-caam",
3203 .setkey = aead_setkey,
3204 .setauthsize = aead_setauthsize,
3205 .encrypt = aead_encrypt,
3206 .decrypt = aead_decrypt,
3207 .ivsize = CTR_RFC3686_IV_SIZE,
3208 .maxauthsize = SHA256_DIGEST_SIZE,
3211 .class1_alg_type = OP_ALG_ALGSEL_AES |
3212 OP_ALG_AAI_CTR_MOD128,
3213 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3214 OP_ALG_AAI_HMAC_PRECOMP,
3222 .cra_name = "authenc(hmac(sha384),"
3223 "rfc3686(ctr(aes)))",
3224 .cra_driver_name = "authenc-hmac-sha384-"
3225 "rfc3686-ctr-aes-caam",
3228 .setkey = aead_setkey,
3229 .setauthsize = aead_setauthsize,
3230 .encrypt = aead_encrypt,
3231 .decrypt = aead_decrypt,
3232 .ivsize = CTR_RFC3686_IV_SIZE,
3233 .maxauthsize = SHA384_DIGEST_SIZE,
3236 .class1_alg_type = OP_ALG_ALGSEL_AES |
3237 OP_ALG_AAI_CTR_MOD128,
3238 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3239 OP_ALG_AAI_HMAC_PRECOMP,
3246 .cra_name = "seqiv(authenc(hmac(sha384),"
3247 "rfc3686(ctr(aes))))",
3248 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3249 "rfc3686-ctr-aes-caam",
3252 .setkey = aead_setkey,
3253 .setauthsize = aead_setauthsize,
3254 .encrypt = aead_encrypt,
3255 .decrypt = aead_decrypt,
3256 .ivsize = CTR_RFC3686_IV_SIZE,
3257 .maxauthsize = SHA384_DIGEST_SIZE,
3260 .class1_alg_type = OP_ALG_ALGSEL_AES |
3261 OP_ALG_AAI_CTR_MOD128,
3262 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3263 OP_ALG_AAI_HMAC_PRECOMP,
3271 .cra_name = "authenc(hmac(sha512),"
3272 "rfc3686(ctr(aes)))",
3273 .cra_driver_name = "authenc-hmac-sha512-"
3274 "rfc3686-ctr-aes-caam",
3277 .setkey = aead_setkey,
3278 .setauthsize = aead_setauthsize,
3279 .encrypt = aead_encrypt,
3280 .decrypt = aead_decrypt,
3281 .ivsize = CTR_RFC3686_IV_SIZE,
3282 .maxauthsize = SHA512_DIGEST_SIZE,
3285 .class1_alg_type = OP_ALG_ALGSEL_AES |
3286 OP_ALG_AAI_CTR_MOD128,
3287 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3288 OP_ALG_AAI_HMAC_PRECOMP,
3295 .cra_name = "seqiv(authenc(hmac(sha512),"
3296 "rfc3686(ctr(aes))))",
3297 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3298 "rfc3686-ctr-aes-caam",
3301 .setkey = aead_setkey,
3302 .setauthsize = aead_setauthsize,
3303 .encrypt = aead_encrypt,
3304 .decrypt = aead_decrypt,
3305 .ivsize = CTR_RFC3686_IV_SIZE,
3306 .maxauthsize = SHA512_DIGEST_SIZE,
3309 .class1_alg_type = OP_ALG_ALGSEL_AES |
3310 OP_ALG_AAI_CTR_MOD128,
3311 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3312 OP_ALG_AAI_HMAC_PRECOMP,
3320 .cra_name = "rfc7539(chacha20,poly1305)",
3321 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3325 .setkey = chachapoly_setkey,
3326 .setauthsize = chachapoly_setauthsize,
3327 .encrypt = chachapoly_encrypt,
3328 .decrypt = chachapoly_decrypt,
3329 .ivsize = CHACHAPOLY_IV_SIZE,
3330 .maxauthsize = POLY1305_DIGEST_SIZE,
3333 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3335 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3342 .cra_name = "rfc7539esp(chacha20,poly1305)",
3343 .cra_driver_name = "rfc7539esp-chacha20-"
3347 .setkey = chachapoly_setkey,
3348 .setauthsize = chachapoly_setauthsize,
3349 .encrypt = chachapoly_encrypt,
3350 .decrypt = chachapoly_decrypt,
3352 .maxauthsize = POLY1305_DIGEST_SIZE,
3355 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3357 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3363 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3366 dma_addr_t dma_addr;
3367 struct caam_drv_private *priv;
3369 ctx->jrdev = caam_jr_alloc();
3370 if (IS_ERR(ctx->jrdev)) {
3371 pr_err("Job Ring Device allocation for transform failed\n");
3372 return PTR_ERR(ctx->jrdev);
3375 priv = dev_get_drvdata(ctx->jrdev->parent);
3376 if (priv->era >= 6 && uses_dkp)
3377 ctx->dir = DMA_BIDIRECTIONAL;
3379 ctx->dir = DMA_TO_DEVICE;
3381 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3382 offsetof(struct caam_ctx,
3384 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3385 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3386 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3387 caam_jr_free(ctx->jrdev);
3391 ctx->sh_desc_enc_dma = dma_addr;
3392 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3394 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3396 /* copy descriptor header template value */
3397 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3398 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3403 static int caam_cra_init(struct crypto_skcipher *tfm)
3405 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3406 struct caam_skcipher_alg *caam_alg =
3407 container_of(alg, typeof(*caam_alg), skcipher);
3409 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3413 static int caam_aead_init(struct crypto_aead *tfm)
3415 struct aead_alg *alg = crypto_aead_alg(tfm);
3416 struct caam_aead_alg *caam_alg =
3417 container_of(alg, struct caam_aead_alg, aead);
3418 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3420 return caam_init_common(ctx, &caam_alg->caam,
3421 alg->setkey == aead_setkey);
3424 static void caam_exit_common(struct caam_ctx *ctx)
3426 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3427 offsetof(struct caam_ctx, sh_desc_enc_dma),
3428 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3429 caam_jr_free(ctx->jrdev);
3432 static void caam_cra_exit(struct crypto_skcipher *tfm)
3434 caam_exit_common(crypto_skcipher_ctx(tfm));
3437 static void caam_aead_exit(struct crypto_aead *tfm)
3439 caam_exit_common(crypto_aead_ctx(tfm));
3442 static void __exit caam_algapi_exit(void)
3446 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3447 struct caam_aead_alg *t_alg = driver_aeads + i;
3449 if (t_alg->registered)
3450 crypto_unregister_aead(&t_alg->aead);
3453 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3454 struct caam_skcipher_alg *t_alg = driver_algs + i;
3456 if (t_alg->registered)
3457 crypto_unregister_skcipher(&t_alg->skcipher);
3461 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3463 struct skcipher_alg *alg = &t_alg->skcipher;
3465 alg->base.cra_module = THIS_MODULE;
3466 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3467 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3468 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3470 alg->init = caam_cra_init;
3471 alg->exit = caam_cra_exit;
3474 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3476 struct aead_alg *alg = &t_alg->aead;
3478 alg->base.cra_module = THIS_MODULE;
3479 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3480 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3481 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3483 alg->init = caam_aead_init;
3484 alg->exit = caam_aead_exit;
3487 static int __init caam_algapi_init(void)
3489 struct device_node *dev_node;
3490 struct platform_device *pdev;
3491 struct caam_drv_private *priv;
3493 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3495 unsigned int md_limit = SHA512_DIGEST_SIZE;
3496 bool registered = false, gcm_support;
3498 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3500 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3505 pdev = of_find_device_by_node(dev_node);
3507 of_node_put(dev_node);
3511 priv = dev_get_drvdata(&pdev->dev);
3512 of_node_put(dev_node);
3515 * If priv is NULL, it's probably because the caam driver wasn't
3516 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3525 * Register crypto algorithms the device supports.
3526 * First, detect presence and attributes of DES, AES, and MD blocks.
3528 if (priv->era < 10) {
3529 u32 cha_vid, cha_inst, aes_rn;
3531 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3532 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3533 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3535 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3536 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3537 CHA_ID_LS_DES_SHIFT;
3538 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3539 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3540 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3541 CHA_ID_LS_ARC4_SHIFT;
3545 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3547 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3551 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3552 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3554 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3555 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3557 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3558 aes_inst = aesa & CHA_VER_NUM_MASK;
3559 md_inst = mdha & CHA_VER_NUM_MASK;
3560 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3561 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3562 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
3564 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3567 /* If MD is present, limit digest size based on LP256 */
3568 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3569 md_limit = SHA256_DIGEST_SIZE;
3571 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3572 struct caam_skcipher_alg *t_alg = driver_algs + i;
3573 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3575 /* Skip DES algorithms if not supported by device */
3577 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3578 (alg_sel == OP_ALG_ALGSEL_DES)))
3581 /* Skip AES algorithms if not supported by device */
3582 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3585 /* Skip ARC4 algorithms if not supported by device */
3586 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3590 * Check support for AES modes not available
3593 if (aes_vid == CHA_VER_VID_AES_LP &&
3594 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3598 caam_skcipher_alg_init(t_alg);
3600 err = crypto_register_skcipher(&t_alg->skcipher);
3602 pr_warn("%s alg registration failed\n",
3603 t_alg->skcipher.base.cra_driver_name);
3607 t_alg->registered = true;
3611 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3612 struct caam_aead_alg *t_alg = driver_aeads + i;
3613 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3615 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3617 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3619 /* Skip DES algorithms if not supported by device */
3621 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3622 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3625 /* Skip AES algorithms if not supported by device */
3626 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3629 /* Skip CHACHA20 algorithms if not supported by device */
3630 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3633 /* Skip POLY1305 algorithms if not supported by device */
3634 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3637 /* Skip GCM algorithms if not supported by device */
3638 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3639 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3643 * Skip algorithms requiring message digests
3644 * if MD or MD size is not supported by device.
3646 if (is_mdha(c2_alg_sel) &&
3647 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3650 caam_aead_alg_init(t_alg);
3652 err = crypto_register_aead(&t_alg->aead);
3654 pr_warn("%s alg registration failed\n",
3655 t_alg->aead.base.cra_driver_name);
3659 t_alg->registered = true;
3664 pr_info("caam algorithms registered in /proc/crypto\n");
3667 put_device(&pdev->dev);
3671 module_init(caam_algapi_init);
3672 module_exit(caam_algapi_exit);
3674 MODULE_LICENSE("GPL");
3675 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3676 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");