1 // SPDX-License-Identifier: GPL-2.0+
3 * Freescale FSL CAAM support for crypto API over QI backend.
6 * Copyright 2013-2016 Freescale Semiconductor, Inc.
7 * Copyright 2016-2018 NXP
14 #include "desc_constr.h"
20 #include "caamalg_desc.h"
25 #define CAAM_CRA_PRIORITY 2000
26 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
27 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
28 SHA512_DIGEST_SIZE * 2)
30 #define DESC_MAX_USED_BYTES (DESC_QI_AEAD_GIVENC_LEN + \
32 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
34 struct caam_alg_entry {
41 struct caam_aead_alg {
43 struct caam_alg_entry caam;
47 struct caam_skcipher_alg {
48 struct skcipher_alg skcipher;
49 struct caam_alg_entry caam;
58 u32 sh_desc_enc[DESC_MAX_USED_LEN];
59 u32 sh_desc_dec[DESC_MAX_USED_LEN];
60 u8 key[CAAM_MAX_KEY_SIZE];
62 enum dma_data_direction dir;
65 unsigned int authsize;
67 spinlock_t lock; /* Protects multiple init of driver context */
68 struct caam_drv_ctx *drv_ctx[NUM_OP];
71 static int aead_set_sh_desc(struct crypto_aead *aead)
73 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
75 struct caam_ctx *ctx = crypto_aead_ctx(aead);
76 unsigned int ivsize = crypto_aead_ivsize(aead);
79 unsigned int data_len[2];
81 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
82 OP_ALG_AAI_CTR_MOD128);
83 const bool is_rfc3686 = alg->caam.rfc3686;
84 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
86 if (!ctx->cdata.keylen || !ctx->authsize)
90 * AES-CTR needs to load IV in CONTEXT1 reg
91 * at an offset of 128bits (16bytes)
92 * CONTEXT1[255:128] = IV
99 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
102 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
103 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
104 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
107 data_len[0] = ctx->adata.keylen_pad;
108 data_len[1] = ctx->cdata.keylen;
113 /* aead_encrypt shared descriptor */
114 if (desc_inline_query(DESC_QI_AEAD_ENC_LEN +
115 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
116 DESC_JOB_IO_LEN, data_len, &inl_mask,
117 ARRAY_SIZE(data_len)) < 0)
121 ctx->adata.key_virt = ctx->key;
123 ctx->adata.key_dma = ctx->key_dma;
126 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
128 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
130 ctx->adata.key_inline = !!(inl_mask & 1);
131 ctx->cdata.key_inline = !!(inl_mask & 2);
133 cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
134 ivsize, ctx->authsize, is_rfc3686, nonce,
135 ctx1_iv_off, true, ctrlpriv->era);
138 /* aead_decrypt shared descriptor */
139 if (desc_inline_query(DESC_QI_AEAD_DEC_LEN +
140 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
141 DESC_JOB_IO_LEN, data_len, &inl_mask,
142 ARRAY_SIZE(data_len)) < 0)
146 ctx->adata.key_virt = ctx->key;
148 ctx->adata.key_dma = ctx->key_dma;
151 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
153 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
155 ctx->adata.key_inline = !!(inl_mask & 1);
156 ctx->cdata.key_inline = !!(inl_mask & 2);
158 cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
159 ivsize, ctx->authsize, alg->caam.geniv,
160 is_rfc3686, nonce, ctx1_iv_off, true,
163 if (!alg->caam.geniv)
166 /* aead_givencrypt shared descriptor */
167 if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN +
168 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
169 DESC_JOB_IO_LEN, data_len, &inl_mask,
170 ARRAY_SIZE(data_len)) < 0)
174 ctx->adata.key_virt = ctx->key;
176 ctx->adata.key_dma = ctx->key_dma;
179 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
181 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
183 ctx->adata.key_inline = !!(inl_mask & 1);
184 ctx->cdata.key_inline = !!(inl_mask & 2);
186 cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
187 ivsize, ctx->authsize, is_rfc3686, nonce,
188 ctx1_iv_off, true, ctrlpriv->era);
194 static int aead_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
196 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
198 ctx->authsize = authsize;
199 aead_set_sh_desc(authenc);
204 static int aead_setkey(struct crypto_aead *aead, const u8 *key,
207 struct caam_ctx *ctx = crypto_aead_ctx(aead);
208 struct device *jrdev = ctx->jrdev;
209 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
210 struct crypto_authenc_keys keys;
213 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
217 dev_err(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
218 keys.authkeylen + keys.enckeylen, keys.enckeylen,
220 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
221 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
225 * If DKP is supported, use it in the shared descriptor to generate
228 if (ctrlpriv->era >= 6) {
229 ctx->adata.keylen = keys.authkeylen;
230 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
233 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
236 memcpy(ctx->key, keys.authkey, keys.authkeylen);
237 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
239 dma_sync_single_for_device(jrdev, ctx->key_dma,
240 ctx->adata.keylen_pad +
241 keys.enckeylen, ctx->dir);
245 ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
246 keys.authkeylen, CAAM_MAX_KEY_SIZE -
251 /* postpend encryption key to auth split key */
252 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
253 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
254 keys.enckeylen, ctx->dir);
256 print_hex_dump(KERN_ERR, "ctx.key@" __stringify(__LINE__)": ",
257 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
258 ctx->adata.keylen_pad + keys.enckeylen, 1);
262 ctx->cdata.keylen = keys.enckeylen;
264 ret = aead_set_sh_desc(aead);
268 /* Now update the driver contexts with the new shared descriptor */
269 if (ctx->drv_ctx[ENCRYPT]) {
270 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
273 dev_err(jrdev, "driver enc context update failed\n");
278 if (ctx->drv_ctx[DECRYPT]) {
279 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
282 dev_err(jrdev, "driver dec context update failed\n");
287 memzero_explicit(&keys, sizeof(keys));
290 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
291 memzero_explicit(&keys, sizeof(keys));
295 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
298 struct crypto_authenc_keys keys;
302 err = crypto_authenc_extractkeys(&keys, key, keylen);
307 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
310 flags = crypto_aead_get_flags(aead);
311 err = __des3_verify_key(&flags, keys.enckey);
313 crypto_aead_set_flags(aead, flags);
317 err = aead_setkey(aead, key, keylen);
320 memzero_explicit(&keys, sizeof(keys));
324 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
328 static int gcm_set_sh_desc(struct crypto_aead *aead)
330 struct caam_ctx *ctx = crypto_aead_ctx(aead);
331 unsigned int ivsize = crypto_aead_ivsize(aead);
332 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
335 if (!ctx->cdata.keylen || !ctx->authsize)
339 * Job Descriptor and Shared Descriptor
340 * must fit into the 64-word Descriptor h/w Buffer
342 if (rem_bytes >= DESC_QI_GCM_ENC_LEN) {
343 ctx->cdata.key_inline = true;
344 ctx->cdata.key_virt = ctx->key;
346 ctx->cdata.key_inline = false;
347 ctx->cdata.key_dma = ctx->key_dma;
350 cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
351 ctx->authsize, true);
354 * Job Descriptor and Shared Descriptor
355 * must fit into the 64-word Descriptor h/w Buffer
357 if (rem_bytes >= DESC_QI_GCM_DEC_LEN) {
358 ctx->cdata.key_inline = true;
359 ctx->cdata.key_virt = ctx->key;
361 ctx->cdata.key_inline = false;
362 ctx->cdata.key_dma = ctx->key_dma;
365 cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
366 ctx->authsize, true);
371 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
373 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
375 ctx->authsize = authsize;
376 gcm_set_sh_desc(authenc);
381 static int gcm_setkey(struct crypto_aead *aead,
382 const u8 *key, unsigned int keylen)
384 struct caam_ctx *ctx = crypto_aead_ctx(aead);
385 struct device *jrdev = ctx->jrdev;
389 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
390 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
393 memcpy(ctx->key, key, keylen);
394 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
395 ctx->cdata.keylen = keylen;
397 ret = gcm_set_sh_desc(aead);
401 /* Now update the driver contexts with the new shared descriptor */
402 if (ctx->drv_ctx[ENCRYPT]) {
403 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
406 dev_err(jrdev, "driver enc context update failed\n");
411 if (ctx->drv_ctx[DECRYPT]) {
412 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
415 dev_err(jrdev, "driver dec context update failed\n");
423 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
425 struct caam_ctx *ctx = crypto_aead_ctx(aead);
426 unsigned int ivsize = crypto_aead_ivsize(aead);
427 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
430 if (!ctx->cdata.keylen || !ctx->authsize)
433 ctx->cdata.key_virt = ctx->key;
436 * Job Descriptor and Shared Descriptor
437 * must fit into the 64-word Descriptor h/w Buffer
439 if (rem_bytes >= DESC_QI_RFC4106_ENC_LEN) {
440 ctx->cdata.key_inline = true;
442 ctx->cdata.key_inline = false;
443 ctx->cdata.key_dma = ctx->key_dma;
446 cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
447 ctx->authsize, true);
450 * Job Descriptor and Shared Descriptor
451 * must fit into the 64-word Descriptor h/w Buffer
453 if (rem_bytes >= DESC_QI_RFC4106_DEC_LEN) {
454 ctx->cdata.key_inline = true;
456 ctx->cdata.key_inline = false;
457 ctx->cdata.key_dma = ctx->key_dma;
460 cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
461 ctx->authsize, true);
466 static int rfc4106_setauthsize(struct crypto_aead *authenc,
467 unsigned int authsize)
469 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
471 ctx->authsize = authsize;
472 rfc4106_set_sh_desc(authenc);
477 static int rfc4106_setkey(struct crypto_aead *aead,
478 const u8 *key, unsigned int keylen)
480 struct caam_ctx *ctx = crypto_aead_ctx(aead);
481 struct device *jrdev = ctx->jrdev;
488 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
489 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
492 memcpy(ctx->key, key, keylen);
494 * The last four bytes of the key material are used as the salt value
495 * in the nonce. Update the AES key length.
497 ctx->cdata.keylen = keylen - 4;
498 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
501 ret = rfc4106_set_sh_desc(aead);
505 /* Now update the driver contexts with the new shared descriptor */
506 if (ctx->drv_ctx[ENCRYPT]) {
507 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
510 dev_err(jrdev, "driver enc context update failed\n");
515 if (ctx->drv_ctx[DECRYPT]) {
516 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
519 dev_err(jrdev, "driver dec context update failed\n");
527 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
529 struct caam_ctx *ctx = crypto_aead_ctx(aead);
530 unsigned int ivsize = crypto_aead_ivsize(aead);
531 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
534 if (!ctx->cdata.keylen || !ctx->authsize)
537 ctx->cdata.key_virt = ctx->key;
540 * Job Descriptor and Shared Descriptor
541 * must fit into the 64-word Descriptor h/w Buffer
543 if (rem_bytes >= DESC_QI_RFC4543_ENC_LEN) {
544 ctx->cdata.key_inline = true;
546 ctx->cdata.key_inline = false;
547 ctx->cdata.key_dma = ctx->key_dma;
550 cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
551 ctx->authsize, true);
554 * Job Descriptor and Shared Descriptor
555 * must fit into the 64-word Descriptor h/w Buffer
557 if (rem_bytes >= DESC_QI_RFC4543_DEC_LEN) {
558 ctx->cdata.key_inline = true;
560 ctx->cdata.key_inline = false;
561 ctx->cdata.key_dma = ctx->key_dma;
564 cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
565 ctx->authsize, true);
570 static int rfc4543_setauthsize(struct crypto_aead *authenc,
571 unsigned int authsize)
573 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
575 ctx->authsize = authsize;
576 rfc4543_set_sh_desc(authenc);
581 static int rfc4543_setkey(struct crypto_aead *aead,
582 const u8 *key, unsigned int keylen)
584 struct caam_ctx *ctx = crypto_aead_ctx(aead);
585 struct device *jrdev = ctx->jrdev;
592 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
593 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
596 memcpy(ctx->key, key, keylen);
598 * The last four bytes of the key material are used as the salt value
599 * in the nonce. Update the AES key length.
601 ctx->cdata.keylen = keylen - 4;
602 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
605 ret = rfc4543_set_sh_desc(aead);
609 /* Now update the driver contexts with the new shared descriptor */
610 if (ctx->drv_ctx[ENCRYPT]) {
611 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
614 dev_err(jrdev, "driver enc context update failed\n");
619 if (ctx->drv_ctx[DECRYPT]) {
620 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
623 dev_err(jrdev, "driver dec context update failed\n");
631 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
634 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
635 struct caam_skcipher_alg *alg =
636 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
638 struct device *jrdev = ctx->jrdev;
639 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
641 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
642 OP_ALG_AAI_CTR_MOD128);
643 const bool is_rfc3686 = alg->caam.rfc3686;
647 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
648 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
651 * AES-CTR needs to load IV in CONTEXT1 reg
652 * at an offset of 128bits (16bytes)
653 * CONTEXT1[255:128] = IV
660 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
661 * | *key = {KEY, NONCE}
664 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
665 keylen -= CTR_RFC3686_NONCE_SIZE;
668 ctx->cdata.keylen = keylen;
669 ctx->cdata.key_virt = key;
670 ctx->cdata.key_inline = true;
672 /* skcipher encrypt, decrypt shared descriptors */
673 cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
674 is_rfc3686, ctx1_iv_off);
675 cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
676 is_rfc3686, ctx1_iv_off);
678 /* Now update the driver contexts with the new shared descriptor */
679 if (ctx->drv_ctx[ENCRYPT]) {
680 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
683 dev_err(jrdev, "driver enc context update failed\n");
688 if (ctx->drv_ctx[DECRYPT]) {
689 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
692 dev_err(jrdev, "driver dec context update failed\n");
699 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
703 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
704 const u8 *key, unsigned int keylen)
706 return unlikely(des3_verify_key(skcipher, key)) ?:
707 skcipher_setkey(skcipher, key, keylen);
710 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
713 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
714 struct device *jrdev = ctx->jrdev;
717 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
718 dev_err(jrdev, "key size mismatch\n");
722 ctx->cdata.keylen = keylen;
723 ctx->cdata.key_virt = key;
724 ctx->cdata.key_inline = true;
726 /* xts skcipher encrypt, decrypt shared descriptors */
727 cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
728 cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
730 /* Now update the driver contexts with the new shared descriptor */
731 if (ctx->drv_ctx[ENCRYPT]) {
732 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
735 dev_err(jrdev, "driver enc context update failed\n");
740 if (ctx->drv_ctx[DECRYPT]) {
741 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
744 dev_err(jrdev, "driver dec context update failed\n");
751 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
756 * aead_edesc - s/w-extended aead descriptor
757 * @src_nents: number of segments in input scatterlist
758 * @dst_nents: number of segments in output scatterlist
759 * @iv_dma: dma address of iv for checking continuity and link table
760 * @qm_sg_bytes: length of dma mapped h/w link table
761 * @qm_sg_dma: bus physical mapped address of h/w link table
762 * @assoclen: associated data length, in CAAM endianness
763 * @assoclen_dma: bus physical mapped address of req->assoclen
764 * @drv_req: driver-specific request structure
765 * @sgt: the h/w link table, followed by IV
772 dma_addr_t qm_sg_dma;
773 unsigned int assoclen;
774 dma_addr_t assoclen_dma;
775 struct caam_drv_req drv_req;
776 struct qm_sg_entry sgt[0];
780 * skcipher_edesc - s/w-extended skcipher descriptor
781 * @src_nents: number of segments in input scatterlist
782 * @dst_nents: number of segments in output scatterlist
783 * @iv_dma: dma address of iv for checking continuity and link table
784 * @qm_sg_bytes: length of dma mapped h/w link table
785 * @qm_sg_dma: bus physical mapped address of h/w link table
786 * @drv_req: driver-specific request structure
787 * @sgt: the h/w link table, followed by IV
789 struct skcipher_edesc {
794 dma_addr_t qm_sg_dma;
795 struct caam_drv_req drv_req;
796 struct qm_sg_entry sgt[0];
799 static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
803 * This function is called on the fast path with values of 'type'
804 * known at compile time. Invalid arguments are not expected and
805 * thus no checks are made.
807 struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
810 if (unlikely(!drv_ctx)) {
811 spin_lock(&ctx->lock);
813 /* Read again to check if some other core init drv_ctx */
814 drv_ctx = ctx->drv_ctx[type];
819 desc = ctx->sh_desc_enc;
820 else /* (type == DECRYPT) */
821 desc = ctx->sh_desc_dec;
823 cpu = smp_processor_id();
824 drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
825 if (!IS_ERR_OR_NULL(drv_ctx))
826 drv_ctx->op_type = type;
828 ctx->drv_ctx[type] = drv_ctx;
831 spin_unlock(&ctx->lock);
837 static void caam_unmap(struct device *dev, struct scatterlist *src,
838 struct scatterlist *dst, int src_nents,
839 int dst_nents, dma_addr_t iv_dma, int ivsize,
840 dma_addr_t qm_sg_dma, int qm_sg_bytes)
844 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
846 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
848 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
852 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
854 dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE);
857 static void aead_unmap(struct device *dev,
858 struct aead_edesc *edesc,
859 struct aead_request *req)
861 struct crypto_aead *aead = crypto_aead_reqtfm(req);
862 int ivsize = crypto_aead_ivsize(aead);
864 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
865 edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
866 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
869 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
870 struct skcipher_request *req)
872 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
873 int ivsize = crypto_skcipher_ivsize(skcipher);
875 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
876 edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
879 static void aead_done(struct caam_drv_req *drv_req, u32 status)
881 struct device *qidev;
882 struct aead_edesc *edesc;
883 struct aead_request *aead_req = drv_req->app_ctx;
884 struct crypto_aead *aead = crypto_aead_reqtfm(aead_req);
885 struct caam_ctx *caam_ctx = crypto_aead_ctx(aead);
888 qidev = caam_ctx->qidev;
890 if (unlikely(status)) {
891 u32 ssrc = status & JRSTA_SSRC_MASK;
892 u8 err_id = status & JRSTA_CCBERR_ERRID_MASK;
894 caam_jr_strstatus(qidev, status);
896 * verify hw auth check passed else return -EBADMSG
898 if (ssrc == JRSTA_SSRC_CCB_ERROR &&
899 err_id == JRSTA_CCBERR_ERRID_ICVCHK)
905 edesc = container_of(drv_req, typeof(*edesc), drv_req);
906 aead_unmap(qidev, edesc, aead_req);
908 aead_request_complete(aead_req, ecode);
909 qi_cache_free(edesc);
913 * allocate and map the aead extended descriptor
915 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
918 struct crypto_aead *aead = crypto_aead_reqtfm(req);
919 struct caam_ctx *ctx = crypto_aead_ctx(aead);
920 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
922 struct device *qidev = ctx->qidev;
923 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
924 GFP_KERNEL : GFP_ATOMIC;
925 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
926 struct aead_edesc *edesc;
927 dma_addr_t qm_sg_dma, iv_dma = 0;
929 unsigned int authsize = ctx->authsize;
930 int qm_sg_index = 0, qm_sg_ents = 0, qm_sg_bytes;
932 struct qm_sg_entry *sg_table, *fd_sgt;
933 struct caam_drv_ctx *drv_ctx;
935 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
936 if (IS_ERR_OR_NULL(drv_ctx))
937 return (struct aead_edesc *)drv_ctx;
939 /* allocate space for base edesc and hw desc commands, link tables */
940 edesc = qi_cache_alloc(GFP_DMA | flags);
941 if (unlikely(!edesc)) {
942 dev_err(qidev, "could not allocate extended descriptor\n");
943 return ERR_PTR(-ENOMEM);
946 if (likely(req->src == req->dst)) {
947 src_nents = sg_nents_for_len(req->src, req->assoclen +
949 (encrypt ? authsize : 0));
950 if (unlikely(src_nents < 0)) {
951 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
952 req->assoclen + req->cryptlen +
953 (encrypt ? authsize : 0));
954 qi_cache_free(edesc);
955 return ERR_PTR(src_nents);
958 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
960 if (unlikely(!mapped_src_nents)) {
961 dev_err(qidev, "unable to map source\n");
962 qi_cache_free(edesc);
963 return ERR_PTR(-ENOMEM);
966 src_nents = sg_nents_for_len(req->src, req->assoclen +
968 if (unlikely(src_nents < 0)) {
969 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
970 req->assoclen + req->cryptlen);
971 qi_cache_free(edesc);
972 return ERR_PTR(src_nents);
975 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
977 (encrypt ? authsize :
979 if (unlikely(dst_nents < 0)) {
980 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
981 req->assoclen + req->cryptlen +
982 (encrypt ? authsize : (-authsize)));
983 qi_cache_free(edesc);
984 return ERR_PTR(dst_nents);
988 mapped_src_nents = dma_map_sg(qidev, req->src,
989 src_nents, DMA_TO_DEVICE);
990 if (unlikely(!mapped_src_nents)) {
991 dev_err(qidev, "unable to map source\n");
992 qi_cache_free(edesc);
993 return ERR_PTR(-ENOMEM);
996 mapped_src_nents = 0;
1000 mapped_dst_nents = dma_map_sg(qidev, req->dst,
1003 if (unlikely(!mapped_dst_nents)) {
1004 dev_err(qidev, "unable to map destination\n");
1005 dma_unmap_sg(qidev, req->src, src_nents,
1007 qi_cache_free(edesc);
1008 return ERR_PTR(-ENOMEM);
1011 mapped_dst_nents = 0;
1015 if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv)
1016 ivsize = crypto_aead_ivsize(aead);
1019 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
1020 * Input is not contiguous.
1022 qm_sg_ents = 1 + !!ivsize + mapped_src_nents +
1023 (mapped_dst_nents > 1 ? mapped_dst_nents : 0);
1024 sg_table = &edesc->sgt[0];
1025 qm_sg_bytes = qm_sg_ents * sizeof(*sg_table);
1026 if (unlikely(offsetof(struct aead_edesc, sgt) + qm_sg_bytes + ivsize >
1027 CAAM_QI_MEMCACHE_SIZE)) {
1028 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1029 qm_sg_ents, ivsize);
1030 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1032 qi_cache_free(edesc);
1033 return ERR_PTR(-ENOMEM);
1037 u8 *iv = (u8 *)(sg_table + qm_sg_ents);
1039 /* Make sure IV is located in a DMAable area */
1040 memcpy(iv, req->iv, ivsize);
1042 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1043 if (dma_mapping_error(qidev, iv_dma)) {
1044 dev_err(qidev, "unable to map IV\n");
1045 caam_unmap(qidev, req->src, req->dst, src_nents,
1046 dst_nents, 0, 0, 0, 0);
1047 qi_cache_free(edesc);
1048 return ERR_PTR(-ENOMEM);
1052 edesc->src_nents = src_nents;
1053 edesc->dst_nents = dst_nents;
1054 edesc->iv_dma = iv_dma;
1055 edesc->drv_req.app_ctx = req;
1056 edesc->drv_req.cbk = aead_done;
1057 edesc->drv_req.drv_ctx = drv_ctx;
1059 edesc->assoclen = cpu_to_caam32(req->assoclen);
1060 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4,
1062 if (dma_mapping_error(qidev, edesc->assoclen_dma)) {
1063 dev_err(qidev, "unable to map assoclen\n");
1064 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1065 iv_dma, ivsize, 0, 0);
1066 qi_cache_free(edesc);
1067 return ERR_PTR(-ENOMEM);
1070 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0);
1073 dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0);
1076 sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + qm_sg_index, 0);
1077 qm_sg_index += mapped_src_nents;
1079 if (mapped_dst_nents > 1)
1080 sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1083 qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE);
1084 if (dma_mapping_error(qidev, qm_sg_dma)) {
1085 dev_err(qidev, "unable to map S/G table\n");
1086 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
1087 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1088 iv_dma, ivsize, 0, 0);
1089 qi_cache_free(edesc);
1090 return ERR_PTR(-ENOMEM);
1093 edesc->qm_sg_dma = qm_sg_dma;
1094 edesc->qm_sg_bytes = qm_sg_bytes;
1096 out_len = req->assoclen + req->cryptlen +
1097 (encrypt ? ctx->authsize : (-ctx->authsize));
1098 in_len = 4 + ivsize + req->assoclen + req->cryptlen;
1100 fd_sgt = &edesc->drv_req.fd_sgt[0];
1101 dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0);
1103 if (req->dst == req->src) {
1104 if (mapped_src_nents == 1)
1105 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->src),
1108 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma +
1109 (1 + !!ivsize) * sizeof(*sg_table),
1111 } else if (mapped_dst_nents == 1) {
1112 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len,
1115 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) *
1116 qm_sg_index, out_len, 0);
1122 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1124 struct aead_edesc *edesc;
1125 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1126 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1129 if (unlikely(caam_congested))
1132 /* allocate extended descriptor */
1133 edesc = aead_edesc_alloc(req, encrypt);
1134 if (IS_ERR_OR_NULL(edesc))
1135 return PTR_ERR(edesc);
1137 /* Create and submit job descriptor */
1138 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1142 aead_unmap(ctx->qidev, edesc, req);
1143 qi_cache_free(edesc);
1149 static int aead_encrypt(struct aead_request *req)
1151 return aead_crypt(req, true);
1154 static int aead_decrypt(struct aead_request *req)
1156 return aead_crypt(req, false);
1159 static int ipsec_gcm_encrypt(struct aead_request *req)
1161 if (req->assoclen < 8)
1164 return aead_crypt(req, true);
1167 static int ipsec_gcm_decrypt(struct aead_request *req)
1169 if (req->assoclen < 8)
1172 return aead_crypt(req, false);
1175 static void skcipher_done(struct caam_drv_req *drv_req, u32 status)
1177 struct skcipher_edesc *edesc;
1178 struct skcipher_request *req = drv_req->app_ctx;
1179 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1180 struct caam_ctx *caam_ctx = crypto_skcipher_ctx(skcipher);
1181 struct device *qidev = caam_ctx->qidev;
1182 int ivsize = crypto_skcipher_ivsize(skcipher);
1185 dev_err(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status);
1188 edesc = container_of(drv_req, typeof(*edesc), drv_req);
1191 caam_jr_strstatus(qidev, status);
1194 print_hex_dump(KERN_ERR, "dstiv @" __stringify(__LINE__)": ",
1195 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1196 edesc->src_nents > 1 ? 100 : ivsize, 1);
1197 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1198 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1199 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1202 skcipher_unmap(qidev, edesc, req);
1205 * The crypto API expects us to set the IV (req->iv) to the last
1206 * ciphertext block. This is used e.g. by the CTS mode.
1208 if (edesc->drv_req.drv_ctx->op_type == ENCRYPT)
1209 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1212 qi_cache_free(edesc);
1213 skcipher_request_complete(req, status);
1216 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1219 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1220 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1221 struct device *qidev = ctx->qidev;
1222 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1223 GFP_KERNEL : GFP_ATOMIC;
1224 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1225 struct skcipher_edesc *edesc;
1228 int ivsize = crypto_skcipher_ivsize(skcipher);
1229 int dst_sg_idx, qm_sg_ents, qm_sg_bytes;
1230 struct qm_sg_entry *sg_table, *fd_sgt;
1231 struct caam_drv_ctx *drv_ctx;
1233 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
1234 if (IS_ERR_OR_NULL(drv_ctx))
1235 return (struct skcipher_edesc *)drv_ctx;
1237 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1238 if (unlikely(src_nents < 0)) {
1239 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
1241 return ERR_PTR(src_nents);
1244 if (unlikely(req->src != req->dst)) {
1245 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1246 if (unlikely(dst_nents < 0)) {
1247 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1249 return ERR_PTR(dst_nents);
1252 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1254 if (unlikely(!mapped_src_nents)) {
1255 dev_err(qidev, "unable to map source\n");
1256 return ERR_PTR(-ENOMEM);
1259 mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
1261 if (unlikely(!mapped_dst_nents)) {
1262 dev_err(qidev, "unable to map destination\n");
1263 dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
1264 return ERR_PTR(-ENOMEM);
1267 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1269 if (unlikely(!mapped_src_nents)) {
1270 dev_err(qidev, "unable to map source\n");
1271 return ERR_PTR(-ENOMEM);
1275 qm_sg_ents = 1 + mapped_src_nents;
1276 dst_sg_idx = qm_sg_ents;
1278 qm_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1279 qm_sg_bytes = qm_sg_ents * sizeof(struct qm_sg_entry);
1280 if (unlikely(offsetof(struct skcipher_edesc, sgt) + qm_sg_bytes +
1281 ivsize > CAAM_QI_MEMCACHE_SIZE)) {
1282 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1283 qm_sg_ents, ivsize);
1284 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1286 return ERR_PTR(-ENOMEM);
1289 /* allocate space for base edesc, link tables and IV */
1290 edesc = qi_cache_alloc(GFP_DMA | flags);
1291 if (unlikely(!edesc)) {
1292 dev_err(qidev, "could not allocate extended descriptor\n");
1293 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1295 return ERR_PTR(-ENOMEM);
1298 /* Make sure IV is located in a DMAable area */
1299 sg_table = &edesc->sgt[0];
1300 iv = (u8 *)(sg_table + qm_sg_ents);
1301 memcpy(iv, req->iv, ivsize);
1303 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1304 if (dma_mapping_error(qidev, iv_dma)) {
1305 dev_err(qidev, "unable to map IV\n");
1306 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1308 qi_cache_free(edesc);
1309 return ERR_PTR(-ENOMEM);
1312 edesc->src_nents = src_nents;
1313 edesc->dst_nents = dst_nents;
1314 edesc->iv_dma = iv_dma;
1315 edesc->qm_sg_bytes = qm_sg_bytes;
1316 edesc->drv_req.app_ctx = req;
1317 edesc->drv_req.cbk = skcipher_done;
1318 edesc->drv_req.drv_ctx = drv_ctx;
1320 dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0);
1321 sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + 1, 0);
1323 if (mapped_dst_nents > 1)
1324 sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1327 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes,
1329 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) {
1330 dev_err(qidev, "unable to map S/G table\n");
1331 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1332 iv_dma, ivsize, 0, 0);
1333 qi_cache_free(edesc);
1334 return ERR_PTR(-ENOMEM);
1337 fd_sgt = &edesc->drv_req.fd_sgt[0];
1339 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma,
1340 ivsize + req->cryptlen, 0);
1342 if (req->src == req->dst) {
1343 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma +
1344 sizeof(*sg_table), req->cryptlen, 0);
1345 } else if (mapped_dst_nents > 1) {
1346 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx *
1347 sizeof(*sg_table), req->cryptlen, 0);
1349 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst),
1356 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1358 struct skcipher_edesc *edesc;
1359 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1360 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1361 int ivsize = crypto_skcipher_ivsize(skcipher);
1364 if (unlikely(caam_congested))
1367 /* allocate extended descriptor */
1368 edesc = skcipher_edesc_alloc(req, encrypt);
1370 return PTR_ERR(edesc);
1373 * The crypto API expects us to set the IV (req->iv) to the last
1377 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1380 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1384 skcipher_unmap(ctx->qidev, edesc, req);
1385 qi_cache_free(edesc);
1391 static int skcipher_encrypt(struct skcipher_request *req)
1393 return skcipher_crypt(req, true);
1396 static int skcipher_decrypt(struct skcipher_request *req)
1398 return skcipher_crypt(req, false);
1401 static struct caam_skcipher_alg driver_algs[] = {
1405 .cra_name = "cbc(aes)",
1406 .cra_driver_name = "cbc-aes-caam-qi",
1407 .cra_blocksize = AES_BLOCK_SIZE,
1409 .setkey = skcipher_setkey,
1410 .encrypt = skcipher_encrypt,
1411 .decrypt = skcipher_decrypt,
1412 .min_keysize = AES_MIN_KEY_SIZE,
1413 .max_keysize = AES_MAX_KEY_SIZE,
1414 .ivsize = AES_BLOCK_SIZE,
1416 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1421 .cra_name = "cbc(des3_ede)",
1422 .cra_driver_name = "cbc-3des-caam-qi",
1423 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1425 .setkey = des3_skcipher_setkey,
1426 .encrypt = skcipher_encrypt,
1427 .decrypt = skcipher_decrypt,
1428 .min_keysize = DES3_EDE_KEY_SIZE,
1429 .max_keysize = DES3_EDE_KEY_SIZE,
1430 .ivsize = DES3_EDE_BLOCK_SIZE,
1432 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1437 .cra_name = "cbc(des)",
1438 .cra_driver_name = "cbc-des-caam-qi",
1439 .cra_blocksize = DES_BLOCK_SIZE,
1441 .setkey = skcipher_setkey,
1442 .encrypt = skcipher_encrypt,
1443 .decrypt = skcipher_decrypt,
1444 .min_keysize = DES_KEY_SIZE,
1445 .max_keysize = DES_KEY_SIZE,
1446 .ivsize = DES_BLOCK_SIZE,
1448 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1453 .cra_name = "ctr(aes)",
1454 .cra_driver_name = "ctr-aes-caam-qi",
1457 .setkey = skcipher_setkey,
1458 .encrypt = skcipher_encrypt,
1459 .decrypt = skcipher_decrypt,
1460 .min_keysize = AES_MIN_KEY_SIZE,
1461 .max_keysize = AES_MAX_KEY_SIZE,
1462 .ivsize = AES_BLOCK_SIZE,
1463 .chunksize = AES_BLOCK_SIZE,
1465 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1466 OP_ALG_AAI_CTR_MOD128,
1471 .cra_name = "rfc3686(ctr(aes))",
1472 .cra_driver_name = "rfc3686-ctr-aes-caam-qi",
1475 .setkey = skcipher_setkey,
1476 .encrypt = skcipher_encrypt,
1477 .decrypt = skcipher_decrypt,
1478 .min_keysize = AES_MIN_KEY_SIZE +
1479 CTR_RFC3686_NONCE_SIZE,
1480 .max_keysize = AES_MAX_KEY_SIZE +
1481 CTR_RFC3686_NONCE_SIZE,
1482 .ivsize = CTR_RFC3686_IV_SIZE,
1483 .chunksize = AES_BLOCK_SIZE,
1486 .class1_alg_type = OP_ALG_ALGSEL_AES |
1487 OP_ALG_AAI_CTR_MOD128,
1494 .cra_name = "xts(aes)",
1495 .cra_driver_name = "xts-aes-caam-qi",
1496 .cra_blocksize = AES_BLOCK_SIZE,
1498 .setkey = xts_skcipher_setkey,
1499 .encrypt = skcipher_encrypt,
1500 .decrypt = skcipher_decrypt,
1501 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1502 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1503 .ivsize = AES_BLOCK_SIZE,
1505 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1509 static struct caam_aead_alg driver_aeads[] = {
1513 .cra_name = "rfc4106(gcm(aes))",
1514 .cra_driver_name = "rfc4106-gcm-aes-caam-qi",
1517 .setkey = rfc4106_setkey,
1518 .setauthsize = rfc4106_setauthsize,
1519 .encrypt = ipsec_gcm_encrypt,
1520 .decrypt = ipsec_gcm_decrypt,
1522 .maxauthsize = AES_BLOCK_SIZE,
1525 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1531 .cra_name = "rfc4543(gcm(aes))",
1532 .cra_driver_name = "rfc4543-gcm-aes-caam-qi",
1535 .setkey = rfc4543_setkey,
1536 .setauthsize = rfc4543_setauthsize,
1537 .encrypt = ipsec_gcm_encrypt,
1538 .decrypt = ipsec_gcm_decrypt,
1540 .maxauthsize = AES_BLOCK_SIZE,
1543 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1546 /* Galois Counter Mode */
1550 .cra_name = "gcm(aes)",
1551 .cra_driver_name = "gcm-aes-caam-qi",
1554 .setkey = gcm_setkey,
1555 .setauthsize = gcm_setauthsize,
1556 .encrypt = aead_encrypt,
1557 .decrypt = aead_decrypt,
1559 .maxauthsize = AES_BLOCK_SIZE,
1562 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1565 /* single-pass ipsec_esp descriptor */
1569 .cra_name = "authenc(hmac(md5),cbc(aes))",
1570 .cra_driver_name = "authenc-hmac-md5-"
1572 .cra_blocksize = AES_BLOCK_SIZE,
1574 .setkey = aead_setkey,
1575 .setauthsize = aead_setauthsize,
1576 .encrypt = aead_encrypt,
1577 .decrypt = aead_decrypt,
1578 .ivsize = AES_BLOCK_SIZE,
1579 .maxauthsize = MD5_DIGEST_SIZE,
1582 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1583 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1584 OP_ALG_AAI_HMAC_PRECOMP,
1590 .cra_name = "echainiv(authenc(hmac(md5),"
1592 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1594 .cra_blocksize = AES_BLOCK_SIZE,
1596 .setkey = aead_setkey,
1597 .setauthsize = aead_setauthsize,
1598 .encrypt = aead_encrypt,
1599 .decrypt = aead_decrypt,
1600 .ivsize = AES_BLOCK_SIZE,
1601 .maxauthsize = MD5_DIGEST_SIZE,
1604 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1605 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1606 OP_ALG_AAI_HMAC_PRECOMP,
1613 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1614 .cra_driver_name = "authenc-hmac-sha1-"
1616 .cra_blocksize = AES_BLOCK_SIZE,
1618 .setkey = aead_setkey,
1619 .setauthsize = aead_setauthsize,
1620 .encrypt = aead_encrypt,
1621 .decrypt = aead_decrypt,
1622 .ivsize = AES_BLOCK_SIZE,
1623 .maxauthsize = SHA1_DIGEST_SIZE,
1626 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1627 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1628 OP_ALG_AAI_HMAC_PRECOMP,
1634 .cra_name = "echainiv(authenc(hmac(sha1),"
1636 .cra_driver_name = "echainiv-authenc-"
1637 "hmac-sha1-cbc-aes-caam-qi",
1638 .cra_blocksize = AES_BLOCK_SIZE,
1640 .setkey = aead_setkey,
1641 .setauthsize = aead_setauthsize,
1642 .encrypt = aead_encrypt,
1643 .decrypt = aead_decrypt,
1644 .ivsize = AES_BLOCK_SIZE,
1645 .maxauthsize = SHA1_DIGEST_SIZE,
1648 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1649 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1650 OP_ALG_AAI_HMAC_PRECOMP,
1657 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1658 .cra_driver_name = "authenc-hmac-sha224-"
1660 .cra_blocksize = AES_BLOCK_SIZE,
1662 .setkey = aead_setkey,
1663 .setauthsize = aead_setauthsize,
1664 .encrypt = aead_encrypt,
1665 .decrypt = aead_decrypt,
1666 .ivsize = AES_BLOCK_SIZE,
1667 .maxauthsize = SHA224_DIGEST_SIZE,
1670 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1671 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1672 OP_ALG_AAI_HMAC_PRECOMP,
1678 .cra_name = "echainiv(authenc(hmac(sha224),"
1680 .cra_driver_name = "echainiv-authenc-"
1681 "hmac-sha224-cbc-aes-caam-qi",
1682 .cra_blocksize = AES_BLOCK_SIZE,
1684 .setkey = aead_setkey,
1685 .setauthsize = aead_setauthsize,
1686 .encrypt = aead_encrypt,
1687 .decrypt = aead_decrypt,
1688 .ivsize = AES_BLOCK_SIZE,
1689 .maxauthsize = SHA224_DIGEST_SIZE,
1692 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1693 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1694 OP_ALG_AAI_HMAC_PRECOMP,
1701 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1702 .cra_driver_name = "authenc-hmac-sha256-"
1704 .cra_blocksize = AES_BLOCK_SIZE,
1706 .setkey = aead_setkey,
1707 .setauthsize = aead_setauthsize,
1708 .encrypt = aead_encrypt,
1709 .decrypt = aead_decrypt,
1710 .ivsize = AES_BLOCK_SIZE,
1711 .maxauthsize = SHA256_DIGEST_SIZE,
1714 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1715 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1716 OP_ALG_AAI_HMAC_PRECOMP,
1722 .cra_name = "echainiv(authenc(hmac(sha256),"
1724 .cra_driver_name = "echainiv-authenc-"
1725 "hmac-sha256-cbc-aes-"
1727 .cra_blocksize = AES_BLOCK_SIZE,
1729 .setkey = aead_setkey,
1730 .setauthsize = aead_setauthsize,
1731 .encrypt = aead_encrypt,
1732 .decrypt = aead_decrypt,
1733 .ivsize = AES_BLOCK_SIZE,
1734 .maxauthsize = SHA256_DIGEST_SIZE,
1737 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1738 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1739 OP_ALG_AAI_HMAC_PRECOMP,
1746 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1747 .cra_driver_name = "authenc-hmac-sha384-"
1749 .cra_blocksize = AES_BLOCK_SIZE,
1751 .setkey = aead_setkey,
1752 .setauthsize = aead_setauthsize,
1753 .encrypt = aead_encrypt,
1754 .decrypt = aead_decrypt,
1755 .ivsize = AES_BLOCK_SIZE,
1756 .maxauthsize = SHA384_DIGEST_SIZE,
1759 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1760 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1761 OP_ALG_AAI_HMAC_PRECOMP,
1767 .cra_name = "echainiv(authenc(hmac(sha384),"
1769 .cra_driver_name = "echainiv-authenc-"
1770 "hmac-sha384-cbc-aes-"
1772 .cra_blocksize = AES_BLOCK_SIZE,
1774 .setkey = aead_setkey,
1775 .setauthsize = aead_setauthsize,
1776 .encrypt = aead_encrypt,
1777 .decrypt = aead_decrypt,
1778 .ivsize = AES_BLOCK_SIZE,
1779 .maxauthsize = SHA384_DIGEST_SIZE,
1782 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1783 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1784 OP_ALG_AAI_HMAC_PRECOMP,
1791 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1792 .cra_driver_name = "authenc-hmac-sha512-"
1794 .cra_blocksize = AES_BLOCK_SIZE,
1796 .setkey = aead_setkey,
1797 .setauthsize = aead_setauthsize,
1798 .encrypt = aead_encrypt,
1799 .decrypt = aead_decrypt,
1800 .ivsize = AES_BLOCK_SIZE,
1801 .maxauthsize = SHA512_DIGEST_SIZE,
1804 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1805 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1806 OP_ALG_AAI_HMAC_PRECOMP,
1812 .cra_name = "echainiv(authenc(hmac(sha512),"
1814 .cra_driver_name = "echainiv-authenc-"
1815 "hmac-sha512-cbc-aes-"
1817 .cra_blocksize = AES_BLOCK_SIZE,
1819 .setkey = aead_setkey,
1820 .setauthsize = aead_setauthsize,
1821 .encrypt = aead_encrypt,
1822 .decrypt = aead_decrypt,
1823 .ivsize = AES_BLOCK_SIZE,
1824 .maxauthsize = SHA512_DIGEST_SIZE,
1827 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1828 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1829 OP_ALG_AAI_HMAC_PRECOMP,
1836 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1837 .cra_driver_name = "authenc-hmac-md5-"
1838 "cbc-des3_ede-caam-qi",
1839 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1841 .setkey = des3_aead_setkey,
1842 .setauthsize = aead_setauthsize,
1843 .encrypt = aead_encrypt,
1844 .decrypt = aead_decrypt,
1845 .ivsize = DES3_EDE_BLOCK_SIZE,
1846 .maxauthsize = MD5_DIGEST_SIZE,
1849 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1850 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1851 OP_ALG_AAI_HMAC_PRECOMP,
1857 .cra_name = "echainiv(authenc(hmac(md5),"
1859 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1860 "cbc-des3_ede-caam-qi",
1861 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1863 .setkey = des3_aead_setkey,
1864 .setauthsize = aead_setauthsize,
1865 .encrypt = aead_encrypt,
1866 .decrypt = aead_decrypt,
1867 .ivsize = DES3_EDE_BLOCK_SIZE,
1868 .maxauthsize = MD5_DIGEST_SIZE,
1871 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1872 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1873 OP_ALG_AAI_HMAC_PRECOMP,
1880 .cra_name = "authenc(hmac(sha1),"
1882 .cra_driver_name = "authenc-hmac-sha1-"
1883 "cbc-des3_ede-caam-qi",
1884 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1886 .setkey = des3_aead_setkey,
1887 .setauthsize = aead_setauthsize,
1888 .encrypt = aead_encrypt,
1889 .decrypt = aead_decrypt,
1890 .ivsize = DES3_EDE_BLOCK_SIZE,
1891 .maxauthsize = SHA1_DIGEST_SIZE,
1894 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1895 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1896 OP_ALG_AAI_HMAC_PRECOMP,
1902 .cra_name = "echainiv(authenc(hmac(sha1),"
1904 .cra_driver_name = "echainiv-authenc-"
1906 "cbc-des3_ede-caam-qi",
1907 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1909 .setkey = des3_aead_setkey,
1910 .setauthsize = aead_setauthsize,
1911 .encrypt = aead_encrypt,
1912 .decrypt = aead_decrypt,
1913 .ivsize = DES3_EDE_BLOCK_SIZE,
1914 .maxauthsize = SHA1_DIGEST_SIZE,
1917 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1918 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1919 OP_ALG_AAI_HMAC_PRECOMP,
1926 .cra_name = "authenc(hmac(sha224),"
1928 .cra_driver_name = "authenc-hmac-sha224-"
1929 "cbc-des3_ede-caam-qi",
1930 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1932 .setkey = des3_aead_setkey,
1933 .setauthsize = aead_setauthsize,
1934 .encrypt = aead_encrypt,
1935 .decrypt = aead_decrypt,
1936 .ivsize = DES3_EDE_BLOCK_SIZE,
1937 .maxauthsize = SHA224_DIGEST_SIZE,
1940 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1941 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1942 OP_ALG_AAI_HMAC_PRECOMP,
1948 .cra_name = "echainiv(authenc(hmac(sha224),"
1950 .cra_driver_name = "echainiv-authenc-"
1952 "cbc-des3_ede-caam-qi",
1953 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1955 .setkey = des3_aead_setkey,
1956 .setauthsize = aead_setauthsize,
1957 .encrypt = aead_encrypt,
1958 .decrypt = aead_decrypt,
1959 .ivsize = DES3_EDE_BLOCK_SIZE,
1960 .maxauthsize = SHA224_DIGEST_SIZE,
1963 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1964 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1965 OP_ALG_AAI_HMAC_PRECOMP,
1972 .cra_name = "authenc(hmac(sha256),"
1974 .cra_driver_name = "authenc-hmac-sha256-"
1975 "cbc-des3_ede-caam-qi",
1976 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1978 .setkey = des3_aead_setkey,
1979 .setauthsize = aead_setauthsize,
1980 .encrypt = aead_encrypt,
1981 .decrypt = aead_decrypt,
1982 .ivsize = DES3_EDE_BLOCK_SIZE,
1983 .maxauthsize = SHA256_DIGEST_SIZE,
1986 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1987 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1988 OP_ALG_AAI_HMAC_PRECOMP,
1994 .cra_name = "echainiv(authenc(hmac(sha256),"
1996 .cra_driver_name = "echainiv-authenc-"
1998 "cbc-des3_ede-caam-qi",
1999 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2001 .setkey = des3_aead_setkey,
2002 .setauthsize = aead_setauthsize,
2003 .encrypt = aead_encrypt,
2004 .decrypt = aead_decrypt,
2005 .ivsize = DES3_EDE_BLOCK_SIZE,
2006 .maxauthsize = SHA256_DIGEST_SIZE,
2009 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2010 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2011 OP_ALG_AAI_HMAC_PRECOMP,
2018 .cra_name = "authenc(hmac(sha384),"
2020 .cra_driver_name = "authenc-hmac-sha384-"
2021 "cbc-des3_ede-caam-qi",
2022 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2024 .setkey = des3_aead_setkey,
2025 .setauthsize = aead_setauthsize,
2026 .encrypt = aead_encrypt,
2027 .decrypt = aead_decrypt,
2028 .ivsize = DES3_EDE_BLOCK_SIZE,
2029 .maxauthsize = SHA384_DIGEST_SIZE,
2032 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2033 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2034 OP_ALG_AAI_HMAC_PRECOMP,
2040 .cra_name = "echainiv(authenc(hmac(sha384),"
2042 .cra_driver_name = "echainiv-authenc-"
2044 "cbc-des3_ede-caam-qi",
2045 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2047 .setkey = des3_aead_setkey,
2048 .setauthsize = aead_setauthsize,
2049 .encrypt = aead_encrypt,
2050 .decrypt = aead_decrypt,
2051 .ivsize = DES3_EDE_BLOCK_SIZE,
2052 .maxauthsize = SHA384_DIGEST_SIZE,
2055 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2056 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2057 OP_ALG_AAI_HMAC_PRECOMP,
2064 .cra_name = "authenc(hmac(sha512),"
2066 .cra_driver_name = "authenc-hmac-sha512-"
2067 "cbc-des3_ede-caam-qi",
2068 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2070 .setkey = des3_aead_setkey,
2071 .setauthsize = aead_setauthsize,
2072 .encrypt = aead_encrypt,
2073 .decrypt = aead_decrypt,
2074 .ivsize = DES3_EDE_BLOCK_SIZE,
2075 .maxauthsize = SHA512_DIGEST_SIZE,
2078 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2079 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2080 OP_ALG_AAI_HMAC_PRECOMP,
2086 .cra_name = "echainiv(authenc(hmac(sha512),"
2088 .cra_driver_name = "echainiv-authenc-"
2090 "cbc-des3_ede-caam-qi",
2091 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2093 .setkey = des3_aead_setkey,
2094 .setauthsize = aead_setauthsize,
2095 .encrypt = aead_encrypt,
2096 .decrypt = aead_decrypt,
2097 .ivsize = DES3_EDE_BLOCK_SIZE,
2098 .maxauthsize = SHA512_DIGEST_SIZE,
2101 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2102 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2103 OP_ALG_AAI_HMAC_PRECOMP,
2110 .cra_name = "authenc(hmac(md5),cbc(des))",
2111 .cra_driver_name = "authenc-hmac-md5-"
2113 .cra_blocksize = DES_BLOCK_SIZE,
2115 .setkey = aead_setkey,
2116 .setauthsize = aead_setauthsize,
2117 .encrypt = aead_encrypt,
2118 .decrypt = aead_decrypt,
2119 .ivsize = DES_BLOCK_SIZE,
2120 .maxauthsize = MD5_DIGEST_SIZE,
2123 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2124 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2125 OP_ALG_AAI_HMAC_PRECOMP,
2131 .cra_name = "echainiv(authenc(hmac(md5),"
2133 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2135 .cra_blocksize = DES_BLOCK_SIZE,
2137 .setkey = aead_setkey,
2138 .setauthsize = aead_setauthsize,
2139 .encrypt = aead_encrypt,
2140 .decrypt = aead_decrypt,
2141 .ivsize = DES_BLOCK_SIZE,
2142 .maxauthsize = MD5_DIGEST_SIZE,
2145 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2146 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2147 OP_ALG_AAI_HMAC_PRECOMP,
2154 .cra_name = "authenc(hmac(sha1),cbc(des))",
2155 .cra_driver_name = "authenc-hmac-sha1-"
2157 .cra_blocksize = DES_BLOCK_SIZE,
2159 .setkey = aead_setkey,
2160 .setauthsize = aead_setauthsize,
2161 .encrypt = aead_encrypt,
2162 .decrypt = aead_decrypt,
2163 .ivsize = DES_BLOCK_SIZE,
2164 .maxauthsize = SHA1_DIGEST_SIZE,
2167 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2168 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2169 OP_ALG_AAI_HMAC_PRECOMP,
2175 .cra_name = "echainiv(authenc(hmac(sha1),"
2177 .cra_driver_name = "echainiv-authenc-"
2178 "hmac-sha1-cbc-des-caam-qi",
2179 .cra_blocksize = DES_BLOCK_SIZE,
2181 .setkey = aead_setkey,
2182 .setauthsize = aead_setauthsize,
2183 .encrypt = aead_encrypt,
2184 .decrypt = aead_decrypt,
2185 .ivsize = DES_BLOCK_SIZE,
2186 .maxauthsize = SHA1_DIGEST_SIZE,
2189 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2190 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2191 OP_ALG_AAI_HMAC_PRECOMP,
2198 .cra_name = "authenc(hmac(sha224),cbc(des))",
2199 .cra_driver_name = "authenc-hmac-sha224-"
2201 .cra_blocksize = DES_BLOCK_SIZE,
2203 .setkey = aead_setkey,
2204 .setauthsize = aead_setauthsize,
2205 .encrypt = aead_encrypt,
2206 .decrypt = aead_decrypt,
2207 .ivsize = DES_BLOCK_SIZE,
2208 .maxauthsize = SHA224_DIGEST_SIZE,
2211 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2212 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2213 OP_ALG_AAI_HMAC_PRECOMP,
2219 .cra_name = "echainiv(authenc(hmac(sha224),"
2221 .cra_driver_name = "echainiv-authenc-"
2222 "hmac-sha224-cbc-des-"
2224 .cra_blocksize = DES_BLOCK_SIZE,
2226 .setkey = aead_setkey,
2227 .setauthsize = aead_setauthsize,
2228 .encrypt = aead_encrypt,
2229 .decrypt = aead_decrypt,
2230 .ivsize = DES_BLOCK_SIZE,
2231 .maxauthsize = SHA224_DIGEST_SIZE,
2234 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2235 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2236 OP_ALG_AAI_HMAC_PRECOMP,
2243 .cra_name = "authenc(hmac(sha256),cbc(des))",
2244 .cra_driver_name = "authenc-hmac-sha256-"
2246 .cra_blocksize = DES_BLOCK_SIZE,
2248 .setkey = aead_setkey,
2249 .setauthsize = aead_setauthsize,
2250 .encrypt = aead_encrypt,
2251 .decrypt = aead_decrypt,
2252 .ivsize = DES_BLOCK_SIZE,
2253 .maxauthsize = SHA256_DIGEST_SIZE,
2256 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2257 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2258 OP_ALG_AAI_HMAC_PRECOMP,
2264 .cra_name = "echainiv(authenc(hmac(sha256),"
2266 .cra_driver_name = "echainiv-authenc-"
2267 "hmac-sha256-cbc-des-"
2269 .cra_blocksize = DES_BLOCK_SIZE,
2271 .setkey = aead_setkey,
2272 .setauthsize = aead_setauthsize,
2273 .encrypt = aead_encrypt,
2274 .decrypt = aead_decrypt,
2275 .ivsize = DES_BLOCK_SIZE,
2276 .maxauthsize = SHA256_DIGEST_SIZE,
2279 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2280 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2281 OP_ALG_AAI_HMAC_PRECOMP,
2288 .cra_name = "authenc(hmac(sha384),cbc(des))",
2289 .cra_driver_name = "authenc-hmac-sha384-"
2291 .cra_blocksize = DES_BLOCK_SIZE,
2293 .setkey = aead_setkey,
2294 .setauthsize = aead_setauthsize,
2295 .encrypt = aead_encrypt,
2296 .decrypt = aead_decrypt,
2297 .ivsize = DES_BLOCK_SIZE,
2298 .maxauthsize = SHA384_DIGEST_SIZE,
2301 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2302 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2303 OP_ALG_AAI_HMAC_PRECOMP,
2309 .cra_name = "echainiv(authenc(hmac(sha384),"
2311 .cra_driver_name = "echainiv-authenc-"
2312 "hmac-sha384-cbc-des-"
2314 .cra_blocksize = DES_BLOCK_SIZE,
2316 .setkey = aead_setkey,
2317 .setauthsize = aead_setauthsize,
2318 .encrypt = aead_encrypt,
2319 .decrypt = aead_decrypt,
2320 .ivsize = DES_BLOCK_SIZE,
2321 .maxauthsize = SHA384_DIGEST_SIZE,
2324 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2325 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2326 OP_ALG_AAI_HMAC_PRECOMP,
2333 .cra_name = "authenc(hmac(sha512),cbc(des))",
2334 .cra_driver_name = "authenc-hmac-sha512-"
2336 .cra_blocksize = DES_BLOCK_SIZE,
2338 .setkey = aead_setkey,
2339 .setauthsize = aead_setauthsize,
2340 .encrypt = aead_encrypt,
2341 .decrypt = aead_decrypt,
2342 .ivsize = DES_BLOCK_SIZE,
2343 .maxauthsize = SHA512_DIGEST_SIZE,
2346 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2347 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2348 OP_ALG_AAI_HMAC_PRECOMP,
2354 .cra_name = "echainiv(authenc(hmac(sha512),"
2356 .cra_driver_name = "echainiv-authenc-"
2357 "hmac-sha512-cbc-des-"
2359 .cra_blocksize = DES_BLOCK_SIZE,
2361 .setkey = aead_setkey,
2362 .setauthsize = aead_setauthsize,
2363 .encrypt = aead_encrypt,
2364 .decrypt = aead_decrypt,
2365 .ivsize = DES_BLOCK_SIZE,
2366 .maxauthsize = SHA512_DIGEST_SIZE,
2369 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2370 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2371 OP_ALG_AAI_HMAC_PRECOMP,
2377 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2380 struct caam_drv_private *priv;
2383 * distribute tfms across job rings to ensure in-order
2384 * crypto request processing per tfm
2386 ctx->jrdev = caam_jr_alloc();
2387 if (IS_ERR(ctx->jrdev)) {
2388 pr_err("Job Ring Device allocation for transform failed\n");
2389 return PTR_ERR(ctx->jrdev);
2392 priv = dev_get_drvdata(ctx->jrdev->parent);
2393 if (priv->era >= 6 && uses_dkp)
2394 ctx->dir = DMA_BIDIRECTIONAL;
2396 ctx->dir = DMA_TO_DEVICE;
2398 ctx->key_dma = dma_map_single(ctx->jrdev, ctx->key, sizeof(ctx->key),
2400 if (dma_mapping_error(ctx->jrdev, ctx->key_dma)) {
2401 dev_err(ctx->jrdev, "unable to map key\n");
2402 caam_jr_free(ctx->jrdev);
2406 /* copy descriptor header template value */
2407 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
2408 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
2410 ctx->qidev = priv->qidev;
2412 spin_lock_init(&ctx->lock);
2413 ctx->drv_ctx[ENCRYPT] = NULL;
2414 ctx->drv_ctx[DECRYPT] = NULL;
2419 static int caam_cra_init(struct crypto_skcipher *tfm)
2421 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
2422 struct caam_skcipher_alg *caam_alg =
2423 container_of(alg, typeof(*caam_alg), skcipher);
2425 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
2429 static int caam_aead_init(struct crypto_aead *tfm)
2431 struct aead_alg *alg = crypto_aead_alg(tfm);
2432 struct caam_aead_alg *caam_alg = container_of(alg, typeof(*caam_alg),
2434 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2436 return caam_init_common(ctx, &caam_alg->caam,
2437 alg->setkey == aead_setkey);
2440 static void caam_exit_common(struct caam_ctx *ctx)
2442 caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
2443 caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
2445 dma_unmap_single(ctx->jrdev, ctx->key_dma, sizeof(ctx->key), ctx->dir);
2447 caam_jr_free(ctx->jrdev);
2450 static void caam_cra_exit(struct crypto_skcipher *tfm)
2452 caam_exit_common(crypto_skcipher_ctx(tfm));
2455 static void caam_aead_exit(struct crypto_aead *tfm)
2457 caam_exit_common(crypto_aead_ctx(tfm));
2460 static void __exit caam_qi_algapi_exit(void)
2464 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2465 struct caam_aead_alg *t_alg = driver_aeads + i;
2467 if (t_alg->registered)
2468 crypto_unregister_aead(&t_alg->aead);
2471 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2472 struct caam_skcipher_alg *t_alg = driver_algs + i;
2474 if (t_alg->registered)
2475 crypto_unregister_skcipher(&t_alg->skcipher);
2479 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
2481 struct skcipher_alg *alg = &t_alg->skcipher;
2483 alg->base.cra_module = THIS_MODULE;
2484 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2485 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2486 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2488 alg->init = caam_cra_init;
2489 alg->exit = caam_cra_exit;
2492 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
2494 struct aead_alg *alg = &t_alg->aead;
2496 alg->base.cra_module = THIS_MODULE;
2497 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2498 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2499 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2501 alg->init = caam_aead_init;
2502 alg->exit = caam_aead_exit;
2505 static int __init caam_qi_algapi_init(void)
2507 struct device_node *dev_node;
2508 struct platform_device *pdev;
2509 struct device *ctrldev;
2510 struct caam_drv_private *priv;
2512 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst;
2513 unsigned int md_limit = SHA512_DIGEST_SIZE;
2514 bool registered = false;
2516 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
2518 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
2523 pdev = of_find_device_by_node(dev_node);
2524 of_node_put(dev_node);
2528 ctrldev = &pdev->dev;
2529 priv = dev_get_drvdata(ctrldev);
2532 * If priv is NULL, it's probably because the caam driver wasn't
2533 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2535 if (!priv || !priv->qi_present) {
2541 dev_info(ctrldev, "caam/qi frontend driver not suitable for DPAA 2.x, aborting...\n");
2547 * Register crypto algorithms the device supports.
2548 * First, detect presence and attributes of DES, AES, and MD blocks.
2550 if (priv->era < 10) {
2551 u32 cha_vid, cha_inst;
2553 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2554 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
2555 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2557 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2558 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
2559 CHA_ID_LS_DES_SHIFT;
2560 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
2561 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2565 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
2566 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
2568 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2569 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2571 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
2572 aes_inst = aesa & CHA_VER_NUM_MASK;
2573 md_inst = mdha & CHA_VER_NUM_MASK;
2576 /* If MD is present, limit digest size based on LP256 */
2577 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
2578 md_limit = SHA256_DIGEST_SIZE;
2580 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2581 struct caam_skcipher_alg *t_alg = driver_algs + i;
2582 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
2584 /* Skip DES algorithms if not supported by device */
2586 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
2587 (alg_sel == OP_ALG_ALGSEL_DES)))
2590 /* Skip AES algorithms if not supported by device */
2591 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
2594 caam_skcipher_alg_init(t_alg);
2596 err = crypto_register_skcipher(&t_alg->skcipher);
2598 dev_warn(priv->qidev, "%s alg registration failed\n",
2599 t_alg->skcipher.base.cra_driver_name);
2603 t_alg->registered = true;
2607 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2608 struct caam_aead_alg *t_alg = driver_aeads + i;
2609 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
2611 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
2613 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2615 /* Skip DES algorithms if not supported by device */
2617 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
2618 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
2621 /* Skip AES algorithms if not supported by device */
2622 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2626 * Check support for AES algorithms not available
2629 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
2633 * Skip algorithms requiring message digests
2634 * if MD or MD size is not supported by device.
2637 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2640 caam_aead_alg_init(t_alg);
2642 err = crypto_register_aead(&t_alg->aead);
2644 pr_warn("%s alg registration failed\n",
2645 t_alg->aead.base.cra_driver_name);
2649 t_alg->registered = true;
2654 dev_info(priv->qidev, "algorithms registered in /proc/crypto\n");
2657 put_device(ctrldev);
2661 module_init(caam_qi_algapi_init);
2662 module_exit(caam_qi_algapi_exit);
2664 MODULE_LICENSE("GPL");
2665 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2666 MODULE_AUTHOR("Freescale Semiconductor");