Merge branch 'x86-alternatives-for-linus' of git://git.kernel.org/pub/scm/linux/kerne...
[sfrench/cifs-2.6.git] / drivers / crypto / inside-secure / safexcel_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <linux/device.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/dmapool.h>
11
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/des.h>
16 #include <crypto/sha.h>
17 #include <crypto/skcipher.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/internal/skcipher.h>
20
21 #include "safexcel.h"
22
23 enum safexcel_cipher_direction {
24         SAFEXCEL_ENCRYPT,
25         SAFEXCEL_DECRYPT,
26 };
27
28 enum safexcel_cipher_alg {
29         SAFEXCEL_DES,
30         SAFEXCEL_3DES,
31         SAFEXCEL_AES,
32 };
33
34 struct safexcel_cipher_ctx {
35         struct safexcel_context base;
36         struct safexcel_crypto_priv *priv;
37
38         u32 mode;
39         enum safexcel_cipher_alg alg;
40         bool aead;
41
42         __le32 key[8];
43         unsigned int key_len;
44
45         /* All the below is AEAD specific */
46         u32 hash_alg;
47         u32 state_sz;
48         u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
49         u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
50 };
51
52 struct safexcel_cipher_req {
53         enum safexcel_cipher_direction direction;
54         bool needs_inv;
55 };
56
57 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
58                                     struct safexcel_command_desc *cdesc,
59                                     u32 length)
60 {
61         struct safexcel_token *token;
62         unsigned offset = 0;
63
64         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
65                 switch (ctx->alg) {
66                 case SAFEXCEL_DES:
67                         offset = DES_BLOCK_SIZE / sizeof(u32);
68                         memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE);
69                         cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
70                         break;
71                 case SAFEXCEL_3DES:
72                         offset = DES3_EDE_BLOCK_SIZE / sizeof(u32);
73                         memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE);
74                         cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
75                         break;
76
77                 case SAFEXCEL_AES:
78                         offset = AES_BLOCK_SIZE / sizeof(u32);
79                         memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
80                         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
81                         break;
82                 }
83         }
84
85         token = (struct safexcel_token *)(cdesc->control_data.token + offset);
86
87         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
88         token[0].packet_length = length;
89         token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
90                         EIP197_TOKEN_STAT_LAST_HASH;
91         token[0].instructions = EIP197_TOKEN_INS_LAST |
92                                 EIP197_TOKEN_INS_TYPE_CRYTO |
93                                 EIP197_TOKEN_INS_TYPE_OUTPUT;
94 }
95
96 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
97                                 struct safexcel_command_desc *cdesc,
98                                 enum safexcel_cipher_direction direction,
99                                 u32 cryptlen, u32 assoclen, u32 digestsize)
100 {
101         struct safexcel_token *token;
102         unsigned offset = 0;
103
104         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
105                 offset = AES_BLOCK_SIZE / sizeof(u32);
106                 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
107
108                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
109         }
110
111         token = (struct safexcel_token *)(cdesc->control_data.token + offset);
112
113         if (direction == SAFEXCEL_DECRYPT)
114                 cryptlen -= digestsize;
115
116         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
117         token[0].packet_length = assoclen;
118         token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
119                                 EIP197_TOKEN_INS_TYPE_OUTPUT;
120
121         token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
122         token[1].packet_length = cryptlen;
123         token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
124         token[1].instructions = EIP197_TOKEN_INS_LAST |
125                                 EIP197_TOKEN_INS_TYPE_CRYTO |
126                                 EIP197_TOKEN_INS_TYPE_HASH |
127                                 EIP197_TOKEN_INS_TYPE_OUTPUT;
128
129         if (direction == SAFEXCEL_ENCRYPT) {
130                 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
131                 token[2].packet_length = digestsize;
132                 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
133                                 EIP197_TOKEN_STAT_LAST_PACKET;
134                 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
135                                         EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
136         } else {
137                 token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
138                 token[2].packet_length = digestsize;
139                 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
140                                 EIP197_TOKEN_STAT_LAST_PACKET;
141                 token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
142
143                 token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
144                 token[3].packet_length = digestsize |
145                                          EIP197_TOKEN_HASH_RESULT_VERIFY;
146                 token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
147                                 EIP197_TOKEN_STAT_LAST_PACKET;
148                 token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
149         }
150 }
151
152 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
153                                         const u8 *key, unsigned int len)
154 {
155         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
156         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
157         struct safexcel_crypto_priv *priv = ctx->priv;
158         struct crypto_aes_ctx aes;
159         int ret, i;
160
161         ret = crypto_aes_expand_key(&aes, key, len);
162         if (ret) {
163                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
164                 return ret;
165         }
166
167         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
168                 for (i = 0; i < len / sizeof(u32); i++) {
169                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
170                                 ctx->base.needs_inv = true;
171                                 break;
172                         }
173                 }
174         }
175
176         for (i = 0; i < len / sizeof(u32); i++)
177                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
178
179         ctx->key_len = len;
180
181         memzero_explicit(&aes, sizeof(aes));
182         return 0;
183 }
184
185 static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
186                                     unsigned int len)
187 {
188         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
189         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
190         struct safexcel_ahash_export_state istate, ostate;
191         struct safexcel_crypto_priv *priv = ctx->priv;
192         struct crypto_authenc_keys keys;
193
194         if (crypto_authenc_extractkeys(&keys, key, len) != 0)
195                 goto badkey;
196
197         if (keys.enckeylen > sizeof(ctx->key))
198                 goto badkey;
199
200         /* Encryption key */
201         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
202             memcmp(ctx->key, keys.enckey, keys.enckeylen))
203                 ctx->base.needs_inv = true;
204
205         /* Auth key */
206         switch (ctx->hash_alg) {
207         case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
208                 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
209                                          keys.authkeylen, &istate, &ostate))
210                         goto badkey;
211                 break;
212         case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
213                 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
214                                          keys.authkeylen, &istate, &ostate))
215                         goto badkey;
216                 break;
217         case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
218                 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
219                                          keys.authkeylen, &istate, &ostate))
220                         goto badkey;
221                 break;
222         case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
223                 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
224                                          keys.authkeylen, &istate, &ostate))
225                         goto badkey;
226                 break;
227         case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
228                 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
229                                          keys.authkeylen, &istate, &ostate))
230                         goto badkey;
231                 break;
232         default:
233                 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
234                 goto badkey;
235         }
236
237         crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
238                                     CRYPTO_TFM_RES_MASK);
239
240         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
241             (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
242              memcmp(ctx->opad, ostate.state, ctx->state_sz)))
243                 ctx->base.needs_inv = true;
244
245         /* Now copy the keys into the context */
246         memcpy(ctx->key, keys.enckey, keys.enckeylen);
247         ctx->key_len = keys.enckeylen;
248
249         memcpy(ctx->ipad, &istate.state, ctx->state_sz);
250         memcpy(ctx->opad, &ostate.state, ctx->state_sz);
251
252         memzero_explicit(&keys, sizeof(keys));
253         return 0;
254
255 badkey:
256         crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
257         memzero_explicit(&keys, sizeof(keys));
258         return -EINVAL;
259 }
260
261 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
262                                     struct crypto_async_request *async,
263                                     struct safexcel_cipher_req *sreq,
264                                     struct safexcel_command_desc *cdesc)
265 {
266         struct safexcel_crypto_priv *priv = ctx->priv;
267         int ctrl_size;
268
269         if (ctx->aead) {
270                 if (sreq->direction == SAFEXCEL_ENCRYPT)
271                         cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
272                 else
273                         cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
274         } else {
275                 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
276
277                 /* The decryption control type is a combination of the
278                  * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
279                  * types.
280                  */
281                 if (sreq->direction == SAFEXCEL_DECRYPT)
282                         cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
283         }
284
285         cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
286         cdesc->control_data.control1 |= ctx->mode;
287
288         if (ctx->aead)
289                 cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
290                                                 ctx->hash_alg;
291
292         if (ctx->alg == SAFEXCEL_DES) {
293                 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
294         } else if (ctx->alg == SAFEXCEL_3DES) {
295                 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
296         } else if (ctx->alg == SAFEXCEL_AES) {
297                 switch (ctx->key_len) {
298                 case AES_KEYSIZE_128:
299                         cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
300                         break;
301                 case AES_KEYSIZE_192:
302                         cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
303                         break;
304                 case AES_KEYSIZE_256:
305                         cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
306                         break;
307                 default:
308                         dev_err(priv->dev, "aes keysize not supported: %u\n",
309                                 ctx->key_len);
310                         return -EINVAL;
311                 }
312         }
313
314         ctrl_size = ctx->key_len / sizeof(u32);
315         if (ctx->aead)
316                 /* Take in account the ipad+opad digests */
317                 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
318         cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
319
320         return 0;
321 }
322
323 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
324                                       struct crypto_async_request *async,
325                                       struct scatterlist *src,
326                                       struct scatterlist *dst,
327                                       unsigned int cryptlen,
328                                       struct safexcel_cipher_req *sreq,
329                                       bool *should_complete, int *ret)
330 {
331         struct safexcel_result_desc *rdesc;
332         int ndesc = 0;
333
334         *ret = 0;
335
336         do {
337                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
338                 if (IS_ERR(rdesc)) {
339                         dev_err(priv->dev,
340                                 "cipher: result: could not retrieve the result descriptor\n");
341                         *ret = PTR_ERR(rdesc);
342                         break;
343                 }
344
345                 if (likely(!*ret))
346                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
347
348                 ndesc++;
349         } while (!rdesc->last_seg);
350
351         safexcel_complete(priv, ring);
352
353         if (src == dst) {
354                 dma_unmap_sg(priv->dev, src,
355                              sg_nents_for_len(src, cryptlen),
356                              DMA_BIDIRECTIONAL);
357         } else {
358                 dma_unmap_sg(priv->dev, src,
359                              sg_nents_for_len(src, cryptlen),
360                              DMA_TO_DEVICE);
361                 dma_unmap_sg(priv->dev, dst,
362                              sg_nents_for_len(dst, cryptlen),
363                              DMA_FROM_DEVICE);
364         }
365
366         *should_complete = true;
367
368         return ndesc;
369 }
370
371 static int safexcel_send_req(struct crypto_async_request *base, int ring,
372                              struct safexcel_cipher_req *sreq,
373                              struct scatterlist *src, struct scatterlist *dst,
374                              unsigned int cryptlen, unsigned int assoclen,
375                              unsigned int digestsize, u8 *iv, int *commands,
376                              int *results)
377 {
378         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
379         struct safexcel_crypto_priv *priv = ctx->priv;
380         struct safexcel_command_desc *cdesc;
381         struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
382         struct scatterlist *sg;
383         unsigned int totlen = cryptlen + assoclen;
384         int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
385         int i, ret = 0;
386
387         if (src == dst) {
388                 nr_src = dma_map_sg(priv->dev, src,
389                                     sg_nents_for_len(src, totlen),
390                                     DMA_BIDIRECTIONAL);
391                 nr_dst = nr_src;
392                 if (!nr_src)
393                         return -EINVAL;
394         } else {
395                 nr_src = dma_map_sg(priv->dev, src,
396                                     sg_nents_for_len(src, totlen),
397                                     DMA_TO_DEVICE);
398                 if (!nr_src)
399                         return -EINVAL;
400
401                 nr_dst = dma_map_sg(priv->dev, dst,
402                                     sg_nents_for_len(dst, totlen),
403                                     DMA_FROM_DEVICE);
404                 if (!nr_dst) {
405                         dma_unmap_sg(priv->dev, src,
406                                      sg_nents_for_len(src, totlen),
407                                      DMA_TO_DEVICE);
408                         return -EINVAL;
409                 }
410         }
411
412         memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
413
414         if (ctx->aead) {
415                 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
416                        ctx->ipad, ctx->state_sz);
417                 memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
418                        ctx->opad, ctx->state_sz);
419         }
420
421         /* command descriptors */
422         for_each_sg(src, sg, nr_src, i) {
423                 int len = sg_dma_len(sg);
424
425                 /* Do not overflow the request */
426                 if (queued - len < 0)
427                         len = queued;
428
429                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
430                                            sg_dma_address(sg), len, totlen,
431                                            ctx->base.ctxr_dma);
432                 if (IS_ERR(cdesc)) {
433                         /* No space left in the command descriptor ring */
434                         ret = PTR_ERR(cdesc);
435                         goto cdesc_rollback;
436                 }
437                 n_cdesc++;
438
439                 if (n_cdesc == 1) {
440                         safexcel_context_control(ctx, base, sreq, cdesc);
441                         if (ctx->aead)
442                                 safexcel_aead_token(ctx, iv, cdesc,
443                                                     sreq->direction, cryptlen,
444                                                     assoclen, digestsize);
445                         else
446                                 safexcel_skcipher_token(ctx, iv, cdesc,
447                                                         cryptlen);
448                 }
449
450                 queued -= len;
451                 if (!queued)
452                         break;
453         }
454
455         /* result descriptors */
456         for_each_sg(dst, sg, nr_dst, i) {
457                 bool first = !i, last = (i == nr_dst - 1);
458                 u32 len = sg_dma_len(sg);
459
460                 rdesc = safexcel_add_rdesc(priv, ring, first, last,
461                                            sg_dma_address(sg), len);
462                 if (IS_ERR(rdesc)) {
463                         /* No space left in the result descriptor ring */
464                         ret = PTR_ERR(rdesc);
465                         goto rdesc_rollback;
466                 }
467                 if (first)
468                         first_rdesc = rdesc;
469                 n_rdesc++;
470         }
471
472         safexcel_rdr_req_set(priv, ring, first_rdesc, base);
473
474         *commands = n_cdesc;
475         *results = n_rdesc;
476         return 0;
477
478 rdesc_rollback:
479         for (i = 0; i < n_rdesc; i++)
480                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
481 cdesc_rollback:
482         for (i = 0; i < n_cdesc; i++)
483                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
484
485         if (src == dst) {
486                 dma_unmap_sg(priv->dev, src,
487                              sg_nents_for_len(src, totlen),
488                              DMA_BIDIRECTIONAL);
489         } else {
490                 dma_unmap_sg(priv->dev, src,
491                              sg_nents_for_len(src, totlen),
492                              DMA_TO_DEVICE);
493                 dma_unmap_sg(priv->dev, dst,
494                              sg_nents_for_len(dst, totlen),
495                              DMA_FROM_DEVICE);
496         }
497
498         return ret;
499 }
500
501 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
502                                       int ring,
503                                       struct crypto_async_request *base,
504                                       bool *should_complete, int *ret)
505 {
506         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
507         struct safexcel_result_desc *rdesc;
508         int ndesc = 0, enq_ret;
509
510         *ret = 0;
511
512         do {
513                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
514                 if (IS_ERR(rdesc)) {
515                         dev_err(priv->dev,
516                                 "cipher: invalidate: could not retrieve the result descriptor\n");
517                         *ret = PTR_ERR(rdesc);
518                         break;
519                 }
520
521                 if (likely(!*ret))
522                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
523
524                 ndesc++;
525         } while (!rdesc->last_seg);
526
527         safexcel_complete(priv, ring);
528
529         if (ctx->base.exit_inv) {
530                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
531                               ctx->base.ctxr_dma);
532
533                 *should_complete = true;
534
535                 return ndesc;
536         }
537
538         ring = safexcel_select_ring(priv);
539         ctx->base.ring = ring;
540
541         spin_lock_bh(&priv->ring[ring].queue_lock);
542         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
543         spin_unlock_bh(&priv->ring[ring].queue_lock);
544
545         if (enq_ret != -EINPROGRESS)
546                 *ret = enq_ret;
547
548         queue_work(priv->ring[ring].workqueue,
549                    &priv->ring[ring].work_data.work);
550
551         *should_complete = false;
552
553         return ndesc;
554 }
555
556 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
557                                            int ring,
558                                            struct crypto_async_request *async,
559                                            bool *should_complete, int *ret)
560 {
561         struct skcipher_request *req = skcipher_request_cast(async);
562         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
563         int err;
564
565         if (sreq->needs_inv) {
566                 sreq->needs_inv = false;
567                 err = safexcel_handle_inv_result(priv, ring, async,
568                                                  should_complete, ret);
569         } else {
570                 err = safexcel_handle_req_result(priv, ring, async, req->src,
571                                                  req->dst, req->cryptlen, sreq,
572                                                  should_complete, ret);
573         }
574
575         return err;
576 }
577
578 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
579                                        int ring,
580                                        struct crypto_async_request *async,
581                                        bool *should_complete, int *ret)
582 {
583         struct aead_request *req = aead_request_cast(async);
584         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
585         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
586         int err;
587
588         if (sreq->needs_inv) {
589                 sreq->needs_inv = false;
590                 err = safexcel_handle_inv_result(priv, ring, async,
591                                                  should_complete, ret);
592         } else {
593                 err = safexcel_handle_req_result(priv, ring, async, req->src,
594                                                  req->dst,
595                                                  req->cryptlen + crypto_aead_authsize(tfm),
596                                                  sreq, should_complete, ret);
597         }
598
599         return err;
600 }
601
602 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
603                                     int ring, int *commands, int *results)
604 {
605         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
606         struct safexcel_crypto_priv *priv = ctx->priv;
607         int ret;
608
609         ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
610         if (unlikely(ret))
611                 return ret;
612
613         *commands = 1;
614         *results = 1;
615
616         return 0;
617 }
618
619 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
620                                   int *commands, int *results)
621 {
622         struct skcipher_request *req = skcipher_request_cast(async);
623         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
624         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
625         struct safexcel_crypto_priv *priv = ctx->priv;
626         int ret;
627
628         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
629
630         if (sreq->needs_inv)
631                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
632         else
633                 ret = safexcel_send_req(async, ring, sreq, req->src,
634                                         req->dst, req->cryptlen, 0, 0, req->iv,
635                                         commands, results);
636         return ret;
637 }
638
639 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
640                               int *commands, int *results)
641 {
642         struct aead_request *req = aead_request_cast(async);
643         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
644         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
645         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
646         struct safexcel_crypto_priv *priv = ctx->priv;
647         int ret;
648
649         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
650
651         if (sreq->needs_inv)
652                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
653         else
654                 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
655                                         req->cryptlen, req->assoclen,
656                                         crypto_aead_authsize(tfm), req->iv,
657                                         commands, results);
658         return ret;
659 }
660
661 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
662                                     struct crypto_async_request *base,
663                                     struct safexcel_cipher_req *sreq,
664                                     struct safexcel_inv_result *result)
665 {
666         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
667         struct safexcel_crypto_priv *priv = ctx->priv;
668         int ring = ctx->base.ring;
669
670         init_completion(&result->completion);
671
672         ctx = crypto_tfm_ctx(base->tfm);
673         ctx->base.exit_inv = true;
674         sreq->needs_inv = true;
675
676         spin_lock_bh(&priv->ring[ring].queue_lock);
677         crypto_enqueue_request(&priv->ring[ring].queue, base);
678         spin_unlock_bh(&priv->ring[ring].queue_lock);
679
680         queue_work(priv->ring[ring].workqueue,
681                    &priv->ring[ring].work_data.work);
682
683         wait_for_completion(&result->completion);
684
685         if (result->error) {
686                 dev_warn(priv->dev,
687                         "cipher: sync: invalidate: completion error %d\n",
688                          result->error);
689                 return result->error;
690         }
691
692         return 0;
693 }
694
695 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
696 {
697         EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
698         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
699         struct safexcel_inv_result result = {};
700
701         memset(req, 0, sizeof(struct skcipher_request));
702
703         skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
704                                       safexcel_inv_complete, &result);
705         skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
706
707         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
708 }
709
710 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
711 {
712         EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
713         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
714         struct safexcel_inv_result result = {};
715
716         memset(req, 0, sizeof(struct aead_request));
717
718         aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
719                                   safexcel_inv_complete, &result);
720         aead_request_set_tfm(req, __crypto_aead_cast(tfm));
721
722         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
723 }
724
725 static int safexcel_queue_req(struct crypto_async_request *base,
726                         struct safexcel_cipher_req *sreq,
727                         enum safexcel_cipher_direction dir, u32 mode,
728                         enum safexcel_cipher_alg alg)
729 {
730         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
731         struct safexcel_crypto_priv *priv = ctx->priv;
732         int ret, ring;
733
734         sreq->needs_inv = false;
735         sreq->direction = dir;
736         ctx->alg = alg;
737         ctx->mode = mode;
738
739         if (ctx->base.ctxr) {
740                 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
741                         sreq->needs_inv = true;
742                         ctx->base.needs_inv = false;
743                 }
744         } else {
745                 ctx->base.ring = safexcel_select_ring(priv);
746                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
747                                                  EIP197_GFP_FLAGS(*base),
748                                                  &ctx->base.ctxr_dma);
749                 if (!ctx->base.ctxr)
750                         return -ENOMEM;
751         }
752
753         ring = ctx->base.ring;
754
755         spin_lock_bh(&priv->ring[ring].queue_lock);
756         ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
757         spin_unlock_bh(&priv->ring[ring].queue_lock);
758
759         queue_work(priv->ring[ring].workqueue,
760                    &priv->ring[ring].work_data.work);
761
762         return ret;
763 }
764
765 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
766 {
767         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
768                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
769                         SAFEXCEL_AES);
770 }
771
772 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
773 {
774         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
775                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
776                         SAFEXCEL_AES);
777 }
778
779 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
780 {
781         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
782         struct safexcel_alg_template *tmpl =
783                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
784                              alg.skcipher.base);
785
786         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
787                                     sizeof(struct safexcel_cipher_req));
788
789         ctx->priv = tmpl->priv;
790
791         ctx->base.send = safexcel_skcipher_send;
792         ctx->base.handle_result = safexcel_skcipher_handle_result;
793         return 0;
794 }
795
796 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
797 {
798         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
799
800         memzero_explicit(ctx->key, sizeof(ctx->key));
801
802         /* context not allocated, skip invalidation */
803         if (!ctx->base.ctxr)
804                 return -ENOMEM;
805
806         memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
807         return 0;
808 }
809
810 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
811 {
812         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
813         struct safexcel_crypto_priv *priv = ctx->priv;
814         int ret;
815
816         if (safexcel_cipher_cra_exit(tfm))
817                 return;
818
819         if (priv->flags & EIP197_TRC_CACHE) {
820                 ret = safexcel_skcipher_exit_inv(tfm);
821                 if (ret)
822                         dev_warn(priv->dev, "skcipher: invalidation error %d\n",
823                                  ret);
824         } else {
825                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
826                               ctx->base.ctxr_dma);
827         }
828 }
829
830 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
831 {
832         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
833         struct safexcel_crypto_priv *priv = ctx->priv;
834         int ret;
835
836         if (safexcel_cipher_cra_exit(tfm))
837                 return;
838
839         if (priv->flags & EIP197_TRC_CACHE) {
840                 ret = safexcel_aead_exit_inv(tfm);
841                 if (ret)
842                         dev_warn(priv->dev, "aead: invalidation error %d\n",
843                                  ret);
844         } else {
845                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
846                               ctx->base.ctxr_dma);
847         }
848 }
849
850 struct safexcel_alg_template safexcel_alg_ecb_aes = {
851         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
852         .engines = EIP97IES | EIP197B | EIP197D,
853         .alg.skcipher = {
854                 .setkey = safexcel_skcipher_aes_setkey,
855                 .encrypt = safexcel_ecb_aes_encrypt,
856                 .decrypt = safexcel_ecb_aes_decrypt,
857                 .min_keysize = AES_MIN_KEY_SIZE,
858                 .max_keysize = AES_MAX_KEY_SIZE,
859                 .base = {
860                         .cra_name = "ecb(aes)",
861                         .cra_driver_name = "safexcel-ecb-aes",
862                         .cra_priority = 300,
863                         .cra_flags = CRYPTO_ALG_ASYNC |
864                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
865                         .cra_blocksize = AES_BLOCK_SIZE,
866                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
867                         .cra_alignmask = 0,
868                         .cra_init = safexcel_skcipher_cra_init,
869                         .cra_exit = safexcel_skcipher_cra_exit,
870                         .cra_module = THIS_MODULE,
871                 },
872         },
873 };
874
875 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
876 {
877         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
878                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
879                         SAFEXCEL_AES);
880 }
881
882 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
883 {
884         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
885                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
886                         SAFEXCEL_AES);
887 }
888
889 struct safexcel_alg_template safexcel_alg_cbc_aes = {
890         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
891         .engines = EIP97IES | EIP197B | EIP197D,
892         .alg.skcipher = {
893                 .setkey = safexcel_skcipher_aes_setkey,
894                 .encrypt = safexcel_cbc_aes_encrypt,
895                 .decrypt = safexcel_cbc_aes_decrypt,
896                 .min_keysize = AES_MIN_KEY_SIZE,
897                 .max_keysize = AES_MAX_KEY_SIZE,
898                 .ivsize = AES_BLOCK_SIZE,
899                 .base = {
900                         .cra_name = "cbc(aes)",
901                         .cra_driver_name = "safexcel-cbc-aes",
902                         .cra_priority = 300,
903                         .cra_flags = CRYPTO_ALG_ASYNC |
904                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
905                         .cra_blocksize = AES_BLOCK_SIZE,
906                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
907                         .cra_alignmask = 0,
908                         .cra_init = safexcel_skcipher_cra_init,
909                         .cra_exit = safexcel_skcipher_cra_exit,
910                         .cra_module = THIS_MODULE,
911                 },
912         },
913 };
914
915 static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
916 {
917         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
918                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
919                         SAFEXCEL_DES);
920 }
921
922 static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
923 {
924         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
925                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
926                         SAFEXCEL_DES);
927 }
928
929 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
930                                unsigned int len)
931 {
932         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
933         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
934         u32 tmp[DES_EXPKEY_WORDS];
935         int ret;
936
937         if (len != DES_KEY_SIZE) {
938                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
939                 return -EINVAL;
940         }
941
942         ret = des_ekey(tmp, key);
943         if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
944                 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
945                 return -EINVAL;
946         }
947
948         /* if context exits and key changed, need to invalidate it */
949         if (ctx->base.ctxr_dma)
950                 if (memcmp(ctx->key, key, len))
951                         ctx->base.needs_inv = true;
952
953         memcpy(ctx->key, key, len);
954         ctx->key_len = len;
955
956         return 0;
957 }
958
959 struct safexcel_alg_template safexcel_alg_cbc_des = {
960         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
961         .engines = EIP97IES | EIP197B | EIP197D,
962         .alg.skcipher = {
963                 .setkey = safexcel_des_setkey,
964                 .encrypt = safexcel_cbc_des_encrypt,
965                 .decrypt = safexcel_cbc_des_decrypt,
966                 .min_keysize = DES_KEY_SIZE,
967                 .max_keysize = DES_KEY_SIZE,
968                 .ivsize = DES_BLOCK_SIZE,
969                 .base = {
970                         .cra_name = "cbc(des)",
971                         .cra_driver_name = "safexcel-cbc-des",
972                         .cra_priority = 300,
973                         .cra_flags = CRYPTO_ALG_ASYNC |
974                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
975                         .cra_blocksize = DES_BLOCK_SIZE,
976                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
977                         .cra_alignmask = 0,
978                         .cra_init = safexcel_skcipher_cra_init,
979                         .cra_exit = safexcel_skcipher_cra_exit,
980                         .cra_module = THIS_MODULE,
981                 },
982         },
983 };
984
985 static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
986 {
987         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
988                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
989                         SAFEXCEL_DES);
990 }
991
992 static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
993 {
994         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
995                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
996                         SAFEXCEL_DES);
997 }
998
999 struct safexcel_alg_template safexcel_alg_ecb_des = {
1000         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1001         .engines = EIP97IES | EIP197B | EIP197D,
1002         .alg.skcipher = {
1003                 .setkey = safexcel_des_setkey,
1004                 .encrypt = safexcel_ecb_des_encrypt,
1005                 .decrypt = safexcel_ecb_des_decrypt,
1006                 .min_keysize = DES_KEY_SIZE,
1007                 .max_keysize = DES_KEY_SIZE,
1008                 .ivsize = DES_BLOCK_SIZE,
1009                 .base = {
1010                         .cra_name = "ecb(des)",
1011                         .cra_driver_name = "safexcel-ecb-des",
1012                         .cra_priority = 300,
1013                         .cra_flags = CRYPTO_ALG_ASYNC |
1014                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1015                         .cra_blocksize = DES_BLOCK_SIZE,
1016                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1017                         .cra_alignmask = 0,
1018                         .cra_init = safexcel_skcipher_cra_init,
1019                         .cra_exit = safexcel_skcipher_cra_exit,
1020                         .cra_module = THIS_MODULE,
1021                 },
1022         },
1023 };
1024
1025 static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1026 {
1027         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1028                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1029                         SAFEXCEL_3DES);
1030 }
1031
1032 static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1033 {
1034         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1035                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1036                         SAFEXCEL_3DES);
1037 }
1038
1039 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1040                                    const u8 *key, unsigned int len)
1041 {
1042         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1043         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1044
1045         if (len != DES3_EDE_KEY_SIZE) {
1046                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1047                 return -EINVAL;
1048         }
1049
1050         /* if context exits and key changed, need to invalidate it */
1051         if (ctx->base.ctxr_dma) {
1052                 if (memcmp(ctx->key, key, len))
1053                         ctx->base.needs_inv = true;
1054         }
1055
1056         memcpy(ctx->key, key, len);
1057
1058         ctx->key_len = len;
1059
1060         return 0;
1061 }
1062
1063 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1064         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1065         .engines = EIP97IES | EIP197B | EIP197D,
1066         .alg.skcipher = {
1067                 .setkey = safexcel_des3_ede_setkey,
1068                 .encrypt = safexcel_cbc_des3_ede_encrypt,
1069                 .decrypt = safexcel_cbc_des3_ede_decrypt,
1070                 .min_keysize = DES3_EDE_KEY_SIZE,
1071                 .max_keysize = DES3_EDE_KEY_SIZE,
1072                 .ivsize = DES3_EDE_BLOCK_SIZE,
1073                 .base = {
1074                         .cra_name = "cbc(des3_ede)",
1075                         .cra_driver_name = "safexcel-cbc-des3_ede",
1076                         .cra_priority = 300,
1077                         .cra_flags = CRYPTO_ALG_ASYNC |
1078                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1079                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1080                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1081                         .cra_alignmask = 0,
1082                         .cra_init = safexcel_skcipher_cra_init,
1083                         .cra_exit = safexcel_skcipher_cra_exit,
1084                         .cra_module = THIS_MODULE,
1085                 },
1086         },
1087 };
1088
1089 static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1090 {
1091         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1092                         SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1093                         SAFEXCEL_3DES);
1094 }
1095
1096 static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1097 {
1098         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1099                         SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1100                         SAFEXCEL_3DES);
1101 }
1102
1103 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1104         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1105         .engines = EIP97IES | EIP197B | EIP197D,
1106         .alg.skcipher = {
1107                 .setkey = safexcel_des3_ede_setkey,
1108                 .encrypt = safexcel_ecb_des3_ede_encrypt,
1109                 .decrypt = safexcel_ecb_des3_ede_decrypt,
1110                 .min_keysize = DES3_EDE_KEY_SIZE,
1111                 .max_keysize = DES3_EDE_KEY_SIZE,
1112                 .ivsize = DES3_EDE_BLOCK_SIZE,
1113                 .base = {
1114                         .cra_name = "ecb(des3_ede)",
1115                         .cra_driver_name = "safexcel-ecb-des3_ede",
1116                         .cra_priority = 300,
1117                         .cra_flags = CRYPTO_ALG_ASYNC |
1118                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1119                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1120                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1121                         .cra_alignmask = 0,
1122                         .cra_init = safexcel_skcipher_cra_init,
1123                         .cra_exit = safexcel_skcipher_cra_exit,
1124                         .cra_module = THIS_MODULE,
1125                 },
1126         },
1127 };
1128
1129 static int safexcel_aead_encrypt(struct aead_request *req)
1130 {
1131         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1132
1133         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1134                         CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1135 }
1136
1137 static int safexcel_aead_decrypt(struct aead_request *req)
1138 {
1139         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1140
1141         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1142                         CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1143 }
1144
1145 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1146 {
1147         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1148         struct safexcel_alg_template *tmpl =
1149                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1150                              alg.aead.base);
1151
1152         crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1153                                 sizeof(struct safexcel_cipher_req));
1154
1155         ctx->priv = tmpl->priv;
1156
1157         ctx->aead = true;
1158         ctx->base.send = safexcel_aead_send;
1159         ctx->base.handle_result = safexcel_aead_handle_result;
1160         return 0;
1161 }
1162
1163 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1164 {
1165         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1166
1167         safexcel_aead_cra_init(tfm);
1168         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1169         ctx->state_sz = SHA1_DIGEST_SIZE;
1170         return 0;
1171 }
1172
1173 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1174         .type = SAFEXCEL_ALG_TYPE_AEAD,
1175         .engines = EIP97IES | EIP197B | EIP197D,
1176         .alg.aead = {
1177                 .setkey = safexcel_aead_aes_setkey,
1178                 .encrypt = safexcel_aead_encrypt,
1179                 .decrypt = safexcel_aead_decrypt,
1180                 .ivsize = AES_BLOCK_SIZE,
1181                 .maxauthsize = SHA1_DIGEST_SIZE,
1182                 .base = {
1183                         .cra_name = "authenc(hmac(sha1),cbc(aes))",
1184                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1185                         .cra_priority = 300,
1186                         .cra_flags = CRYPTO_ALG_ASYNC |
1187                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1188                         .cra_blocksize = AES_BLOCK_SIZE,
1189                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1190                         .cra_alignmask = 0,
1191                         .cra_init = safexcel_aead_sha1_cra_init,
1192                         .cra_exit = safexcel_aead_cra_exit,
1193                         .cra_module = THIS_MODULE,
1194                 },
1195         },
1196 };
1197
1198 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1199 {
1200         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1201
1202         safexcel_aead_cra_init(tfm);
1203         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1204         ctx->state_sz = SHA256_DIGEST_SIZE;
1205         return 0;
1206 }
1207
1208 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1209         .type = SAFEXCEL_ALG_TYPE_AEAD,
1210         .engines = EIP97IES | EIP197B | EIP197D,
1211         .alg.aead = {
1212                 .setkey = safexcel_aead_aes_setkey,
1213                 .encrypt = safexcel_aead_encrypt,
1214                 .decrypt = safexcel_aead_decrypt,
1215                 .ivsize = AES_BLOCK_SIZE,
1216                 .maxauthsize = SHA256_DIGEST_SIZE,
1217                 .base = {
1218                         .cra_name = "authenc(hmac(sha256),cbc(aes))",
1219                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1220                         .cra_priority = 300,
1221                         .cra_flags = CRYPTO_ALG_ASYNC |
1222                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1223                         .cra_blocksize = AES_BLOCK_SIZE,
1224                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1225                         .cra_alignmask = 0,
1226                         .cra_init = safexcel_aead_sha256_cra_init,
1227                         .cra_exit = safexcel_aead_cra_exit,
1228                         .cra_module = THIS_MODULE,
1229                 },
1230         },
1231 };
1232
1233 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1234 {
1235         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1236
1237         safexcel_aead_cra_init(tfm);
1238         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1239         ctx->state_sz = SHA256_DIGEST_SIZE;
1240         return 0;
1241 }
1242
1243 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1244         .type = SAFEXCEL_ALG_TYPE_AEAD,
1245         .engines = EIP97IES | EIP197B | EIP197D,
1246         .alg.aead = {
1247                 .setkey = safexcel_aead_aes_setkey,
1248                 .encrypt = safexcel_aead_encrypt,
1249                 .decrypt = safexcel_aead_decrypt,
1250                 .ivsize = AES_BLOCK_SIZE,
1251                 .maxauthsize = SHA224_DIGEST_SIZE,
1252                 .base = {
1253                         .cra_name = "authenc(hmac(sha224),cbc(aes))",
1254                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1255                         .cra_priority = 300,
1256                         .cra_flags = CRYPTO_ALG_ASYNC |
1257                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1258                         .cra_blocksize = AES_BLOCK_SIZE,
1259                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1260                         .cra_alignmask = 0,
1261                         .cra_init = safexcel_aead_sha224_cra_init,
1262                         .cra_exit = safexcel_aead_cra_exit,
1263                         .cra_module = THIS_MODULE,
1264                 },
1265         },
1266 };
1267
1268 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1269 {
1270         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1271
1272         safexcel_aead_cra_init(tfm);
1273         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1274         ctx->state_sz = SHA512_DIGEST_SIZE;
1275         return 0;
1276 }
1277
1278 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1279         .type = SAFEXCEL_ALG_TYPE_AEAD,
1280         .engines = EIP97IES | EIP197B | EIP197D,
1281         .alg.aead = {
1282                 .setkey = safexcel_aead_aes_setkey,
1283                 .encrypt = safexcel_aead_encrypt,
1284                 .decrypt = safexcel_aead_decrypt,
1285                 .ivsize = AES_BLOCK_SIZE,
1286                 .maxauthsize = SHA512_DIGEST_SIZE,
1287                 .base = {
1288                         .cra_name = "authenc(hmac(sha512),cbc(aes))",
1289                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1290                         .cra_priority = 300,
1291                         .cra_flags = CRYPTO_ALG_ASYNC |
1292                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1293                         .cra_blocksize = AES_BLOCK_SIZE,
1294                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1295                         .cra_alignmask = 0,
1296                         .cra_init = safexcel_aead_sha512_cra_init,
1297                         .cra_exit = safexcel_aead_cra_exit,
1298                         .cra_module = THIS_MODULE,
1299                 },
1300         },
1301 };
1302
1303 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1304 {
1305         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1306
1307         safexcel_aead_cra_init(tfm);
1308         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1309         ctx->state_sz = SHA512_DIGEST_SIZE;
1310         return 0;
1311 }
1312
1313 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1314         .type = SAFEXCEL_ALG_TYPE_AEAD,
1315         .engines = EIP97IES | EIP197B | EIP197D,
1316         .alg.aead = {
1317                 .setkey = safexcel_aead_aes_setkey,
1318                 .encrypt = safexcel_aead_encrypt,
1319                 .decrypt = safexcel_aead_decrypt,
1320                 .ivsize = AES_BLOCK_SIZE,
1321                 .maxauthsize = SHA384_DIGEST_SIZE,
1322                 .base = {
1323                         .cra_name = "authenc(hmac(sha384),cbc(aes))",
1324                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1325                         .cra_priority = 300,
1326                         .cra_flags = CRYPTO_ALG_ASYNC |
1327                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1328                         .cra_blocksize = AES_BLOCK_SIZE,
1329                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1330                         .cra_alignmask = 0,
1331                         .cra_init = safexcel_aead_sha384_cra_init,
1332                         .cra_exit = safexcel_aead_cra_exit,
1333                         .cra_module = THIS_MODULE,
1334                 },
1335         },
1336 };