Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/drivers/crypto/amcc/crypto4xx_alg.c
29267 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* AMCC SoC PPC4xx Crypto Driver
4
*
5
* Copyright (c) 2008 Applied Micro Circuits Corporation.
6
* All rights reserved. James Hsiao <[email protected]>
7
*
8
* This file implements the Linux crypto algorithms.
9
*/
10
11
#include <linux/kernel.h>
12
#include <linux/interrupt.h>
13
#include <linux/spinlock_types.h>
14
#include <linux/scatterlist.h>
15
#include <linux/dma-mapping.h>
16
#include <crypto/algapi.h>
17
#include <crypto/aead.h>
18
#include <crypto/aes.h>
19
#include <crypto/gcm.h>
20
#include <crypto/sha1.h>
21
#include <crypto/ctr.h>
22
#include <crypto/skcipher.h>
23
#include "crypto4xx_reg_def.h"
24
#include "crypto4xx_core.h"
25
#include "crypto4xx_sa.h"
26
27
static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
28
u32 save_iv, u32 ld_h, u32 ld_iv,
29
u32 hdr_proc, u32 h, u32 c, u32 pad_type,
30
u32 op_grp, u32 op, u32 dir)
31
{
32
sa->sa_command_0.w = 0;
33
sa->sa_command_0.bf.save_hash_state = save_h;
34
sa->sa_command_0.bf.save_iv = save_iv;
35
sa->sa_command_0.bf.load_hash_state = ld_h;
36
sa->sa_command_0.bf.load_iv = ld_iv;
37
sa->sa_command_0.bf.hdr_proc = hdr_proc;
38
sa->sa_command_0.bf.hash_alg = h;
39
sa->sa_command_0.bf.cipher_alg = c;
40
sa->sa_command_0.bf.pad_type = pad_type & 3;
41
sa->sa_command_0.bf.extend_pad = pad_type >> 2;
42
sa->sa_command_0.bf.op_group = op_grp;
43
sa->sa_command_0.bf.opcode = op;
44
sa->sa_command_0.bf.dir = dir;
45
}
46
47
static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
48
u32 hmac_mc, u32 cfb, u32 esn,
49
u32 sn_mask, u32 mute, u32 cp_pad,
50
u32 cp_pay, u32 cp_hdr)
51
{
52
sa->sa_command_1.w = 0;
53
sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
54
sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
55
sa->sa_command_1.bf.feedback_mode = cfb;
56
sa->sa_command_1.bf.sa_rev = 1;
57
sa->sa_command_1.bf.hmac_muting = hmac_mc;
58
sa->sa_command_1.bf.extended_seq_num = esn;
59
sa->sa_command_1.bf.seq_num_mask = sn_mask;
60
sa->sa_command_1.bf.mutable_bit_proc = mute;
61
sa->sa_command_1.bf.copy_pad = cp_pad;
62
sa->sa_command_1.bf.copy_payload = cp_pay;
63
sa->sa_command_1.bf.copy_hdr = cp_hdr;
64
}
65
66
static inline int crypto4xx_crypt(struct skcipher_request *req,
67
const unsigned int ivlen, bool decrypt,
68
bool check_blocksize)
69
{
70
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
71
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
72
__le32 iv[AES_IV_SIZE / 4];
73
74
if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
75
return -EINVAL;
76
77
if (ivlen)
78
crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
79
80
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
81
req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
82
ctx->sa_len, 0, NULL);
83
}
84
85
int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
86
{
87
return crypto4xx_crypt(req, 0, false, true);
88
}
89
90
int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
91
{
92
return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
93
}
94
95
int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
96
{
97
return crypto4xx_crypt(req, 0, true, true);
98
}
99
100
int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
101
{
102
return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
103
}
104
105
int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
106
{
107
return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
108
}
109
110
int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
111
{
112
return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
113
}
114
115
/*
116
* AES Functions
117
*/
118
static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
119
const u8 *key,
120
unsigned int keylen,
121
unsigned char cm,
122
u8 fb)
123
{
124
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
125
struct dynamic_sa_ctl *sa;
126
int rc;
127
128
if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
129
keylen != AES_KEYSIZE_128)
130
return -EINVAL;
131
132
/* Create SA */
133
if (ctx->sa_in || ctx->sa_out)
134
crypto4xx_free_sa(ctx);
135
136
rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
137
if (rc)
138
return rc;
139
140
/* Setup SA */
141
sa = ctx->sa_in;
142
143
set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
144
SA_NOT_SAVE_IV : SA_SAVE_IV),
145
SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
146
SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
147
SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
148
SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
149
SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
150
DIR_INBOUND);
151
152
set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
153
fb, SA_EXTENDED_SN_OFF,
154
SA_SEQ_MASK_OFF, SA_MC_ENABLE,
155
SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
156
SA_NOT_COPY_HDR);
157
crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
158
key, keylen);
159
sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
160
sa->sa_command_1.bf.key_len = keylen >> 3;
161
162
memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
163
sa = ctx->sa_out;
164
sa->sa_command_0.bf.dir = DIR_OUTBOUND;
165
/*
166
* SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
167
* it's the DIR_(IN|OUT)BOUND that matters
168
*/
169
sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
170
171
return 0;
172
}
173
174
int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
175
const u8 *key, unsigned int keylen)
176
{
177
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
178
CRYPTO_FEEDBACK_MODE_NO_FB);
179
}
180
181
int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
182
const u8 *key, unsigned int keylen)
183
{
184
return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
185
CRYPTO_FEEDBACK_MODE_NO_FB);
186
}
187
188
int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
189
const u8 *key, unsigned int keylen)
190
{
191
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
192
int rc;
193
194
rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
195
CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
196
if (rc)
197
return rc;
198
199
ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
200
CTR_RFC3686_NONCE_SIZE]);
201
202
return 0;
203
}
204
205
int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
206
{
207
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
208
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
209
__le32 iv[AES_IV_SIZE / 4] = {
210
ctx->iv_nonce,
211
cpu_to_le32p((u32 *) req->iv),
212
cpu_to_le32p((u32 *) (req->iv + 4)),
213
cpu_to_le32(1) };
214
215
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
216
req->cryptlen, iv, AES_IV_SIZE,
217
ctx->sa_out, ctx->sa_len, 0, NULL);
218
}
219
220
int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
221
{
222
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
223
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
224
__le32 iv[AES_IV_SIZE / 4] = {
225
ctx->iv_nonce,
226
cpu_to_le32p((u32 *) req->iv),
227
cpu_to_le32p((u32 *) (req->iv + 4)),
228
cpu_to_le32(1) };
229
230
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
231
req->cryptlen, iv, AES_IV_SIZE,
232
ctx->sa_out, ctx->sa_len, 0, NULL);
233
}
234
235
static int
236
crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
237
{
238
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
239
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
240
size_t iv_len = crypto_skcipher_ivsize(cipher);
241
unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
242
unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
243
AES_BLOCK_SIZE;
244
245
/*
246
* The hardware uses only the last 32-bits as the counter while the
247
* kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
248
* the whole IV is a counter. So fallback if the counter is going to
249
* overlow.
250
*/
251
if (counter + nblks < counter) {
252
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
253
int ret;
254
255
skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
256
skcipher_request_set_callback(subreq, req->base.flags,
257
NULL, NULL);
258
skcipher_request_set_crypt(subreq, req->src, req->dst,
259
req->cryptlen, req->iv);
260
ret = encrypt ? crypto_skcipher_encrypt(subreq)
261
: crypto_skcipher_decrypt(subreq);
262
skcipher_request_zero(subreq);
263
return ret;
264
}
265
266
return encrypt ? crypto4xx_encrypt_iv_stream(req)
267
: crypto4xx_decrypt_iv_stream(req);
268
}
269
270
static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
271
struct crypto_skcipher *cipher,
272
const u8 *key,
273
unsigned int keylen)
274
{
275
crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
276
CRYPTO_TFM_REQ_MASK);
277
crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
278
crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
279
return crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
280
}
281
282
int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
283
const u8 *key, unsigned int keylen)
284
{
285
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
286
int rc;
287
288
rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
289
if (rc)
290
return rc;
291
292
return crypto4xx_setkey_aes(cipher, key, keylen,
293
CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
294
}
295
296
int crypto4xx_encrypt_ctr(struct skcipher_request *req)
297
{
298
return crypto4xx_ctr_crypt(req, true);
299
}
300
301
int crypto4xx_decrypt_ctr(struct skcipher_request *req)
302
{
303
return crypto4xx_ctr_crypt(req, false);
304
}
305
306
static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
307
unsigned int len,
308
bool is_ccm, bool decrypt)
309
{
310
struct crypto_aead *aead = crypto_aead_reqtfm(req);
311
312
/* authsize has to be a multiple of 4 */
313
if (aead->authsize & 3)
314
return true;
315
316
/*
317
* hardware does not handle cases where plaintext
318
* is less than a block.
319
*/
320
if (len < AES_BLOCK_SIZE)
321
return true;
322
323
/* assoc len needs to be a multiple of 4 and <= 1020 */
324
if (req->assoclen & 0x3 || req->assoclen > 1020)
325
return true;
326
327
/* CCM supports only counter field length of 2 and 4 bytes */
328
if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
329
return true;
330
331
return false;
332
}
333
334
static int crypto4xx_aead_fallback(struct aead_request *req,
335
struct crypto4xx_ctx *ctx, bool do_decrypt)
336
{
337
struct aead_request *subreq = aead_request_ctx(req);
338
339
aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
340
aead_request_set_callback(subreq, req->base.flags,
341
req->base.complete, req->base.data);
342
aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
343
req->iv);
344
aead_request_set_ad(subreq, req->assoclen);
345
return do_decrypt ? crypto_aead_decrypt(subreq) :
346
crypto_aead_encrypt(subreq);
347
}
348
349
static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
350
struct crypto_aead *cipher,
351
const u8 *key,
352
unsigned int keylen)
353
{
354
crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
355
crypto_aead_set_flags(ctx->sw_cipher.aead,
356
crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
357
return crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
358
}
359
360
/*
361
* AES-CCM Functions
362
*/
363
364
int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
365
unsigned int keylen)
366
{
367
struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
368
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
369
struct dynamic_sa_ctl *sa;
370
int rc = 0;
371
372
rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
373
if (rc)
374
return rc;
375
376
if (ctx->sa_in || ctx->sa_out)
377
crypto4xx_free_sa(ctx);
378
379
rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
380
if (rc)
381
return rc;
382
383
/* Setup SA */
384
sa = (struct dynamic_sa_ctl *) ctx->sa_in;
385
sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
386
387
set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
388
SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
389
SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
390
SA_CIPHER_ALG_AES,
391
SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
392
SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
393
394
set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
395
CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
396
SA_SEQ_MASK_OFF, SA_MC_ENABLE,
397
SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
398
SA_NOT_COPY_HDR);
399
400
sa->sa_command_1.bf.key_len = keylen >> 3;
401
402
crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
403
404
memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
405
sa = (struct dynamic_sa_ctl *) ctx->sa_out;
406
407
set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
408
SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
409
SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
410
SA_CIPHER_ALG_AES,
411
SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
412
SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
413
414
set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
415
CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
416
SA_SEQ_MASK_OFF, SA_MC_ENABLE,
417
SA_COPY_PAD, SA_COPY_PAYLOAD,
418
SA_NOT_COPY_HDR);
419
420
sa->sa_command_1.bf.key_len = keylen >> 3;
421
return 0;
422
}
423
424
static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
425
{
426
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
427
struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
428
struct crypto_aead *aead = crypto_aead_reqtfm(req);
429
__le32 iv[4];
430
u32 tmp_sa[SA_AES128_CCM_LEN + 4];
431
struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
432
unsigned int len = req->cryptlen;
433
434
if (decrypt)
435
len -= crypto_aead_authsize(aead);
436
437
if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
438
return crypto4xx_aead_fallback(req, ctx, decrypt);
439
440
memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
441
sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
442
443
if (req->iv[0] == 1) {
444
/* CRYPTO_MODE_AES_ICM */
445
sa->sa_command_1.bf.crypto_mode9_8 = 1;
446
}
447
448
iv[3] = cpu_to_le32(0);
449
crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
450
451
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
452
len, iv, sizeof(iv),
453
sa, ctx->sa_len, req->assoclen, rctx->dst);
454
}
455
456
int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
457
{
458
return crypto4xx_crypt_aes_ccm(req, false);
459
}
460
461
int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
462
{
463
return crypto4xx_crypt_aes_ccm(req, true);
464
}
465
466
int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
467
unsigned int authsize)
468
{
469
struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
470
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
471
472
return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
473
}
474
475
/*
476
* AES-GCM Functions
477
*/
478
479
static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
480
{
481
switch (keylen) {
482
case 16:
483
case 24:
484
case 32:
485
return 0;
486
default:
487
return -EINVAL;
488
}
489
}
490
491
static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
492
unsigned int keylen)
493
{
494
struct crypto_aes_ctx ctx;
495
uint8_t src[16] = { 0 };
496
int rc;
497
498
rc = aes_expandkey(&ctx, key, keylen);
499
if (rc) {
500
pr_err("aes_expandkey() failed: %d\n", rc);
501
return rc;
502
}
503
504
aes_encrypt(&ctx, src, src);
505
crypto4xx_memcpy_to_le32(hash_start, src, 16);
506
memzero_explicit(&ctx, sizeof(ctx));
507
return 0;
508
}
509
510
int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
511
const u8 *key, unsigned int keylen)
512
{
513
struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
514
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
515
struct dynamic_sa_ctl *sa;
516
int rc = 0;
517
518
if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
519
return -EINVAL;
520
521
rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
522
if (rc)
523
return rc;
524
525
if (ctx->sa_in || ctx->sa_out)
526
crypto4xx_free_sa(ctx);
527
528
rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
529
if (rc)
530
return rc;
531
532
sa = (struct dynamic_sa_ctl *) ctx->sa_in;
533
534
sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
535
set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
536
SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
537
SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
538
SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
539
SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
540
DIR_INBOUND);
541
set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
542
CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
543
SA_SEQ_MASK_ON, SA_MC_DISABLE,
544
SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
545
SA_NOT_COPY_HDR);
546
547
sa->sa_command_1.bf.key_len = keylen >> 3;
548
549
crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
550
key, keylen);
551
552
rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
553
key, keylen);
554
if (rc) {
555
pr_err("GCM hash key setting failed = %d\n", rc);
556
goto err;
557
}
558
559
memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
560
sa = (struct dynamic_sa_ctl *) ctx->sa_out;
561
sa->sa_command_0.bf.dir = DIR_OUTBOUND;
562
sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
563
564
return 0;
565
err:
566
crypto4xx_free_sa(ctx);
567
return rc;
568
}
569
570
static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
571
bool decrypt)
572
{
573
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
574
struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
575
__le32 iv[4];
576
unsigned int len = req->cryptlen;
577
578
if (decrypt)
579
len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
580
581
if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
582
return crypto4xx_aead_fallback(req, ctx, decrypt);
583
584
crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
585
iv[3] = cpu_to_le32(1);
586
587
return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
588
len, iv, sizeof(iv),
589
decrypt ? ctx->sa_in : ctx->sa_out,
590
ctx->sa_len, req->assoclen, rctx->dst);
591
}
592
593
int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
594
{
595
return crypto4xx_crypt_aes_gcm(req, false);
596
}
597
598
int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
599
{
600
return crypto4xx_crypt_aes_gcm(req, true);
601
}
602
603