Path: blob/master/drivers/crypto/intel/ixp4xx/ixp4xx_crypto.c
29278 views
// SPDX-License-Identifier: GPL-2.0-only1/*2* Intel IXP4xx NPE-C crypto driver3*4* Copyright (C) 2008 Christian Hohnstaedt <[email protected]>5*/67#include <linux/platform_device.h>8#include <linux/dma-mapping.h>9#include <linux/dmapool.h>10#include <linux/crypto.h>11#include <linux/kernel.h>12#include <linux/rtnetlink.h>13#include <linux/interrupt.h>14#include <linux/spinlock.h>15#include <linux/gfp.h>16#include <linux/module.h>17#include <linux/of.h>1819#include <crypto/ctr.h>20#include <crypto/internal/des.h>21#include <crypto/aes.h>22#include <crypto/hmac.h>23#include <crypto/sha1.h>24#include <crypto/algapi.h>25#include <crypto/internal/aead.h>26#include <crypto/internal/skcipher.h>27#include <crypto/authenc.h>28#include <crypto/scatterwalk.h>2930#include <linux/soc/ixp4xx/npe.h>31#include <linux/soc/ixp4xx/qmgr.h>3233/* Intermittent includes, delete this after v5.14-rc1 */34#include <linux/soc/ixp4xx/cpu.h>3536#define MAX_KEYLEN 323738/* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */39#define NPE_CTX_LEN 8040#define AES_BLOCK128 164142#define NPE_OP_HASH_VERIFY 0x0143#define NPE_OP_CCM_ENABLE 0x0444#define NPE_OP_CRYPT_ENABLE 0x0845#define NPE_OP_HASH_ENABLE 0x1046#define NPE_OP_NOT_IN_PLACE 0x2047#define NPE_OP_HMAC_DISABLE 0x4048#define NPE_OP_CRYPT_ENCRYPT 0x804950#define NPE_OP_CCM_GEN_MIC 0xcc51#define NPE_OP_HASH_GEN_ICV 0x5052#define NPE_OP_ENC_GEN_KEY 0xc95354#define MOD_ECB 0x000055#define MOD_CTR 0x100056#define MOD_CBC_ENC 0x200057#define MOD_CBC_DEC 0x300058#define MOD_CCM_ENC 0x400059#define MOD_CCM_DEC 0x50006061#define KEYLEN_128 462#define KEYLEN_192 663#define KEYLEN_256 86465#define CIPH_DECR 0x000066#define CIPH_ENCR 0x04006768#define MOD_DES 0x000069#define MOD_TDEA2 0x010070#define MOD_3DES 0x020071#define MOD_AES 0x080072#define MOD_AES128 (0x0800 | KEYLEN_128)73#define MOD_AES192 (0x0900 | KEYLEN_192)74#define MOD_AES256 (0x0a00 | KEYLEN_256)7576#define MAX_IVLEN 1677#define NPE_QLEN 1678/* Space for registering when the first79* NPE_QLEN crypt_ctl are busy */80#define NPE_QLEN_TOTAL 648182#define CTL_FLAG_UNUSED 0x000083#define CTL_FLAG_USED 0x100084#define CTL_FLAG_PERFORM_ABLK 0x000185#define CTL_FLAG_GEN_ICV 0x000286#define CTL_FLAG_GEN_REVAES 0x000487#define CTL_FLAG_PERFORM_AEAD 0x000888#define CTL_FLAG_MASK 0x000f8990#define HMAC_PAD_BLOCKLEN SHA1_BLOCK_SIZE9192#define MD5_DIGEST_SIZE 169394struct buffer_desc {95u32 phys_next;96#ifdef __ARMEB__97u16 buf_len;98u16 pkt_len;99#else100u16 pkt_len;101u16 buf_len;102#endif103dma_addr_t phys_addr;104u32 __reserved[4];105struct buffer_desc *next;106enum dma_data_direction dir;107};108109struct crypt_ctl {110#ifdef __ARMEB__111u8 mode; /* NPE_OP_* operation mode */112u8 init_len;113u16 reserved;114#else115u16 reserved;116u8 init_len;117u8 mode; /* NPE_OP_* operation mode */118#endif119u8 iv[MAX_IVLEN]; /* IV for CBC mode or CTR IV for CTR mode */120u32 icv_rev_aes; /* icv or rev aes */121u32 src_buf;122u32 dst_buf;123#ifdef __ARMEB__124u16 auth_offs; /* Authentication start offset */125u16 auth_len; /* Authentication data length */126u16 crypt_offs; /* Cryption start offset */127u16 crypt_len; /* Cryption data length */128#else129u16 auth_len; /* Authentication data length */130u16 auth_offs; /* Authentication start offset */131u16 crypt_len; /* Cryption data length */132u16 crypt_offs; /* Cryption start offset */133#endif134u32 aadAddr; /* Additional Auth Data Addr for CCM mode */135u32 crypto_ctx; /* NPE Crypto Param structure address */136137/* Used by Host: 4*4 bytes*/138unsigned int ctl_flags;139union {140struct skcipher_request *ablk_req;141struct aead_request *aead_req;142struct crypto_tfm *tfm;143} data;144struct buffer_desc *regist_buf;145u8 *regist_ptr;146};147148struct ablk_ctx {149struct buffer_desc *src;150struct buffer_desc *dst;151u8 iv[MAX_IVLEN];152bool encrypt;153struct skcipher_request fallback_req; // keep at the end154};155156struct aead_ctx {157struct buffer_desc *src;158struct buffer_desc *dst;159struct scatterlist ivlist;160/* used when the hmac is not on one sg entry */161u8 *hmac_virt;162int encrypt;163};164165struct ix_hash_algo {166u32 cfgword;167unsigned char *icv;168};169170struct ix_sa_dir {171unsigned char *npe_ctx;172dma_addr_t npe_ctx_phys;173int npe_ctx_idx;174u8 npe_mode;175};176177struct ixp_ctx {178struct ix_sa_dir encrypt;179struct ix_sa_dir decrypt;180int authkey_len;181u8 authkey[MAX_KEYLEN];182int enckey_len;183u8 enckey[MAX_KEYLEN];184u8 salt[MAX_IVLEN];185u8 nonce[CTR_RFC3686_NONCE_SIZE];186unsigned int salted;187atomic_t configuring;188struct completion completion;189struct crypto_skcipher *fallback_tfm;190};191192struct ixp_alg {193struct skcipher_alg crypto;194const struct ix_hash_algo *hash;195u32 cfg_enc;196u32 cfg_dec;197198int registered;199};200201struct ixp_aead_alg {202struct aead_alg crypto;203const struct ix_hash_algo *hash;204u32 cfg_enc;205u32 cfg_dec;206207int registered;208};209210static const struct ix_hash_algo hash_alg_md5 = {211.cfgword = 0xAA010004,212.icv = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"213"\xFE\xDC\xBA\x98\x76\x54\x32\x10",214};215216static const struct ix_hash_algo hash_alg_sha1 = {217.cfgword = 0x00000005,218.icv = "\x67\x45\x23\x01\xEF\xCD\xAB\x89\x98\xBA"219"\xDC\xFE\x10\x32\x54\x76\xC3\xD2\xE1\xF0",220};221222static struct npe *npe_c;223224static unsigned int send_qid;225static unsigned int recv_qid;226static struct dma_pool *buffer_pool;227static struct dma_pool *ctx_pool;228229static struct crypt_ctl *crypt_virt;230static dma_addr_t crypt_phys;231232static int support_aes = 1;233234static struct platform_device *pdev;235236static inline dma_addr_t crypt_virt2phys(struct crypt_ctl *virt)237{238return crypt_phys + (virt - crypt_virt) * sizeof(struct crypt_ctl);239}240241static inline struct crypt_ctl *crypt_phys2virt(dma_addr_t phys)242{243return crypt_virt + (phys - crypt_phys) / sizeof(struct crypt_ctl);244}245246static inline u32 cipher_cfg_enc(struct crypto_tfm *tfm)247{248return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->cfg_enc;249}250251static inline u32 cipher_cfg_dec(struct crypto_tfm *tfm)252{253return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->cfg_dec;254}255256static inline const struct ix_hash_algo *ix_hash(struct crypto_tfm *tfm)257{258return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->hash;259}260261static int setup_crypt_desc(void)262{263struct device *dev = &pdev->dev;264265BUILD_BUG_ON(!(IS_ENABLED(CONFIG_COMPILE_TEST) &&266IS_ENABLED(CONFIG_64BIT)) &&267sizeof(struct crypt_ctl) != 64);268crypt_virt = dma_alloc_coherent(dev,269NPE_QLEN * sizeof(struct crypt_ctl),270&crypt_phys, GFP_ATOMIC);271if (!crypt_virt)272return -ENOMEM;273return 0;274}275276static DEFINE_SPINLOCK(desc_lock);277static struct crypt_ctl *get_crypt_desc(void)278{279int i;280static int idx;281unsigned long flags;282283spin_lock_irqsave(&desc_lock, flags);284285if (unlikely(!crypt_virt))286setup_crypt_desc();287if (unlikely(!crypt_virt)) {288spin_unlock_irqrestore(&desc_lock, flags);289return NULL;290}291i = idx;292if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {293if (++idx >= NPE_QLEN)294idx = 0;295crypt_virt[i].ctl_flags = CTL_FLAG_USED;296spin_unlock_irqrestore(&desc_lock, flags);297return crypt_virt + i;298} else {299spin_unlock_irqrestore(&desc_lock, flags);300return NULL;301}302}303304static DEFINE_SPINLOCK(emerg_lock);305static struct crypt_ctl *get_crypt_desc_emerg(void)306{307int i;308static int idx = NPE_QLEN;309struct crypt_ctl *desc;310unsigned long flags;311312desc = get_crypt_desc();313if (desc)314return desc;315if (unlikely(!crypt_virt))316return NULL;317318spin_lock_irqsave(&emerg_lock, flags);319i = idx;320if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {321if (++idx >= NPE_QLEN_TOTAL)322idx = NPE_QLEN;323crypt_virt[i].ctl_flags = CTL_FLAG_USED;324spin_unlock_irqrestore(&emerg_lock, flags);325return crypt_virt + i;326} else {327spin_unlock_irqrestore(&emerg_lock, flags);328return NULL;329}330}331332static void free_buf_chain(struct device *dev, struct buffer_desc *buf,333dma_addr_t phys)334{335while (buf) {336struct buffer_desc *buf1;337u32 phys1;338339buf1 = buf->next;340phys1 = buf->phys_next;341dma_unmap_single(dev, buf->phys_addr, buf->buf_len, buf->dir);342dma_pool_free(buffer_pool, buf, phys);343buf = buf1;344phys = phys1;345}346}347348static struct tasklet_struct crypto_done_tasklet;349350static void finish_scattered_hmac(struct crypt_ctl *crypt)351{352struct aead_request *req = crypt->data.aead_req;353struct aead_ctx *req_ctx = aead_request_ctx(req);354struct crypto_aead *tfm = crypto_aead_reqtfm(req);355int authsize = crypto_aead_authsize(tfm);356int decryptlen = req->assoclen + req->cryptlen - authsize;357358if (req_ctx->encrypt) {359scatterwalk_map_and_copy(req_ctx->hmac_virt, req->dst,360decryptlen, authsize, 1);361}362dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes);363}364365static void one_packet(dma_addr_t phys)366{367struct device *dev = &pdev->dev;368struct crypt_ctl *crypt;369struct ixp_ctx *ctx;370int failed;371372failed = phys & 0x1 ? -EBADMSG : 0;373phys &= ~0x3;374crypt = crypt_phys2virt(phys);375376switch (crypt->ctl_flags & CTL_FLAG_MASK) {377case CTL_FLAG_PERFORM_AEAD: {378struct aead_request *req = crypt->data.aead_req;379struct aead_ctx *req_ctx = aead_request_ctx(req);380381free_buf_chain(dev, req_ctx->src, crypt->src_buf);382free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);383if (req_ctx->hmac_virt)384finish_scattered_hmac(crypt);385386aead_request_complete(req, failed);387break;388}389case CTL_FLAG_PERFORM_ABLK: {390struct skcipher_request *req = crypt->data.ablk_req;391struct ablk_ctx *req_ctx = skcipher_request_ctx(req);392struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);393unsigned int ivsize = crypto_skcipher_ivsize(tfm);394unsigned int offset;395396if (ivsize > 0) {397offset = req->cryptlen - ivsize;398if (req_ctx->encrypt) {399scatterwalk_map_and_copy(req->iv, req->dst,400offset, ivsize, 0);401} else {402memcpy(req->iv, req_ctx->iv, ivsize);403memzero_explicit(req_ctx->iv, ivsize);404}405}406407if (req_ctx->dst)408free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);409410free_buf_chain(dev, req_ctx->src, crypt->src_buf);411skcipher_request_complete(req, failed);412break;413}414case CTL_FLAG_GEN_ICV:415ctx = crypto_tfm_ctx(crypt->data.tfm);416dma_pool_free(ctx_pool, crypt->regist_ptr,417crypt->regist_buf->phys_addr);418dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf);419if (atomic_dec_and_test(&ctx->configuring))420complete(&ctx->completion);421break;422case CTL_FLAG_GEN_REVAES:423ctx = crypto_tfm_ctx(crypt->data.tfm);424*(__be32 *)ctx->decrypt.npe_ctx &= cpu_to_be32(~CIPH_ENCR);425if (atomic_dec_and_test(&ctx->configuring))426complete(&ctx->completion);427break;428default:429BUG();430}431crypt->ctl_flags = CTL_FLAG_UNUSED;432}433434static void irqhandler(void *_unused)435{436tasklet_schedule(&crypto_done_tasklet);437}438439static void crypto_done_action(unsigned long arg)440{441int i;442443for (i = 0; i < 4; i++) {444dma_addr_t phys = qmgr_get_entry(recv_qid);445if (!phys)446return;447one_packet(phys);448}449tasklet_schedule(&crypto_done_tasklet);450}451452static int init_ixp_crypto(struct device *dev)453{454struct device_node *np = dev->of_node;455u32 msg[2] = { 0, 0 };456int ret = -ENODEV;457u32 npe_id;458459dev_info(dev, "probing...\n");460461/* Locate the NPE and queue manager to use from device tree */462if (IS_ENABLED(CONFIG_OF) && np) {463struct of_phandle_args queue_spec;464struct of_phandle_args npe_spec;465466ret = of_parse_phandle_with_fixed_args(np, "intel,npe-handle",4671, 0, &npe_spec);468if (ret) {469dev_err(dev, "no NPE engine specified\n");470return -ENODEV;471}472npe_id = npe_spec.args[0];473of_node_put(npe_spec.np);474475ret = of_parse_phandle_with_fixed_args(np, "queue-rx", 1, 0,476&queue_spec);477if (ret) {478dev_err(dev, "no rx queue phandle\n");479return -ENODEV;480}481recv_qid = queue_spec.args[0];482of_node_put(queue_spec.np);483484ret = of_parse_phandle_with_fixed_args(np, "queue-txready", 1, 0,485&queue_spec);486if (ret) {487dev_err(dev, "no txready queue phandle\n");488return -ENODEV;489}490send_qid = queue_spec.args[0];491of_node_put(queue_spec.np);492} else {493/*494* Hardcoded engine when using platform data, this goes away495* when we switch to using DT only.496*/497npe_id = 2;498send_qid = 29;499recv_qid = 30;500}501502npe_c = npe_request(npe_id);503if (!npe_c)504return ret;505506if (!npe_running(npe_c)) {507ret = npe_load_firmware(npe_c, npe_name(npe_c), dev);508if (ret)509goto npe_release;510if (npe_recv_message(npe_c, msg, "STATUS_MSG"))511goto npe_error;512} else {513if (npe_send_message(npe_c, msg, "STATUS_MSG"))514goto npe_error;515516if (npe_recv_message(npe_c, msg, "STATUS_MSG"))517goto npe_error;518}519520switch ((msg[1] >> 16) & 0xff) {521case 3:522dev_warn(dev, "Firmware of %s lacks AES support\n", npe_name(npe_c));523support_aes = 0;524break;525case 4:526case 5:527support_aes = 1;528break;529default:530dev_err(dev, "Firmware of %s lacks crypto support\n", npe_name(npe_c));531ret = -ENODEV;532goto npe_release;533}534/* buffer_pool will also be used to sometimes store the hmac,535* so assure it is large enough536*/537BUILD_BUG_ON(SHA1_DIGEST_SIZE > sizeof(struct buffer_desc));538buffer_pool = dma_pool_create("buffer", dev, sizeof(struct buffer_desc),53932, 0);540ret = -ENOMEM;541if (!buffer_pool)542goto err;543544ctx_pool = dma_pool_create("context", dev, NPE_CTX_LEN, 16, 0);545if (!ctx_pool)546goto err;547548ret = qmgr_request_queue(send_qid, NPE_QLEN_TOTAL, 0, 0,549"ixp_crypto:out", NULL);550if (ret)551goto err;552ret = qmgr_request_queue(recv_qid, NPE_QLEN, 0, 0,553"ixp_crypto:in", NULL);554if (ret) {555qmgr_release_queue(send_qid);556goto err;557}558qmgr_set_irq(recv_qid, QUEUE_IRQ_SRC_NOT_EMPTY, irqhandler, NULL);559tasklet_init(&crypto_done_tasklet, crypto_done_action, 0);560561qmgr_enable_irq(recv_qid);562return 0;563564npe_error:565dev_err(dev, "%s not responding\n", npe_name(npe_c));566ret = -EIO;567err:568dma_pool_destroy(ctx_pool);569dma_pool_destroy(buffer_pool);570npe_release:571npe_release(npe_c);572return ret;573}574575static void release_ixp_crypto(struct device *dev)576{577qmgr_disable_irq(recv_qid);578tasklet_kill(&crypto_done_tasklet);579580qmgr_release_queue(send_qid);581qmgr_release_queue(recv_qid);582583dma_pool_destroy(ctx_pool);584dma_pool_destroy(buffer_pool);585586npe_release(npe_c);587588if (crypt_virt)589dma_free_coherent(dev, NPE_QLEN * sizeof(struct crypt_ctl),590crypt_virt, crypt_phys);591}592593static void reset_sa_dir(struct ix_sa_dir *dir)594{595memset(dir->npe_ctx, 0, NPE_CTX_LEN);596dir->npe_ctx_idx = 0;597dir->npe_mode = 0;598}599600static int init_sa_dir(struct ix_sa_dir *dir)601{602dir->npe_ctx = dma_pool_alloc(ctx_pool, GFP_KERNEL, &dir->npe_ctx_phys);603if (!dir->npe_ctx)604return -ENOMEM;605606reset_sa_dir(dir);607return 0;608}609610static void free_sa_dir(struct ix_sa_dir *dir)611{612memset(dir->npe_ctx, 0, NPE_CTX_LEN);613dma_pool_free(ctx_pool, dir->npe_ctx, dir->npe_ctx_phys);614}615616static int init_tfm(struct crypto_tfm *tfm)617{618struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);619int ret;620621atomic_set(&ctx->configuring, 0);622ret = init_sa_dir(&ctx->encrypt);623if (ret)624return ret;625ret = init_sa_dir(&ctx->decrypt);626if (ret)627free_sa_dir(&ctx->encrypt);628629return ret;630}631632static int init_tfm_ablk(struct crypto_skcipher *tfm)633{634struct crypto_tfm *ctfm = crypto_skcipher_tfm(tfm);635struct ixp_ctx *ctx = crypto_tfm_ctx(ctfm);636const char *name = crypto_tfm_alg_name(ctfm);637638ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);639if (IS_ERR(ctx->fallback_tfm)) {640pr_err("ERROR: Cannot allocate fallback for %s %ld\n",641name, PTR_ERR(ctx->fallback_tfm));642return PTR_ERR(ctx->fallback_tfm);643}644645pr_info("Fallback for %s is %s\n",646crypto_tfm_alg_driver_name(&tfm->base),647crypto_tfm_alg_driver_name(crypto_skcipher_tfm(ctx->fallback_tfm))648);649650crypto_skcipher_set_reqsize(tfm, sizeof(struct ablk_ctx) + crypto_skcipher_reqsize(ctx->fallback_tfm));651return init_tfm(crypto_skcipher_tfm(tfm));652}653654static int init_tfm_aead(struct crypto_aead *tfm)655{656crypto_aead_set_reqsize(tfm, sizeof(struct aead_ctx));657return init_tfm(crypto_aead_tfm(tfm));658}659660static void exit_tfm(struct crypto_tfm *tfm)661{662struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);663664free_sa_dir(&ctx->encrypt);665free_sa_dir(&ctx->decrypt);666}667668static void exit_tfm_ablk(struct crypto_skcipher *tfm)669{670struct crypto_tfm *ctfm = crypto_skcipher_tfm(tfm);671struct ixp_ctx *ctx = crypto_tfm_ctx(ctfm);672673crypto_free_skcipher(ctx->fallback_tfm);674exit_tfm(crypto_skcipher_tfm(tfm));675}676677static void exit_tfm_aead(struct crypto_aead *tfm)678{679exit_tfm(crypto_aead_tfm(tfm));680}681682static int register_chain_var(struct crypto_tfm *tfm, u8 xpad, u32 target,683int init_len, u32 ctx_addr, const u8 *key,684int key_len)685{686struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);687struct crypt_ctl *crypt;688struct buffer_desc *buf;689int i;690u8 *pad;691dma_addr_t pad_phys, buf_phys;692693BUILD_BUG_ON(NPE_CTX_LEN < HMAC_PAD_BLOCKLEN);694pad = dma_pool_alloc(ctx_pool, GFP_KERNEL, &pad_phys);695if (!pad)696return -ENOMEM;697buf = dma_pool_alloc(buffer_pool, GFP_KERNEL, &buf_phys);698if (!buf) {699dma_pool_free(ctx_pool, pad, pad_phys);700return -ENOMEM;701}702crypt = get_crypt_desc_emerg();703if (!crypt) {704dma_pool_free(ctx_pool, pad, pad_phys);705dma_pool_free(buffer_pool, buf, buf_phys);706return -EAGAIN;707}708709memcpy(pad, key, key_len);710memset(pad + key_len, 0, HMAC_PAD_BLOCKLEN - key_len);711for (i = 0; i < HMAC_PAD_BLOCKLEN; i++)712pad[i] ^= xpad;713714crypt->data.tfm = tfm;715crypt->regist_ptr = pad;716crypt->regist_buf = buf;717718crypt->auth_offs = 0;719crypt->auth_len = HMAC_PAD_BLOCKLEN;720crypt->crypto_ctx = ctx_addr;721crypt->src_buf = buf_phys;722crypt->icv_rev_aes = target;723crypt->mode = NPE_OP_HASH_GEN_ICV;724crypt->init_len = init_len;725crypt->ctl_flags |= CTL_FLAG_GEN_ICV;726727buf->next = NULL;728buf->buf_len = HMAC_PAD_BLOCKLEN;729buf->pkt_len = 0;730buf->phys_addr = pad_phys;731732atomic_inc(&ctx->configuring);733qmgr_put_entry(send_qid, crypt_virt2phys(crypt));734BUG_ON(qmgr_stat_overflow(send_qid));735return 0;736}737738static int setup_auth(struct crypto_tfm *tfm, int encrypt, unsigned int authsize,739const u8 *key, int key_len, unsigned int digest_len)740{741u32 itarget, otarget, npe_ctx_addr;742unsigned char *cinfo;743int init_len, ret = 0;744u32 cfgword;745struct ix_sa_dir *dir;746struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);747const struct ix_hash_algo *algo;748749dir = encrypt ? &ctx->encrypt : &ctx->decrypt;750cinfo = dir->npe_ctx + dir->npe_ctx_idx;751algo = ix_hash(tfm);752753/* write cfg word to cryptinfo */754cfgword = algo->cfgword | (authsize << 6); /* (authsize/4) << 8 */755#ifndef __ARMEB__756cfgword ^= 0xAA000000; /* change the "byte swap" flags */757#endif758*(__be32 *)cinfo = cpu_to_be32(cfgword);759cinfo += sizeof(cfgword);760761/* write ICV to cryptinfo */762memcpy(cinfo, algo->icv, digest_len);763cinfo += digest_len;764765itarget = dir->npe_ctx_phys + dir->npe_ctx_idx766+ sizeof(algo->cfgword);767otarget = itarget + digest_len;768init_len = cinfo - (dir->npe_ctx + dir->npe_ctx_idx);769npe_ctx_addr = dir->npe_ctx_phys + dir->npe_ctx_idx;770771dir->npe_ctx_idx += init_len;772dir->npe_mode |= NPE_OP_HASH_ENABLE;773774if (!encrypt)775dir->npe_mode |= NPE_OP_HASH_VERIFY;776777ret = register_chain_var(tfm, HMAC_OPAD_VALUE, otarget,778init_len, npe_ctx_addr, key, key_len);779if (ret)780return ret;781return register_chain_var(tfm, HMAC_IPAD_VALUE, itarget,782init_len, npe_ctx_addr, key, key_len);783}784785static int gen_rev_aes_key(struct crypto_tfm *tfm)786{787struct crypt_ctl *crypt;788struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);789struct ix_sa_dir *dir = &ctx->decrypt;790791crypt = get_crypt_desc_emerg();792if (!crypt)793return -EAGAIN;794795*(__be32 *)dir->npe_ctx |= cpu_to_be32(CIPH_ENCR);796797crypt->data.tfm = tfm;798crypt->crypt_offs = 0;799crypt->crypt_len = AES_BLOCK128;800crypt->src_buf = 0;801crypt->crypto_ctx = dir->npe_ctx_phys;802crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32);803crypt->mode = NPE_OP_ENC_GEN_KEY;804crypt->init_len = dir->npe_ctx_idx;805crypt->ctl_flags |= CTL_FLAG_GEN_REVAES;806807atomic_inc(&ctx->configuring);808qmgr_put_entry(send_qid, crypt_virt2phys(crypt));809BUG_ON(qmgr_stat_overflow(send_qid));810return 0;811}812813static int setup_cipher(struct crypto_tfm *tfm, int encrypt, const u8 *key,814int key_len)815{816u8 *cinfo;817u32 cipher_cfg;818u32 keylen_cfg = 0;819struct ix_sa_dir *dir;820struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);821int err;822823dir = encrypt ? &ctx->encrypt : &ctx->decrypt;824cinfo = dir->npe_ctx;825826if (encrypt) {827cipher_cfg = cipher_cfg_enc(tfm);828dir->npe_mode |= NPE_OP_CRYPT_ENCRYPT;829} else {830cipher_cfg = cipher_cfg_dec(tfm);831}832if (cipher_cfg & MOD_AES) {833switch (key_len) {834case 16:835keylen_cfg = MOD_AES128;836break;837case 24:838keylen_cfg = MOD_AES192;839break;840case 32:841keylen_cfg = MOD_AES256;842break;843default:844return -EINVAL;845}846cipher_cfg |= keylen_cfg;847} else {848err = crypto_des_verify_key(tfm, key);849if (err)850return err;851}852/* write cfg word to cryptinfo */853*(__be32 *)cinfo = cpu_to_be32(cipher_cfg);854cinfo += sizeof(cipher_cfg);855856/* write cipher key to cryptinfo */857memcpy(cinfo, key, key_len);858/* NPE wants keylen set to DES3_EDE_KEY_SIZE even for single DES */859if (key_len < DES3_EDE_KEY_SIZE && !(cipher_cfg & MOD_AES)) {860memset(cinfo + key_len, 0, DES3_EDE_KEY_SIZE - key_len);861key_len = DES3_EDE_KEY_SIZE;862}863dir->npe_ctx_idx = sizeof(cipher_cfg) + key_len;864dir->npe_mode |= NPE_OP_CRYPT_ENABLE;865if ((cipher_cfg & MOD_AES) && !encrypt)866return gen_rev_aes_key(tfm);867868return 0;869}870871static struct buffer_desc *chainup_buffers(struct device *dev,872struct scatterlist *sg, unsigned int nbytes,873struct buffer_desc *buf, gfp_t flags,874enum dma_data_direction dir)875{876for (; nbytes > 0; sg = sg_next(sg)) {877unsigned int len = min(nbytes, sg->length);878struct buffer_desc *next_buf;879dma_addr_t next_buf_phys;880void *ptr;881882nbytes -= len;883ptr = sg_virt(sg);884next_buf = dma_pool_alloc(buffer_pool, flags, &next_buf_phys);885if (!next_buf) {886buf = NULL;887break;888}889sg_dma_address(sg) = dma_map_single(dev, ptr, len, dir);890buf->next = next_buf;891buf->phys_next = next_buf_phys;892buf = next_buf;893894buf->phys_addr = sg_dma_address(sg);895buf->buf_len = len;896buf->dir = dir;897}898buf->next = NULL;899buf->phys_next = 0;900return buf;901}902903static int ablk_setkey(struct crypto_skcipher *tfm, const u8 *key,904unsigned int key_len)905{906struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);907int ret;908909init_completion(&ctx->completion);910atomic_inc(&ctx->configuring);911912reset_sa_dir(&ctx->encrypt);913reset_sa_dir(&ctx->decrypt);914915ctx->encrypt.npe_mode = NPE_OP_HMAC_DISABLE;916ctx->decrypt.npe_mode = NPE_OP_HMAC_DISABLE;917918ret = setup_cipher(&tfm->base, 0, key, key_len);919if (ret)920goto out;921ret = setup_cipher(&tfm->base, 1, key, key_len);922out:923if (!atomic_dec_and_test(&ctx->configuring))924wait_for_completion(&ctx->completion);925if (ret)926return ret;927crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);928crypto_skcipher_set_flags(ctx->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);929930return crypto_skcipher_setkey(ctx->fallback_tfm, key, key_len);931}932933static int ablk_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,934unsigned int key_len)935{936return verify_skcipher_des3_key(tfm, key) ?:937ablk_setkey(tfm, key, key_len);938}939940static int ablk_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key,941unsigned int key_len)942{943struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);944945/* the nonce is stored in bytes at end of key */946if (key_len < CTR_RFC3686_NONCE_SIZE)947return -EINVAL;948949memcpy(ctx->nonce, key + (key_len - CTR_RFC3686_NONCE_SIZE),950CTR_RFC3686_NONCE_SIZE);951952key_len -= CTR_RFC3686_NONCE_SIZE;953return ablk_setkey(tfm, key, key_len);954}955956static int ixp4xx_cipher_fallback(struct skcipher_request *areq, int encrypt)957{958struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);959struct ixp_ctx *op = crypto_skcipher_ctx(tfm);960struct ablk_ctx *rctx = skcipher_request_ctx(areq);961int err;962963skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);964skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,965areq->base.complete, areq->base.data);966skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,967areq->cryptlen, areq->iv);968if (encrypt)969err = crypto_skcipher_encrypt(&rctx->fallback_req);970else971err = crypto_skcipher_decrypt(&rctx->fallback_req);972return err;973}974975static int ablk_perform(struct skcipher_request *req, int encrypt)976{977struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);978struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);979unsigned int ivsize = crypto_skcipher_ivsize(tfm);980struct ix_sa_dir *dir;981struct crypt_ctl *crypt;982unsigned int nbytes = req->cryptlen;983enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;984struct ablk_ctx *req_ctx = skcipher_request_ctx(req);985struct buffer_desc src_hook;986struct device *dev = &pdev->dev;987unsigned int offset;988gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?989GFP_KERNEL : GFP_ATOMIC;990991if (sg_nents(req->src) > 1 || sg_nents(req->dst) > 1)992return ixp4xx_cipher_fallback(req, encrypt);993994if (qmgr_stat_full(send_qid))995return -EAGAIN;996if (atomic_read(&ctx->configuring))997return -EAGAIN;998999dir = encrypt ? &ctx->encrypt : &ctx->decrypt;1000req_ctx->encrypt = encrypt;10011002crypt = get_crypt_desc();1003if (!crypt)1004return -ENOMEM;10051006crypt->data.ablk_req = req;1007crypt->crypto_ctx = dir->npe_ctx_phys;1008crypt->mode = dir->npe_mode;1009crypt->init_len = dir->npe_ctx_idx;10101011crypt->crypt_offs = 0;1012crypt->crypt_len = nbytes;10131014BUG_ON(ivsize && !req->iv);1015memcpy(crypt->iv, req->iv, ivsize);1016if (ivsize > 0 && !encrypt) {1017offset = req->cryptlen - ivsize;1018scatterwalk_map_and_copy(req_ctx->iv, req->src, offset, ivsize, 0);1019}1020if (req->src != req->dst) {1021struct buffer_desc dst_hook;10221023crypt->mode |= NPE_OP_NOT_IN_PLACE;1024/* This was never tested by Intel1025* for more than one dst buffer, I think. */1026req_ctx->dst = NULL;1027if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook,1028flags, DMA_FROM_DEVICE))1029goto free_buf_dest;1030src_direction = DMA_TO_DEVICE;1031req_ctx->dst = dst_hook.next;1032crypt->dst_buf = dst_hook.phys_next;1033} else {1034req_ctx->dst = NULL;1035}1036req_ctx->src = NULL;1037if (!chainup_buffers(dev, req->src, nbytes, &src_hook, flags,1038src_direction))1039goto free_buf_src;10401041req_ctx->src = src_hook.next;1042crypt->src_buf = src_hook.phys_next;1043crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK;1044qmgr_put_entry(send_qid, crypt_virt2phys(crypt));1045BUG_ON(qmgr_stat_overflow(send_qid));1046return -EINPROGRESS;10471048free_buf_src:1049free_buf_chain(dev, req_ctx->src, crypt->src_buf);1050free_buf_dest:1051if (req->src != req->dst)1052free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);10531054crypt->ctl_flags = CTL_FLAG_UNUSED;1055return -ENOMEM;1056}10571058static int ablk_encrypt(struct skcipher_request *req)1059{1060return ablk_perform(req, 1);1061}10621063static int ablk_decrypt(struct skcipher_request *req)1064{1065return ablk_perform(req, 0);1066}10671068static int ablk_rfc3686_crypt(struct skcipher_request *req)1069{1070struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);1071struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);1072u8 iv[CTR_RFC3686_BLOCK_SIZE];1073u8 *info = req->iv;1074int ret;10751076/* set up counter block */1077memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);1078memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);10791080/* initialize counter portion of counter block */1081*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =1082cpu_to_be32(1);10831084req->iv = iv;1085ret = ablk_perform(req, 1);1086req->iv = info;1087return ret;1088}10891090static int aead_perform(struct aead_request *req, int encrypt,1091int cryptoffset, int eff_cryptlen, u8 *iv)1092{1093struct crypto_aead *tfm = crypto_aead_reqtfm(req);1094struct ixp_ctx *ctx = crypto_aead_ctx(tfm);1095unsigned int ivsize = crypto_aead_ivsize(tfm);1096unsigned int authsize = crypto_aead_authsize(tfm);1097struct ix_sa_dir *dir;1098struct crypt_ctl *crypt;1099unsigned int cryptlen;1100struct buffer_desc *buf, src_hook;1101struct aead_ctx *req_ctx = aead_request_ctx(req);1102struct device *dev = &pdev->dev;1103gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?1104GFP_KERNEL : GFP_ATOMIC;1105enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;1106unsigned int lastlen;11071108if (qmgr_stat_full(send_qid))1109return -EAGAIN;1110if (atomic_read(&ctx->configuring))1111return -EAGAIN;11121113if (encrypt) {1114dir = &ctx->encrypt;1115cryptlen = req->cryptlen;1116} else {1117dir = &ctx->decrypt;1118/* req->cryptlen includes the authsize when decrypting */1119cryptlen = req->cryptlen - authsize;1120eff_cryptlen -= authsize;1121}1122crypt = get_crypt_desc();1123if (!crypt)1124return -ENOMEM;11251126crypt->data.aead_req = req;1127crypt->crypto_ctx = dir->npe_ctx_phys;1128crypt->mode = dir->npe_mode;1129crypt->init_len = dir->npe_ctx_idx;11301131crypt->crypt_offs = cryptoffset;1132crypt->crypt_len = eff_cryptlen;11331134crypt->auth_offs = 0;1135crypt->auth_len = req->assoclen + cryptlen;1136BUG_ON(ivsize && !req->iv);1137memcpy(crypt->iv, req->iv, ivsize);11381139buf = chainup_buffers(dev, req->src, crypt->auth_len,1140&src_hook, flags, src_direction);1141req_ctx->src = src_hook.next;1142crypt->src_buf = src_hook.phys_next;1143if (!buf)1144goto free_buf_src;11451146lastlen = buf->buf_len;1147if (lastlen >= authsize)1148crypt->icv_rev_aes = buf->phys_addr +1149buf->buf_len - authsize;11501151req_ctx->dst = NULL;11521153if (req->src != req->dst) {1154struct buffer_desc dst_hook;11551156crypt->mode |= NPE_OP_NOT_IN_PLACE;1157src_direction = DMA_TO_DEVICE;11581159buf = chainup_buffers(dev, req->dst, crypt->auth_len,1160&dst_hook, flags, DMA_FROM_DEVICE);1161req_ctx->dst = dst_hook.next;1162crypt->dst_buf = dst_hook.phys_next;11631164if (!buf)1165goto free_buf_dst;11661167if (encrypt) {1168lastlen = buf->buf_len;1169if (lastlen >= authsize)1170crypt->icv_rev_aes = buf->phys_addr +1171buf->buf_len - authsize;1172}1173}11741175if (unlikely(lastlen < authsize)) {1176dma_addr_t dma;1177/* The 12 hmac bytes are scattered,1178* we need to copy them into a safe buffer */1179req_ctx->hmac_virt = dma_pool_alloc(buffer_pool, flags, &dma);1180if (unlikely(!req_ctx->hmac_virt))1181goto free_buf_dst;1182crypt->icv_rev_aes = dma;1183if (!encrypt) {1184scatterwalk_map_and_copy(req_ctx->hmac_virt,1185req->src, cryptlen, authsize, 0);1186}1187req_ctx->encrypt = encrypt;1188} else {1189req_ctx->hmac_virt = NULL;1190}11911192crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD;1193qmgr_put_entry(send_qid, crypt_virt2phys(crypt));1194BUG_ON(qmgr_stat_overflow(send_qid));1195return -EINPROGRESS;11961197free_buf_dst:1198free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);1199free_buf_src:1200free_buf_chain(dev, req_ctx->src, crypt->src_buf);1201crypt->ctl_flags = CTL_FLAG_UNUSED;1202return -ENOMEM;1203}12041205static int aead_setup(struct crypto_aead *tfm, unsigned int authsize)1206{1207struct ixp_ctx *ctx = crypto_aead_ctx(tfm);1208unsigned int digest_len = crypto_aead_maxauthsize(tfm);1209int ret;12101211if (!ctx->enckey_len && !ctx->authkey_len)1212return 0;1213init_completion(&ctx->completion);1214atomic_inc(&ctx->configuring);12151216reset_sa_dir(&ctx->encrypt);1217reset_sa_dir(&ctx->decrypt);12181219ret = setup_cipher(&tfm->base, 0, ctx->enckey, ctx->enckey_len);1220if (ret)1221goto out;1222ret = setup_cipher(&tfm->base, 1, ctx->enckey, ctx->enckey_len);1223if (ret)1224goto out;1225ret = setup_auth(&tfm->base, 0, authsize, ctx->authkey,1226ctx->authkey_len, digest_len);1227if (ret)1228goto out;1229ret = setup_auth(&tfm->base, 1, authsize, ctx->authkey,1230ctx->authkey_len, digest_len);1231out:1232if (!atomic_dec_and_test(&ctx->configuring))1233wait_for_completion(&ctx->completion);1234return ret;1235}12361237static int aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)1238{1239int max = crypto_aead_maxauthsize(tfm) >> 2;12401241if ((authsize >> 2) < 1 || (authsize >> 2) > max || (authsize & 3))1242return -EINVAL;1243return aead_setup(tfm, authsize);1244}12451246static int aead_setkey(struct crypto_aead *tfm, const u8 *key,1247unsigned int keylen)1248{1249struct ixp_ctx *ctx = crypto_aead_ctx(tfm);1250struct crypto_authenc_keys keys;12511252if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)1253goto badkey;12541255if (keys.authkeylen > sizeof(ctx->authkey))1256goto badkey;12571258if (keys.enckeylen > sizeof(ctx->enckey))1259goto badkey;12601261memcpy(ctx->authkey, keys.authkey, keys.authkeylen);1262memcpy(ctx->enckey, keys.enckey, keys.enckeylen);1263ctx->authkey_len = keys.authkeylen;1264ctx->enckey_len = keys.enckeylen;12651266memzero_explicit(&keys, sizeof(keys));1267return aead_setup(tfm, crypto_aead_authsize(tfm));1268badkey:1269memzero_explicit(&keys, sizeof(keys));1270return -EINVAL;1271}12721273static int des3_aead_setkey(struct crypto_aead *tfm, const u8 *key,1274unsigned int keylen)1275{1276struct ixp_ctx *ctx = crypto_aead_ctx(tfm);1277struct crypto_authenc_keys keys;1278int err;12791280err = crypto_authenc_extractkeys(&keys, key, keylen);1281if (unlikely(err))1282goto badkey;12831284err = -EINVAL;1285if (keys.authkeylen > sizeof(ctx->authkey))1286goto badkey;12871288err = verify_aead_des3_key(tfm, keys.enckey, keys.enckeylen);1289if (err)1290goto badkey;12911292memcpy(ctx->authkey, keys.authkey, keys.authkeylen);1293memcpy(ctx->enckey, keys.enckey, keys.enckeylen);1294ctx->authkey_len = keys.authkeylen;1295ctx->enckey_len = keys.enckeylen;12961297memzero_explicit(&keys, sizeof(keys));1298return aead_setup(tfm, crypto_aead_authsize(tfm));1299badkey:1300memzero_explicit(&keys, sizeof(keys));1301return err;1302}13031304static int aead_encrypt(struct aead_request *req)1305{1306return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv);1307}13081309static int aead_decrypt(struct aead_request *req)1310{1311return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv);1312}13131314static struct ixp_alg ixp4xx_algos[] = {1315{1316.crypto = {1317.base.cra_name = "cbc(des)",1318.base.cra_blocksize = DES_BLOCK_SIZE,13191320.min_keysize = DES_KEY_SIZE,1321.max_keysize = DES_KEY_SIZE,1322.ivsize = DES_BLOCK_SIZE,1323},1324.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,1325.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,13261327}, {1328.crypto = {1329.base.cra_name = "ecb(des)",1330.base.cra_blocksize = DES_BLOCK_SIZE,1331.min_keysize = DES_KEY_SIZE,1332.max_keysize = DES_KEY_SIZE,1333},1334.cfg_enc = CIPH_ENCR | MOD_DES | MOD_ECB | KEYLEN_192,1335.cfg_dec = CIPH_DECR | MOD_DES | MOD_ECB | KEYLEN_192,1336}, {1337.crypto = {1338.base.cra_name = "cbc(des3_ede)",1339.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,13401341.min_keysize = DES3_EDE_KEY_SIZE,1342.max_keysize = DES3_EDE_KEY_SIZE,1343.ivsize = DES3_EDE_BLOCK_SIZE,1344.setkey = ablk_des3_setkey,1345},1346.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,1347.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,1348}, {1349.crypto = {1350.base.cra_name = "ecb(des3_ede)",1351.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,13521353.min_keysize = DES3_EDE_KEY_SIZE,1354.max_keysize = DES3_EDE_KEY_SIZE,1355.setkey = ablk_des3_setkey,1356},1357.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_ECB | KEYLEN_192,1358.cfg_dec = CIPH_DECR | MOD_3DES | MOD_ECB | KEYLEN_192,1359}, {1360.crypto = {1361.base.cra_name = "cbc(aes)",1362.base.cra_blocksize = AES_BLOCK_SIZE,13631364.min_keysize = AES_MIN_KEY_SIZE,1365.max_keysize = AES_MAX_KEY_SIZE,1366.ivsize = AES_BLOCK_SIZE,1367},1368.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,1369.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,1370}, {1371.crypto = {1372.base.cra_name = "ecb(aes)",1373.base.cra_blocksize = AES_BLOCK_SIZE,13741375.min_keysize = AES_MIN_KEY_SIZE,1376.max_keysize = AES_MAX_KEY_SIZE,1377},1378.cfg_enc = CIPH_ENCR | MOD_AES | MOD_ECB,1379.cfg_dec = CIPH_DECR | MOD_AES | MOD_ECB,1380}, {1381.crypto = {1382.base.cra_name = "ctr(aes)",1383.base.cra_blocksize = 1,13841385.min_keysize = AES_MIN_KEY_SIZE,1386.max_keysize = AES_MAX_KEY_SIZE,1387.ivsize = AES_BLOCK_SIZE,1388},1389.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,1390.cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,1391}, {1392.crypto = {1393.base.cra_name = "rfc3686(ctr(aes))",1394.base.cra_blocksize = 1,13951396.min_keysize = AES_MIN_KEY_SIZE,1397.max_keysize = AES_MAX_KEY_SIZE,1398.ivsize = AES_BLOCK_SIZE,1399.setkey = ablk_rfc3686_setkey,1400.encrypt = ablk_rfc3686_crypt,1401.decrypt = ablk_rfc3686_crypt,1402},1403.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,1404.cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,1405} };14061407static struct ixp_aead_alg ixp4xx_aeads[] = {1408{1409.crypto = {1410.base = {1411.cra_name = "authenc(hmac(md5),cbc(des))",1412.cra_blocksize = DES_BLOCK_SIZE,1413},1414.ivsize = DES_BLOCK_SIZE,1415.maxauthsize = MD5_DIGEST_SIZE,1416},1417.hash = &hash_alg_md5,1418.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,1419.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,1420}, {1421.crypto = {1422.base = {1423.cra_name = "authenc(hmac(md5),cbc(des3_ede))",1424.cra_blocksize = DES3_EDE_BLOCK_SIZE,1425},1426.ivsize = DES3_EDE_BLOCK_SIZE,1427.maxauthsize = MD5_DIGEST_SIZE,1428.setkey = des3_aead_setkey,1429},1430.hash = &hash_alg_md5,1431.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,1432.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,1433}, {1434.crypto = {1435.base = {1436.cra_name = "authenc(hmac(sha1),cbc(des))",1437.cra_blocksize = DES_BLOCK_SIZE,1438},1439.ivsize = DES_BLOCK_SIZE,1440.maxauthsize = SHA1_DIGEST_SIZE,1441},1442.hash = &hash_alg_sha1,1443.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,1444.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,1445}, {1446.crypto = {1447.base = {1448.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",1449.cra_blocksize = DES3_EDE_BLOCK_SIZE,1450},1451.ivsize = DES3_EDE_BLOCK_SIZE,1452.maxauthsize = SHA1_DIGEST_SIZE,1453.setkey = des3_aead_setkey,1454},1455.hash = &hash_alg_sha1,1456.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,1457.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,1458}, {1459.crypto = {1460.base = {1461.cra_name = "authenc(hmac(md5),cbc(aes))",1462.cra_blocksize = AES_BLOCK_SIZE,1463},1464.ivsize = AES_BLOCK_SIZE,1465.maxauthsize = MD5_DIGEST_SIZE,1466},1467.hash = &hash_alg_md5,1468.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,1469.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,1470}, {1471.crypto = {1472.base = {1473.cra_name = "authenc(hmac(sha1),cbc(aes))",1474.cra_blocksize = AES_BLOCK_SIZE,1475},1476.ivsize = AES_BLOCK_SIZE,1477.maxauthsize = SHA1_DIGEST_SIZE,1478},1479.hash = &hash_alg_sha1,1480.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,1481.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,1482} };14831484#define IXP_POSTFIX "-ixp4xx"14851486static int ixp_crypto_probe(struct platform_device *_pdev)1487{1488struct device *dev = &_pdev->dev;1489int num = ARRAY_SIZE(ixp4xx_algos);1490int i, err;14911492pdev = _pdev;14931494err = init_ixp_crypto(dev);1495if (err)1496return err;14971498for (i = 0; i < num; i++) {1499struct skcipher_alg *cra = &ixp4xx_algos[i].crypto;15001501if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME,1502"%s"IXP_POSTFIX, cra->base.cra_name) >=1503CRYPTO_MAX_ALG_NAME)1504continue;1505if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES))1506continue;15071508/* block ciphers */1509cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |1510CRYPTO_ALG_ASYNC |1511CRYPTO_ALG_ALLOCATES_MEMORY |1512CRYPTO_ALG_NEED_FALLBACK;1513if (!cra->setkey)1514cra->setkey = ablk_setkey;1515if (!cra->encrypt)1516cra->encrypt = ablk_encrypt;1517if (!cra->decrypt)1518cra->decrypt = ablk_decrypt;1519cra->init = init_tfm_ablk;1520cra->exit = exit_tfm_ablk;15211522cra->base.cra_ctxsize = sizeof(struct ixp_ctx);1523cra->base.cra_module = THIS_MODULE;1524cra->base.cra_alignmask = 3;1525cra->base.cra_priority = 300;1526if (crypto_register_skcipher(cra))1527dev_err(&pdev->dev, "Failed to register '%s'\n",1528cra->base.cra_name);1529else1530ixp4xx_algos[i].registered = 1;1531}15321533for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {1534struct aead_alg *cra = &ixp4xx_aeads[i].crypto;15351536if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME,1537"%s"IXP_POSTFIX, cra->base.cra_name) >=1538CRYPTO_MAX_ALG_NAME)1539continue;1540if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES))1541continue;15421543/* authenc */1544cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |1545CRYPTO_ALG_ASYNC |1546CRYPTO_ALG_ALLOCATES_MEMORY;1547cra->setkey = cra->setkey ?: aead_setkey;1548cra->setauthsize = aead_setauthsize;1549cra->encrypt = aead_encrypt;1550cra->decrypt = aead_decrypt;1551cra->init = init_tfm_aead;1552cra->exit = exit_tfm_aead;15531554cra->base.cra_ctxsize = sizeof(struct ixp_ctx);1555cra->base.cra_module = THIS_MODULE;1556cra->base.cra_alignmask = 3;1557cra->base.cra_priority = 300;15581559if (crypto_register_aead(cra))1560dev_err(&pdev->dev, "Failed to register '%s'\n",1561cra->base.cra_driver_name);1562else1563ixp4xx_aeads[i].registered = 1;1564}1565return 0;1566}15671568static void ixp_crypto_remove(struct platform_device *pdev)1569{1570int num = ARRAY_SIZE(ixp4xx_algos);1571int i;15721573for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {1574if (ixp4xx_aeads[i].registered)1575crypto_unregister_aead(&ixp4xx_aeads[i].crypto);1576}15771578for (i = 0; i < num; i++) {1579if (ixp4xx_algos[i].registered)1580crypto_unregister_skcipher(&ixp4xx_algos[i].crypto);1581}1582release_ixp_crypto(&pdev->dev);1583}1584static const struct of_device_id ixp4xx_crypto_of_match[] = {1585{1586.compatible = "intel,ixp4xx-crypto",1587},1588{},1589};15901591static struct platform_driver ixp_crypto_driver = {1592.probe = ixp_crypto_probe,1593.remove = ixp_crypto_remove,1594.driver = {1595.name = "ixp4xx_crypto",1596.of_match_table = ixp4xx_crypto_of_match,1597},1598};1599module_platform_driver(ixp_crypto_driver);16001601MODULE_LICENSE("GPL");1602MODULE_AUTHOR("Christian Hohnstaedt <[email protected]>");1603MODULE_DESCRIPTION("IXP4xx hardware crypto");1604160516061607