Path: blob/master/drivers/crypto/amlogic/amlogic-gxl-cipher.c
29267 views
// SPDX-License-Identifier: GPL-2.01/*2* amlogic-cipher.c - hardware cryptographic offloader for Amlogic GXL SoC3*4* Copyright (C) 2018-2019 Corentin LABBE <[email protected]>5*6* This file add support for AES cipher with 128,192,256 bits keysize in7* CBC and ECB mode.8*/910#include <linux/crypto.h>11#include <linux/delay.h>12#include <linux/io.h>13#include <crypto/scatterwalk.h>14#include <linux/scatterlist.h>15#include <linux/dma-mapping.h>16#include <crypto/internal/skcipher.h>17#include "amlogic-gxl.h"1819static int get_engine_number(struct meson_dev *mc)20{21return atomic_inc_return(&mc->flow) % MAXFLOW;22}2324static bool meson_cipher_need_fallback(struct skcipher_request *areq)25{26struct scatterlist *src_sg = areq->src;27struct scatterlist *dst_sg = areq->dst;2829if (areq->cryptlen == 0)30return true;3132if (sg_nents(src_sg) != sg_nents(dst_sg))33return true;3435/* KEY/IV descriptors use 3 desc */36if (sg_nents(src_sg) > MAXDESC - 3 || sg_nents(dst_sg) > MAXDESC - 3)37return true;3839while (src_sg && dst_sg) {40if ((src_sg->length % 16) != 0)41return true;42if ((dst_sg->length % 16) != 0)43return true;44if (src_sg->length != dst_sg->length)45return true;46if (!IS_ALIGNED(src_sg->offset, sizeof(u32)))47return true;48if (!IS_ALIGNED(dst_sg->offset, sizeof(u32)))49return true;50src_sg = sg_next(src_sg);51dst_sg = sg_next(dst_sg);52}5354return false;55}5657static int meson_cipher_do_fallback(struct skcipher_request *areq)58{59struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);60struct meson_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);61struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq);62int err;63#ifdef CONFIG_CRYPTO_DEV_AMLOGIC_GXL_DEBUG64struct skcipher_alg *alg = crypto_skcipher_alg(tfm);65struct meson_alg_template *algt;6667algt = container_of(alg, struct meson_alg_template, alg.skcipher.base);68algt->stat_fb++;69#endif70skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);71skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,72areq->base.complete, areq->base.data);73skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,74areq->cryptlen, areq->iv);7576if (rctx->op_dir == MESON_DECRYPT)77err = crypto_skcipher_decrypt(&rctx->fallback_req);78else79err = crypto_skcipher_encrypt(&rctx->fallback_req);80return err;81}8283static int meson_cipher(struct skcipher_request *areq)84{85struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);86struct meson_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);87struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq);88struct meson_dev *mc = op->mc;89struct skcipher_alg *alg = crypto_skcipher_alg(tfm);90struct meson_alg_template *algt;91int flow = rctx->flow;92unsigned int todo, eat, len;93struct scatterlist *src_sg = areq->src;94struct scatterlist *dst_sg = areq->dst;95struct meson_desc *desc;96int nr_sgs, nr_sgd;97int i, err = 0;98unsigned int keyivlen, ivsize, offset, tloffset;99dma_addr_t phykeyiv;100void *backup_iv = NULL, *bkeyiv;101u32 v;102103algt = container_of(alg, struct meson_alg_template, alg.skcipher.base);104105dev_dbg(mc->dev, "%s %s %u %x IV(%u) key=%u flow=%d\n", __func__,106crypto_tfm_alg_name(areq->base.tfm),107areq->cryptlen,108rctx->op_dir, crypto_skcipher_ivsize(tfm),109op->keylen, flow);110111#ifdef CONFIG_CRYPTO_DEV_AMLOGIC_GXL_DEBUG112algt->stat_req++;113mc->chanlist[flow].stat_req++;114#endif115116/*117* The hardware expect a list of meson_desc structures.118* The 2 first structures store key119* The third stores IV120*/121bkeyiv = kzalloc(48, GFP_KERNEL | GFP_DMA);122if (!bkeyiv)123return -ENOMEM;124125memcpy(bkeyiv, op->key, op->keylen);126keyivlen = op->keylen;127128ivsize = crypto_skcipher_ivsize(tfm);129if (areq->iv && ivsize > 0) {130if (ivsize > areq->cryptlen) {131dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen);132err = -EINVAL;133goto theend;134}135memcpy(bkeyiv + 32, areq->iv, ivsize);136keyivlen = 48;137if (rctx->op_dir == MESON_DECRYPT) {138backup_iv = kzalloc(ivsize, GFP_KERNEL);139if (!backup_iv) {140err = -ENOMEM;141goto theend;142}143offset = areq->cryptlen - ivsize;144scatterwalk_map_and_copy(backup_iv, areq->src, offset,145ivsize, 0);146}147}148if (keyivlen == 24)149keyivlen = 32;150151phykeyiv = dma_map_single(mc->dev, bkeyiv, keyivlen,152DMA_TO_DEVICE);153err = dma_mapping_error(mc->dev, phykeyiv);154if (err) {155dev_err(mc->dev, "Cannot DMA MAP KEY IV\n");156goto theend;157}158159tloffset = 0;160eat = 0;161i = 0;162while (keyivlen > eat) {163desc = &mc->chanlist[flow].tl[tloffset];164memset(desc, 0, sizeof(struct meson_desc));165todo = min(keyivlen - eat, 16u);166desc->t_src = cpu_to_le32(phykeyiv + i * 16);167desc->t_dst = cpu_to_le32(i * 16);168v = (MODE_KEY << 20) | DESC_OWN | 16;169desc->t_status = cpu_to_le32(v);170171eat += todo;172i++;173tloffset++;174}175176if (areq->src == areq->dst) {177nr_sgs = dma_map_sg(mc->dev, areq->src, sg_nents(areq->src),178DMA_BIDIRECTIONAL);179if (!nr_sgs) {180dev_err(mc->dev, "Invalid SG count %d\n", nr_sgs);181err = -EINVAL;182goto theend;183}184nr_sgd = nr_sgs;185} else {186nr_sgs = dma_map_sg(mc->dev, areq->src, sg_nents(areq->src),187DMA_TO_DEVICE);188if (!nr_sgs || nr_sgs > MAXDESC - 3) {189dev_err(mc->dev, "Invalid SG count %d\n", nr_sgs);190err = -EINVAL;191goto theend;192}193nr_sgd = dma_map_sg(mc->dev, areq->dst, sg_nents(areq->dst),194DMA_FROM_DEVICE);195if (!nr_sgd || nr_sgd > MAXDESC - 3) {196dev_err(mc->dev, "Invalid SG count %d\n", nr_sgd);197err = -EINVAL;198goto theend;199}200}201202src_sg = areq->src;203dst_sg = areq->dst;204len = areq->cryptlen;205while (src_sg) {206desc = &mc->chanlist[flow].tl[tloffset];207memset(desc, 0, sizeof(struct meson_desc));208209desc->t_src = cpu_to_le32(sg_dma_address(src_sg));210desc->t_dst = cpu_to_le32(sg_dma_address(dst_sg));211todo = min(len, sg_dma_len(src_sg));212v = (op->keymode << 20) | DESC_OWN | todo | (algt->blockmode << 26);213if (rctx->op_dir)214v |= DESC_ENCRYPTION;215len -= todo;216217if (!sg_next(src_sg))218v |= DESC_LAST;219desc->t_status = cpu_to_le32(v);220tloffset++;221src_sg = sg_next(src_sg);222dst_sg = sg_next(dst_sg);223}224225reinit_completion(&mc->chanlist[flow].complete);226mc->chanlist[flow].status = 0;227writel(mc->chanlist[flow].t_phy | 2, mc->base + (flow << 2));228wait_for_completion_interruptible_timeout(&mc->chanlist[flow].complete,229msecs_to_jiffies(500));230if (mc->chanlist[flow].status == 0) {231dev_err(mc->dev, "DMA timeout for flow %d\n", flow);232err = -EINVAL;233}234235dma_unmap_single(mc->dev, phykeyiv, keyivlen, DMA_TO_DEVICE);236237if (areq->src == areq->dst) {238dma_unmap_sg(mc->dev, areq->src, sg_nents(areq->src), DMA_BIDIRECTIONAL);239} else {240dma_unmap_sg(mc->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);241dma_unmap_sg(mc->dev, areq->dst, sg_nents(areq->dst), DMA_FROM_DEVICE);242}243244if (areq->iv && ivsize > 0) {245if (rctx->op_dir == MESON_DECRYPT) {246memcpy(areq->iv, backup_iv, ivsize);247} else {248scatterwalk_map_and_copy(areq->iv, areq->dst,249areq->cryptlen - ivsize,250ivsize, 0);251}252}253theend:254kfree_sensitive(bkeyiv);255kfree_sensitive(backup_iv);256257return err;258}259260int meson_handle_cipher_request(struct crypto_engine *engine, void *areq)261{262int err;263struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);264265err = meson_cipher(breq);266local_bh_disable();267crypto_finalize_skcipher_request(engine, breq, err);268local_bh_enable();269270return 0;271}272273int meson_skdecrypt(struct skcipher_request *areq)274{275struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);276struct meson_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);277struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq);278struct crypto_engine *engine;279int e;280281rctx->op_dir = MESON_DECRYPT;282if (meson_cipher_need_fallback(areq))283return meson_cipher_do_fallback(areq);284e = get_engine_number(op->mc);285engine = op->mc->chanlist[e].engine;286rctx->flow = e;287288return crypto_transfer_skcipher_request_to_engine(engine, areq);289}290291int meson_skencrypt(struct skcipher_request *areq)292{293struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);294struct meson_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);295struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq);296struct crypto_engine *engine;297int e;298299rctx->op_dir = MESON_ENCRYPT;300if (meson_cipher_need_fallback(areq))301return meson_cipher_do_fallback(areq);302e = get_engine_number(op->mc);303engine = op->mc->chanlist[e].engine;304rctx->flow = e;305306return crypto_transfer_skcipher_request_to_engine(engine, areq);307}308309int meson_cipher_init(struct crypto_tfm *tfm)310{311struct meson_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);312struct meson_alg_template *algt;313const char *name = crypto_tfm_alg_name(tfm);314struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);315struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);316317memset(op, 0, sizeof(struct meson_cipher_tfm_ctx));318319algt = container_of(alg, struct meson_alg_template, alg.skcipher.base);320op->mc = algt->mc;321322op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);323if (IS_ERR(op->fallback_tfm)) {324dev_err(op->mc->dev, "ERROR: Cannot allocate fallback for %s %ld\n",325name, PTR_ERR(op->fallback_tfm));326return PTR_ERR(op->fallback_tfm);327}328329crypto_skcipher_set_reqsize(sktfm, sizeof(struct meson_cipher_req_ctx) +330crypto_skcipher_reqsize(op->fallback_tfm));331332return 0;333}334335void meson_cipher_exit(struct crypto_tfm *tfm)336{337struct meson_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);338339kfree_sensitive(op->key);340crypto_free_skcipher(op->fallback_tfm);341}342343int meson_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,344unsigned int keylen)345{346struct meson_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);347struct meson_dev *mc = op->mc;348349switch (keylen) {350case 128 / 8:351op->keymode = MODE_AES_128;352break;353case 192 / 8:354op->keymode = MODE_AES_192;355break;356case 256 / 8:357op->keymode = MODE_AES_256;358break;359default:360dev_dbg(mc->dev, "ERROR: Invalid keylen %u\n", keylen);361return -EINVAL;362}363kfree_sensitive(op->key);364op->keylen = keylen;365op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);366if (!op->key)367return -ENOMEM;368369return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);370}371372373