Path: blob/master/drivers/crypto/intel/keembay/keembay-ocs-aes-core.c
29278 views
// SPDX-License-Identifier: GPL-2.0-only1/*2* Intel Keem Bay OCS AES Crypto Driver.3*4* Copyright (C) 2018-2020 Intel Corporation5*/67#include <crypto/aes.h>8#include <crypto/engine.h>9#include <crypto/gcm.h>10#include <crypto/internal/aead.h>11#include <crypto/internal/skcipher.h>12#include <crypto/scatterwalk.h>13#include <linux/clk.h>14#include <linux/completion.h>15#include <linux/dma-mapping.h>16#include <linux/err.h>17#include <linux/interrupt.h>18#include <linux/io.h>19#include <linux/kernel.h>20#include <linux/module.h>21#include <linux/of.h>22#include <linux/platform_device.h>23#include <linux/string.h>2425#include "ocs-aes.h"2627#define KMB_OCS_PRIORITY 35028#define DRV_NAME "keembay-ocs-aes"2930#define OCS_AES_MIN_KEY_SIZE 1631#define OCS_AES_MAX_KEY_SIZE 3232#define OCS_AES_KEYSIZE_128 1633#define OCS_AES_KEYSIZE_192 2434#define OCS_AES_KEYSIZE_256 3235#define OCS_SM4_KEY_SIZE 163637/**38* struct ocs_aes_tctx - OCS AES Transform context39* @aes_dev: The OCS AES device.40* @key: AES/SM4 key.41* @key_len: The length (in bytes) of @key.42* @cipher: OCS cipher to use (either AES or SM4).43* @sw_cipher: The cipher to use as fallback.44* @use_fallback: Whether or not fallback cipher should be used.45*/46struct ocs_aes_tctx {47struct ocs_aes_dev *aes_dev;48u8 key[OCS_AES_KEYSIZE_256];49unsigned int key_len;50enum ocs_cipher cipher;51union {52struct crypto_sync_skcipher *sk;53struct crypto_aead *aead;54} sw_cipher;55bool use_fallback;56};5758/**59* struct ocs_aes_rctx - OCS AES Request context.60* @instruction: Instruction to be executed (encrypt / decrypt).61* @mode: Mode to use (ECB, CBC, CTR, CCm, GCM, CTS)62* @src_nents: Number of source SG entries.63* @dst_nents: Number of destination SG entries.64* @src_dma_count: The number of DMA-mapped entries of the source SG.65* @dst_dma_count: The number of DMA-mapped entries of the destination SG.66* @in_place: Whether or not this is an in place request, i.e.,67* src_sg == dst_sg.68* @src_dll: OCS DMA linked list for input data.69* @dst_dll: OCS DMA linked list for output data.70* @last_ct_blk: Buffer to hold last cipher text block (only used in CBC71* mode).72* @cts_swap: Whether or not CTS swap must be performed.73* @aad_src_dll: OCS DMA linked list for input AAD data.74* @aad_dst_dll: OCS DMA linked list for output AAD data.75* @in_tag: Buffer to hold input encrypted tag (only used for76* CCM/GCM decrypt).77* @out_tag: Buffer to hold output encrypted / decrypted tag (only78* used for GCM encrypt / decrypt).79*/80struct ocs_aes_rctx {81/* Fields common across all modes. */82enum ocs_instruction instruction;83enum ocs_mode mode;84int src_nents;85int dst_nents;86int src_dma_count;87int dst_dma_count;88bool in_place;89struct ocs_dll_desc src_dll;90struct ocs_dll_desc dst_dll;9192/* CBC specific */93u8 last_ct_blk[AES_BLOCK_SIZE];9495/* CTS specific */96int cts_swap;9798/* CCM/GCM specific */99struct ocs_dll_desc aad_src_dll;100struct ocs_dll_desc aad_dst_dll;101u8 in_tag[AES_BLOCK_SIZE];102103/* GCM specific */104u8 out_tag[AES_BLOCK_SIZE];105};106107/* Driver data. */108struct ocs_aes_drv {109struct list_head dev_list;110spinlock_t lock; /* Protects dev_list. */111};112113static struct ocs_aes_drv ocs_aes = {114.dev_list = LIST_HEAD_INIT(ocs_aes.dev_list),115.lock = __SPIN_LOCK_UNLOCKED(ocs_aes.lock),116};117118static struct ocs_aes_dev *kmb_ocs_aes_find_dev(struct ocs_aes_tctx *tctx)119{120struct ocs_aes_dev *aes_dev;121122spin_lock(&ocs_aes.lock);123124if (tctx->aes_dev) {125aes_dev = tctx->aes_dev;126goto exit;127}128129/* Only a single OCS device available */130aes_dev = list_first_entry(&ocs_aes.dev_list, struct ocs_aes_dev, list);131tctx->aes_dev = aes_dev;132133exit:134spin_unlock(&ocs_aes.lock);135136return aes_dev;137}138139/*140* Ensure key is 128-bit or 256-bit for AES or 128-bit for SM4 and an actual141* key is being passed in.142*143* Return: 0 if key is valid, -EINVAL otherwise.144*/145static int check_key(const u8 *in_key, size_t key_len, enum ocs_cipher cipher)146{147if (!in_key)148return -EINVAL;149150/* For AES, only 128-byte or 256-byte keys are supported. */151if (cipher == OCS_AES && (key_len == OCS_AES_KEYSIZE_128 ||152key_len == OCS_AES_KEYSIZE_256))153return 0;154155/* For SM4, only 128-byte keys are supported. */156if (cipher == OCS_SM4 && key_len == OCS_AES_KEYSIZE_128)157return 0;158159/* Everything else is unsupported. */160return -EINVAL;161}162163/* Save key into transformation context. */164static int save_key(struct ocs_aes_tctx *tctx, const u8 *in_key, size_t key_len,165enum ocs_cipher cipher)166{167int ret;168169ret = check_key(in_key, key_len, cipher);170if (ret)171return ret;172173memcpy(tctx->key, in_key, key_len);174tctx->key_len = key_len;175tctx->cipher = cipher;176177return 0;178}179180/* Set key for symmetric cypher. */181static int kmb_ocs_sk_set_key(struct crypto_skcipher *tfm, const u8 *in_key,182size_t key_len, enum ocs_cipher cipher)183{184struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);185186/* Fallback is used for AES with 192-bit key. */187tctx->use_fallback = (cipher == OCS_AES &&188key_len == OCS_AES_KEYSIZE_192);189190if (!tctx->use_fallback)191return save_key(tctx, in_key, key_len, cipher);192193crypto_sync_skcipher_clear_flags(tctx->sw_cipher.sk,194CRYPTO_TFM_REQ_MASK);195crypto_sync_skcipher_set_flags(tctx->sw_cipher.sk,196tfm->base.crt_flags &197CRYPTO_TFM_REQ_MASK);198199return crypto_sync_skcipher_setkey(tctx->sw_cipher.sk, in_key, key_len);200}201202/* Set key for AEAD cipher. */203static int kmb_ocs_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,204size_t key_len, enum ocs_cipher cipher)205{206struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);207208/* Fallback is used for AES with 192-bit key. */209tctx->use_fallback = (cipher == OCS_AES &&210key_len == OCS_AES_KEYSIZE_192);211212if (!tctx->use_fallback)213return save_key(tctx, in_key, key_len, cipher);214215crypto_aead_clear_flags(tctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);216crypto_aead_set_flags(tctx->sw_cipher.aead,217crypto_aead_get_flags(tfm) & CRYPTO_TFM_REQ_MASK);218219return crypto_aead_setkey(tctx->sw_cipher.aead, in_key, key_len);220}221222/* Swap two AES blocks in SG lists. */223static void sg_swap_blocks(struct scatterlist *sgl, unsigned int nents,224off_t blk1_offset, off_t blk2_offset)225{226u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE];227228/*229* No easy way to copy within sg list, so copy both blocks to temporary230* buffers first.231*/232sg_pcopy_to_buffer(sgl, nents, tmp_buf1, AES_BLOCK_SIZE, blk1_offset);233sg_pcopy_to_buffer(sgl, nents, tmp_buf2, AES_BLOCK_SIZE, blk2_offset);234sg_pcopy_from_buffer(sgl, nents, tmp_buf1, AES_BLOCK_SIZE, blk2_offset);235sg_pcopy_from_buffer(sgl, nents, tmp_buf2, AES_BLOCK_SIZE, blk1_offset);236}237238/* Initialize request context to default values. */239static void ocs_aes_init_rctx(struct ocs_aes_rctx *rctx)240{241/* Zero everything. */242memset(rctx, 0, sizeof(*rctx));243244/* Set initial value for DMA addresses. */245rctx->src_dll.dma_addr = DMA_MAPPING_ERROR;246rctx->dst_dll.dma_addr = DMA_MAPPING_ERROR;247rctx->aad_src_dll.dma_addr = DMA_MAPPING_ERROR;248rctx->aad_dst_dll.dma_addr = DMA_MAPPING_ERROR;249}250251static int kmb_ocs_sk_validate_input(struct skcipher_request *req,252enum ocs_mode mode)253{254struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);255int iv_size = crypto_skcipher_ivsize(tfm);256257switch (mode) {258case OCS_MODE_ECB:259/* Ensure input length is multiple of block size */260if (req->cryptlen % AES_BLOCK_SIZE != 0)261return -EINVAL;262263return 0;264265case OCS_MODE_CBC:266/* Ensure input length is multiple of block size */267if (req->cryptlen % AES_BLOCK_SIZE != 0)268return -EINVAL;269270/* Ensure IV is present and block size in length */271if (!req->iv || iv_size != AES_BLOCK_SIZE)272return -EINVAL;273/*274* NOTE: Since req->cryptlen == 0 case was already handled in275* kmb_ocs_sk_common(), the above two conditions also guarantee276* that: cryptlen >= iv_size277*/278return 0;279280case OCS_MODE_CTR:281/* Ensure IV is present and block size in length */282if (!req->iv || iv_size != AES_BLOCK_SIZE)283return -EINVAL;284return 0;285286case OCS_MODE_CTS:287/* Ensure input length >= block size */288if (req->cryptlen < AES_BLOCK_SIZE)289return -EINVAL;290291/* Ensure IV is present and block size in length */292if (!req->iv || iv_size != AES_BLOCK_SIZE)293return -EINVAL;294295return 0;296default:297return -EINVAL;298}299}300301/*302* Called by encrypt() / decrypt() skcipher functions.303*304* Use fallback if needed, otherwise initialize context and enqueue request305* into engine.306*/307static int kmb_ocs_sk_common(struct skcipher_request *req,308enum ocs_cipher cipher,309enum ocs_instruction instruction,310enum ocs_mode mode)311{312struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);313struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);314struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);315struct ocs_aes_dev *aes_dev;316int rc;317318if (tctx->use_fallback) {319SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, tctx->sw_cipher.sk);320321skcipher_request_set_sync_tfm(subreq, tctx->sw_cipher.sk);322skcipher_request_set_callback(subreq, req->base.flags, NULL,323NULL);324skcipher_request_set_crypt(subreq, req->src, req->dst,325req->cryptlen, req->iv);326327if (instruction == OCS_ENCRYPT)328rc = crypto_skcipher_encrypt(subreq);329else330rc = crypto_skcipher_decrypt(subreq);331332skcipher_request_zero(subreq);333334return rc;335}336337/*338* If cryptlen == 0, no processing needed for ECB, CBC and CTR.339*340* For CTS continue: kmb_ocs_sk_validate_input() will return -EINVAL.341*/342if (!req->cryptlen && mode != OCS_MODE_CTS)343return 0;344345rc = kmb_ocs_sk_validate_input(req, mode);346if (rc)347return rc;348349aes_dev = kmb_ocs_aes_find_dev(tctx);350if (!aes_dev)351return -ENODEV;352353if (cipher != tctx->cipher)354return -EINVAL;355356ocs_aes_init_rctx(rctx);357rctx->instruction = instruction;358rctx->mode = mode;359360return crypto_transfer_skcipher_request_to_engine(aes_dev->engine, req);361}362363static void cleanup_ocs_dma_linked_list(struct device *dev,364struct ocs_dll_desc *dll)365{366if (dll->vaddr)367dma_free_coherent(dev, dll->size, dll->vaddr, dll->dma_addr);368dll->vaddr = NULL;369dll->size = 0;370dll->dma_addr = DMA_MAPPING_ERROR;371}372373static void kmb_ocs_sk_dma_cleanup(struct skcipher_request *req)374{375struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);376struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);377struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);378struct device *dev = tctx->aes_dev->dev;379380if (rctx->src_dma_count) {381dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);382rctx->src_dma_count = 0;383}384385if (rctx->dst_dma_count) {386dma_unmap_sg(dev, req->dst, rctx->dst_nents, rctx->in_place ?387DMA_BIDIRECTIONAL :388DMA_FROM_DEVICE);389rctx->dst_dma_count = 0;390}391392/* Clean up OCS DMA linked lists */393cleanup_ocs_dma_linked_list(dev, &rctx->src_dll);394cleanup_ocs_dma_linked_list(dev, &rctx->dst_dll);395}396397static int kmb_ocs_sk_prepare_inplace(struct skcipher_request *req)398{399struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);400struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);401struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);402int iv_size = crypto_skcipher_ivsize(tfm);403int rc;404405/*406* For CBC decrypt, save last block (iv) to last_ct_blk buffer.407*408* Note: if we are here, we already checked that cryptlen >= iv_size409* and iv_size == AES_BLOCK_SIZE (i.e., the size of last_ct_blk); see410* kmb_ocs_sk_validate_input().411*/412if (rctx->mode == OCS_MODE_CBC && rctx->instruction == OCS_DECRYPT)413scatterwalk_map_and_copy(rctx->last_ct_blk, req->src,414req->cryptlen - iv_size, iv_size, 0);415416/* For CTS decrypt, swap last two blocks, if needed. */417if (rctx->cts_swap && rctx->instruction == OCS_DECRYPT)418sg_swap_blocks(req->dst, rctx->dst_nents,419req->cryptlen - AES_BLOCK_SIZE,420req->cryptlen - (2 * AES_BLOCK_SIZE));421422/* src and dst buffers are the same, use bidirectional DMA mapping. */423rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,424rctx->dst_nents, DMA_BIDIRECTIONAL);425if (rctx->dst_dma_count == 0) {426dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");427return -ENOMEM;428}429430/* Create DST linked list */431rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,432rctx->dst_dma_count, &rctx->dst_dll,433req->cryptlen, 0);434if (rc)435return rc;436/*437* If descriptor creation was successful, set the src_dll.dma_addr to438* the value of dst_dll.dma_addr, as we do in-place AES operation on439* the src.440*/441rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr;442443return 0;444}445446static int kmb_ocs_sk_prepare_notinplace(struct skcipher_request *req)447{448struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);449struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);450struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);451int rc;452453rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen);454if (rctx->src_nents < 0)455return -EBADMSG;456457/* Map SRC SG. */458rctx->src_dma_count = dma_map_sg(tctx->aes_dev->dev, req->src,459rctx->src_nents, DMA_TO_DEVICE);460if (rctx->src_dma_count == 0) {461dev_err(tctx->aes_dev->dev, "Failed to map source sg\n");462return -ENOMEM;463}464465/* Create SRC linked list */466rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,467rctx->src_dma_count, &rctx->src_dll,468req->cryptlen, 0);469if (rc)470return rc;471472/* Map DST SG. */473rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,474rctx->dst_nents, DMA_FROM_DEVICE);475if (rctx->dst_dma_count == 0) {476dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");477return -ENOMEM;478}479480/* Create DST linked list */481rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,482rctx->dst_dma_count, &rctx->dst_dll,483req->cryptlen, 0);484if (rc)485return rc;486487/* If this is not a CTS decrypt operation with swapping, we are done. */488if (!(rctx->cts_swap && rctx->instruction == OCS_DECRYPT))489return 0;490491/*492* Otherwise, we have to copy src to dst (as we cannot modify src).493* Use OCS AES bypass mode to copy src to dst via DMA.494*495* NOTE: for anything other than small data sizes this is rather496* inefficient.497*/498rc = ocs_aes_bypass_op(tctx->aes_dev, rctx->dst_dll.dma_addr,499rctx->src_dll.dma_addr, req->cryptlen);500if (rc)501return rc;502503/*504* Now dst == src, so clean up what we did so far and use in_place505* logic.506*/507kmb_ocs_sk_dma_cleanup(req);508rctx->in_place = true;509510return kmb_ocs_sk_prepare_inplace(req);511}512513static int kmb_ocs_sk_run(struct skcipher_request *req)514{515struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);516struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);517struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);518struct ocs_aes_dev *aes_dev = tctx->aes_dev;519int iv_size = crypto_skcipher_ivsize(tfm);520int rc;521522rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen);523if (rctx->dst_nents < 0)524return -EBADMSG;525526/*527* If 2 blocks or greater, and multiple of block size swap last two528* blocks to be compatible with other crypto API CTS implementations:529* OCS mode uses CBC-CS2, whereas other crypto API implementations use530* CBC-CS3.531* CBC-CS2 and CBC-CS3 defined by:532* https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38a-add.pdf533*/534rctx->cts_swap = (rctx->mode == OCS_MODE_CTS &&535req->cryptlen > AES_BLOCK_SIZE &&536req->cryptlen % AES_BLOCK_SIZE == 0);537538rctx->in_place = (req->src == req->dst);539540if (rctx->in_place)541rc = kmb_ocs_sk_prepare_inplace(req);542else543rc = kmb_ocs_sk_prepare_notinplace(req);544545if (rc)546goto error;547548rc = ocs_aes_op(aes_dev, rctx->mode, tctx->cipher, rctx->instruction,549rctx->dst_dll.dma_addr, rctx->src_dll.dma_addr,550req->cryptlen, req->iv, iv_size);551if (rc)552goto error;553554/* Clean-up DMA before further processing output. */555kmb_ocs_sk_dma_cleanup(req);556557/* For CTS Encrypt, swap last 2 blocks, if needed. */558if (rctx->cts_swap && rctx->instruction == OCS_ENCRYPT) {559sg_swap_blocks(req->dst, rctx->dst_nents,560req->cryptlen - AES_BLOCK_SIZE,561req->cryptlen - (2 * AES_BLOCK_SIZE));562return 0;563}564565/* For CBC copy IV to req->IV. */566if (rctx->mode == OCS_MODE_CBC) {567/* CBC encrypt case. */568if (rctx->instruction == OCS_ENCRYPT) {569scatterwalk_map_and_copy(req->iv, req->dst,570req->cryptlen - iv_size,571iv_size, 0);572return 0;573}574/* CBC decrypt case. */575if (rctx->in_place)576memcpy(req->iv, rctx->last_ct_blk, iv_size);577else578scatterwalk_map_and_copy(req->iv, req->src,579req->cryptlen - iv_size,580iv_size, 0);581return 0;582}583/* For all other modes there's nothing to do. */584585return 0;586587error:588kmb_ocs_sk_dma_cleanup(req);589590return rc;591}592593static int kmb_ocs_aead_validate_input(struct aead_request *req,594enum ocs_instruction instruction,595enum ocs_mode mode)596{597struct crypto_aead *tfm = crypto_aead_reqtfm(req);598int tag_size = crypto_aead_authsize(tfm);599int iv_size = crypto_aead_ivsize(tfm);600601/* For decrypt crytplen == len(PT) + len(tag). */602if (instruction == OCS_DECRYPT && req->cryptlen < tag_size)603return -EINVAL;604605/* IV is mandatory. */606if (!req->iv)607return -EINVAL;608609switch (mode) {610case OCS_MODE_GCM:611if (iv_size != GCM_AES_IV_SIZE)612return -EINVAL;613614return 0;615616case OCS_MODE_CCM:617/* Ensure IV is present and block size in length */618if (iv_size != AES_BLOCK_SIZE)619return -EINVAL;620621return 0;622623default:624return -EINVAL;625}626}627628/*629* Called by encrypt() / decrypt() aead functions.630*631* Use fallback if needed, otherwise initialize context and enqueue request632* into engine.633*/634static int kmb_ocs_aead_common(struct aead_request *req,635enum ocs_cipher cipher,636enum ocs_instruction instruction,637enum ocs_mode mode)638{639struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));640struct ocs_aes_rctx *rctx = aead_request_ctx(req);641struct ocs_aes_dev *dd;642int rc;643644if (tctx->use_fallback) {645struct aead_request *subreq = aead_request_ctx(req);646647aead_request_set_tfm(subreq, tctx->sw_cipher.aead);648aead_request_set_callback(subreq, req->base.flags,649req->base.complete, req->base.data);650aead_request_set_crypt(subreq, req->src, req->dst,651req->cryptlen, req->iv);652aead_request_set_ad(subreq, req->assoclen);653rc = crypto_aead_setauthsize(tctx->sw_cipher.aead,654crypto_aead_authsize(crypto_aead_reqtfm(req)));655if (rc)656return rc;657658return (instruction == OCS_ENCRYPT) ?659crypto_aead_encrypt(subreq) :660crypto_aead_decrypt(subreq);661}662663rc = kmb_ocs_aead_validate_input(req, instruction, mode);664if (rc)665return rc;666667dd = kmb_ocs_aes_find_dev(tctx);668if (!dd)669return -ENODEV;670671if (cipher != tctx->cipher)672return -EINVAL;673674ocs_aes_init_rctx(rctx);675rctx->instruction = instruction;676rctx->mode = mode;677678return crypto_transfer_aead_request_to_engine(dd->engine, req);679}680681static void kmb_ocs_aead_dma_cleanup(struct aead_request *req)682{683struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));684struct ocs_aes_rctx *rctx = aead_request_ctx(req);685struct device *dev = tctx->aes_dev->dev;686687if (rctx->src_dma_count) {688dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);689rctx->src_dma_count = 0;690}691692if (rctx->dst_dma_count) {693dma_unmap_sg(dev, req->dst, rctx->dst_nents, rctx->in_place ?694DMA_BIDIRECTIONAL :695DMA_FROM_DEVICE);696rctx->dst_dma_count = 0;697}698/* Clean up OCS DMA linked lists */699cleanup_ocs_dma_linked_list(dev, &rctx->src_dll);700cleanup_ocs_dma_linked_list(dev, &rctx->dst_dll);701cleanup_ocs_dma_linked_list(dev, &rctx->aad_src_dll);702cleanup_ocs_dma_linked_list(dev, &rctx->aad_dst_dll);703}704705/**706* kmb_ocs_aead_dma_prepare() - Do DMA mapping for AEAD processing.707* @req: The AEAD request being processed.708* @src_dll_size: Where to store the length of the data mapped into the709* src_dll OCS DMA list.710*711* Do the following:712* - DMA map req->src and req->dst713* - Initialize the following OCS DMA linked lists: rctx->src_dll,714* rctx->dst_dll, rctx->aad_src_dll and rxtc->aad_dst_dll.715*716* Return: 0 on success, negative error code otherwise.717*/718static int kmb_ocs_aead_dma_prepare(struct aead_request *req, u32 *src_dll_size)719{720struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));721const int tag_size = crypto_aead_authsize(crypto_aead_reqtfm(req));722struct ocs_aes_rctx *rctx = aead_request_ctx(req);723u32 in_size; /* The length of the data to be mapped by src_dll. */724u32 out_size; /* The length of the data to be mapped by dst_dll. */725u32 dst_size; /* The length of the data in dst_sg. */726int rc;727728/* Get number of entries in input data SG list. */729rctx->src_nents = sg_nents_for_len(req->src,730req->assoclen + req->cryptlen);731if (rctx->src_nents < 0)732return -EBADMSG;733734if (rctx->instruction == OCS_DECRYPT) {735/*736* For decrypt:737* - src sg list is: AAD|CT|tag738* - dst sg list expects: AAD|PT739*740* in_size == len(CT); out_size == len(PT)741*/742743/* req->cryptlen includes both CT and tag. */744in_size = req->cryptlen - tag_size;745746/* out_size = PT size == CT size */747out_size = in_size;748749/* len(dst_sg) == len(AAD) + len(PT) */750dst_size = req->assoclen + out_size;751752/*753* Copy tag from source SG list to 'in_tag' buffer.754*755* Note: this needs to be done here, before DMA mapping src_sg.756*/757sg_pcopy_to_buffer(req->src, rctx->src_nents, rctx->in_tag,758tag_size, req->assoclen + in_size);759760} else { /* OCS_ENCRYPT */761/*762* For encrypt:763* src sg list is: AAD|PT764* dst sg list expects: AAD|CT|tag765*/766/* in_size == len(PT) */767in_size = req->cryptlen;768769/*770* In CCM mode the OCS engine appends the tag to the ciphertext,771* but in GCM mode the tag must be read from the tag registers772* and appended manually below773*/774out_size = (rctx->mode == OCS_MODE_CCM) ? in_size + tag_size :775in_size;776/* len(dst_sg) == len(AAD) + len(CT) + len(tag) */777dst_size = req->assoclen + in_size + tag_size;778}779*src_dll_size = in_size;780781/* Get number of entries in output data SG list. */782rctx->dst_nents = sg_nents_for_len(req->dst, dst_size);783if (rctx->dst_nents < 0)784return -EBADMSG;785786rctx->in_place = (req->src == req->dst) ? 1 : 0;787788/* Map destination; use bidirectional mapping for in-place case. */789rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,790rctx->dst_nents,791rctx->in_place ? DMA_BIDIRECTIONAL :792DMA_FROM_DEVICE);793if (rctx->dst_dma_count == 0 && rctx->dst_nents != 0) {794dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");795return -ENOMEM;796}797798/* Create AAD DST list: maps dst[0:AAD_SIZE-1]. */799rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,800rctx->dst_dma_count,801&rctx->aad_dst_dll, req->assoclen,8020);803if (rc)804return rc;805806/* Create DST list: maps dst[AAD_SIZE:out_size] */807rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,808rctx->dst_dma_count, &rctx->dst_dll,809out_size, req->assoclen);810if (rc)811return rc;812813if (rctx->in_place) {814/* If this is not CCM encrypt, we are done. */815if (!(rctx->mode == OCS_MODE_CCM &&816rctx->instruction == OCS_ENCRYPT)) {817/*818* SRC and DST are the same, so re-use the same DMA819* addresses (to avoid allocating new DMA lists820* identical to the dst ones).821*/822rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr;823rctx->aad_src_dll.dma_addr = rctx->aad_dst_dll.dma_addr;824825return 0;826}827/*828* For CCM encrypt the input and output linked lists contain829* different amounts of data, so, we need to create different830* SRC and AAD SRC lists, even for the in-place case.831*/832rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,833rctx->dst_dma_count,834&rctx->aad_src_dll,835req->assoclen, 0);836if (rc)837return rc;838rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,839rctx->dst_dma_count,840&rctx->src_dll, in_size,841req->assoclen);842if (rc)843return rc;844845return 0;846}847/* Not in-place case. */848849/* Map source SG. */850rctx->src_dma_count = dma_map_sg(tctx->aes_dev->dev, req->src,851rctx->src_nents, DMA_TO_DEVICE);852if (rctx->src_dma_count == 0 && rctx->src_nents != 0) {853dev_err(tctx->aes_dev->dev, "Failed to map source sg\n");854return -ENOMEM;855}856857/* Create AAD SRC list. */858rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,859rctx->src_dma_count,860&rctx->aad_src_dll,861req->assoclen, 0);862if (rc)863return rc;864865/* Create SRC list. */866rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,867rctx->src_dma_count,868&rctx->src_dll, in_size,869req->assoclen);870if (rc)871return rc;872873if (req->assoclen == 0)874return 0;875876/* Copy AAD from src sg to dst sg using OCS DMA. */877rc = ocs_aes_bypass_op(tctx->aes_dev, rctx->aad_dst_dll.dma_addr,878rctx->aad_src_dll.dma_addr, req->cryptlen);879if (rc)880dev_err(tctx->aes_dev->dev,881"Failed to copy source AAD to destination AAD\n");882883return rc;884}885886static int kmb_ocs_aead_run(struct aead_request *req)887{888struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));889const int tag_size = crypto_aead_authsize(crypto_aead_reqtfm(req));890struct ocs_aes_rctx *rctx = aead_request_ctx(req);891u32 in_size; /* The length of the data mapped by src_dll. */892int rc;893894rc = kmb_ocs_aead_dma_prepare(req, &in_size);895if (rc)896goto exit;897898/* For CCM, we just call the OCS processing and we are done. */899if (rctx->mode == OCS_MODE_CCM) {900rc = ocs_aes_ccm_op(tctx->aes_dev, tctx->cipher,901rctx->instruction, rctx->dst_dll.dma_addr,902rctx->src_dll.dma_addr, in_size,903req->iv,904rctx->aad_src_dll.dma_addr, req->assoclen,905rctx->in_tag, tag_size);906goto exit;907}908/* GCM case; invoke OCS processing. */909rc = ocs_aes_gcm_op(tctx->aes_dev, tctx->cipher,910rctx->instruction,911rctx->dst_dll.dma_addr,912rctx->src_dll.dma_addr, in_size,913req->iv,914rctx->aad_src_dll.dma_addr, req->assoclen,915rctx->out_tag, tag_size);916if (rc)917goto exit;918919/* For GCM decrypt, we have to compare in_tag with out_tag. */920if (rctx->instruction == OCS_DECRYPT) {921rc = memcmp(rctx->in_tag, rctx->out_tag, tag_size) ?922-EBADMSG : 0;923goto exit;924}925926/* For GCM encrypt, we must manually copy out_tag to DST sg. */927928/* Clean-up must be called before the sg_pcopy_from_buffer() below. */929kmb_ocs_aead_dma_cleanup(req);930931/* Copy tag to destination sg after AAD and CT. */932sg_pcopy_from_buffer(req->dst, rctx->dst_nents, rctx->out_tag,933tag_size, req->assoclen + req->cryptlen);934935/* Return directly as DMA cleanup already done. */936return 0;937938exit:939kmb_ocs_aead_dma_cleanup(req);940941return rc;942}943944static int kmb_ocs_aes_sk_do_one_request(struct crypto_engine *engine,945void *areq)946{947struct skcipher_request *req =948container_of(areq, struct skcipher_request, base);949struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);950struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);951int err;952953if (!tctx->aes_dev) {954err = -ENODEV;955goto exit;956}957958err = ocs_aes_set_key(tctx->aes_dev, tctx->key_len, tctx->key,959tctx->cipher);960if (err)961goto exit;962963err = kmb_ocs_sk_run(req);964965exit:966crypto_finalize_skcipher_request(engine, req, err);967968return 0;969}970971static int kmb_ocs_aes_aead_do_one_request(struct crypto_engine *engine,972void *areq)973{974struct aead_request *req = container_of(areq,975struct aead_request, base);976struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));977int err;978979if (!tctx->aes_dev)980return -ENODEV;981982err = ocs_aes_set_key(tctx->aes_dev, tctx->key_len, tctx->key,983tctx->cipher);984if (err)985goto exit;986987err = kmb_ocs_aead_run(req);988989exit:990crypto_finalize_aead_request(tctx->aes_dev->engine, req, err);991992return 0;993}994995static int kmb_ocs_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,996unsigned int key_len)997{998return kmb_ocs_sk_set_key(tfm, in_key, key_len, OCS_AES);999}10001001static int kmb_ocs_aes_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,1002unsigned int key_len)1003{1004return kmb_ocs_aead_set_key(tfm, in_key, key_len, OCS_AES);1005}10061007#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB1008static int kmb_ocs_aes_ecb_encrypt(struct skcipher_request *req)1009{1010return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_ECB);1011}10121013static int kmb_ocs_aes_ecb_decrypt(struct skcipher_request *req)1014{1015return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_ECB);1016}1017#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB */10181019static int kmb_ocs_aes_cbc_encrypt(struct skcipher_request *req)1020{1021return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CBC);1022}10231024static int kmb_ocs_aes_cbc_decrypt(struct skcipher_request *req)1025{1026return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CBC);1027}10281029static int kmb_ocs_aes_ctr_encrypt(struct skcipher_request *req)1030{1031return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CTR);1032}10331034static int kmb_ocs_aes_ctr_decrypt(struct skcipher_request *req)1035{1036return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CTR);1037}10381039#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS1040static int kmb_ocs_aes_cts_encrypt(struct skcipher_request *req)1041{1042return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CTS);1043}10441045static int kmb_ocs_aes_cts_decrypt(struct skcipher_request *req)1046{1047return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CTS);1048}1049#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS */10501051static int kmb_ocs_aes_gcm_encrypt(struct aead_request *req)1052{1053return kmb_ocs_aead_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_GCM);1054}10551056static int kmb_ocs_aes_gcm_decrypt(struct aead_request *req)1057{1058return kmb_ocs_aead_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_GCM);1059}10601061static int kmb_ocs_aes_ccm_encrypt(struct aead_request *req)1062{1063return kmb_ocs_aead_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CCM);1064}10651066static int kmb_ocs_aes_ccm_decrypt(struct aead_request *req)1067{1068return kmb_ocs_aead_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CCM);1069}10701071static int kmb_ocs_sm4_set_key(struct crypto_skcipher *tfm, const u8 *in_key,1072unsigned int key_len)1073{1074return kmb_ocs_sk_set_key(tfm, in_key, key_len, OCS_SM4);1075}10761077static int kmb_ocs_sm4_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,1078unsigned int key_len)1079{1080return kmb_ocs_aead_set_key(tfm, in_key, key_len, OCS_SM4);1081}10821083#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB1084static int kmb_ocs_sm4_ecb_encrypt(struct skcipher_request *req)1085{1086return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_ECB);1087}10881089static int kmb_ocs_sm4_ecb_decrypt(struct skcipher_request *req)1090{1091return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_ECB);1092}1093#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB */10941095static int kmb_ocs_sm4_cbc_encrypt(struct skcipher_request *req)1096{1097return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CBC);1098}10991100static int kmb_ocs_sm4_cbc_decrypt(struct skcipher_request *req)1101{1102return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CBC);1103}11041105static int kmb_ocs_sm4_ctr_encrypt(struct skcipher_request *req)1106{1107return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CTR);1108}11091110static int kmb_ocs_sm4_ctr_decrypt(struct skcipher_request *req)1111{1112return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CTR);1113}11141115#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS1116static int kmb_ocs_sm4_cts_encrypt(struct skcipher_request *req)1117{1118return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CTS);1119}11201121static int kmb_ocs_sm4_cts_decrypt(struct skcipher_request *req)1122{1123return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CTS);1124}1125#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS */11261127static int kmb_ocs_sm4_gcm_encrypt(struct aead_request *req)1128{1129return kmb_ocs_aead_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_GCM);1130}11311132static int kmb_ocs_sm4_gcm_decrypt(struct aead_request *req)1133{1134return kmb_ocs_aead_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_GCM);1135}11361137static int kmb_ocs_sm4_ccm_encrypt(struct aead_request *req)1138{1139return kmb_ocs_aead_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CCM);1140}11411142static int kmb_ocs_sm4_ccm_decrypt(struct aead_request *req)1143{1144return kmb_ocs_aead_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CCM);1145}11461147static int ocs_aes_init_tfm(struct crypto_skcipher *tfm)1148{1149const char *alg_name = crypto_tfm_alg_name(&tfm->base);1150struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);1151struct crypto_sync_skcipher *blk;11521153/* set fallback cipher in case it will be needed */1154blk = crypto_alloc_sync_skcipher(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK);1155if (IS_ERR(blk))1156return PTR_ERR(blk);11571158tctx->sw_cipher.sk = blk;11591160crypto_skcipher_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));11611162return 0;1163}11641165static int ocs_sm4_init_tfm(struct crypto_skcipher *tfm)1166{1167crypto_skcipher_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));11681169return 0;1170}11711172static inline void clear_key(struct ocs_aes_tctx *tctx)1173{1174memzero_explicit(tctx->key, OCS_AES_KEYSIZE_256);11751176/* Zero key registers if set */1177if (tctx->aes_dev)1178ocs_aes_set_key(tctx->aes_dev, OCS_AES_KEYSIZE_256,1179tctx->key, OCS_AES);1180}11811182static void ocs_exit_tfm(struct crypto_skcipher *tfm)1183{1184struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);11851186clear_key(tctx);11871188if (tctx->sw_cipher.sk) {1189crypto_free_sync_skcipher(tctx->sw_cipher.sk);1190tctx->sw_cipher.sk = NULL;1191}1192}11931194static int ocs_aes_aead_cra_init(struct crypto_aead *tfm)1195{1196const char *alg_name = crypto_tfm_alg_name(&tfm->base);1197struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);1198struct crypto_aead *blk;11991200/* Set fallback cipher in case it will be needed */1201blk = crypto_alloc_aead(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK);1202if (IS_ERR(blk))1203return PTR_ERR(blk);12041205tctx->sw_cipher.aead = blk;12061207crypto_aead_set_reqsize(tfm,1208max(sizeof(struct ocs_aes_rctx),1209(sizeof(struct aead_request) +1210crypto_aead_reqsize(tctx->sw_cipher.aead))));12111212return 0;1213}12141215static int kmb_ocs_aead_ccm_setauthsize(struct crypto_aead *tfm,1216unsigned int authsize)1217{1218switch (authsize) {1219case 4:1220case 6:1221case 8:1222case 10:1223case 12:1224case 14:1225case 16:1226return 0;1227default:1228return -EINVAL;1229}1230}12311232static int kmb_ocs_aead_gcm_setauthsize(struct crypto_aead *tfm,1233unsigned int authsize)1234{1235return crypto_gcm_check_authsize(authsize);1236}12371238static int ocs_sm4_aead_cra_init(struct crypto_aead *tfm)1239{1240crypto_aead_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));12411242return 0;1243}12441245static void ocs_aead_cra_exit(struct crypto_aead *tfm)1246{1247struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);12481249clear_key(tctx);12501251if (tctx->sw_cipher.aead) {1252crypto_free_aead(tctx->sw_cipher.aead);1253tctx->sw_cipher.aead = NULL;1254}1255}12561257static struct skcipher_engine_alg algs[] = {1258#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB1259{1260.base.base.cra_name = "ecb(aes)",1261.base.base.cra_driver_name = "ecb-aes-keembay-ocs",1262.base.base.cra_priority = KMB_OCS_PRIORITY,1263.base.base.cra_flags = CRYPTO_ALG_ASYNC |1264CRYPTO_ALG_KERN_DRIVER_ONLY |1265CRYPTO_ALG_NEED_FALLBACK,1266.base.base.cra_blocksize = AES_BLOCK_SIZE,1267.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1268.base.base.cra_module = THIS_MODULE,1269.base.base.cra_alignmask = 0,12701271.base.min_keysize = OCS_AES_MIN_KEY_SIZE,1272.base.max_keysize = OCS_AES_MAX_KEY_SIZE,1273.base.setkey = kmb_ocs_aes_set_key,1274.base.encrypt = kmb_ocs_aes_ecb_encrypt,1275.base.decrypt = kmb_ocs_aes_ecb_decrypt,1276.base.init = ocs_aes_init_tfm,1277.base.exit = ocs_exit_tfm,1278.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1279},1280#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB */1281{1282.base.base.cra_name = "cbc(aes)",1283.base.base.cra_driver_name = "cbc-aes-keembay-ocs",1284.base.base.cra_priority = KMB_OCS_PRIORITY,1285.base.base.cra_flags = CRYPTO_ALG_ASYNC |1286CRYPTO_ALG_KERN_DRIVER_ONLY |1287CRYPTO_ALG_NEED_FALLBACK,1288.base.base.cra_blocksize = AES_BLOCK_SIZE,1289.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1290.base.base.cra_module = THIS_MODULE,1291.base.base.cra_alignmask = 0,12921293.base.min_keysize = OCS_AES_MIN_KEY_SIZE,1294.base.max_keysize = OCS_AES_MAX_KEY_SIZE,1295.base.ivsize = AES_BLOCK_SIZE,1296.base.setkey = kmb_ocs_aes_set_key,1297.base.encrypt = kmb_ocs_aes_cbc_encrypt,1298.base.decrypt = kmb_ocs_aes_cbc_decrypt,1299.base.init = ocs_aes_init_tfm,1300.base.exit = ocs_exit_tfm,1301.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1302},1303{1304.base.base.cra_name = "ctr(aes)",1305.base.base.cra_driver_name = "ctr-aes-keembay-ocs",1306.base.base.cra_priority = KMB_OCS_PRIORITY,1307.base.base.cra_flags = CRYPTO_ALG_ASYNC |1308CRYPTO_ALG_KERN_DRIVER_ONLY |1309CRYPTO_ALG_NEED_FALLBACK,1310.base.base.cra_blocksize = 1,1311.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1312.base.base.cra_module = THIS_MODULE,1313.base.base.cra_alignmask = 0,13141315.base.min_keysize = OCS_AES_MIN_KEY_SIZE,1316.base.max_keysize = OCS_AES_MAX_KEY_SIZE,1317.base.ivsize = AES_BLOCK_SIZE,1318.base.setkey = kmb_ocs_aes_set_key,1319.base.encrypt = kmb_ocs_aes_ctr_encrypt,1320.base.decrypt = kmb_ocs_aes_ctr_decrypt,1321.base.init = ocs_aes_init_tfm,1322.base.exit = ocs_exit_tfm,1323.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1324},1325#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS1326{1327.base.base.cra_name = "cts(cbc(aes))",1328.base.base.cra_driver_name = "cts-aes-keembay-ocs",1329.base.base.cra_priority = KMB_OCS_PRIORITY,1330.base.base.cra_flags = CRYPTO_ALG_ASYNC |1331CRYPTO_ALG_KERN_DRIVER_ONLY |1332CRYPTO_ALG_NEED_FALLBACK,1333.base.base.cra_blocksize = AES_BLOCK_SIZE,1334.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1335.base.base.cra_module = THIS_MODULE,1336.base.base.cra_alignmask = 0,13371338.base.min_keysize = OCS_AES_MIN_KEY_SIZE,1339.base.max_keysize = OCS_AES_MAX_KEY_SIZE,1340.base.ivsize = AES_BLOCK_SIZE,1341.base.setkey = kmb_ocs_aes_set_key,1342.base.encrypt = kmb_ocs_aes_cts_encrypt,1343.base.decrypt = kmb_ocs_aes_cts_decrypt,1344.base.init = ocs_aes_init_tfm,1345.base.exit = ocs_exit_tfm,1346.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1347},1348#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS */1349#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB1350{1351.base.base.cra_name = "ecb(sm4)",1352.base.base.cra_driver_name = "ecb-sm4-keembay-ocs",1353.base.base.cra_priority = KMB_OCS_PRIORITY,1354.base.base.cra_flags = CRYPTO_ALG_ASYNC |1355CRYPTO_ALG_KERN_DRIVER_ONLY,1356.base.base.cra_blocksize = AES_BLOCK_SIZE,1357.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1358.base.base.cra_module = THIS_MODULE,1359.base.base.cra_alignmask = 0,13601361.base.min_keysize = OCS_SM4_KEY_SIZE,1362.base.max_keysize = OCS_SM4_KEY_SIZE,1363.base.setkey = kmb_ocs_sm4_set_key,1364.base.encrypt = kmb_ocs_sm4_ecb_encrypt,1365.base.decrypt = kmb_ocs_sm4_ecb_decrypt,1366.base.init = ocs_sm4_init_tfm,1367.base.exit = ocs_exit_tfm,1368.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1369},1370#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB */1371{1372.base.base.cra_name = "cbc(sm4)",1373.base.base.cra_driver_name = "cbc-sm4-keembay-ocs",1374.base.base.cra_priority = KMB_OCS_PRIORITY,1375.base.base.cra_flags = CRYPTO_ALG_ASYNC |1376CRYPTO_ALG_KERN_DRIVER_ONLY,1377.base.base.cra_blocksize = AES_BLOCK_SIZE,1378.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1379.base.base.cra_module = THIS_MODULE,1380.base.base.cra_alignmask = 0,13811382.base.min_keysize = OCS_SM4_KEY_SIZE,1383.base.max_keysize = OCS_SM4_KEY_SIZE,1384.base.ivsize = AES_BLOCK_SIZE,1385.base.setkey = kmb_ocs_sm4_set_key,1386.base.encrypt = kmb_ocs_sm4_cbc_encrypt,1387.base.decrypt = kmb_ocs_sm4_cbc_decrypt,1388.base.init = ocs_sm4_init_tfm,1389.base.exit = ocs_exit_tfm,1390.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1391},1392{1393.base.base.cra_name = "ctr(sm4)",1394.base.base.cra_driver_name = "ctr-sm4-keembay-ocs",1395.base.base.cra_priority = KMB_OCS_PRIORITY,1396.base.base.cra_flags = CRYPTO_ALG_ASYNC |1397CRYPTO_ALG_KERN_DRIVER_ONLY,1398.base.base.cra_blocksize = 1,1399.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1400.base.base.cra_module = THIS_MODULE,1401.base.base.cra_alignmask = 0,14021403.base.min_keysize = OCS_SM4_KEY_SIZE,1404.base.max_keysize = OCS_SM4_KEY_SIZE,1405.base.ivsize = AES_BLOCK_SIZE,1406.base.setkey = kmb_ocs_sm4_set_key,1407.base.encrypt = kmb_ocs_sm4_ctr_encrypt,1408.base.decrypt = kmb_ocs_sm4_ctr_decrypt,1409.base.init = ocs_sm4_init_tfm,1410.base.exit = ocs_exit_tfm,1411.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1412},1413#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS1414{1415.base.base.cra_name = "cts(cbc(sm4))",1416.base.base.cra_driver_name = "cts-sm4-keembay-ocs",1417.base.base.cra_priority = KMB_OCS_PRIORITY,1418.base.base.cra_flags = CRYPTO_ALG_ASYNC |1419CRYPTO_ALG_KERN_DRIVER_ONLY,1420.base.base.cra_blocksize = AES_BLOCK_SIZE,1421.base.base.cra_ctxsize = sizeof(struct ocs_aes_tctx),1422.base.base.cra_module = THIS_MODULE,1423.base.base.cra_alignmask = 0,14241425.base.min_keysize = OCS_SM4_KEY_SIZE,1426.base.max_keysize = OCS_SM4_KEY_SIZE,1427.base.ivsize = AES_BLOCK_SIZE,1428.base.setkey = kmb_ocs_sm4_set_key,1429.base.encrypt = kmb_ocs_sm4_cts_encrypt,1430.base.decrypt = kmb_ocs_sm4_cts_decrypt,1431.base.init = ocs_sm4_init_tfm,1432.base.exit = ocs_exit_tfm,1433.op.do_one_request = kmb_ocs_aes_sk_do_one_request,1434}1435#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS */1436};14371438static struct aead_engine_alg algs_aead[] = {1439{1440.base.base = {1441.cra_name = "gcm(aes)",1442.cra_driver_name = "gcm-aes-keembay-ocs",1443.cra_priority = KMB_OCS_PRIORITY,1444.cra_flags = CRYPTO_ALG_ASYNC |1445CRYPTO_ALG_KERN_DRIVER_ONLY |1446CRYPTO_ALG_NEED_FALLBACK,1447.cra_blocksize = 1,1448.cra_ctxsize = sizeof(struct ocs_aes_tctx),1449.cra_alignmask = 0,1450.cra_module = THIS_MODULE,1451},1452.base.init = ocs_aes_aead_cra_init,1453.base.exit = ocs_aead_cra_exit,1454.base.ivsize = GCM_AES_IV_SIZE,1455.base.maxauthsize = AES_BLOCK_SIZE,1456.base.setauthsize = kmb_ocs_aead_gcm_setauthsize,1457.base.setkey = kmb_ocs_aes_aead_set_key,1458.base.encrypt = kmb_ocs_aes_gcm_encrypt,1459.base.decrypt = kmb_ocs_aes_gcm_decrypt,1460.op.do_one_request = kmb_ocs_aes_aead_do_one_request,1461},1462{1463.base.base = {1464.cra_name = "ccm(aes)",1465.cra_driver_name = "ccm-aes-keembay-ocs",1466.cra_priority = KMB_OCS_PRIORITY,1467.cra_flags = CRYPTO_ALG_ASYNC |1468CRYPTO_ALG_KERN_DRIVER_ONLY |1469CRYPTO_ALG_NEED_FALLBACK,1470.cra_blocksize = 1,1471.cra_ctxsize = sizeof(struct ocs_aes_tctx),1472.cra_alignmask = 0,1473.cra_module = THIS_MODULE,1474},1475.base.init = ocs_aes_aead_cra_init,1476.base.exit = ocs_aead_cra_exit,1477.base.ivsize = AES_BLOCK_SIZE,1478.base.maxauthsize = AES_BLOCK_SIZE,1479.base.setauthsize = kmb_ocs_aead_ccm_setauthsize,1480.base.setkey = kmb_ocs_aes_aead_set_key,1481.base.encrypt = kmb_ocs_aes_ccm_encrypt,1482.base.decrypt = kmb_ocs_aes_ccm_decrypt,1483.op.do_one_request = kmb_ocs_aes_aead_do_one_request,1484},1485{1486.base.base = {1487.cra_name = "gcm(sm4)",1488.cra_driver_name = "gcm-sm4-keembay-ocs",1489.cra_priority = KMB_OCS_PRIORITY,1490.cra_flags = CRYPTO_ALG_ASYNC |1491CRYPTO_ALG_KERN_DRIVER_ONLY,1492.cra_blocksize = 1,1493.cra_ctxsize = sizeof(struct ocs_aes_tctx),1494.cra_alignmask = 0,1495.cra_module = THIS_MODULE,1496},1497.base.init = ocs_sm4_aead_cra_init,1498.base.exit = ocs_aead_cra_exit,1499.base.ivsize = GCM_AES_IV_SIZE,1500.base.maxauthsize = AES_BLOCK_SIZE,1501.base.setauthsize = kmb_ocs_aead_gcm_setauthsize,1502.base.setkey = kmb_ocs_sm4_aead_set_key,1503.base.encrypt = kmb_ocs_sm4_gcm_encrypt,1504.base.decrypt = kmb_ocs_sm4_gcm_decrypt,1505.op.do_one_request = kmb_ocs_aes_aead_do_one_request,1506},1507{1508.base.base = {1509.cra_name = "ccm(sm4)",1510.cra_driver_name = "ccm-sm4-keembay-ocs",1511.cra_priority = KMB_OCS_PRIORITY,1512.cra_flags = CRYPTO_ALG_ASYNC |1513CRYPTO_ALG_KERN_DRIVER_ONLY,1514.cra_blocksize = 1,1515.cra_ctxsize = sizeof(struct ocs_aes_tctx),1516.cra_alignmask = 0,1517.cra_module = THIS_MODULE,1518},1519.base.init = ocs_sm4_aead_cra_init,1520.base.exit = ocs_aead_cra_exit,1521.base.ivsize = AES_BLOCK_SIZE,1522.base.maxauthsize = AES_BLOCK_SIZE,1523.base.setauthsize = kmb_ocs_aead_ccm_setauthsize,1524.base.setkey = kmb_ocs_sm4_aead_set_key,1525.base.encrypt = kmb_ocs_sm4_ccm_encrypt,1526.base.decrypt = kmb_ocs_sm4_ccm_decrypt,1527.op.do_one_request = kmb_ocs_aes_aead_do_one_request,1528}1529};15301531static void unregister_aes_algs(struct ocs_aes_dev *aes_dev)1532{1533crypto_engine_unregister_aeads(algs_aead, ARRAY_SIZE(algs_aead));1534crypto_engine_unregister_skciphers(algs, ARRAY_SIZE(algs));1535}15361537static int register_aes_algs(struct ocs_aes_dev *aes_dev)1538{1539int ret;15401541/*1542* If any algorithm fails to register, all preceding algorithms that1543* were successfully registered will be automatically unregistered.1544*/1545ret = crypto_engine_register_aeads(algs_aead, ARRAY_SIZE(algs_aead));1546if (ret)1547return ret;15481549ret = crypto_engine_register_skciphers(algs, ARRAY_SIZE(algs));1550if (ret)1551crypto_engine_unregister_aeads(algs_aead, ARRAY_SIZE(algs));15521553return ret;1554}15551556/* Device tree driver match. */1557static const struct of_device_id kmb_ocs_aes_of_match[] = {1558{1559.compatible = "intel,keembay-ocs-aes",1560},1561{}1562};15631564static void kmb_ocs_aes_remove(struct platform_device *pdev)1565{1566struct ocs_aes_dev *aes_dev;15671568aes_dev = platform_get_drvdata(pdev);15691570unregister_aes_algs(aes_dev);15711572spin_lock(&ocs_aes.lock);1573list_del(&aes_dev->list);1574spin_unlock(&ocs_aes.lock);15751576crypto_engine_exit(aes_dev->engine);1577}15781579static int kmb_ocs_aes_probe(struct platform_device *pdev)1580{1581struct device *dev = &pdev->dev;1582struct ocs_aes_dev *aes_dev;1583int rc;15841585aes_dev = devm_kzalloc(dev, sizeof(*aes_dev), GFP_KERNEL);1586if (!aes_dev)1587return -ENOMEM;15881589aes_dev->dev = dev;15901591platform_set_drvdata(pdev, aes_dev);15921593rc = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(32));1594if (rc) {1595dev_err(dev, "Failed to set 32 bit dma mask %d\n", rc);1596return rc;1597}15981599/* Get base register address. */1600aes_dev->base_reg = devm_platform_ioremap_resource(pdev, 0);1601if (IS_ERR(aes_dev->base_reg))1602return PTR_ERR(aes_dev->base_reg);16031604/* Get and request IRQ */1605aes_dev->irq = platform_get_irq(pdev, 0);1606if (aes_dev->irq < 0)1607return aes_dev->irq;16081609rc = devm_request_threaded_irq(dev, aes_dev->irq, ocs_aes_irq_handler,1610NULL, 0, "keembay-ocs-aes", aes_dev);1611if (rc < 0) {1612dev_err(dev, "Could not request IRQ\n");1613return rc;1614}16151616INIT_LIST_HEAD(&aes_dev->list);1617spin_lock(&ocs_aes.lock);1618list_add_tail(&aes_dev->list, &ocs_aes.dev_list);1619spin_unlock(&ocs_aes.lock);16201621init_completion(&aes_dev->irq_completion);16221623/* Initialize crypto engine */1624aes_dev->engine = crypto_engine_alloc_init(dev, true);1625if (!aes_dev->engine) {1626rc = -ENOMEM;1627goto list_del;1628}16291630rc = crypto_engine_start(aes_dev->engine);1631if (rc) {1632dev_err(dev, "Could not start crypto engine\n");1633goto cleanup;1634}16351636rc = register_aes_algs(aes_dev);1637if (rc) {1638dev_err(dev,1639"Could not register OCS algorithms with Crypto API\n");1640goto cleanup;1641}16421643return 0;16441645cleanup:1646crypto_engine_exit(aes_dev->engine);1647list_del:1648spin_lock(&ocs_aes.lock);1649list_del(&aes_dev->list);1650spin_unlock(&ocs_aes.lock);16511652return rc;1653}16541655/* The OCS driver is a platform device. */1656static struct platform_driver kmb_ocs_aes_driver = {1657.probe = kmb_ocs_aes_probe,1658.remove = kmb_ocs_aes_remove,1659.driver = {1660.name = DRV_NAME,1661.of_match_table = kmb_ocs_aes_of_match,1662},1663};16641665module_platform_driver(kmb_ocs_aes_driver);16661667MODULE_DESCRIPTION("Intel Keem Bay Offload and Crypto Subsystem (OCS) AES/SM4 Driver");1668MODULE_LICENSE("GPL");16691670MODULE_ALIAS_CRYPTO("cbc-aes-keembay-ocs");1671MODULE_ALIAS_CRYPTO("ctr-aes-keembay-ocs");1672MODULE_ALIAS_CRYPTO("gcm-aes-keembay-ocs");1673MODULE_ALIAS_CRYPTO("ccm-aes-keembay-ocs");16741675MODULE_ALIAS_CRYPTO("cbc-sm4-keembay-ocs");1676MODULE_ALIAS_CRYPTO("ctr-sm4-keembay-ocs");1677MODULE_ALIAS_CRYPTO("gcm-sm4-keembay-ocs");1678MODULE_ALIAS_CRYPTO("ccm-sm4-keembay-ocs");16791680#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB1681MODULE_ALIAS_CRYPTO("ecb-aes-keembay-ocs");1682MODULE_ALIAS_CRYPTO("ecb-sm4-keembay-ocs");1683#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB */16841685#ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS1686MODULE_ALIAS_CRYPTO("cts-aes-keembay-ocs");1687MODULE_ALIAS_CRYPTO("cts-sm4-keembay-ocs");1688#endif /* CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS */168916901691