get:
Show a patch.

patch:
Update a patch.

put:
Update a patch.

GET /api/patches/94852/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 94852,
    "url": "https://patches.dpdk.org/api/patches/94852/?format=api",
    "web_url": "https://patches.dpdk.org/project/dpdk/patch/1624602307-30098-3-git-send-email-anoobj@marvell.com/",
    "project": {
        "id": 1,
        "url": "https://patches.dpdk.org/api/projects/1/?format=api",
        "name": "DPDK",
        "link_name": "dpdk",
        "list_id": "dev.dpdk.org",
        "list_email": "dev@dpdk.org",
        "web_url": "http://core.dpdk.org",
        "scm_url": "git://dpdk.org/dpdk",
        "webscm_url": "http://git.dpdk.org/dpdk",
        "list_archive_url": "https://inbox.dpdk.org/dev",
        "list_archive_url_format": "https://inbox.dpdk.org/dev/{}",
        "commit_url_format": ""
    },
    "msgid": "<1624602307-30098-3-git-send-email-anoobj@marvell.com>",
    "list_archive_url": "https://inbox.dpdk.org/dev/1624602307-30098-3-git-send-email-anoobj@marvell.com",
    "date": "2021-06-25T06:25:05",
    "name": "[v2,2/4] crypto/cnxk: add asymmetric datapath ops",
    "commit_ref": null,
    "pull_url": null,
    "state": "superseded",
    "archived": true,
    "hash": "72ef6586606c95f7fbc484fe890d555e5663e036",
    "submitter": {
        "id": 1205,
        "url": "https://patches.dpdk.org/api/people/1205/?format=api",
        "name": "Anoob Joseph",
        "email": "anoobj@marvell.com"
    },
    "delegate": {
        "id": 6690,
        "url": "https://patches.dpdk.org/api/users/6690/?format=api",
        "username": "akhil",
        "first_name": "akhil",
        "last_name": "goyal",
        "email": "gakhil@marvell.com"
    },
    "mbox": "https://patches.dpdk.org/project/dpdk/patch/1624602307-30098-3-git-send-email-anoobj@marvell.com/mbox/",
    "series": [
        {
            "id": 17485,
            "url": "https://patches.dpdk.org/api/series/17485/?format=api",
            "web_url": "https://patches.dpdk.org/project/dpdk/list/?series=17485",
            "date": "2021-06-25T06:25:03",
            "name": "Add asymmetric ops in crypto cnxk PMDs",
            "version": 2,
            "mbox": "https://patches.dpdk.org/series/17485/mbox/"
        }
    ],
    "comments": "https://patches.dpdk.org/api/patches/94852/comments/",
    "check": "success",
    "checks": "https://patches.dpdk.org/api/patches/94852/checks/",
    "tags": {},
    "related": [],
    "headers": {
        "Return-Path": "<dev-bounces@dpdk.org>",
        "X-Original-To": "patchwork@inbox.dpdk.org",
        "Delivered-To": "patchwork@inbox.dpdk.org",
        "Received": [
            "from mails.dpdk.org (mails.dpdk.org [217.70.189.124])\n\tby inbox.dpdk.org (Postfix) with ESMTP id A8003A0C40;\n\tFri, 25 Jun 2021 08:25:34 +0200 (CEST)",
            "from [217.70.189.124] (localhost [127.0.0.1])\n\tby mails.dpdk.org (Postfix) with ESMTP id A1BC1410E6;\n\tFri, 25 Jun 2021 08:25:32 +0200 (CEST)",
            "from mx0b-0016f401.pphosted.com (mx0b-0016f401.pphosted.com\n [67.231.156.173])\n by mails.dpdk.org (Postfix) with ESMTP id 0FB7A40E46\n for <dev@dpdk.org>; Fri, 25 Jun 2021 08:25:30 +0200 (CEST)",
            "from pps.filterd (m0045851.ppops.net [127.0.0.1])\n by mx0b-0016f401.pphosted.com (8.16.0.43/8.16.0.43) with SMTP id\n 15P6POTM000801; Thu, 24 Jun 2021 23:25:30 -0700",
            "from dc5-exch01.marvell.com ([199.233.59.181])\n by mx0b-0016f401.pphosted.com with ESMTP id 39d241smau-2\n (version=TLSv1.2 cipher=ECDHE-RSA-AES256-SHA384 bits=256 verify=NOT);\n Thu, 24 Jun 2021 23:25:30 -0700",
            "from DC5-EXCH02.marvell.com (10.69.176.39) by DC5-EXCH01.marvell.com\n (10.69.176.38) with Microsoft SMTP Server (TLS) id 15.0.1497.18;\n Thu, 24 Jun 2021 23:25:27 -0700",
            "from maili.marvell.com (10.69.176.80) by DC5-EXCH02.marvell.com\n (10.69.176.39) with Microsoft SMTP Server id 15.0.1497.18 via Frontend\n Transport; Thu, 24 Jun 2021 23:25:27 -0700",
            "from HY-LT1002.marvell.com (HY-LT1002.marvell.com [10.28.176.218])\n by maili.marvell.com (Postfix) with ESMTP id 8F1463F704C;\n Thu, 24 Jun 2021 23:25:24 -0700 (PDT)"
        ],
        "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed; d=marvell.com;\n h=from : to : cc :\n subject : date : message-id : in-reply-to : references : mime-version :\n content-transfer-encoding : content-type; s=pfpt0220;\n bh=6koba9UcsE9RXvSFplIt7s7pM9o/85hpuzI3jXRZbV8=;\n b=Cvx4jSGC0BMY8kZh5KLxX+7CVanx3czG5W15TE+q1jC8p/DNYNRAEZsAelAwIelFwQId\n YuS0HNkPTsbclBmBSaJRKxxfhR0Db/VXb76cHIzXuqQijsVGrn2YoSA3bSmdgoOgB3oe\n p4+DJTtgNn35m2LbvHDyWt6ZTWf/zHj+99AF2woO48uPLWHT0AeYqh8IUHVVtyj9CF6k\n sV4y64xGKT+6jaxkGUHltxwFCxL/cmO0QauLARBnYcuE78VE+HdZTr0/x2BCjDIMFU1J\n QKOfh/+iRoRl44nFb6VDWV49uflj7XSalHQIA7HmF7MV8pwcJHKSG7qklz4QwQvKeBD2 fA==",
        "From": "Anoob Joseph <anoobj@marvell.com>",
        "To": "Akhil Goyal <gakhil@marvell.com>, Thomas Monjalon <thomas@monjalon.net>",
        "CC": "Kiran Kumar K <kirankumark@marvell.com>, Jerin Jacob <jerinj@marvell.com>,\n Ankur Dwivedi <adwivedi@marvell.com>, Tejasree Kondoj\n <ktejasree@marvell.com>, <dev@dpdk.org>",
        "Date": "Fri, 25 Jun 2021 11:55:05 +0530",
        "Message-ID": "<1624602307-30098-3-git-send-email-anoobj@marvell.com>",
        "X-Mailer": "git-send-email 2.7.4",
        "In-Reply-To": "<1624602307-30098-1-git-send-email-anoobj@marvell.com>",
        "References": "<1622655994-24480-1-git-send-email-anoobj@marvell.com>\n <1624602307-30098-1-git-send-email-anoobj@marvell.com>",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit",
        "Content-Type": "text/plain",
        "X-Proofpoint-GUID": "AX3XBQ2-1usThYBEV4Wioh-bARqdSJUa",
        "X-Proofpoint-ORIG-GUID": "AX3XBQ2-1usThYBEV4Wioh-bARqdSJUa",
        "X-Proofpoint-Virus-Version": "vendor=fsecure engine=2.50.10434:6.0.391, 18.0.790\n definitions=2021-06-25_02:2021-06-24,\n 2021-06-25 signatures=0",
        "Subject": "[dpdk-dev] [PATCH v2 2/4] crypto/cnxk: add asymmetric datapath ops",
        "X-BeenThere": "dev@dpdk.org",
        "X-Mailman-Version": "2.1.29",
        "Precedence": "list",
        "List-Id": "DPDK patches and discussions <dev.dpdk.org>",
        "List-Unsubscribe": "<https://mails.dpdk.org/options/dev>,\n <mailto:dev-request@dpdk.org?subject=unsubscribe>",
        "List-Archive": "<http://mails.dpdk.org/archives/dev/>",
        "List-Post": "<mailto:dev@dpdk.org>",
        "List-Help": "<mailto:dev-request@dpdk.org?subject=help>",
        "List-Subscribe": "<https://mails.dpdk.org/listinfo/dev>,\n <mailto:dev-request@dpdk.org?subject=subscribe>",
        "Errors-To": "dev-bounces@dpdk.org",
        "Sender": "\"dev\" <dev-bounces@dpdk.org>"
    },
    "content": "From: Kiran Kumar K <kirankumark@marvell.com>\n\nAdd asymmetric crypto datapath ops.\n\n\nSigned-off-by: Kiran Kumar K <kirankumark@marvell.com>\n---\n drivers/crypto/cnxk/cn10k_cryptodev_ops.c |  27 ++\n drivers/crypto/cnxk/cn9k_cryptodev_ops.c  |  29 +-\n drivers/crypto/cnxk/cnxk_ae.h             | 625 ++++++++++++++++++++++++++++++\n 3 files changed, 679 insertions(+), 2 deletions(-)",
    "diff": "diff --git a/drivers/crypto/cnxk/cn10k_cryptodev_ops.c b/drivers/crypto/cnxk/cn10k_cryptodev_ops.c\nindex 8b33764..6d12cc3 100644\n--- a/drivers/crypto/cnxk/cn10k_cryptodev_ops.c\n+++ b/drivers/crypto/cnxk/cn10k_cryptodev_ops.c\n@@ -10,6 +10,7 @@\n #include \"cn10k_cryptodev_ops.h\"\n #include \"cn10k_ipsec_la_ops.h\"\n #include \"cn10k_ipsec.h\"\n+#include \"cnxk_ae.h\"\n #include \"cnxk_cryptodev.h\"\n #include \"cnxk_cryptodev_ops.h\"\n #include \"cnxk_se.h\"\n@@ -100,7 +101,9 @@ cn10k_cpt_fill_inst(struct cnxk_cpt_qp *qp, struct rte_crypto_op *ops[],\n \t\t    struct cpt_inst_s inst[], struct cpt_inflight_req *infl_req)\n {\n \tstruct cn10k_sec_session *sec_sess;\n+\tstruct rte_crypto_asym_op *asym_op;\n \tstruct rte_crypto_sym_op *sym_op;\n+\tstruct cnxk_ae_sess *ae_sess;\n \tstruct cnxk_se_sess *sess;\n \tstruct rte_crypto_op *op;\n \tuint64_t w7;\n@@ -148,6 +151,21 @@ cn10k_cpt_fill_inst(struct cnxk_cpt_qp *qp, struct rte_crypto_op *ops[],\n \t\t\t}\n \t\t\tw7 = sess->cpt_inst_w7;\n \t\t}\n+\t} else if (op->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) {\n+\n+\t\tif (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {\n+\t\t\tasym_op = op->asym;\n+\t\t\tae_sess = get_asym_session_private_data(\n+\t\t\t\tasym_op->session, cn10k_cryptodev_driver_id);\n+\t\t\tret = cnxk_ae_enqueue(qp, op, infl_req, &inst[0],\n+\t\t\t\t\t      ae_sess);\n+\t\t\tif (unlikely(ret))\n+\t\t\t\treturn 0;\n+\t\t\tw7 = ae_sess->cpt_inst_w7;\n+\t\t} else {\n+\t\t\tplt_dp_err(\"Not supported Asym op without session\");\n+\t\t\treturn 0;\n+\t\t}\n \t} else {\n \t\tplt_dp_err(\"Unsupported op type\");\n \t\treturn 0;\n@@ -303,6 +321,15 @@ cn10k_cpt_dequeue_post_process(struct cnxk_cpt_qp *qp,\n \t\t\t\tcompl_auth_verify(cop, (uint8_t *)rsp[0],\n \t\t\t\t\t\t  rsp[1]);\n \t\t\t}\n+\t\t} else if (cop->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) {\n+\t\t\tstruct rte_crypto_asym_op *op = cop->asym;\n+\t\t\tuintptr_t *mdata = infl_req->mdata;\n+\t\t\tstruct cnxk_ae_sess *sess;\n+\n+\t\t\tsess = get_asym_session_private_data(\n+\t\t\t\top->session, cn10k_cryptodev_driver_id);\n+\n+\t\t\tcnxk_ae_post_process(cop, sess, (uint8_t *)mdata[0]);\n \t\t}\n \t} else {\n \t\tcop->status = RTE_CRYPTO_OP_STATUS_ERROR;\ndiff --git a/drivers/crypto/cnxk/cn9k_cryptodev_ops.c b/drivers/crypto/cnxk/cn9k_cryptodev_ops.c\nindex d8b2aea..e367cc4 100644\n--- a/drivers/crypto/cnxk/cn9k_cryptodev_ops.c\n+++ b/drivers/crypto/cnxk/cn9k_cryptodev_ops.c\n@@ -7,6 +7,7 @@\n \n #include \"cn9k_cryptodev.h\"\n #include \"cn9k_cryptodev_ops.h\"\n+#include \"cnxk_ae.h\"\n #include \"cnxk_cryptodev.h\"\n #include \"cnxk_cryptodev_ops.h\"\n #include \"cnxk_se.h\"\n@@ -65,11 +66,11 @@ static uint16_t\n cn9k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops)\n {\n \tstruct cpt_inflight_req *infl_req;\n+\tstruct rte_crypto_asym_op *asym_op;\n \tstruct rte_crypto_sym_op *sym_op;\n \tuint16_t nb_allowed, count = 0;\n \tstruct cnxk_cpt_qp *qp = qptr;\n \tstruct pending_queue *pend_q;\n-\tstruct cnxk_se_sess *sess;\n \tstruct rte_crypto_op *op;\n \tstruct cpt_inst_s inst;\n \tuint64_t lmt_status;\n@@ -95,6 +96,8 @@ cn9k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops)\n \t\tinfl_req->op_flags = 0;\n \n \t\tif (op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC) {\n+\t\t\tstruct cnxk_se_sess *sess;\n+\n \t\t\tif (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {\n \t\t\t\tsym_op = op->sym;\n \t\t\t\tsess = get_sym_session_private_data(\n@@ -120,6 +123,20 @@ cn9k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops)\n \t\t\t\t\t\t\top->sym->session);\n \t\t\t\t}\n \t\t\t}\n+\t\t\tinst.w7.u64 = sess->cpt_inst_w7;\n+\t\t} else if (op->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) {\n+\t\t\tstruct cnxk_ae_sess *sess;\n+\n+\t\t\tret = -EINVAL;\n+\t\t\tif (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {\n+\t\t\t\tasym_op = op->asym;\n+\t\t\t\tsess = get_asym_session_private_data(\n+\t\t\t\t\tasym_op->session,\n+\t\t\t\t\tcn9k_cryptodev_driver_id);\n+\t\t\t\tret = cnxk_ae_enqueue(qp, op, infl_req, &inst,\n+\t\t\t\t\t\t      sess);\n+\t\t\t\tinst.w7.u64 = sess->cpt_inst_w7;\n+\t\t\t}\n \t\t} else {\n \t\t\tplt_dp_err(\"Unsupported op type\");\n \t\t\tbreak;\n@@ -134,7 +151,6 @@ cn9k_cpt_enqueue_burst(void *qptr, struct rte_crypto_op **ops, uint16_t nb_ops)\n \n \t\tinfl_req->res.cn9k.compcode = CPT_COMP_NOT_DONE;\n \t\tinst.res_addr = (uint64_t)&infl_req->res;\n-\t\tinst.w7.u64 = sess->cpt_inst_w7;\n \n \t\tdo {\n \t\t\t/* Copy CPT command to LMTLINE */\n@@ -189,6 +205,15 @@ cn9k_cpt_dequeue_post_process(struct cnxk_cpt_qp *qp, struct rte_crypto_op *cop,\n \t\t\t\tcompl_auth_verify(cop, (uint8_t *)rsp[0],\n \t\t\t\t\t\t  rsp[1]);\n \t\t\t}\n+\t\t} else if (cop->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) {\n+\t\t\tstruct rte_crypto_asym_op *op = cop->asym;\n+\t\t\tuintptr_t *mdata = infl_req->mdata;\n+\t\t\tstruct cnxk_ae_sess *sess;\n+\n+\t\t\tsess = get_asym_session_private_data(\n+\t\t\t\top->session, cn9k_cryptodev_driver_id);\n+\n+\t\t\tcnxk_ae_post_process(cop, sess, (uint8_t *)mdata[0]);\n \t\t}\n \t} else {\n \t\tcop->status = RTE_CRYPTO_OP_STATUS_ERROR;\ndiff --git a/drivers/crypto/cnxk/cnxk_ae.h b/drivers/crypto/cnxk/cnxk_ae.h\nindex e3dd63b..c752e62 100644\n--- a/drivers/crypto/cnxk/cnxk_ae.h\n+++ b/drivers/crypto/cnxk/cnxk_ae.h\n@@ -208,4 +208,629 @@ cnxk_ae_free_session_parameters(struct cnxk_ae_sess *sess)\n \t\tbreak;\n \t}\n }\n+\n+static __rte_always_inline int\n+cnxk_ae_modex_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,\n+\t\t   struct rte_crypto_modex_xform *mod, struct cpt_inst_s *inst)\n+{\n+\tuint32_t exp_len = mod->exponent.length;\n+\tuint32_t mod_len = mod->modulus.length;\n+\tstruct rte_crypto_mod_op_param mod_op;\n+\tuint64_t total_key_len;\n+\tunion cpt_inst_w4 w4;\n+\tuint32_t base_len;\n+\tuint32_t dlen;\n+\tuint8_t *dptr;\n+\n+\tmod_op = op->asym->modex;\n+\n+\tbase_len = mod_op.base.length;\n+\tif (unlikely(base_len > mod_len)) {\n+\t\top->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\treturn -ENOTSUP;\n+\t}\n+\n+\ttotal_key_len = mod_len + exp_len;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\tmemcpy(dptr, mod->modulus.data, total_key_len);\n+\tdptr += total_key_len;\n+\tmemcpy(dptr, mod_op.base.data, base_len);\n+\tdptr += base_len;\n+\tdlen = total_key_len + base_len;\n+\n+\t/* Setup opcodes */\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;\n+\tw4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;\n+\n+\tw4.s.param1 = mod_len;\n+\tw4.s.param2 = exp_len;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+\n+\treturn 0;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_rsa_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,\n+\t\t struct rte_crypto_rsa_xform *rsa,\n+\t\t rte_crypto_param *crypto_param, struct cpt_inst_s *inst)\n+{\n+\tstruct rte_crypto_rsa_op_param rsa_op;\n+\tuint32_t mod_len = rsa->n.length;\n+\tuint32_t exp_len = rsa->e.length;\n+\tuint64_t total_key_len;\n+\tunion cpt_inst_w4 w4;\n+\tuint32_t in_size;\n+\tuint32_t dlen;\n+\tuint8_t *dptr;\n+\n+\trsa_op = op->asym->rsa;\n+\ttotal_key_len = mod_len + exp_len;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\tmemcpy(dptr, rsa->n.data, total_key_len);\n+\tdptr += total_key_len;\n+\n+\tin_size = crypto_param->length;\n+\tmemcpy(dptr, crypto_param->data, in_size);\n+\n+\tdptr += in_size;\n+\tdlen = total_key_len + in_size;\n+\n+\tif (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {\n+\t\t/* Use mod_exp operation for no_padding type */\n+\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX;\n+\t\tw4.s.param2 = exp_len;\n+\t} else {\n+\t\tif (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {\n+\t\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC;\n+\t\t\t/* Public key encrypt, use BT2*/\n+\t\t\tw4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2 |\n+\t\t\t\t      ((uint16_t)(exp_len) << 1);\n+\t\t} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {\n+\t\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC;\n+\t\t\t/* Public key decrypt, use BT1 */\n+\t\t\tw4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;\n+\t\t}\n+\t}\n+\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;\n+\n+\tw4.s.param1 = mod_len;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_rsa_crt_prep(struct rte_crypto_op *op, struct roc_ae_buf_ptr *meta_buf,\n+\t\t     struct rte_crypto_rsa_xform *rsa,\n+\t\t     rte_crypto_param *crypto_param, struct cpt_inst_s *inst)\n+{\n+\tuint32_t qInv_len = rsa->qt.qInv.length;\n+\tstruct rte_crypto_rsa_op_param rsa_op;\n+\tuint32_t dP_len = rsa->qt.dP.length;\n+\tuint32_t dQ_len = rsa->qt.dQ.length;\n+\tuint32_t p_len = rsa->qt.p.length;\n+\tuint32_t q_len = rsa->qt.q.length;\n+\tuint32_t mod_len = rsa->n.length;\n+\tuint64_t total_key_len;\n+\tunion cpt_inst_w4 w4;\n+\tuint32_t in_size;\n+\tuint32_t dlen;\n+\tuint8_t *dptr;\n+\n+\trsa_op = op->asym->rsa;\n+\ttotal_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\tmemcpy(dptr, rsa->qt.q.data, total_key_len);\n+\tdptr += total_key_len;\n+\n+\tin_size = crypto_param->length;\n+\tmemcpy(dptr, crypto_param->data, in_size);\n+\n+\tdptr += in_size;\n+\tdlen = total_key_len + in_size;\n+\n+\tif (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {\n+\t\t/*Use mod_exp operation for no_padding type */\n+\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_MODEX_CRT;\n+\t} else {\n+\t\tif (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {\n+\t\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_ENC_CRT;\n+\t\t\t/* Private encrypt, use BT1 */\n+\t\t\tw4.s.param2 = ROC_AE_CPT_BLOCK_TYPE1;\n+\t\t} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {\n+\t\t\tw4.s.opcode_minor = ROC_AE_MINOR_OP_PKCS_DEC_CRT;\n+\t\t\t/* Private decrypt, use BT2 */\n+\t\t\tw4.s.param2 = ROC_AE_CPT_BLOCK_TYPE2;\n+\t\t}\n+\t}\n+\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_MODEX;\n+\n+\tw4.s.param1 = mod_len;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+}\n+\n+static __rte_always_inline int __rte_hot\n+cnxk_ae_enqueue_rsa_op(struct rte_crypto_op *op,\n+\t\t       struct roc_ae_buf_ptr *meta_buf,\n+\t\t       struct cnxk_ae_sess *sess, struct cpt_inst_s *inst)\n+{\n+\tstruct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;\n+\n+\tswitch (rsa->op_type) {\n+\tcase RTE_CRYPTO_ASYM_OP_VERIFY:\n+\t\tcnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->sign,\n+\t\t\t\t inst);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_ENCRYPT:\n+\t\tcnxk_ae_rsa_prep(op, meta_buf, &sess->rsa_ctx, &rsa->message,\n+\t\t\t\t inst);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_SIGN:\n+\t\tcnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx,\n+\t\t\t\t     &rsa->message, inst);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_DECRYPT:\n+\t\tcnxk_ae_rsa_crt_prep(op, meta_buf, &sess->rsa_ctx, &rsa->cipher,\n+\t\t\t\t     inst);\n+\t\tbreak;\n+\tdefault:\n+\t\top->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\treturn -EINVAL;\n+\t}\n+\treturn 0;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,\n+\t\t\tstruct roc_ae_buf_ptr *meta_buf,\n+\t\t\tuint64_t fpm_table_iova, struct roc_ae_ec_group *ec_grp,\n+\t\t\tuint8_t curveid, struct cpt_inst_s *inst)\n+{\n+\tuint16_t message_len = ecdsa->message.length;\n+\tuint16_t pkey_len = ecdsa->pkey.length;\n+\tuint16_t p_align, k_align, m_align;\n+\tuint16_t k_len = ecdsa->k.length;\n+\tuint16_t order_len, prime_len;\n+\tuint16_t o_offset, pk_offset;\n+\tunion cpt_inst_w4 w4;\n+\tuint16_t dlen;\n+\tuint8_t *dptr;\n+\n+\tprime_len = ec_grp->prime.length;\n+\torder_len = ec_grp->order.length;\n+\n+\t/* Truncate input length to curve prime length */\n+\tif (message_len > prime_len)\n+\t\tmessage_len = prime_len;\n+\tm_align = RTE_ALIGN_CEIL(message_len, 8);\n+\n+\tp_align = RTE_ALIGN_CEIL(prime_len, 8);\n+\tk_align = RTE_ALIGN_CEIL(k_len, 8);\n+\n+\t/* Set write offset for order and private key */\n+\to_offset = prime_len - order_len;\n+\tpk_offset = prime_len - pkey_len;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\n+\t/*\n+\t * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),\n+\t * ROUNDUP8(priv key len, prime len, order len)).\n+\t * Please note, private key, order cannot exceed prime\n+\t * length i.e 3 * p_align.\n+\t */\n+\tdlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 3;\n+\n+\tmemset(dptr, 0, dlen);\n+\n+\t*(uint64_t *)dptr = fpm_table_iova;\n+\tdptr += sizeof(fpm_table_iova);\n+\n+\tmemcpy(dptr, ecdsa->k.data, k_len);\n+\tdptr += k_align;\n+\n+\tmemcpy(dptr, ec_grp->prime.data, prime_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr + o_offset, ec_grp->order.data, order_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr, ecdsa->message.data, message_len);\n+\tdptr += m_align;\n+\n+\t/* Setup opcodes */\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;\n+\tw4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_SIGN;\n+\n+\tw4.s.param1 = curveid | (message_len << 8);\n+\tw4.s.param2 = k_len;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,\n+\t\t\t  struct roc_ae_buf_ptr *meta_buf,\n+\t\t\t  uint64_t fpm_table_iova,\n+\t\t\t  struct roc_ae_ec_group *ec_grp, uint8_t curveid,\n+\t\t\t  struct cpt_inst_s *inst)\n+{\n+\tuint32_t message_len = ecdsa->message.length;\n+\tuint16_t o_offset, r_offset, s_offset;\n+\tuint16_t qx_len = ecdsa->q.x.length;\n+\tuint16_t qy_len = ecdsa->q.y.length;\n+\tuint16_t r_len = ecdsa->r.length;\n+\tuint16_t s_len = ecdsa->s.length;\n+\tuint16_t order_len, prime_len;\n+\tuint16_t qx_offset, qy_offset;\n+\tuint16_t p_align, m_align;\n+\tunion cpt_inst_w4 w4;\n+\tuint16_t dlen;\n+\tuint8_t *dptr;\n+\n+\tprime_len = ec_grp->prime.length;\n+\torder_len = ec_grp->order.length;\n+\n+\t/* Truncate input length to curve prime length */\n+\tif (message_len > prime_len)\n+\t\tmessage_len = prime_len;\n+\n+\tm_align = RTE_ALIGN_CEIL(message_len, 8);\n+\tp_align = RTE_ALIGN_CEIL(prime_len, 8);\n+\n+\t/* Set write offset for sign, order and public key coordinates */\n+\to_offset = prime_len - order_len;\n+\tqx_offset = prime_len - qx_len;\n+\tqy_offset = prime_len - qy_len;\n+\tr_offset = prime_len - r_len;\n+\ts_offset = prime_len - s_len;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\n+\t/*\n+\t * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),\n+\t * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),\n+\t * prime len, order len)).\n+\t * Please note sign, public key and order can not exceed prime length\n+\t * i.e. 6 * p_align\n+\t */\n+\tdlen = sizeof(fpm_table_iova) + m_align + (6 * p_align);\n+\n+\tmemset(dptr, 0, dlen);\n+\n+\t*(uint64_t *)dptr = fpm_table_iova;\n+\tdptr += sizeof(fpm_table_iova);\n+\n+\tmemcpy(dptr + r_offset, ecdsa->r.data, r_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr + s_offset, ecdsa->s.data, s_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr, ecdsa->message.data, message_len);\n+\tdptr += m_align;\n+\n+\tmemcpy(dptr + o_offset, ec_grp->order.data, order_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr, ec_grp->prime.data, prime_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);\n+\tdptr += p_align;\n+\n+\tmemcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);\n+\tdptr += p_align;\n+\n+\t/* Setup opcodes */\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_ECDSA;\n+\tw4.s.opcode_minor = ROC_AE_MINOR_OP_ECDSA_VERIFY;\n+\n+\tw4.s.param1 = curveid | (message_len << 8);\n+\tw4.s.param2 = 0;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+}\n+\n+static __rte_always_inline int __rte_hot\n+cnxk_ae_enqueue_ecdsa_op(struct rte_crypto_op *op,\n+\t\t\t struct roc_ae_buf_ptr *meta_buf,\n+\t\t\t struct cnxk_ae_sess *sess, uint64_t *fpm_iova,\n+\t\t\t struct roc_ae_ec_group **ec_grp,\n+\t\t\t struct cpt_inst_s *inst)\n+{\n+\tstruct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;\n+\tuint8_t curveid = sess->ec_ctx.curveid;\n+\n+\tif (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)\n+\t\tcnxk_ae_ecdsa_sign_prep(ecdsa, meta_buf, fpm_iova[curveid],\n+\t\t\t\t\tec_grp[curveid], curveid, inst);\n+\telse if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)\n+\t\tcnxk_ae_ecdsa_verify_prep(ecdsa, meta_buf, fpm_iova[curveid],\n+\t\t\t\t\t  ec_grp[curveid], curveid, inst);\n+\telse {\n+\t\top->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\treturn -EINVAL;\n+\t}\n+\treturn 0;\n+}\n+\n+static __rte_always_inline int\n+cnxk_ae_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,\n+\t\t  struct roc_ae_buf_ptr *meta_buf,\n+\t\t  struct roc_ae_ec_group *ec_grp, uint8_t curveid,\n+\t\t  struct cpt_inst_s *inst)\n+{\n+\tuint16_t x1_len = ecpm->p.x.length;\n+\tuint16_t y1_len = ecpm->p.y.length;\n+\tuint16_t scalar_align, p_align;\n+\tuint16_t x1_offset, y1_offset;\n+\tuint16_t dlen, prime_len;\n+\tunion cpt_inst_w4 w4;\n+\tuint8_t *dptr;\n+\n+\tprime_len = ec_grp->prime.length;\n+\n+\t/* Input buffer */\n+\tdptr = meta_buf->vaddr;\n+\tinst->dptr = (uintptr_t)dptr;\n+\n+\tp_align = RTE_ALIGN_CEIL(prime_len, 8);\n+\tscalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);\n+\n+\t/*\n+\t * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,\n+\t * scalar length),\n+\t * Please note point length is equivalent to prime of the curve\n+\t */\n+\tdlen = 3 * p_align + scalar_align;\n+\n+\tx1_offset = prime_len - x1_len;\n+\ty1_offset = prime_len - y1_len;\n+\n+\tmemset(dptr, 0, dlen);\n+\n+\t/* Copy input point, scalar, prime */\n+\tmemcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);\n+\tdptr += p_align;\n+\tmemcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);\n+\tdptr += p_align;\n+\tmemcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);\n+\tdptr += scalar_align;\n+\tmemcpy(dptr, ec_grp->prime.data, ec_grp->prime.length);\n+\tdptr += p_align;\n+\n+\t/* Setup opcodes */\n+\tw4.s.opcode_major = ROC_AE_MAJOR_OP_ECC;\n+\tw4.s.opcode_minor = ROC_AE_MINOR_OP_ECC_UMP;\n+\n+\tw4.s.param1 = curveid;\n+\tw4.s.param2 = ecpm->scalar.length;\n+\tw4.s.dlen = dlen;\n+\n+\tinst->w4.u64 = w4.u64;\n+\tinst->rptr = (uintptr_t)dptr;\n+\n+\treturn 0;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_dequeue_rsa_op(struct rte_crypto_op *cop, uint8_t *rptr,\n+\t\t       struct rte_crypto_rsa_xform *rsa_ctx)\n+{\n+\tstruct rte_crypto_rsa_op_param *rsa = &cop->asym->rsa;\n+\n+\tswitch (rsa->op_type) {\n+\tcase RTE_CRYPTO_ASYM_OP_ENCRYPT:\n+\t\trsa->cipher.length = rsa_ctx->n.length;\n+\t\tmemcpy(rsa->cipher.data, rptr, rsa->cipher.length);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_DECRYPT:\n+\t\tif (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {\n+\t\t\trsa->message.length = rsa_ctx->n.length;\n+\t\t\tmemcpy(rsa->message.data, rptr, rsa->message.length);\n+\t\t} else {\n+\t\t\t/* Get length of decrypted output */\n+\t\t\trsa->message.length =\n+\t\t\t\trte_cpu_to_be_16(*((uint16_t *)rptr));\n+\t\t\t/*\n+\t\t\t * Offset output data pointer by length field\n+\t\t\t * (2 bytes) and copy decrypted data.\n+\t\t\t */\n+\t\t\tmemcpy(rsa->message.data, rptr + 2,\n+\t\t\t       rsa->message.length);\n+\t\t}\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_SIGN:\n+\t\trsa->sign.length = rsa_ctx->n.length;\n+\t\tmemcpy(rsa->sign.data, rptr, rsa->sign.length);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_OP_VERIFY:\n+\t\tif (rsa->pad == RTE_CRYPTO_RSA_PADDING_NONE) {\n+\t\t\trsa->sign.length = rsa_ctx->n.length;\n+\t\t\tmemcpy(rsa->sign.data, rptr, rsa->sign.length);\n+\t\t} else {\n+\t\t\t/* Get length of signed output */\n+\t\t\trsa->sign.length =\n+\t\t\t\trte_cpu_to_be_16(*((uint16_t *)rptr));\n+\t\t\t/*\n+\t\t\t * Offset output data pointer by length field\n+\t\t\t * (2 bytes) and copy signed data.\n+\t\t\t */\n+\t\t\tmemcpy(rsa->sign.data, rptr + 2, rsa->sign.length);\n+\t\t}\n+\t\tif (memcmp(rsa->sign.data, rsa->message.data,\n+\t\t\t   rsa->message.length)) {\n+\t\t\tcop->status = RTE_CRYPTO_OP_STATUS_ERROR;\n+\t\t}\n+\t\tbreak;\n+\tdefault:\n+\t\tcop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\tbreak;\n+\t}\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_dequeue_ecdsa_op(struct rte_crypto_ecdsa_op_param *ecdsa, uint8_t *rptr,\n+\t\t\t struct roc_ae_ec_ctx *ec,\n+\t\t\t struct roc_ae_ec_group **ec_grp)\n+{\n+\tint prime_len = ec_grp[ec->curveid]->prime.length;\n+\n+\tif (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)\n+\t\treturn;\n+\n+\t/* Separate out sign r and s components */\n+\tmemcpy(ecdsa->r.data, rptr, prime_len);\n+\tmemcpy(ecdsa->s.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);\n+\tecdsa->r.length = prime_len;\n+\tecdsa->s.length = prime_len;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_dequeue_ecpm_op(struct rte_crypto_ecpm_op_param *ecpm, uint8_t *rptr,\n+\t\t\tstruct roc_ae_ec_ctx *ec,\n+\t\t\tstruct roc_ae_ec_group **ec_grp)\n+{\n+\tint prime_len = ec_grp[ec->curveid]->prime.length;\n+\n+\tmemcpy(ecpm->r.x.data, rptr, prime_len);\n+\tmemcpy(ecpm->r.y.data, rptr + RTE_ALIGN_CEIL(prime_len, 8), prime_len);\n+\tecpm->r.x.length = prime_len;\n+\tecpm->r.y.length = prime_len;\n+}\n+\n+static __rte_always_inline void *\n+cnxk_ae_alloc_meta(struct roc_ae_buf_ptr *buf,\n+\t\t   struct rte_mempool *cpt_meta_pool,\n+\t\t   struct cpt_inflight_req *infl_req)\n+{\n+\tuint8_t *mdata;\n+\n+\tif (unlikely(rte_mempool_get(cpt_meta_pool, (void **)&mdata) < 0))\n+\t\treturn NULL;\n+\n+\tbuf->vaddr = mdata;\n+\n+\tinfl_req->mdata = mdata;\n+\tinfl_req->op_flags |= CPT_OP_FLAGS_METABUF;\n+\n+\treturn mdata;\n+}\n+\n+static __rte_always_inline int32_t __rte_hot\n+cnxk_ae_enqueue(struct cnxk_cpt_qp *qp, struct rte_crypto_op *op,\n+\t\tstruct cpt_inflight_req *infl_req, struct cpt_inst_s *inst,\n+\t\tstruct cnxk_ae_sess *sess)\n+{\n+\tstruct cpt_qp_meta_info *minfo = &qp->meta_info;\n+\tstruct rte_crypto_asym_op *asym_op = op->asym;\n+\tstruct roc_ae_buf_ptr meta_buf;\n+\tuint64_t *mop;\n+\tvoid *mdata;\n+\tint ret;\n+\n+\tmdata = cnxk_ae_alloc_meta(&meta_buf, minfo->pool, infl_req);\n+\tif (mdata == NULL)\n+\t\treturn -ENOMEM;\n+\n+\t/* Reserve 8B for RPTR */\n+\tmeta_buf.vaddr = PLT_PTR_ADD(mdata, sizeof(uint64_t));\n+\n+\tswitch (sess->xfrm_type) {\n+\tcase RTE_CRYPTO_ASYM_XFORM_MODEX:\n+\t\tret = cnxk_ae_modex_prep(op, &meta_buf, &sess->mod_ctx, inst);\n+\t\tif (unlikely(ret))\n+\t\t\tgoto req_fail;\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_RSA:\n+\t\tret = cnxk_ae_enqueue_rsa_op(op, &meta_buf, sess, inst);\n+\t\tif (unlikely(ret))\n+\t\t\tgoto req_fail;\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_ECDSA:\n+\t\tret = cnxk_ae_enqueue_ecdsa_op(op, &meta_buf, sess,\n+\t\t\t\t\t       sess->cnxk_fpm_iova,\n+\t\t\t\t\t       sess->ec_grp, inst);\n+\t\tif (unlikely(ret))\n+\t\t\tgoto req_fail;\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_ECPM:\n+\t\tret = cnxk_ae_ecpm_prep(&asym_op->ecpm, &meta_buf,\n+\t\t\t\t\tsess->ec_grp[sess->ec_ctx.curveid],\n+\t\t\t\t\tsess->ec_ctx.curveid, inst);\n+\t\tif (unlikely(ret))\n+\t\t\tgoto req_fail;\n+\t\tbreak;\n+\tdefault:\n+\t\top->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\tret = -EINVAL;\n+\t\tgoto req_fail;\n+\t}\n+\n+\tmop = mdata;\n+\tmop[0] = inst->rptr;\n+\treturn 0;\n+\n+req_fail:\n+\trte_mempool_put(minfo->pool, infl_req->mdata);\n+\treturn ret;\n+}\n+\n+static __rte_always_inline void\n+cnxk_ae_post_process(struct rte_crypto_op *cop, struct cnxk_ae_sess *sess,\n+\t\t     uint8_t *rptr)\n+{\n+\tstruct rte_crypto_asym_op *op = cop->asym;\n+\n+\tswitch (sess->xfrm_type) {\n+\tcase RTE_CRYPTO_ASYM_XFORM_RSA:\n+\t\tcnxk_ae_dequeue_rsa_op(cop, rptr, &sess->rsa_ctx);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_MODEX:\n+\t\top->modex.result.length = sess->mod_ctx.modulus.length;\n+\t\tmemcpy(op->modex.result.data, rptr, op->modex.result.length);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_ECDSA:\n+\t\tcnxk_ae_dequeue_ecdsa_op(&op->ecdsa, rptr, &sess->ec_ctx,\n+\t\t\t\t\t sess->ec_grp);\n+\t\tbreak;\n+\tcase RTE_CRYPTO_ASYM_XFORM_ECPM:\n+\t\tcnxk_ae_dequeue_ecpm_op(&op->ecpm, rptr, &sess->ec_ctx,\n+\t\t\t\t\tsess->ec_grp);\n+\t\tbreak;\n+\tdefault:\n+\t\tcop->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;\n+\t\tbreak;\n+\t}\n+}\n #endif /* _CNXK_AE_H_ */\n",
    "prefixes": [
        "v2",
        "2/4"
    ]
}