@@ -37,6 +37,9 @@ SHA256 HMAC = Y
AES GCM (128) = Y
AES GCM (192) = Y
AES GCM (256) = Y
+AES CCM (128) = Y
+AES CCM (192) = Y
+AES CCM (256) = Y
;
; Supported Asymmetric algorithms of the 'nitrox' crypto driver.
@@ -29,6 +29,7 @@ Hash algorithms:
Supported AEAD algorithms:
* ``RTE_CRYPTO_AEAD_AES_GCM``
+* ``RTE_CRYPTO_AEAD_AES_CCM``
Limitations
-----------
@@ -72,6 +72,10 @@ New Features
Also, make sure to start the actual text at the margin.
=======================================================
+* **Updated Marvell NITROX symmetric crypto PMD.**
+
+ * Added support for AES-CCM algorithm.
+
Removed Items
-------------
@@ -492,7 +492,8 @@ configure_aead_ctx(struct rte_crypto_aead_xform *xform,
return -ENOTSUP;
}
- if (unlikely(xform->algo != RTE_CRYPTO_AEAD_AES_GCM))
+ if (unlikely(xform->algo != RTE_CRYPTO_AEAD_AES_GCM &&
+ xform->algo != RTE_CRYPTO_AEAD_AES_CCM))
return -ENOTSUP;
aes_keylen = flexi_aes_keylen(xform->key.length, true);
@@ -506,8 +507,29 @@ configure_aead_ctx(struct rte_crypto_aead_xform *xform,
if (unlikely(xform->iv.length > MAX_IV_LEN))
return -EINVAL;
+ if (xform->algo == RTE_CRYPTO_AEAD_AES_CCM) {
+ int L;
+
+ /* digest_length must be 4, 6, 8, 10, 12, 14, 16 bytes */
+ if (unlikely(xform->digest_length < 4 ||
+ xform->digest_length > 16 ||
+ (xform->digest_length & 1) == 1)) {
+ NITROX_LOG(ERR, "Invalid digest length %d\n",
+ xform->digest_length);
+ return -EINVAL;
+ }
+
+ L = 15 - xform->iv.length;
+ if (unlikely(L < 2 || L > 8)) {
+ NITROX_LOG(ERR, "Invalid iv length %d\n",
+ xform->iv.length);
+ return -EINVAL;
+ }
+ }
+
fctx->flags = rte_be_to_cpu_64(fctx->flags);
- fctx->w0.cipher_type = CIPHER_AES_GCM;
+ fctx->w0.cipher_type = (xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ?
+ CIPHER_AES_GCM : CIPHER_AES_CCM;
fctx->w0.aes_keylen = aes_keylen;
fctx->w0.iv_source = IV_FROM_DPTR;
fctx->w0.hash_type = AUTH_NULL;
@@ -526,6 +548,7 @@ configure_aead_ctx(struct rte_crypto_aead_xform *xform,
ctx->iv.length = xform->iv.length;
ctx->digest_length = xform->digest_length;
ctx->aad_length = xform->aad_length;
+ ctx->aead_algo = xform->algo;
return 0;
}
@@ -138,6 +138,36 @@ static const struct rte_cryptodev_capabilities nitrox_capabilities[] = {
}, }
}, }
},
+ { /* AES CCM */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,
+ {.aead = {
+ .algo = RTE_CRYPTO_AEAD_AES_CCM,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .digest_size = {
+ .min = 4,
+ .max = 16,
+ .increment = 2
+ },
+ .aad_size = {
+ .min = 0,
+ .max = 512,
+ .increment = 1
+ },
+ .iv_size = {
+ .min = 7,
+ .max = 13,
+ .increment = 1
+ },
+ }, }
+ }, }
+ },
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
@@ -70,6 +70,7 @@ struct flexi_crypto_context {
struct nitrox_crypto_ctx {
struct flexi_crypto_context fctx;
enum nitrox_chain nitrox_chain;
+ enum rte_crypto_aead_algorithm aead_algo;
struct {
uint16_t offset;
uint16_t length;
@@ -23,6 +23,8 @@
#define SOLICIT_BASE_DPORT 256
#define PENDING_SIG 0xFFFFFFFFFFFFFFFFUL
#define CMD_TIMEOUT 2
+/* For AES_CCM actual AAD will be copied 18 bytes after the AAD pointer, according to the API */
+#define DPDK_AES_CCM_ADD_OFFSET 18
struct gphdr {
uint16_t param0;
@@ -486,10 +488,15 @@ create_combined_sglist(struct nitrox_softreq *sr, struct nitrox_sgtable *sgtbl,
struct rte_mbuf *mbuf)
{
struct rte_crypto_op *op = sr->op;
+ uint32_t aad_offset = 0;
+
+ if (sr->ctx->aead_algo == RTE_CRYPTO_AEAD_AES_CCM)
+ aad_offset = DPDK_AES_CCM_ADD_OFFSET;
fill_sglist(sgtbl, sr->iv.len, sr->iv.iova, sr->iv.virt);
- fill_sglist(sgtbl, sr->ctx->aad_length, op->sym->aead.aad.phys_addr,
- op->sym->aead.aad.data);
+ fill_sglist(sgtbl, sr->ctx->aad_length,
+ op->sym->aead.aad.phys_addr + aad_offset,
+ op->sym->aead.aad.data + aad_offset);
return create_sglist_from_mbuf(sgtbl, mbuf, op->sym->cipher.data.offset,
op->sym->cipher.data.length);
}
@@ -721,11 +728,53 @@ process_combined_data(struct nitrox_softreq *sr)
struct nitrox_sglist digest;
struct rte_crypto_op *op = sr->op;
- err = softreq_copy_salt(sr);
- if (unlikely(err))
- return err;
+ if (sr->ctx->aead_algo == RTE_CRYPTO_AEAD_AES_GCM) {
+ err = softreq_copy_salt(sr);
+ if (unlikely(err))
+ return err;
+
+ softreq_copy_iv(sr, AES_GCM_SALT_SIZE);
+ } else if (sr->ctx->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
+ union {
+ uint8_t value;
+ struct {
+#if RTE_BYTE_ORDER == RTE_BIG_ENDIAN
+ uint8_t rsvd: 1;
+ uint8_t adata: 1;
+ uint8_t mstar: 3;
+ uint8_t lstar: 3;
+#else
+ uint8_t lstar: 3;
+ uint8_t mstar: 3;
+ uint8_t adata: 1;
+ uint8_t rsvd: 1;
+#endif
+ };
+ } flags;
+ uint8_t L;
+ uint8_t *iv_addr;
+
+ flags.value = 0;
+ flags.rsvd = 0;
+ flags.adata = (sr->ctx->aad_length > 0) ? 1 : 0;
+ flags.mstar = (sr->ctx->digest_length - 2) / 2;
+ L = 15 - sr->ctx->iv.length;
+ flags.lstar = L - 1;
+ iv_addr = rte_crypto_op_ctod_offset(sr->op, uint8_t *,
+ sr->ctx->iv.offset);
+ /* initialize IV flags */
+ iv_addr[0] = flags.value;
+ /* initialize IV counter to 0 */
+ memset(&iv_addr[1] + sr->ctx->iv.length, 0, L);
+ sr->iv.virt = rte_crypto_op_ctod_offset(sr->op, uint8_t *,
+ sr->ctx->iv.offset);
+ sr->iv.iova = rte_crypto_op_ctophys_offset(sr->op,
+ sr->ctx->iv.offset);
+ sr->iv.len = 16;
+ } else {
+ return -EINVAL;
+ }
- softreq_copy_iv(sr, AES_GCM_SALT_SIZE);
err = extract_combined_digest(sr, &digest);
if (unlikely(err))
return err;