[v2,18/33] common/cpt: add common code for fill session data

Message ID 1536033560-21541-19-git-send-email-ajoseph@caviumnetworks.com (mailing list archive)
State Superseded, archived
Delegated to: akhil goyal
Headers
Series Adding Cavium's OcteonTX crypto PMD |

Checks

Context Check Description
ci/checkpatch success coding style OK
ci/Intel-compilation success Compilation OK

Commit Message

Anoob Joseph Sept. 4, 2018, 3:59 a.m. UTC
  From: Nithin Dabilpuram <nithin.dabilpuram@caviumnetworks.com>

Adding common code required for filling session data for AEAD, cipher &
auth sessions.

Signed-off-by: Ankur Dwivedi <ankur.dwivedi@caviumnetworks.com>
Signed-off-by: Anoob Joseph <anoob.joseph@caviumnetworks.com>
Signed-off-by: Murthy NSSR <nidadavolu.murthy@caviumnetworks.com>
Signed-off-by: Nithin Dabilpuram <nithin.dabilpuram@caviumnetworks.com>
Signed-off-by: Ragothaman Jayaraman <rjayaraman@caviumnetworks.com>
Signed-off-by: Srisivasubramanian S <ssrinivasan@caviumnetworks.com>
Signed-off-by: Tejasree Kondoj <kondoj.tejasree@caviumnetworks.com>
---
 drivers/common/cpt/cpt_mcode_defines.h |  88 +++++
 drivers/common/cpt/cpt_ucode.h         | 574 +++++++++++++++++++++++++++++++++
 2 files changed, 662 insertions(+)
  

Patch

diff --git a/drivers/common/cpt/cpt_mcode_defines.h b/drivers/common/cpt/cpt_mcode_defines.h
index 5b1566e..0d2d0db 100644
--- a/drivers/common/cpt/cpt_mcode_defines.h
+++ b/drivers/common/cpt/cpt_mcode_defines.h
@@ -25,9 +25,86 @@ 
 #define SG_LIST_HDR_SIZE	(8u)
 #define SG_ENTRY_SIZE		sizeof(sg_comp_t)
 
+#define CPT_DMA_MODE		(1 << 7)  /* Default support is with SG */
+
+#define CPT_FROM_CTX		0
+#define CPT_FROM_DPTR		1
+
+#define FC_GEN			0x1
+#define ZUC_SNOW3G		0x2
+#define KASUMI			0x3
+#define HASH_HMAC		0x4
+
+#define ZS_EA			0x1
+#define ZS_IA			0x2
+#define K_F8			0x4
+#define K_F9			0x8
+
+#define CPT_OP_CIPHER_ENCRYPT	0x1
+#define CPT_OP_CIPHER_DECRYPT	0x2
+#define CPT_OP_CIPHER_MASK	0x3
+
+#define CPT_OP_AUTH_VERIFY	0x4
+#define CPT_OP_AUTH_GENERATE	0x8
+#define CPT_OP_AUTH_MASK	0xC
+
+#define CPT_OP_ENCODE	(CPT_OP_CIPHER_ENCRYPT | CPT_OP_AUTH_GENERATE)
+#define CPT_OP_DECODE	(CPT_OP_CIPHER_DECRYPT | CPT_OP_AUTH_VERIFY)
+
 /* #define CPT_ALWAYS_USE_SG_MODE */
 #define CPT_ALWAYS_USE_SEPARATE_BUF
 
+typedef enum {
+	MD5_TYPE        = 1,
+	SHA1_TYPE       = 2,
+	SHA2_SHA224     = 3,
+	SHA2_SHA256     = 4,
+	SHA2_SHA384     = 5,
+	SHA2_SHA512     = 6,
+	GMAC_TYPE       = 7,
+	XCBC_TYPE       = 8,
+	SHA3_SHA224     = 10,
+	SHA3_SHA256     = 11,
+	SHA3_SHA384     = 12,
+	SHA3_SHA512     = 13,
+	SHA3_SHAKE256   = 14,
+	SHA3_SHAKE512   = 15,
+
+	/* These are only for software use */
+	ZUC_EIA3        = 0x90,
+	SNOW3G_UIA2     = 0x91,
+	KASUMI_F9_CBC   = 0x92,
+	KASUMI_F9_ECB   = 0x93,
+} mc_hash_type_t;
+
+typedef enum {
+	/*
+	 * These are defined by MC for Flexi crypto
+	 * for field of 4 bits
+	 */
+	DES3_CBC    = 0x1,
+	DES3_ECB    = 0x2,
+	AES_CBC     = 0x3,
+	AES_ECB     = 0x4,
+	AES_CFB     = 0x5,
+	AES_CTR     = 0x6,
+	AES_GCM     = 0x7,
+	AES_XTS     = 0x8,
+
+	/* These are only for software use */
+	ZUC_EEA3        = 0x90,
+	SNOW3G_UEA2     = 0x91,
+	KASUMI_F8_CBC   = 0x92,
+	KASUMI_F8_ECB   = 0x93,
+} mc_cipher_type_t;
+
+typedef enum {
+	AES_128_BIT = 0x1,
+	AES_192_BIT = 0x2,
+	AES_256_BIT = 0x3
+} mc_aes_type_t;
+
+
 typedef struct sglist_comp {
 	union {
 		uint64_t len;
@@ -143,6 +220,17 @@  struct cpt_ctx {
 	uint8_t  auth_key[64];
 };
 
+typedef struct fc_params digest_params_t;
+
+/* Cipher Algorithms */
+typedef mc_cipher_type_t cipher_type_t;
+
+/* Auth Algorithms */
+typedef mc_hash_type_t auth_type_t;
+
 #define CPT_P_ENC_CTRL(fctx)  fctx->enc.enc_ctrl.e
 
+#define SESS_PRIV(__sess) \
+	(void *)((uint8_t *)__sess + sizeof(struct cpt_sess_misc))
+
 #endif /* _CPT_MCODE_DEFINES_H_ */
diff --git a/drivers/common/cpt/cpt_ucode.h b/drivers/common/cpt/cpt_ucode.h
index e4f16fe..a9aef68 100644
--- a/drivers/common/cpt/cpt_ucode.h
+++ b/drivers/common/cpt/cpt_ucode.h
@@ -12,6 +12,13 @@ 
  *
  */
 
+static uint8_t zuc_d[32] = {
+	0x44, 0xD7, 0x26, 0xBC, 0x62, 0x6B, 0x13, 0x5E,
+	0x57, 0x89, 0x35, 0xE2, 0x71, 0x35, 0x09, 0xAF,
+	0x4D, 0x78, 0x2F, 0x13, 0x6B, 0xC4, 0x1A, 0xF1,
+	0x5E, 0x26, 0x3C, 0x4D, 0x78, 0x9A, 0x47, 0xAC
+};
+
 static __rte_always_inline int
 cpt_is_algo_supported(struct rte_crypto_sym_xform *xform)
 {
@@ -44,4 +51,571 @@  cpt_is_algo_supported(struct rte_crypto_sym_xform *xform)
 	return 0;
 }
 
+static __rte_always_inline void
+gen_key_snow3g(uint8_t *ck, uint32_t *keyx)
+{
+	int i, base;
+
+	for (i = 0; i < 4; i++) {
+		base = 4 * i;
+		keyx[3 - i] = (ck[base] << 24) | (ck[base + 1] << 16) |
+			(ck[base + 2] << 8) | (ck[base + 3]);
+		keyx[3 - i] = rte_cpu_to_be_32(keyx[3 - i]);
+	}
+}
+
+static __rte_always_inline int
+cpt_fc_ciph_set_key(void *ctx, cipher_type_t type, uint8_t *key,
+		    uint16_t key_len, uint8_t *salt)
+{
+	struct cpt_ctx *cpt_ctx = ctx;
+	mc_fc_context_t *fctx = &cpt_ctx->fctx;
+	mc_aes_type_t aes_key_type = 0;
+	uint64_t *ctrl_flags;
+
+	if (!type) {
+		/* to support passthrough case */
+
+		cpt_ctx->fc_type = FC_GEN;
+		ctrl_flags = (uint64_t *)&(fctx->enc.enc_ctrl.flags);
+		cpt_ctx->enc_cipher = 0;
+
+		*ctrl_flags = rte_be_to_cpu_64(*ctrl_flags);
+		CPT_P_ENC_CTRL(fctx).enc_cipher = 0;
+		*ctrl_flags = rte_cpu_to_be_64(*ctrl_flags);
+
+		return 0;
+	}
+
+	if ((type >= ZUC_EEA3) && (type <= KASUMI_F8_ECB)) {
+		uint32_t keyx[4];
+
+		if (key_len != 16)
+			return -1;
+
+		/* No support for AEAD yet */
+		if (cpt_ctx->hash_type)
+			return -1;
+
+		/* For ZUC/SNOW3G/Kasumi */
+		switch (type) {
+		case SNOW3G_UEA2:
+			cpt_ctx->snow3g = 1;
+			gen_key_snow3g(key, keyx);
+			memcpy(cpt_ctx->zs_ctx.ci_key, keyx, key_len);
+			cpt_ctx->fc_type = ZUC_SNOW3G;
+			cpt_ctx->zsk_flags = 0;
+			break;
+		case ZUC_EEA3:
+			cpt_ctx->snow3g = 0;
+			memcpy(cpt_ctx->zs_ctx.ci_key, key, key_len);
+			memcpy(cpt_ctx->zs_ctx.zuc_const, zuc_d, 32);
+			cpt_ctx->fc_type = ZUC_SNOW3G;
+			cpt_ctx->zsk_flags = 0;
+			break;
+		case KASUMI_F8_ECB:
+			/* Kasumi ECB mode */
+			cpt_ctx->k_ecb = 1;
+			memcpy(cpt_ctx->k_ctx.ci_key, key, key_len);
+			cpt_ctx->zsk_flags = 0;
+			cpt_ctx->fc_type = KASUMI;
+			break;
+		case KASUMI_F8_CBC:
+			memcpy(cpt_ctx->k_ctx.ci_key, key, key_len);
+			cpt_ctx->zsk_flags = 0;
+			cpt_ctx->fc_type = KASUMI;
+			break;
+		default:
+			return -1;
+		}
+		cpt_ctx->enc_cipher = type;
+		return 0;
+	}
+
+	fctx = &cpt_ctx->fctx;
+	/* Even though iv source is from dptr,
+	 * aes_gcm salt is taken from ctx
+	 */
+	if (salt && (type == AES_GCM)) {
+		memcpy(fctx->enc.encr_iv, salt, 4);
+		/* Assuming it was just salt update
+		 * and nothing else
+		 */
+		if (!key)
+			return 0;
+	}
+
+	cpt_ctx->fc_type = FC_GEN;
+	ctrl_flags = (uint64_t *)&(fctx->enc.enc_ctrl.flags);
+	*ctrl_flags = rte_be_to_cpu_64(*ctrl_flags);
+
+	cpt_ctx->enc_cipher = type;
+
+	/* For GMAC auth, cipher must be NULL */
+	if (cpt_ctx->hash_type != GMAC_TYPE)
+		CPT_P_ENC_CTRL(fctx).enc_cipher = type;
+
+	if (type == AES_XTS)
+		key_len = key_len / 2;
+
+	/* key len only for AES */
+	if ((type != DES3_CBC) &&
+	    (type != DES3_ECB)) {
+		switch (key_len) {
+		case CPT_BYTE_16:
+			aes_key_type = AES_128_BIT;
+			break;
+		case CPT_BYTE_24:
+			aes_key_type = AES_192_BIT;
+			if (type == AES_XTS) {
+				CPT_LOG_DP_ERR("Invalid AES key len for"
+					    " XTS\n");
+				return -1;
+			}
+			break;
+		case CPT_BYTE_32:
+			aes_key_type = AES_256_BIT;
+			break;
+		default:
+			CPT_LOG_DP_ERR("Invalid AES key len\n");
+			return -1;
+		}
+
+		CPT_P_ENC_CTRL(fctx).aes_key = aes_key_type;
+	}
+
+	/*
+	 * We need to always say iv is from DPTR as user can
+	 * sometimes override IV per operation.
+	 * For DES3_ECB IV need to be from CTX.
+	 */
+	if (type == DES3_ECB)
+		CPT_P_ENC_CTRL(fctx).iv_source = CPT_FROM_CTX;
+	else
+		CPT_P_ENC_CTRL(fctx).iv_source = CPT_FROM_DPTR;
+
+	memcpy(fctx->enc.encr_key, key, key_len);
+
+	if ((type == DES3_CBC) && (key_len == 8)) {
+		/* CPT performs DES using 3DES with the 8B DES-key
+		 * replicated 2 more times to match the 24B 3DES-key.
+		 * Eg. If org. key is "0x0a 0x0b", then new key is
+		 * "0x0a 0x0b 0x0a 0x0b 0x0a 0x0b"
+		 */
+		memcpy(fctx->enc.encr_key+key_len, key, key_len);
+		memcpy(fctx->enc.encr_key+2*key_len, key, key_len);
+	}
+
+	if (type == AES_XTS) {
+		/* Copy key2 for XTS into ipad */
+		memset(fctx->hmac.ipad, 0, sizeof(fctx->hmac.ipad));
+		memcpy(fctx->hmac.ipad, &key[key_len], key_len);
+	}
+
+	*ctrl_flags = rte_cpu_to_be_64(*ctrl_flags);
+
+	return 0;
+}
+
+static __rte_always_inline int
+cpt_fc_auth_set_key(void *ctx, auth_type_t type, uint8_t *key,
+		    uint16_t key_len, uint16_t mac_len)
+{
+	struct cpt_ctx *cpt_ctx = ctx;
+	mc_fc_context_t *fctx = &cpt_ctx->fctx;
+	uint64_t *ctrl_flags = NULL;
+
+	if ((type >= ZUC_EIA3) && (type <= KASUMI_F9_ECB)) {
+		uint32_t keyx[4];
+
+		if (key_len != 16)
+			return -1;
+		/* No support for AEAD yet */
+		if (cpt_ctx->enc_cipher)
+			return -1;
+		/* For ZUC/SNOW3G/Kasumi */
+		switch (type) {
+		case SNOW3G_UIA2:
+			cpt_ctx->snow3g = 1;
+			gen_key_snow3g(key, keyx);
+			memcpy(cpt_ctx->zs_ctx.ci_key, keyx, key_len);
+			cpt_ctx->fc_type = ZUC_SNOW3G;
+			cpt_ctx->zsk_flags = 0x1;
+			break;
+		case ZUC_EIA3:
+			cpt_ctx->snow3g = 0;
+			memcpy(cpt_ctx->zs_ctx.ci_key, key, key_len);
+			memcpy(cpt_ctx->zs_ctx.zuc_const, zuc_d, 32);
+			cpt_ctx->fc_type = ZUC_SNOW3G;
+			cpt_ctx->zsk_flags = 0x1;
+			break;
+		case KASUMI_F9_ECB:
+			/* Kasumi ECB mode */
+			cpt_ctx->k_ecb = 1;
+			memcpy(cpt_ctx->k_ctx.ci_key, key, key_len);
+			cpt_ctx->fc_type = KASUMI;
+			cpt_ctx->zsk_flags = 0x1;
+			break;
+		case KASUMI_F9_CBC:
+			memcpy(cpt_ctx->k_ctx.ci_key, key, key_len);
+			cpt_ctx->fc_type = KASUMI;
+			cpt_ctx->zsk_flags = 0x1;
+			break;
+		default:
+			return -1;
+		}
+		cpt_ctx->mac_len = 4;
+		cpt_ctx->hash_type = type;
+		return 0;
+	}
+
+	if (!(cpt_ctx->fc_type == FC_GEN && !type)) {
+		if (!cpt_ctx->fc_type || !cpt_ctx->enc_cipher)
+			cpt_ctx->fc_type = HASH_HMAC;
+	}
+
+	ctrl_flags = (uint64_t *)&fctx->enc.enc_ctrl.flags;
+	*ctrl_flags = rte_be_to_cpu_64(*ctrl_flags);
+
+	/* For GMAC auth, cipher must be NULL */
+	if (type == GMAC_TYPE)
+		CPT_P_ENC_CTRL(fctx).enc_cipher = 0;
+
+	CPT_P_ENC_CTRL(fctx).hash_type = cpt_ctx->hash_type = type;
+	CPT_P_ENC_CTRL(fctx).mac_len = cpt_ctx->mac_len = mac_len;
+
+	if (key_len) {
+		cpt_ctx->hmac = 1;
+		memset(cpt_ctx->auth_key, 0, sizeof(cpt_ctx->auth_key));
+		memcpy(cpt_ctx->auth_key, key, key_len);
+		cpt_ctx->auth_key_len = key_len;
+		memset(fctx->hmac.ipad, 0, sizeof(fctx->hmac.ipad));
+		memset(fctx->hmac.opad, 0, sizeof(fctx->hmac.opad));
+		memcpy(fctx->hmac.opad, key, key_len);
+		CPT_P_ENC_CTRL(fctx).auth_input_type = 1;
+	}
+	*ctrl_flags = rte_cpu_to_be_64(*ctrl_flags);
+	return 0;
+}
+
+static __rte_always_inline int
+fill_sess_aead(struct rte_crypto_sym_xform *xform,
+		 struct cpt_sess_misc *sess)
+{
+	struct rte_crypto_aead_xform *aead_form;
+	cipher_type_t enc_type = 0; /* NULL Cipher type */
+	auth_type_t auth_type = 0; /* NULL Auth type */
+	uint32_t cipher_key_len = 0;
+	uint8_t zsk_flag = 0, aes_gcm = 0;
+	aead_form = &xform->aead;
+	void *ctx;
+
+	if (aead_form->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
+	   aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) {
+		sess->cpt_op |= CPT_OP_CIPHER_ENCRYPT;
+		sess->cpt_op |= CPT_OP_AUTH_GENERATE;
+	} else if (aead_form->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
+		aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) {
+		sess->cpt_op |= CPT_OP_CIPHER_DECRYPT;
+		sess->cpt_op |= CPT_OP_AUTH_VERIFY;
+	} else {
+		CPT_LOG_DP_ERR("Unknown cipher operation\n");
+		return -1;
+	}
+	switch (aead_form->algo) {
+	case RTE_CRYPTO_AEAD_AES_GCM:
+		enc_type = AES_GCM;
+		cipher_key_len = 16;
+		aes_gcm = 1;
+		break;
+	case RTE_CRYPTO_AEAD_AES_CCM:
+		CPT_LOG_DP_ERR("Crypto: Unsupported cipher algo %u",
+			       aead_form->algo);
+		return -1;
+	default:
+		CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified",
+			       aead_form->algo);
+		return -1;
+	}
+	if (aead_form->key.length < cipher_key_len) {
+		CPT_LOG_DP_ERR("Invalid cipher params keylen %lu",
+			       (unsigned int long)aead_form->key.length);
+		return -1;
+	}
+	sess->zsk_flag = zsk_flag;
+	sess->aes_gcm = aes_gcm;
+	sess->mac_len = aead_form->digest_length;
+	sess->iv_offset = aead_form->iv.offset;
+	sess->iv_length = aead_form->iv.length;
+	sess->aad_length = aead_form->aad_length;
+	ctx = (void *)((uint8_t *)sess + sizeof(struct cpt_sess_misc)),
+
+	cpt_fc_ciph_set_key(ctx, enc_type, aead_form->key.data,
+			aead_form->key.length, NULL);
+
+	cpt_fc_auth_set_key(ctx, auth_type, NULL, 0, aead_form->digest_length);
+
+	return 0;
+}
+
+static __rte_always_inline int
+fill_sess_cipher(struct rte_crypto_sym_xform *xform,
+		 struct cpt_sess_misc *sess)
+{
+	struct rte_crypto_cipher_xform *c_form;
+	cipher_type_t enc_type = 0; /* NULL Cipher type */
+	uint32_t cipher_key_len = 0;
+	uint8_t zsk_flag = 0, aes_gcm = 0, aes_ctr = 0, is_null = 0;
+
+	if (xform->type != RTE_CRYPTO_SYM_XFORM_CIPHER)
+		return -1;
+
+	c_form = &xform->cipher;
+
+	if (c_form->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
+		sess->cpt_op |= CPT_OP_CIPHER_ENCRYPT;
+	else if (c_form->op == RTE_CRYPTO_CIPHER_OP_DECRYPT)
+		sess->cpt_op |= CPT_OP_CIPHER_DECRYPT;
+	else {
+		CPT_LOG_DP_ERR("Unknown cipher operation\n");
+		return -1;
+	}
+
+	switch (c_form->algo) {
+	case RTE_CRYPTO_CIPHER_AES_CBC:
+		enc_type = AES_CBC;
+		cipher_key_len = 16;
+		break;
+	case RTE_CRYPTO_CIPHER_3DES_CBC:
+		enc_type = DES3_CBC;
+		cipher_key_len = 24;
+		break;
+	case RTE_CRYPTO_CIPHER_DES_CBC:
+		/* DES is implemented using 3DES in hardware */
+		enc_type = DES3_CBC;
+		cipher_key_len = 8;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_CTR:
+		enc_type = AES_CTR;
+		cipher_key_len = 16;
+		aes_ctr = 1;
+		break;
+	case RTE_CRYPTO_CIPHER_NULL:
+		enc_type = 0;
+		is_null = 1;
+		break;
+	case RTE_CRYPTO_CIPHER_KASUMI_F8:
+		enc_type = KASUMI_F8_ECB;
+		cipher_key_len = 16;
+		zsk_flag = K_F8;
+		break;
+	case RTE_CRYPTO_CIPHER_SNOW3G_UEA2:
+		enc_type = SNOW3G_UEA2;
+		cipher_key_len = 16;
+		zsk_flag = ZS_EA;
+		break;
+	case RTE_CRYPTO_CIPHER_ZUC_EEA3:
+		enc_type = ZUC_EEA3;
+		cipher_key_len = 16;
+		zsk_flag = ZS_EA;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_XTS:
+		enc_type = AES_XTS;
+		cipher_key_len = 16;
+		break;
+	case RTE_CRYPTO_CIPHER_3DES_ECB:
+		enc_type = DES3_ECB;
+		cipher_key_len = 24;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_ECB:
+		enc_type = AES_ECB;
+		cipher_key_len = 16;
+		break;
+	case RTE_CRYPTO_CIPHER_3DES_CTR:
+	case RTE_CRYPTO_CIPHER_AES_F8:
+	case RTE_CRYPTO_CIPHER_ARC4:
+		CPT_LOG_DP_ERR("Crypto: Unsupported cipher algo %u",
+			       c_form->algo);
+		return -1;
+	default:
+		CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified",
+			       c_form->algo);
+		return -1;
+	}
+
+	if (c_form->key.length < cipher_key_len) {
+		CPT_LOG_DP_ERR("Invalid cipher params keylen %lu",
+			       (unsigned long) c_form->key.length);
+		return -1;
+	}
+
+	sess->zsk_flag = zsk_flag;
+	sess->aes_gcm = aes_gcm;
+	sess->aes_ctr = aes_ctr;
+	sess->iv_offset = c_form->iv.offset;
+	sess->iv_length = c_form->iv.length;
+	sess->is_null = is_null;
+
+	cpt_fc_ciph_set_key(SESS_PRIV(sess), enc_type, c_form->key.data,
+			    c_form->key.length, NULL);
+
+	return 0;
+}
+
+static __rte_always_inline int
+fill_sess_auth(struct rte_crypto_sym_xform *xform,
+	       struct cpt_sess_misc *sess)
+{
+	struct rte_crypto_auth_xform *a_form;
+	auth_type_t auth_type = 0; /* NULL Auth type */
+	uint8_t zsk_flag = 0, aes_gcm = 0, is_null = 0;
+
+	if (xform->type != RTE_CRYPTO_SYM_XFORM_AUTH)
+		goto error_out;
+
+	a_form = &xform->auth;
+
+	if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY)
+		sess->cpt_op |= CPT_OP_AUTH_VERIFY;
+	else if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE)
+		sess->cpt_op |= CPT_OP_AUTH_GENERATE;
+	else {
+		CPT_LOG_DP_ERR("Unknown auth operation");
+		return -1;
+	}
+
+	if (a_form->key.length > 64) {
+		CPT_LOG_DP_ERR("Auth key length is big");
+		return -1;
+	}
+
+	switch (a_form->algo) {
+	case RTE_CRYPTO_AUTH_SHA1_HMAC:
+		/* Fall through */
+	case RTE_CRYPTO_AUTH_SHA1:
+		auth_type = SHA1_TYPE;
+		break;
+	case RTE_CRYPTO_AUTH_SHA256_HMAC:
+	case RTE_CRYPTO_AUTH_SHA256:
+		auth_type = SHA2_SHA256;
+		break;
+	case RTE_CRYPTO_AUTH_SHA512_HMAC:
+	case RTE_CRYPTO_AUTH_SHA512:
+		auth_type = SHA2_SHA512;
+		break;
+	case RTE_CRYPTO_AUTH_AES_GMAC:
+		auth_type = GMAC_TYPE;
+		aes_gcm = 1;
+		break;
+	case RTE_CRYPTO_AUTH_SHA224_HMAC:
+	case RTE_CRYPTO_AUTH_SHA224:
+		auth_type = SHA2_SHA224;
+		break;
+	case RTE_CRYPTO_AUTH_SHA384_HMAC:
+	case RTE_CRYPTO_AUTH_SHA384:
+		auth_type = SHA2_SHA384;
+		break;
+	case RTE_CRYPTO_AUTH_MD5_HMAC:
+	case RTE_CRYPTO_AUTH_MD5:
+		auth_type = MD5_TYPE;
+		break;
+	case RTE_CRYPTO_AUTH_KASUMI_F9:
+		auth_type = KASUMI_F9_ECB;
+		/*
+		 * Indicate that direction needs to be taken out
+		 * from end of src
+		 */
+		zsk_flag = K_F9;
+		break;
+	case RTE_CRYPTO_AUTH_SNOW3G_UIA2:
+		auth_type = SNOW3G_UIA2;
+		zsk_flag = ZS_IA;
+		break;
+	case RTE_CRYPTO_AUTH_ZUC_EIA3:
+		auth_type = ZUC_EIA3;
+		zsk_flag = ZS_IA;
+		break;
+	case RTE_CRYPTO_AUTH_NULL:
+		auth_type = 0;
+		is_null = 1;
+		break;
+	case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
+	case RTE_CRYPTO_AUTH_AES_CMAC:
+	case RTE_CRYPTO_AUTH_AES_CBC_MAC:
+		CPT_LOG_DP_ERR("Crypto: Unsupported hash algo %u",
+			       a_form->algo);
+		goto error_out;
+	default:
+		CPT_LOG_DP_ERR("Crypto: Undefined Hash algo %u specified",
+			       a_form->algo);
+		goto error_out;
+	}
+
+	sess->zsk_flag = zsk_flag;
+	sess->aes_gcm = aes_gcm;
+	sess->mac_len = a_form->digest_length;
+	sess->is_null = is_null;
+	if (zsk_flag) {
+		sess->auth_iv_offset = a_form->iv.offset;
+		sess->auth_iv_length = a_form->iv.length;
+	}
+	cpt_fc_auth_set_key(SESS_PRIV(sess), auth_type, a_form->key.data,
+			    a_form->key.length, a_form->digest_length);
+
+	return 0;
+
+error_out:
+	return -1;
+}
+
+static __rte_always_inline int
+fill_sess_gmac(struct rte_crypto_sym_xform *xform,
+		 struct cpt_sess_misc *sess)
+{
+	struct rte_crypto_auth_xform *a_form;
+	cipher_type_t enc_type = 0; /* NULL Cipher type */
+	auth_type_t auth_type = 0; /* NULL Auth type */
+	uint8_t zsk_flag = 0, aes_gcm = 0;
+	void *ctx;
+
+	if (xform->type != RTE_CRYPTO_SYM_XFORM_AUTH)
+		return -1;
+
+	a_form = &xform->auth;
+
+	if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE)
+		sess->cpt_op |= CPT_OP_ENCODE;
+	else if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY)
+		sess->cpt_op |= CPT_OP_DECODE;
+	else {
+		CPT_LOG_DP_ERR("Unknown auth operation");
+		return -1;
+	}
+
+	switch (a_form->algo) {
+	case RTE_CRYPTO_AUTH_AES_GMAC:
+		enc_type = AES_GCM;
+		auth_type = GMAC_TYPE;
+		break;
+	default:
+		CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified",
+			       a_form->algo);
+		return -1;
+	}
+
+	sess->zsk_flag = zsk_flag;
+	sess->aes_gcm = aes_gcm;
+	sess->is_gmac = 1;
+	sess->iv_offset = a_form->iv.offset;
+	sess->iv_length = a_form->iv.length;
+	sess->mac_len = a_form->digest_length;
+	ctx = (void *)((uint8_t *)sess + sizeof(struct cpt_sess_misc)),
+
+	cpt_fc_ciph_set_key(ctx, enc_type, a_form->key.data,
+			a_form->key.length, NULL);
+	cpt_fc_auth_set_key(ctx, auth_type, NULL, 0, a_form->digest_length);
+
+	return 0;
+}
+
 #endif /*_CPT_UCODE_H_ */