Files
vpp/src/plugins/dev_octeon/crypto.h
T
Nithinsen Kaithakadan 6937c0b2df octeon: add crypto framework
Configure crypto device.
Add crypto support in control plane and data plane.
Control plane
 - Handle vnet crypto key add and delete
 - Register crypto async enqueue and dequeue handlers
Data plane
 - Add encryption and decryption support for
    - AES-GCM
    - AES-CBC hmac sha1/256/384/512
    - AES-CTR sha1
    - 3DES-CBC md5 sha1/256/384/512

Type: feature

Signed-off-by: Nithinsen Kaithakadan <nkaithakadan@marvell.com>
Signed-off-by: Monendra Singh Kushwaha <kmonendra@marvell.com>
Change-Id: Ia9e16c61ed84800a59e0c932a4ba6aa1423c1ec8
2024-10-17 11:53:39 +00:00

175 lines
6.3 KiB
C

/*
* Copyright (c) 2024 Marvell.
* SPDX-License-Identifier: Apache-2.0
* https://spdx.org/licenses/Apache-2.0.html
*/
#ifndef _CRYPTO_H_
#define _CRYPTO_H_
#include <vnet/crypto/crypto.h>
#include <vnet/ip/ip.h>
#define OCT_MAX_N_CPT_DEV 2
#define OCT_CPT_LF_MAX_NB_DESC 128000
/* CRYPTO_ID, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
#define foreach_oct_crypto_aead_async_alg \
_ (AES_128_GCM, 16, 16, 8) \
_ (AES_128_GCM, 16, 16, 12) \
_ (AES_192_GCM, 24, 16, 8) \
_ (AES_192_GCM, 24, 16, 12) \
_ (AES_256_GCM, 32, 16, 8) \
_ (AES_256_GCM, 32, 16, 12)
/* CRYPTO_ID, INTEG_ID, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
#define foreach_oct_crypto_link_async_alg \
_ (AES_128_CBC, SHA1, 16, 12) \
_ (AES_192_CBC, SHA1, 24, 12) \
_ (AES_256_CBC, SHA1, 32, 12) \
_ (AES_128_CBC, SHA256, 16, 16) \
_ (AES_192_CBC, SHA256, 24, 16) \
_ (AES_256_CBC, SHA256, 32, 16) \
_ (AES_128_CBC, SHA384, 16, 24) \
_ (AES_192_CBC, SHA384, 24, 24) \
_ (AES_256_CBC, SHA384, 32, 24) \
_ (AES_128_CBC, SHA512, 16, 32) \
_ (AES_192_CBC, SHA512, 24, 32) \
_ (AES_256_CBC, SHA512, 32, 32) \
_ (3DES_CBC, MD5, 24, 12) \
_ (3DES_CBC, SHA1, 24, 12) \
_ (3DES_CBC, SHA256, 24, 16) \
_ (3DES_CBC, SHA384, 24, 24) \
_ (3DES_CBC, SHA512, 24, 32) \
_ (AES_128_CTR, SHA1, 16, 12) \
_ (AES_192_CTR, SHA1, 24, 12) \
_ (AES_256_CTR, SHA1, 32, 12)
#define OCT_MOD_INC(i, l) ((i) == (l - 1) ? (i) = 0 : (i)++)
#define OCT_SCATTER_GATHER_BUFFER_SIZE 1024
#define CPT_LMT_SIZE_COPY (sizeof (struct cpt_inst_s) / 16)
#define OCT_MAX_LMT_SZ 16
#define SRC_IOV_SIZE \
(sizeof (struct roc_se_iov_ptr) + \
(sizeof (struct roc_se_buf_ptr) * ROC_MAX_SG_CNT))
#define OCT_CPT_LMT_GET_LINE_ADDR(lmt_addr, lmt_num) \
(void *) ((u64) (lmt_addr) + ((u64) (lmt_num) << ROC_LMT_LINE_SIZE_LOG2))
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
struct roc_cpt *roc_cpt;
struct roc_cpt_lmtline lmtline;
struct roc_cpt_lf lf;
vnet_dev_t *dev;
} oct_crypto_dev_t;
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
/** CPT opcode */
u16 cpt_op : 4;
/** Flag for AES GCM */
u16 aes_gcm : 1;
/** IV length in bytes */
u8 iv_length;
/** Auth IV length in bytes */
u8 auth_iv_length;
/** IV offset in bytes */
u16 iv_offset;
/** Auth IV offset in bytes */
u16 auth_iv_offset;
/** CPT inst word 7 */
u64 cpt_inst_w7;
/* initialise as part of first packet */
u8 initialised;
/* store link key index in case of linked algo */
vnet_crypto_key_index_t key_index;
oct_crypto_dev_t *crypto_dev;
struct roc_se_ctx cpt_ctx;
} oct_crypto_sess_t;
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
oct_crypto_sess_t *sess;
oct_crypto_dev_t *crypto_dev;
} oct_crypto_key_t;
typedef struct oct_crypto_scatter_gather
{
u8 buf[OCT_SCATTER_GATHER_BUFFER_SIZE];
} oct_crypto_scatter_gather_t;
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
/** Result data of all entries in the frame */
volatile union cpt_res_s res[VNET_CRYPTO_FRAME_SIZE];
/** Scatter gather data */
void *sg_data;
/** Frame pointer */
vnet_crypto_async_frame_t *frame;
/** Number of async elements in frame */
u16 elts;
/** Next read entry in frame, when dequeue */
u16 deq_elts;
} oct_crypto_inflight_req_t;
typedef struct
{
/** Array of pending request */
oct_crypto_inflight_req_t *req_queue;
/** Number of inflight operations in queue */
u32 n_crypto_inflight;
/** Tail of queue to be used for enqueue */
u16 enq_tail;
/** Head of queue to be used for dequeue */
u16 deq_head;
/** Number of descriptors */
u16 n_desc;
} oct_crypto_pending_queue_t;
typedef struct
{
oct_crypto_dev_t *crypto_dev[OCT_MAX_N_CPT_DEV];
oct_crypto_key_t *keys[VNET_CRYPTO_ASYNC_OP_N_TYPES];
oct_crypto_pending_queue_t *pend_q;
int n_cpt;
u8 started;
} oct_crypto_main_t;
extern oct_crypto_main_t oct_crypto_main;
void oct_crypto_key_del_handler (vlib_main_t *vm,
vnet_crypto_key_index_t key_index);
void oct_crypto_key_add_handler (vlib_main_t *vm,
vnet_crypto_key_index_t key_index);
void oct_crypto_key_handler (vlib_main_t *vm, vnet_crypto_key_op_t kop,
vnet_crypto_key_index_t idx);
int oct_crypto_enqueue_linked_alg_enc (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
int oct_crypto_enqueue_linked_alg_dec (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
int oct_crypto_enqueue_aead_aad_8_enc (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
int oct_crypto_enqueue_aead_aad_12_enc (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
int oct_crypto_enqueue_aead_aad_8_dec (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
int oct_crypto_enqueue_aead_aad_12_dec (vlib_main_t *vm,
vnet_crypto_async_frame_t *frame);
vnet_crypto_async_frame_t *oct_crypto_frame_dequeue (vlib_main_t *vm,
u32 *nb_elts_processed,
u32 *enqueue_thread_idx);
int oct_init_crypto_engine_handlers (vlib_main_t *vm, vnet_dev_t *dev);
int oct_conf_sw_queue (vlib_main_t *vm, vnet_dev_t *dev);
#endif /* _CRYPTO_H_ */