DPDK 23.11.2
Loading...
Searching...
No Matches
rte_cryptodev.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015-2020 Intel Corporation.
3 */
4
5#ifndef _RTE_CRYPTODEV_H_
6#define _RTE_CRYPTODEV_H_
7
17#ifdef __cplusplus
18extern "C" {
19#endif
20
21#include <rte_compat.h>
22#include "rte_kvargs.h"
23#include "rte_crypto.h"
24#include <rte_common.h>
25#include <rte_rcu_qsbr.h>
26
27#include "rte_cryptodev_trace_fp.h"
28
29/* Logging Macros */
30
31#define CDEV_LOG_ERR(...) \
32 RTE_LOG(ERR, CRYPTODEV, \
33 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
34 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
35
36#define CDEV_LOG_INFO(...) \
37 RTE_LOG(INFO, CRYPTODEV, \
38 RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
39 RTE_FMT_TAIL(__VA_ARGS__,)))
40
41#define CDEV_LOG_DEBUG(...) \
42 RTE_LOG(DEBUG, CRYPTODEV, \
43 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
44 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
45
46#define CDEV_PMD_TRACE(...) \
47 RTE_LOG(DEBUG, CRYPTODEV, \
48 RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
49 dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
50
64#define rte_crypto_op_ctod_offset(c, t, o) \
65 ((t)((char *)(c) + (o)))
66
78#define rte_crypto_op_ctophys_offset(c, o) \
79 (rte_iova_t)((c)->phys_addr + (o))
80
85 uint16_t min;
86 uint16_t max;
87 uint16_t increment;
93};
94
100#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
101#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
102#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
103
110 union {
111 struct {
114 uint16_t block_size;
124 } auth;
126 struct {
129 uint16_t block_size;
131 struct rte_crypto_param_range key_size;
133 struct rte_crypto_param_range iv_size;
135 uint32_t dataunit_set;
141 } cipher;
143 struct {
146 uint16_t block_size;
148 struct rte_crypto_param_range key_size;
150 struct rte_crypto_param_range digest_size;
152 struct rte_crypto_param_range aad_size;
154 struct rte_crypto_param_range iv_size;
156 } aead;
157 };
158};
159
167 uint32_t op_types;
176 __extension__
177 union {
188 };
189
190 uint64_t hash_algos;
192};
193
200
201
214
218 union {
219 enum rte_crypto_cipher_algorithm cipher;
222 } algo;
223};
224
233
246 const struct rte_cryptodev_sym_capability_idx *idx);
247
260 const struct rte_cryptodev_asym_capability_idx *idx);
261
274int
276 const struct rte_cryptodev_symmetric_capability *capability,
277 uint16_t key_size, uint16_t iv_size);
278
292int
294 const struct rte_cryptodev_symmetric_capability *capability,
295 uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
296
311int
313 const struct rte_cryptodev_symmetric_capability *capability,
314 uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
315 uint16_t iv_size);
316
327int
329 const struct rte_cryptodev_asymmetric_xform_capability *capability,
330 enum rte_crypto_asym_op_type op_type);
331
342int
344 const struct rte_cryptodev_asymmetric_xform_capability *capability,
345 uint16_t modlen);
346
357bool
359 const struct rte_cryptodev_asymmetric_xform_capability *capability,
360 enum rte_crypto_auth_algorithm hash);
361
373int
375 const char *algo_string);
376
388int
390 const char *algo_string);
391
403int
405 const char *algo_string);
406
418int
420 const char *xform_string);
421
431__rte_experimental
432const char *
434
444__rte_experimental
445const char *
447
457__rte_experimental
458const char *
460
470__rte_experimental
471const char *
473
474
476#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
477 { RTE_CRYPTO_OP_TYPE_UNDEFINED }
478
479
488#define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
490#define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
492#define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
494#define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
496#define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
498#define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
500#define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
502#define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
506#define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
508#define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
512#define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
516#define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
521#define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
525#define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
527#define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
529#define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
531#define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
533#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
535#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
537#define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
539#define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
541#define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
543#define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
545#define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
547#define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
549#define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
551#define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
553#define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
555#define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28)
566const char *
568
570/* Structure rte_cryptodev_info 8< */
572 const char *driver_name;
573 uint8_t driver_id;
574 struct rte_device *device;
591 struct {
597 } sym;
598};
599/* >8 End of structure rte_cryptodev_info. */
600
601#define RTE_CRYPTODEV_DETACHED (0)
602#define RTE_CRYPTODEV_ATTACHED (1)
603
610
612/* Structure rte_cryptodev_qp_conf 8<*/
618/* >8 End of structure rte_cryptodev_qp_conf. */
619
641typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
642 struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
643
653typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
654 enum rte_cryptodev_event_type event, void *cb_arg);
655
656
669
670#define RTE_CRYPTODEV_NAME_MAX_LEN (64)
682int
683rte_cryptodev_get_dev_id(const char *name);
684
695const char *
697
705uint8_t
707
716uint8_t
718
730uint8_t
731rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
732 uint8_t nb_devices);
733/*
734 * Return the NUMA socket to which a device is connected
735 *
736 * @param dev_id
737 * The identifier of the device
738 * @return
739 * The NUMA socket id to which the device is connected or
740 * a default of zero if the socket could not be determined.
741 * -1 if returned is the dev_id value is out of range.
742 */
743int
744rte_cryptodev_socket_id(uint8_t dev_id);
745
747/* Structure rte_cryptodev_config 8< */
760/* >8 End of structure rte_cryptodev_config. */
761
776int
777rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
778
794int
795rte_cryptodev_start(uint8_t dev_id);
796
803void
804rte_cryptodev_stop(uint8_t dev_id);
805
815int
816rte_cryptodev_close(uint8_t dev_id);
817
839int
840rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
841 const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
842
856int
857rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
858
866uint16_t
868
869
881int
882rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
883
889void
891
905void
906rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
907
908
922int
924 enum rte_cryptodev_event_type event,
925 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
926
940int
942 enum rte_cryptodev_event_type event,
943 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
944
960__rte_experimental
961int
962rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id);
963
964struct rte_cryptodev_callback;
965
967RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
968
981
986struct rte_cryptodev_cb_rcu {
987 RTE_ATOMIC(struct rte_cryptodev_cb *) next;
989 struct rte_rcu_qsbr *qsbr;
991};
992
1002void *
1004
1034struct rte_mempool *
1035rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
1036 uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
1037 int socket_id);
1038
1039
1060struct rte_mempool *
1061rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
1062 uint32_t cache_size, uint16_t user_data_size, int socket_id);
1063
1080void *
1082 struct rte_crypto_sym_xform *xforms,
1083 struct rte_mempool *mp);
1101int
1103 struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1104 void **session);
1105
1118int
1120 void *sess);
1121
1133int
1134rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1135
1142unsigned int
1144
1156unsigned int
1158
1169unsigned int
1171
1180unsigned int
1182
1192
1201const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1202
1215int
1217 void *data,
1218 uint16_t size);
1219
1220#define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1224static inline uint64_t
1226{
1227 return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1228}
1229
1233static inline void
1235{
1236 uint64_t *data;
1237 data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1238 *data = opaque;
1239}
1240
1251void *
1253
1267int
1268rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1269
1280void *
1282
1295uint32_t
1297 void *sess, union rte_crypto_sym_ofs ofs,
1298 struct rte_crypto_sym_vec *vec);
1299
1309int
1311
1327int
1328rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1329 enum rte_crypto_op_type op_type,
1330 enum rte_crypto_op_sess_type sess_type,
1331 void *ev_mdata, uint16_t size);
1332
1337union rte_cryptodev_session_ctx {void *crypto_sess;
1338 struct rte_crypto_sym_xform *xform;
1339 struct rte_security_session *sec_sess;
1340};
1341
1368 void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1369 union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1370
1393 void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1394 uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1395 struct rte_crypto_va_iova_ptr *iv,
1396 struct rte_crypto_va_iova_ptr *digest,
1397 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1398 void *user_data);
1399
1411typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1412 uint32_t n);
1413
1423typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1424
1433typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1434 uint32_t index, uint8_t is_op_success);
1435
1477typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1478 uint8_t *drv_ctx,
1479 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1480 uint32_t max_nb_to_dequeue,
1482 void **out_user_data, uint8_t is_user_data_array,
1483 uint32_t *n_success, int *dequeue_status);
1484
1508typedef void * (*cryptodev_sym_raw_dequeue_t)(
1509 void *qp, uint8_t *drv_ctx, int *dequeue_status,
1510 enum rte_crypto_op_status *op_status);
1511
1518 void *qp_data;
1519
1526
1527 /* Driver specific context data */
1528 __extension__ uint8_t drv_ctx_data[];
1529};
1530
1552int
1553rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1554 struct rte_crypto_raw_dp_ctx *ctx,
1555 enum rte_crypto_op_sess_type sess_type,
1556 union rte_cryptodev_session_ctx session_ctx,
1557 uint8_t is_update);
1558
1583uint32_t
1585 struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1586 void **user_data, int *enqueue_status);
1587
1608__rte_experimental
1609static __rte_always_inline int
1611 struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1612 union rte_crypto_sym_ofs ofs,
1613 struct rte_crypto_va_iova_ptr *iv,
1614 struct rte_crypto_va_iova_ptr *digest,
1615 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1616 void *user_data)
1617{
1618 return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1619 n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1620}
1621
1632int
1634 uint32_t n);
1635
1677uint32_t
1679 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1680 uint32_t max_nb_to_dequeue,
1682 void **out_user_data, uint8_t is_user_data_array,
1683 uint32_t *n_success, int *dequeue_status);
1684
1708__rte_experimental
1709static __rte_always_inline void *
1711 int *dequeue_status, enum rte_crypto_op_status *op_status)
1712{
1713 return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1714 op_status);
1715}
1716
1726int
1728 uint32_t n);
1729
1765struct rte_cryptodev_cb *
1767 uint16_t qp_id,
1769 void *cb_arg);
1770
1793 uint16_t qp_id,
1794 struct rte_cryptodev_cb *cb);
1795
1830struct rte_cryptodev_cb *
1832 uint16_t qp_id,
1834 void *cb_arg);
1835
1858 uint16_t qp_id,
1859 struct rte_cryptodev_cb *cb);
1860
1861#include <rte_cryptodev_core.h>
1898static inline uint16_t
1899rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1900 struct rte_crypto_op **ops, uint16_t nb_ops)
1901{
1902 const struct rte_crypto_fp_ops *fp_ops;
1903 void *qp;
1904
1905 rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1906
1907 fp_ops = &rte_crypto_fp_ops[dev_id];
1908 qp = fp_ops->qp.data[qp_id];
1909
1910 nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1911
1912#ifdef RTE_CRYPTO_CALLBACKS
1913 if (unlikely(fp_ops->qp.deq_cb[qp_id].next != NULL)) {
1914 struct rte_cryptodev_cb_rcu *list;
1915 struct rte_cryptodev_cb *cb;
1916
1917 /* rte_memory_order_release memory order was used when the
1918 * call back was inserted into the list.
1919 * Since there is a clear dependency between loading
1920 * cb and cb->fn/cb->next, rte_memory_order_acquire memory order is
1921 * not required.
1922 */
1923 list = &fp_ops->qp.deq_cb[qp_id];
1924 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1925 cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
1926
1927 while (cb != NULL) {
1928 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1929 cb->arg);
1930 cb = cb->next;
1931 };
1932
1933 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1934 }
1935#endif
1936 return nb_ops;
1937}
1938
1970static inline uint16_t
1971rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1972 struct rte_crypto_op **ops, uint16_t nb_ops)
1973{
1974 const struct rte_crypto_fp_ops *fp_ops;
1975 void *qp;
1976
1977 fp_ops = &rte_crypto_fp_ops[dev_id];
1978 qp = fp_ops->qp.data[qp_id];
1979#ifdef RTE_CRYPTO_CALLBACKS
1980 if (unlikely(fp_ops->qp.enq_cb[qp_id].next != NULL)) {
1981 struct rte_cryptodev_cb_rcu *list;
1982 struct rte_cryptodev_cb *cb;
1983
1984 /* rte_memory_order_release memory order was used when the
1985 * call back was inserted into the list.
1986 * Since there is a clear dependency between loading
1987 * cb and cb->fn/cb->next, rte_memory_order_acquire memory order is
1988 * not required.
1989 */
1990 list = &fp_ops->qp.enq_cb[qp_id];
1991 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1992 cb = rte_atomic_load_explicit(&list->next, rte_memory_order_relaxed);
1993
1994 while (cb != NULL) {
1995 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1996 cb->arg);
1997 cb = cb->next;
1998 };
1999
2000 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
2001 }
2002#endif
2003
2004 rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
2005 return fp_ops->enqueue_burst(qp, ops, nb_ops);
2006}
2007
2008
2009
2010#ifdef __cplusplus
2011}
2012#endif
2013
2014#endif /* _RTE_CRYPTODEV_H_ */
#define unlikely(x)
#define __rte_always_inline
Definition rte_common.h:331
rte_crypto_op_sess_type
Definition rte_crypto.h:61
rte_crypto_op_type
Definition rte_crypto.h:28
rte_crypto_op_status
Definition rte_crypto.h:38
rte_crypto_asym_op_type
rte_crypto_asym_xform_type
rte_crypto_auth_algorithm
rte_crypto_sym_xform_type
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
int rte_cryptodev_close(uint8_t dev_id)
struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
rte_cryptodev_event_type
@ RTE_CRYPTODEV_EVENT_ERROR
@ RTE_CRYPTODEV_EVENT_UNKNOWN
@ RTE_CRYPTODEV_EVENT_MAX
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
uint8_t rte_cryptodev_count(void)
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_start(uint8_t dev_id)
bool rte_cryptodev_asym_xform_capability_check_hash(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_auth_algorithm hash)
uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental const char * rte_cryptodev_get_aead_algo_string(enum rte_crypto_aead_algorithm algo_enum)
int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
void rte_cryptodev_stop(uint8_t dev_id)
struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
const char * rte_cryptodev_name_get(uint8_t dev_id)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental const char * rte_cryptodev_asym_get_xform_string(enum rte_crypto_asym_xform_type xform_enum)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
void rte_cryptodev_stats_reset(uint8_t dev_id)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
void * rte_cryptodev_sym_session_get_user_data(void *sess)
void * rte_cryptodev_asym_session_get_user_data(void *sess)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_get_dev_id(const char *name)
void * rte_cryptodev_get_sec_ctx(uint8_t dev_id)
unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
__rte_experimental const char * rte_cryptodev_get_cipher_algo_string(enum rte_crypto_cipher_algorithm algo_enum)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
unsigned int rte_cryptodev_asym_get_header_session_size(void)
__rte_experimental const char * rte_cryptodev_get_auth_algo_string(enum rte_crypto_auth_algorithm algo_enum)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
__rte_experimental int rte_cryptodev_queue_pair_event_error_query(uint8_t dev_id, uint16_t qp_id)
struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
enum rte_crypto_asym_xform_type type
struct rte_crypto_param_range modlen
enum rte_crypto_asym_xform_type xform_type
enum rte_crypto_op_type op
struct rte_cryptodev_symmetric_capability sym
struct rte_cryptodev_asymmetric_capability asym
struct rte_cryptodev_cb * next
rte_cryptodev_callback_fn fn
unsigned max_nb_queue_pairs
struct rte_device * device
uint16_t min_mbuf_headroom_req
const struct rte_cryptodev_capabilities * capabilities
uint16_t min_mbuf_tailroom_req
const char * driver_name
struct rte_mempool * mp_session
enum rte_crypto_auth_algorithm algo
enum rte_crypto_cipher_algorithm algo
enum rte_crypto_aead_algorithm algo
struct rte_crypto_param_range iv_size
struct rte_crypto_param_range digest_size
struct rte_cryptodev_symmetric_capability::@101::@103 auth
struct rte_cryptodev_symmetric_capability::@101::@104 cipher
struct rte_crypto_param_range aad_size
struct rte_crypto_param_range key_size
enum rte_crypto_sym_xform_type xform_type
char name[RTE_MEMPOOL_NAMESIZE]
uint32_t size
uint32_t cache_size
uint32_t elt_size