DPDK  22.11.2
rte_cryptodev.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2015-2020 Intel Corporation.
3  */
4 
5 #ifndef _RTE_CRYPTODEV_H_
6 #define _RTE_CRYPTODEV_H_
7 
17 #ifdef __cplusplus
18 extern "C" {
19 #endif
20 
21 #include <rte_compat.h>
22 #include "rte_kvargs.h"
23 #include "rte_crypto.h"
24 #include <rte_common.h>
25 #include <rte_rcu_qsbr.h>
26 
27 #include "rte_cryptodev_trace_fp.h"
28 
29 extern const char **rte_cyptodev_names;
30 
31 /* Logging Macros */
32 
33 #define CDEV_LOG_ERR(...) \
34  RTE_LOG(ERR, CRYPTODEV, \
35  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
36  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
37 
38 #define CDEV_LOG_INFO(...) \
39  RTE_LOG(INFO, CRYPTODEV, \
40  RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
41  RTE_FMT_TAIL(__VA_ARGS__,)))
42 
43 #define CDEV_LOG_DEBUG(...) \
44  RTE_LOG(DEBUG, CRYPTODEV, \
45  RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
46  __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
47 
48 #define CDEV_PMD_TRACE(...) \
49  RTE_LOG(DEBUG, CRYPTODEV, \
50  RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
51  dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
52 
66 #define rte_crypto_op_ctod_offset(c, t, o) \
67  ((t)((char *)(c) + (o)))
68 
80 #define rte_crypto_op_ctophys_offset(c, o) \
81  (rte_iova_t)((c)->phys_addr + (o))
82 
87  uint16_t min;
88  uint16_t max;
89  uint16_t increment;
95 };
96 
102 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
103 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
104 #define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
105 
113  union {
114  struct {
117  uint16_t block_size;
127  } auth;
129  struct {
132  uint16_t block_size;
138  uint32_t dataunit_set;
144  } cipher;
146  struct {
149  uint16_t block_size;
159  } aead;
160  };
161 };
162 
171  uint32_t op_types;
180  __extension__
181  union {
186  };
187 };
188 
195 };
196 
197 
200  enum rte_crypto_op_type op;
204  union {
209  };
210 };
211 
214  enum rte_crypto_sym_xform_type type;
215  union {
216  enum rte_crypto_cipher_algorithm cipher;
217  enum rte_crypto_auth_algorithm auth;
218  enum rte_crypto_aead_algorithm aead;
219  } algo;
220 };
221 
230 };
231 
244  const struct rte_cryptodev_sym_capability_idx *idx);
245 
256 __rte_experimental
259  const struct rte_cryptodev_asym_capability_idx *idx);
260 
273 int
275  const struct rte_cryptodev_symmetric_capability *capability,
276  uint16_t key_size, uint16_t iv_size);
277 
291 int
293  const struct rte_cryptodev_symmetric_capability *capability,
294  uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
295 
310 int
312  const struct rte_cryptodev_symmetric_capability *capability,
313  uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
314  uint16_t iv_size);
315 
326 __rte_experimental
327 int
329  const struct rte_cryptodev_asymmetric_xform_capability *capability,
330  enum rte_crypto_asym_op_type op_type);
331 
342 __rte_experimental
343 int
345  const struct rte_cryptodev_asymmetric_xform_capability *capability,
346  uint16_t modlen);
347 
359 int
361  const char *algo_string);
362 
374 int
376  const char *algo_string);
377 
389 int
391  const char *algo_string);
392 
404 __rte_experimental
405 int
407  const char *xform_string);
408 
409 
411 #define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
412  { RTE_CRYPTO_OP_TYPE_UNDEFINED }
413 
414 
423 #define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
425 #define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
427 #define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
429 #define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
431 #define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
433 #define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
435 #define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
437 #define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
441 #define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
443 #define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
447 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
451 #define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
456 #define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
460 #define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
462 #define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
464 #define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
466 #define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
468 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
470 #define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
472 #define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
474 #define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
476 #define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
478 #define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
480 #define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
482 #define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
484 #define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
486 #define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
488 #define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
500 extern const char *
502 
504 /* Structure rte_cryptodev_info 8< */
506  const char *driver_name;
507  uint8_t driver_id;
508  struct rte_device *device;
510  uint64_t feature_flags;
525  struct {
526  unsigned max_nb_sessions;
531  } sym;
532 };
533 /* >8 End of structure rte_cryptodev_info. */
534 
535 #define RTE_CRYPTODEV_DETACHED (0)
536 #define RTE_CRYPTODEV_ATTACHED (1)
537 
543 };
544 
546 /* Structure rte_cryptodev_qp_conf 8<*/
548  uint32_t nb_descriptors;
551 };
552 /* >8 End of structure rte_cryptodev_qp_conf. */
553 
575 typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
576  struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
577 
587 typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
588  enum rte_cryptodev_event_type event, void *cb_arg);
589 
590 
593  uint64_t enqueued_count;
595  uint64_t dequeued_count;
602 };
603 
604 #define RTE_CRYPTODEV_NAME_MAX_LEN (64)
616 extern int
617 rte_cryptodev_get_dev_id(const char *name);
618 
629 extern const char *
630 rte_cryptodev_name_get(uint8_t dev_id);
631 
639 extern uint8_t
641 
650 extern uint8_t
652 
664 uint8_t
665 rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
666  uint8_t nb_devices);
667 /*
668  * Return the NUMA socket to which a device is connected
669  *
670  * @param dev_id
671  * The identifier of the device
672  * @return
673  * The NUMA socket id to which the device is connected or
674  * a default of zero if the socket could not be determined.
675  * -1 if returned is the dev_id value is out of range.
676  */
677 extern int
678 rte_cryptodev_socket_id(uint8_t dev_id);
679 
681 /* Structure rte_cryptodev_config 8< */
683  int socket_id;
684  uint16_t nb_queue_pairs;
686  uint64_t ff_disable;
693 };
694 /* >8 End of structure rte_cryptodev_config. */
695 
710 extern int
711 rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
712 
728 extern int
729 rte_cryptodev_start(uint8_t dev_id);
730 
737 extern void
738 rte_cryptodev_stop(uint8_t dev_id);
739 
749 extern int
750 rte_cryptodev_close(uint8_t dev_id);
751 
773 extern int
774 rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
775  const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
776 
790 __rte_experimental
791 int
792 rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
793 
801 extern uint16_t
803 
804 
816 extern int
817 rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
818 
824 extern void
826 
840 extern void
841 rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
842 
843 
857 extern int
859  enum rte_cryptodev_event_type event,
860  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
861 
875 extern int
877  enum rte_cryptodev_event_type event,
878  rte_cryptodev_cb_fn cb_fn, void *cb_arg);
879 
880 struct rte_cryptodev_callback;
881 
883 RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
884 
894  void *arg;
896 };
897 
902 struct rte_cryptodev_cb_rcu {
903  struct rte_cryptodev_cb *next;
905  struct rte_rcu_qsbr *qsbr;
907 };
908 
909 void *
910 rte_cryptodev_get_sec_ctx(uint8_t dev_id);
911 
941 __rte_experimental
942 struct rte_mempool *
943 rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
944  uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
945  int socket_id);
946 
947 
968 __rte_experimental
969 struct rte_mempool *
970 rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
971  uint32_t cache_size, uint16_t user_data_size, int socket_id);
972 
986 void *
988  struct rte_crypto_sym_xform *xforms,
989  struct rte_mempool *mp);
1007 __rte_experimental
1008 int
1010  struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1011  void **session);
1012 
1025 int
1027  void *sess);
1028 
1040 __rte_experimental
1041 int
1042 rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1043 
1050 __rte_experimental
1051 unsigned int
1053 
1065 unsigned int
1067 
1078 __rte_experimental
1079 unsigned int
1081 
1090 unsigned int
1092 
1102 
1111 const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1112 
1125 __rte_experimental
1126 int
1128  void *data,
1129  uint16_t size);
1130 
1131 #define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1135 static inline uint64_t
1137 {
1138  return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1139 }
1140 
1144 static inline void
1145 rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
1146 {
1147  uint64_t *data;
1148  data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1149  *data = opaque;
1150 }
1151 
1162 __rte_experimental
1163 void *
1165 
1179 __rte_experimental
1180 int
1181 rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1182 
1193 __rte_experimental
1194 void *
1196 
1209 __rte_experimental
1210 uint32_t
1212  void *sess, union rte_crypto_sym_ofs ofs,
1213  struct rte_crypto_sym_vec *vec);
1214 
1224 __rte_experimental
1225 int
1227 
1243 __rte_experimental
1244 int
1245 rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1246  enum rte_crypto_op_type op_type,
1247  enum rte_crypto_op_sess_type sess_type,
1248  void *ev_mdata, uint16_t size);
1249 
1254 union rte_cryptodev_session_ctx {void *crypto_sess;
1255  struct rte_crypto_sym_xform *xform;
1256  struct rte_security_session *sec_sess;
1257 };
1258 
1285  void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1286  union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1287 
1310  void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1311  uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1312  struct rte_crypto_va_iova_ptr *iv,
1313  struct rte_crypto_va_iova_ptr *digest,
1314  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1315  void *user_data);
1316 
1328 typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1329  uint32_t n);
1330 
1340 typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1341 
1350 typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1351  uint32_t index, uint8_t is_op_success);
1352 
1394 typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1395  uint8_t *drv_ctx,
1396  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1397  uint32_t max_nb_to_dequeue,
1398  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1399  void **out_user_data, uint8_t is_user_data_array,
1400  uint32_t *n_success, int *dequeue_status);
1401 
1425 typedef void * (*cryptodev_sym_raw_dequeue_t)(
1426  void *qp, uint8_t *drv_ctx, int *dequeue_status,
1427  enum rte_crypto_op_status *op_status);
1428 
1435  void *qp_data;
1436 
1438  cryptodev_sym_raw_enqueue_burst_t enqueue_burst;
1441  cryptodev_sym_raw_dequeue_burst_t dequeue_burst;
1443 
1444  /* Driver specific context data */
1445  __extension__ uint8_t drv_ctx_data[];
1446 };
1447 
1471 __rte_experimental
1472 int
1473 rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1474  struct rte_crypto_raw_dp_ctx *ctx,
1475  enum rte_crypto_op_sess_type sess_type,
1476  union rte_cryptodev_session_ctx session_ctx,
1477  uint8_t is_update);
1478 
1503 __rte_experimental
1504 uint32_t
1506  struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1507  void **user_data, int *enqueue_status);
1508 
1529 __rte_experimental
1530 static __rte_always_inline int
1532  struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1533  union rte_crypto_sym_ofs ofs,
1534  struct rte_crypto_va_iova_ptr *iv,
1535  struct rte_crypto_va_iova_ptr *digest,
1536  struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1537  void *user_data)
1538 {
1539  return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1540  n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1541 }
1542 
1553 __rte_experimental
1554 int
1556  uint32_t n);
1557 
1599 __rte_experimental
1600 uint32_t
1602  rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1603  uint32_t max_nb_to_dequeue,
1604  rte_cryptodev_raw_post_dequeue_t post_dequeue,
1605  void **out_user_data, uint8_t is_user_data_array,
1606  uint32_t *n_success, int *dequeue_status);
1607 
1631 __rte_experimental
1632 static __rte_always_inline void *
1634  int *dequeue_status, enum rte_crypto_op_status *op_status)
1635 {
1636  return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1637  op_status);
1638 }
1639 
1649 __rte_experimental
1650 int
1652  uint32_t n);
1653 
1690 __rte_experimental
1691 struct rte_cryptodev_cb *
1693  uint16_t qp_id,
1695  void *cb_arg);
1696 
1719 __rte_experimental
1721  uint16_t qp_id,
1722  struct rte_cryptodev_cb *cb);
1723 
1759 __rte_experimental
1760 struct rte_cryptodev_cb *
1762  uint16_t qp_id,
1764  void *cb_arg);
1765 
1787 __rte_experimental
1789  uint16_t qp_id,
1790  struct rte_cryptodev_cb *cb);
1791 
1792 #include <rte_cryptodev_core.h>
1829 static inline uint16_t
1830 rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1831  struct rte_crypto_op **ops, uint16_t nb_ops)
1832 {
1833  const struct rte_crypto_fp_ops *fp_ops;
1834  void *qp;
1835 
1836  rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1837 
1838  fp_ops = &rte_crypto_fp_ops[dev_id];
1839  qp = fp_ops->qp.data[qp_id];
1840 
1841  nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1842 
1843 #ifdef RTE_CRYPTO_CALLBACKS
1844  if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1845  struct rte_cryptodev_cb_rcu *list;
1846  struct rte_cryptodev_cb *cb;
1847 
1848  /* __ATOMIC_RELEASE memory order was used when the
1849  * call back was inserted into the list.
1850  * Since there is a clear dependency between loading
1851  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1852  * not required.
1853  */
1854  list = &fp_ops->qp.deq_cb[qp_id];
1855  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1856  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1857 
1858  while (cb != NULL) {
1859  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1860  cb->arg);
1861  cb = cb->next;
1862  };
1863 
1864  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1865  }
1866 #endif
1867  return nb_ops;
1868 }
1869 
1901 static inline uint16_t
1902 rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1903  struct rte_crypto_op **ops, uint16_t nb_ops)
1904 {
1905  const struct rte_crypto_fp_ops *fp_ops;
1906  void *qp;
1907 
1908  fp_ops = &rte_crypto_fp_ops[dev_id];
1909  qp = fp_ops->qp.data[qp_id];
1910 #ifdef RTE_CRYPTO_CALLBACKS
1911  if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1912  struct rte_cryptodev_cb_rcu *list;
1913  struct rte_cryptodev_cb *cb;
1914 
1915  /* __ATOMIC_RELEASE memory order was used when the
1916  * call back was inserted into the list.
1917  * Since there is a clear dependency between loading
1918  * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1919  * not required.
1920  */
1921  list = &fp_ops->qp.enq_cb[qp_id];
1922  rte_rcu_qsbr_thread_online(list->qsbr, 0);
1923  cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1924 
1925  while (cb != NULL) {
1926  nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1927  cb->arg);
1928  cb = cb->next;
1929  };
1930 
1931  rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1932  }
1933 #endif
1934 
1935  rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1936  return fp_ops->enqueue_burst(qp, ops, nb_ops);
1937 }
1938 
1939 
1940 
1941 #ifdef __cplusplus
1942 }
1943 #endif
1944 
1945 #endif /* _RTE_CRYPTODEV_H_ */
#define unlikely(x)
#define RTE_STD_C11
Definition: rte_common.h:39
#define __rte_always_inline
Definition: rte_common.h:255
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
rte_crypto_op_type
Definition: rte_crypto.h:29
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_asym_op_type
rte_crypto_asym_xform_type
rte_crypto_auth_algorithm
rte_crypto_sym_xform_type
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
int rte_cryptodev_close(uint8_t dev_id)
rte_cryptodev_event_type
@ RTE_CRYPTODEV_EVENT_ERROR
@ RTE_CRYPTODEV_EVENT_UNKNOWN
@ RTE_CRYPTODEV_EVENT_MAX
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
uint8_t rte_cryptodev_count(void)
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_start(uint8_t dev_id)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
__rte_experimental struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
void rte_cryptodev_stop(uint8_t dev_id)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
__rte_experimental void * rte_cryptodev_asym_session_get_user_data(void *sess)
__rte_experimental int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(void *sess)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
__rte_experimental int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
const char * rte_cryptodev_name_get(uint8_t dev_id)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_get_dev_id(const char *name)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:301
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:354
enum rte_crypto_asym_xform_type type
struct rte_crypto_param_range modlen
enum rte_crypto_asym_xform_type xform_type
enum rte_crypto_op_type op
struct rte_cryptodev_symmetric_capability sym
struct rte_cryptodev_asymmetric_capability asym
struct rte_cryptodev_cb * next
rte_cryptodev_callback_fn fn
unsigned max_nb_queue_pairs
struct rte_device * device
uint16_t min_mbuf_headroom_req
const struct rte_cryptodev_capabilities * capabilities
uint16_t min_mbuf_tailroom_req
const char * driver_name
unsigned max_nb_sessions
struct rte_mempool * mp_session
uint64_t enqueue_err_count
uint64_t dequeue_err_count
enum rte_crypto_auth_algorithm algo
struct rte_crypto_param_range iv_size
struct rte_crypto_param_range digest_size
struct rte_crypto_param_range aad_size
struct rte_cryptodev_symmetric_capability::@97::@99 auth
struct rte_cryptodev_symmetric_capability::@97::@100 cipher
struct rte_crypto_param_range key_size
enum rte_crypto_sym_xform_type xform_type
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:220
uint32_t size
Definition: rte_mempool.h:230
uint32_t cache_size
Definition: rte_mempool.h:231
uint32_t elt_size
Definition: rte_mempool.h:234