DPDK 22.11.0-rc2
rte_cryptodev.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015-2020 Intel Corporation.
3 */
4
5#ifndef _RTE_CRYPTODEV_H_
6#define _RTE_CRYPTODEV_H_
7
17#ifdef __cplusplus
18extern "C" {
19#endif
20
21#include "rte_kvargs.h"
22#include "rte_crypto.h"
23#include <rte_common.h>
24#include <rte_rcu_qsbr.h>
25
26#include "rte_cryptodev_trace_fp.h"
27
28extern const char **rte_cyptodev_names;
29
30/* Logging Macros */
31
32#define CDEV_LOG_ERR(...) \
33 RTE_LOG(ERR, CRYPTODEV, \
34 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
35 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
36
37#define CDEV_LOG_INFO(...) \
38 RTE_LOG(INFO, CRYPTODEV, \
39 RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
40 RTE_FMT_TAIL(__VA_ARGS__,)))
41
42#define CDEV_LOG_DEBUG(...) \
43 RTE_LOG(DEBUG, CRYPTODEV, \
44 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
45 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
46
47#define CDEV_PMD_TRACE(...) \
48 RTE_LOG(DEBUG, CRYPTODEV, \
49 RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
50 dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
51
65#define rte_crypto_op_ctod_offset(c, t, o) \
66 ((t)((char *)(c) + (o)))
67
79#define rte_crypto_op_ctophys_offset(c, o) \
80 (rte_iova_t)((c)->phys_addr + (o))
81
86 uint16_t min;
87 uint16_t max;
88 uint16_t increment;
94};
95
101#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
102#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
103#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
104
112 union {
113 struct {
116 uint16_t block_size;
126 } auth;
128 struct {
131 uint16_t block_size;
137 uint32_t dataunit_set;
143 } cipher;
145 struct {
148 uint16_t block_size;
158 } aead;
159 };
160};
161
170 uint32_t op_types;
179 __extension__
180 union {
185 };
186};
187
194};
195
196
203 union {
208 };
209};
210
214 union {
215 enum rte_crypto_cipher_algorithm cipher;
218 } algo;
219};
220
229};
230
243 const struct rte_cryptodev_sym_capability_idx *idx);
244
255__rte_experimental
258 const struct rte_cryptodev_asym_capability_idx *idx);
259
272int
274 const struct rte_cryptodev_symmetric_capability *capability,
275 uint16_t key_size, uint16_t iv_size);
276
290int
292 const struct rte_cryptodev_symmetric_capability *capability,
293 uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
294
309int
311 const struct rte_cryptodev_symmetric_capability *capability,
312 uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
313 uint16_t iv_size);
314
325__rte_experimental
326int
328 const struct rte_cryptodev_asymmetric_xform_capability *capability,
329 enum rte_crypto_asym_op_type op_type);
330
341__rte_experimental
342int
344 const struct rte_cryptodev_asymmetric_xform_capability *capability,
345 uint16_t modlen);
346
358int
360 const char *algo_string);
361
373int
375 const char *algo_string);
376
388int
390 const char *algo_string);
391
403__rte_experimental
404int
406 const char *xform_string);
407
408
410#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
411 { RTE_CRYPTO_OP_TYPE_UNDEFINED }
412
413
422#define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
424#define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
426#define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
428#define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
430#define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
432#define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
434#define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
436#define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
440#define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
442#define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
446#define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
450#define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
455#define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
459#define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
461#define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
463#define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
465#define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
467#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
469#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
471#define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
473#define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
475#define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
477#define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
479#define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
481#define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
483#define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
485#define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
487#define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
499extern const char *
501
504 const char *driver_name;
505 uint8_t driver_id;
506 struct rte_device *device;
523 struct {
529 } sym;
530};
531
532#define RTE_CRYPTODEV_DETACHED (0)
533#define RTE_CRYPTODEV_ATTACHED (1)
534
541
544 uint32_t nb_descriptors;
547};
548
570typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
571 struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
572
582typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
583 enum rte_cryptodev_event_type event, void *cb_arg);
584
585
597};
598
599#define RTE_CRYPTODEV_NAME_MAX_LEN (64)
611extern int
612rte_cryptodev_get_dev_id(const char *name);
613
624extern const char *
626
634extern uint8_t
636
645extern uint8_t
647
659uint8_t
660rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
661 uint8_t nb_devices);
662/*
663 * Return the NUMA socket to which a device is connected
664 *
665 * @param dev_id
666 * The identifier of the device
667 * @return
668 * The NUMA socket id to which the device is connected or
669 * a default of zero if the socket could not be determined.
670 * -1 if returned is the dev_id value is out of range.
671 */
672extern int
673rte_cryptodev_socket_id(uint8_t dev_id);
674
680 uint64_t ff_disable;
687};
688
703extern int
704rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
705
721extern int
722rte_cryptodev_start(uint8_t dev_id);
723
730extern void
731rte_cryptodev_stop(uint8_t dev_id);
732
742extern int
743rte_cryptodev_close(uint8_t dev_id);
744
766extern int
767rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
768 const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
769
783__rte_experimental
784int
785rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
786
794extern uint16_t
796
797
809extern int
810rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
811
817extern void
819
833extern void
834rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
835
836
850extern int
852 enum rte_cryptodev_event_type event,
853 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
854
868extern int
870 enum rte_cryptodev_event_type event,
871 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
872
873struct rte_cryptodev_callback;
874
876RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
877
887 void *arg;
889};
890
895struct rte_cryptodev_cb_rcu {
896 struct rte_cryptodev_cb *next;
898 struct rte_rcu_qsbr *qsbr;
900};
901
902void *
903rte_cryptodev_get_sec_ctx(uint8_t dev_id);
904
931__rte_experimental
932struct rte_mempool *
933rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
934 uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
935 int socket_id);
936
937
958__rte_experimental
959struct rte_mempool *
960rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts,
961 uint32_t cache_size, uint16_t user_data_size, int socket_id);
962
976void *
978 struct rte_crypto_sym_xform *xforms,
979 struct rte_mempool *mp);
997__rte_experimental
998int
1000 struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp,
1001 void **session);
1002
1015int
1017 void *sess);
1018
1030__rte_experimental
1031int
1032rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess);
1033
1040__rte_experimental
1041unsigned int
1043
1055unsigned int
1057
1068__rte_experimental
1069unsigned int
1071
1080unsigned int
1082
1092
1101const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1102
1115__rte_experimental
1116int
1118 void *data,
1119 uint16_t size);
1120
1121#define CRYPTO_SESS_OPAQUE_DATA_OFF 0
1125static inline uint64_t
1127{
1128 return *((uint64_t *)sess + CRYPTO_SESS_OPAQUE_DATA_OFF);
1129}
1130
1134static inline void
1136{
1137 uint64_t *data;
1138 data = (((uint64_t *)sess) + CRYPTO_SESS_OPAQUE_DATA_OFF);
1139 *data = opaque;
1140}
1141
1152__rte_experimental
1153void *
1155
1169__rte_experimental
1170int
1171rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size);
1172
1183__rte_experimental
1184void *
1186
1199__rte_experimental
1200uint32_t
1202 void *sess, union rte_crypto_sym_ofs ofs,
1203 struct rte_crypto_sym_vec *vec);
1204
1214__rte_experimental
1215int
1217
1233__rte_experimental
1234int
1235rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess,
1236 enum rte_crypto_op_type op_type,
1237 enum rte_crypto_op_sess_type sess_type,
1238 void *ev_mdata, uint16_t size);
1239
1244union rte_cryptodev_session_ctx {void *crypto_sess;
1245 struct rte_crypto_sym_xform *xform;
1246 struct rte_security_session *sec_sess;
1247};
1248
1275 void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1276 union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1277
1300 void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1301 uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1302 struct rte_crypto_va_iova_ptr *iv,
1303 struct rte_crypto_va_iova_ptr *digest,
1304 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1305 void *user_data);
1306
1318typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1319 uint32_t n);
1320
1330typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1331
1340typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1341 uint32_t index, uint8_t is_op_success);
1342
1384typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1385 uint8_t *drv_ctx,
1386 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1387 uint32_t max_nb_to_dequeue,
1389 void **out_user_data, uint8_t is_user_data_array,
1390 uint32_t *n_success, int *dequeue_status);
1391
1415typedef void * (*cryptodev_sym_raw_dequeue_t)(
1416 void *qp, uint8_t *drv_ctx, int *dequeue_status,
1417 enum rte_crypto_op_status *op_status);
1418
1425 void *qp_data;
1426
1433
1434 /* Driver specific context data */
1435 __extension__ uint8_t drv_ctx_data[];
1436};
1437
1461__rte_experimental
1462int
1463rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1464 struct rte_crypto_raw_dp_ctx *ctx,
1465 enum rte_crypto_op_sess_type sess_type,
1466 union rte_cryptodev_session_ctx session_ctx,
1467 uint8_t is_update);
1468
1493__rte_experimental
1494uint32_t
1496 struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1497 void **user_data, int *enqueue_status);
1498
1519__rte_experimental
1520static __rte_always_inline int
1522 struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1523 union rte_crypto_sym_ofs ofs,
1524 struct rte_crypto_va_iova_ptr *iv,
1525 struct rte_crypto_va_iova_ptr *digest,
1526 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1527 void *user_data)
1528{
1529 return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1530 n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1531}
1532
1543__rte_experimental
1544int
1546 uint32_t n);
1547
1589__rte_experimental
1590uint32_t
1592 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1593 uint32_t max_nb_to_dequeue,
1595 void **out_user_data, uint8_t is_user_data_array,
1596 uint32_t *n_success, int *dequeue_status);
1597
1621__rte_experimental
1622static __rte_always_inline void *
1624 int *dequeue_status, enum rte_crypto_op_status *op_status)
1625{
1626 return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1627 op_status);
1628}
1629
1639__rte_experimental
1640int
1642 uint32_t n);
1643
1680__rte_experimental
1681struct rte_cryptodev_cb *
1683 uint16_t qp_id,
1685 void *cb_arg);
1686
1709__rte_experimental
1711 uint16_t qp_id,
1712 struct rte_cryptodev_cb *cb);
1713
1749__rte_experimental
1750struct rte_cryptodev_cb *
1752 uint16_t qp_id,
1754 void *cb_arg);
1755
1777__rte_experimental
1779 uint16_t qp_id,
1780 struct rte_cryptodev_cb *cb);
1781
1782#include <rte_cryptodev_core.h>
1819static inline uint16_t
1820rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1821 struct rte_crypto_op **ops, uint16_t nb_ops)
1822{
1823 const struct rte_crypto_fp_ops *fp_ops;
1824 void *qp;
1825
1826 rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1827
1828 fp_ops = &rte_crypto_fp_ops[dev_id];
1829 qp = fp_ops->qp.data[qp_id];
1830
1831 nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1832
1833#ifdef RTE_CRYPTO_CALLBACKS
1834 if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1835 struct rte_cryptodev_cb_rcu *list;
1836 struct rte_cryptodev_cb *cb;
1837
1838 /* __ATOMIC_RELEASE memory order was used when the
1839 * call back was inserted into the list.
1840 * Since there is a clear dependency between loading
1841 * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1842 * not required.
1843 */
1844 list = &fp_ops->qp.deq_cb[qp_id];
1845 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1846 cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1847
1848 while (cb != NULL) {
1849 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1850 cb->arg);
1851 cb = cb->next;
1852 };
1853
1854 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1855 }
1856#endif
1857 return nb_ops;
1858}
1859
1891static inline uint16_t
1892rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1893 struct rte_crypto_op **ops, uint16_t nb_ops)
1894{
1895 const struct rte_crypto_fp_ops *fp_ops;
1896 void *qp;
1897
1898 fp_ops = &rte_crypto_fp_ops[dev_id];
1899 qp = fp_ops->qp.data[qp_id];
1900#ifdef RTE_CRYPTO_CALLBACKS
1901 if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1902 struct rte_cryptodev_cb_rcu *list;
1903 struct rte_cryptodev_cb *cb;
1904
1905 /* __ATOMIC_RELEASE memory order was used when the
1906 * call back was inserted into the list.
1907 * Since there is a clear dependency between loading
1908 * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1909 * not required.
1910 */
1911 list = &fp_ops->qp.enq_cb[qp_id];
1912 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1913 cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1914
1915 while (cb != NULL) {
1916 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1917 cb->arg);
1918 cb = cb->next;
1919 };
1920
1921 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1922 }
1923#endif
1924
1925 rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1926 return fp_ops->enqueue_burst(qp, ops, nb_ops);
1927}
1928
1929
1930
1931#ifdef __cplusplus
1932}
1933#endif
1934
1935#endif /* _RTE_CRYPTODEV_H_ */
#define unlikely(x)
#define RTE_STD_C11
Definition: rte_common.h:39
#define __rte_always_inline
Definition: rte_common.h:255
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
rte_crypto_op_type
Definition: rte_crypto.h:29
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_asym_op_type
rte_crypto_asym_xform_type
rte_crypto_auth_algorithm
rte_crypto_sym_xform_type
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
int rte_cryptodev_close(uint8_t dev_id)
rte_cryptodev_event_type
@ RTE_CRYPTODEV_EVENT_ERROR
@ RTE_CRYPTODEV_EVENT_UNKNOWN
@ RTE_CRYPTODEV_EVENT_MAX
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
__rte_experimental struct rte_mempool * rte_cryptodev_asym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t cache_size, uint16_t user_data_size, int socket_id)
uint8_t rte_cryptodev_count(void)
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_start(uint8_t dev_id)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
static uint64_t rte_cryptodev_sym_session_opaque_data_get(void *sess)
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, void *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
void rte_cryptodev_stop(uint8_t dev_id)
const char * rte_cryptodev_name_get(uint8_t dev_id)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
static void rte_cryptodev_sym_session_opaque_data_set(void *sess, uint64_t opaque)
__rte_experimental void * rte_cryptodev_asym_session_get_user_data(void *sess)
__rte_experimental int rte_cryptodev_asym_session_set_user_data(void *sess, void *data, uint16_t size)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
void * rte_cryptodev_sym_session_create(uint8_t dev_id, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mp)
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_session_create(uint8_t dev_id, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mp, void **session)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
__rte_experimental int rte_cryptodev_asym_session_free(uint8_t dev_id, void *sess)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_get_dev_id(const char *name)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
__rte_experimental int rte_cryptodev_session_event_mdata_set(uint8_t dev_id, void *sess, enum rte_crypto_op_type op_type, enum rte_crypto_op_sess_type sess_type, void *ev_mdata, uint16_t size)
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(void *sess, void *data, uint16_t size)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(void *sess)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
int rte_cryptodev_sym_session_free(uint8_t dev_id, void *sess)
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:300
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:353
enum rte_crypto_asym_xform_type type
struct rte_crypto_param_range modlen
enum rte_crypto_asym_xform_type xform_type
enum rte_crypto_op_type op
struct rte_cryptodev_symmetric_capability sym
struct rte_cryptodev_asymmetric_capability asym
struct rte_cryptodev_cb * next
rte_cryptodev_callback_fn fn
unsigned max_nb_queue_pairs
struct rte_device * device
uint16_t min_mbuf_headroom_req
const struct rte_cryptodev_capabilities * capabilities
uint16_t min_mbuf_tailroom_req
const char * driver_name
unsigned max_nb_sessions
struct rte_mempool * mp_session
uint64_t enqueue_err_count
uint64_t dequeue_err_count
enum rte_crypto_auth_algorithm algo
enum rte_crypto_cipher_algorithm algo
enum rte_crypto_aead_algorithm algo
struct rte_crypto_param_range iv_size
struct rte_crypto_param_range digest_size
struct rte_crypto_param_range aad_size
struct rte_cryptodev_symmetric_capability::@97::@99 auth
struct rte_cryptodev_symmetric_capability::@97::@100 cipher
struct rte_crypto_param_range key_size
enum rte_crypto_sym_xform_type xform_type
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:205
uint32_t size
Definition: rte_mempool.h:215
uint32_t cache_size
Definition: rte_mempool.h:216
uint32_t elt_size
Definition: rte_mempool.h:219