1
2
3
4
5#include <string.h>
6
7#include <rte_common.h>
8#include <rte_malloc.h>
9#include <rte_cryptodev_pmd.h>
10
11#include "armv8_pmd_private.h"
12
13static const struct rte_cryptodev_capabilities
14 armv8_crypto_pmd_capabilities[] = {
15 {
16 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
17 {.sym = {
18 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
19 {.auth = {
20 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
21 .block_size = 64,
22 .key_size = {
23 .min = 1,
24 .max = 64,
25 .increment = 1
26 },
27 .digest_size = {
28 .min = 1,
29 .max = 20,
30 .increment = 1
31 },
32 .iv_size = { 0 }
33 }, }
34 }, }
35 },
36 {
37 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
38 {.sym = {
39 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
40 {.auth = {
41 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
42 .block_size = 64,
43 .key_size = {
44 .min = 1,
45 .max = 64,
46 .increment = 1
47 },
48 .digest_size = {
49 .min = 1,
50 .max = 32,
51 .increment = 1
52 },
53 .iv_size = { 0 }
54 }, }
55 }, }
56 },
57 {
58 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
59 {.sym = {
60 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
61 {.cipher = {
62 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
63 .block_size = 16,
64 .key_size = {
65 .min = 16,
66 .max = 16,
67 .increment = 0
68 },
69 .iv_size = {
70 .min = 16,
71 .max = 16,
72 .increment = 0
73 }
74 }, }
75 }, }
76 },
77
78 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
79};
80
81
82
83static int
84armv8_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev,
85 __rte_unused struct rte_cryptodev_config *config)
86{
87 return 0;
88}
89
90
91static int
92armv8_crypto_pmd_start(__rte_unused struct rte_cryptodev *dev)
93{
94 return 0;
95}
96
97
98static void
99armv8_crypto_pmd_stop(__rte_unused struct rte_cryptodev *dev)
100{
101}
102
103
104static int
105armv8_crypto_pmd_close(__rte_unused struct rte_cryptodev *dev)
106{
107 return 0;
108}
109
110
111
112static void
113armv8_crypto_pmd_stats_get(struct rte_cryptodev *dev,
114 struct rte_cryptodev_stats *stats)
115{
116 int qp_id;
117
118 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
119 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
120
121 stats->enqueued_count += qp->stats.enqueued_count;
122 stats->dequeued_count += qp->stats.dequeued_count;
123
124 stats->enqueue_err_count += qp->stats.enqueue_err_count;
125 stats->dequeue_err_count += qp->stats.dequeue_err_count;
126 }
127}
128
129
130static void
131armv8_crypto_pmd_stats_reset(struct rte_cryptodev *dev)
132{
133 int qp_id;
134
135 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
136 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
137
138 memset(&qp->stats, 0, sizeof(qp->stats));
139 }
140}
141
142
143
144static void
145armv8_crypto_pmd_info_get(struct rte_cryptodev *dev,
146 struct rte_cryptodev_info *dev_info)
147{
148 struct armv8_crypto_private *internals = dev->data->dev_private;
149
150 if (dev_info != NULL) {
151 dev_info->driver_id = dev->driver_id;
152 dev_info->feature_flags = dev->feature_flags;
153 dev_info->capabilities = armv8_crypto_pmd_capabilities;
154 dev_info->max_nb_queue_pairs = internals->max_nb_qpairs;
155
156 dev_info->sym.max_nb_sessions = 0;
157 }
158}
159
160
161static int
162armv8_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
163{
164
165 if (dev->data->queue_pairs[qp_id] != NULL) {
166 rte_free(dev->data->queue_pairs[qp_id]);
167 dev->data->queue_pairs[qp_id] = NULL;
168 }
169
170 return 0;
171}
172
173
174static int
175armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
176 struct armv8_crypto_qp *qp)
177{
178 unsigned int n;
179
180 n = snprintf(qp->name, sizeof(qp->name), "armv8_crypto_pmd_%u_qp_%u",
181 dev->data->dev_id, qp->id);
182
183 if (n >= sizeof(qp->name))
184 return -1;
185
186 return 0;
187}
188
189
190
191static struct rte_ring *
192armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp *qp,
193 unsigned int ring_size, int socket_id)
194{
195 struct rte_ring *r;
196
197 r = rte_ring_lookup(qp->name);
198 if (r) {
199 if (rte_ring_get_size(r) >= ring_size) {
200 ARMV8_CRYPTO_LOG_INFO(
201 "Reusing existing ring %s for processed ops",
202 qp->name);
203 return r;
204 }
205
206 ARMV8_CRYPTO_LOG_ERR(
207 "Unable to reuse existing ring %s for processed ops",
208 qp->name);
209 return NULL;
210 }
211
212 return rte_ring_create(qp->name, ring_size, socket_id,
213 RING_F_SP_ENQ | RING_F_SC_DEQ);
214}
215
216
217
218static int
219armv8_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
220 const struct rte_cryptodev_qp_conf *qp_conf,
221 int socket_id)
222{
223 struct armv8_crypto_qp *qp = NULL;
224
225
226 if (dev->data->queue_pairs[qp_id] != NULL)
227 armv8_crypto_pmd_qp_release(dev, qp_id);
228
229
230 qp = rte_zmalloc_socket("ARMv8 PMD Queue Pair", sizeof(*qp),
231 RTE_CACHE_LINE_SIZE, socket_id);
232 if (qp == NULL)
233 return -ENOMEM;
234
235 qp->id = qp_id;
236 dev->data->queue_pairs[qp_id] = qp;
237
238 if (armv8_crypto_pmd_qp_set_unique_name(dev, qp) != 0)
239 goto qp_setup_cleanup;
240
241 qp->processed_ops = armv8_crypto_pmd_qp_create_processed_ops_ring(qp,
242 qp_conf->nb_descriptors, socket_id);
243 if (qp->processed_ops == NULL)
244 goto qp_setup_cleanup;
245
246 qp->sess_mp = qp_conf->mp_session;
247 qp->sess_mp_priv = qp_conf->mp_session_private;
248
249 memset(&qp->stats, 0, sizeof(qp->stats));
250
251 return 0;
252
253qp_setup_cleanup:
254 if (qp)
255 rte_free(qp);
256
257 return -1;
258}
259
260
261static unsigned
262armv8_crypto_pmd_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
263{
264 return sizeof(struct armv8_crypto_session);
265}
266
267
268static int
269armv8_crypto_pmd_sym_session_configure(struct rte_cryptodev *dev,
270 struct rte_crypto_sym_xform *xform,
271 struct rte_cryptodev_sym_session *sess,
272 struct rte_mempool *mempool)
273{
274 void *sess_private_data;
275 int ret;
276
277 if (unlikely(sess == NULL)) {
278 ARMV8_CRYPTO_LOG_ERR("invalid session struct");
279 return -EINVAL;
280 }
281
282 if (rte_mempool_get(mempool, &sess_private_data)) {
283 CDEV_LOG_ERR(
284 "Couldn't get object from session mempool");
285 return -ENOMEM;
286 }
287
288 ret = armv8_crypto_set_session_parameters(sess_private_data, xform);
289 if (ret != 0) {
290 ARMV8_CRYPTO_LOG_ERR("failed configure session parameters");
291
292
293 rte_mempool_put(mempool, sess_private_data);
294 return ret;
295 }
296
297 set_sym_session_private_data(sess, dev->driver_id,
298 sess_private_data);
299
300 return 0;
301}
302
303
304static void
305armv8_crypto_pmd_sym_session_clear(struct rte_cryptodev *dev,
306 struct rte_cryptodev_sym_session *sess)
307{
308 uint8_t index = dev->driver_id;
309 void *sess_priv = get_sym_session_private_data(sess, index);
310
311
312 if (sess_priv) {
313 memset(sess_priv, 0, sizeof(struct armv8_crypto_session));
314 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv);
315 set_sym_session_private_data(sess, index, NULL);
316 rte_mempool_put(sess_mp, sess_priv);
317 }
318}
319
320struct rte_cryptodev_ops armv8_crypto_pmd_ops = {
321 .dev_configure = armv8_crypto_pmd_config,
322 .dev_start = armv8_crypto_pmd_start,
323 .dev_stop = armv8_crypto_pmd_stop,
324 .dev_close = armv8_crypto_pmd_close,
325
326 .stats_get = armv8_crypto_pmd_stats_get,
327 .stats_reset = armv8_crypto_pmd_stats_reset,
328
329 .dev_infos_get = armv8_crypto_pmd_info_get,
330
331 .queue_pair_setup = armv8_crypto_pmd_qp_setup,
332 .queue_pair_release = armv8_crypto_pmd_qp_release,
333
334 .sym_session_get_size = armv8_crypto_pmd_sym_session_get_size,
335 .sym_session_configure = armv8_crypto_pmd_sym_session_configure,
336 .sym_session_clear = armv8_crypto_pmd_sym_session_clear
337};
338
339struct rte_cryptodev_ops *rte_armv8_crypto_pmd_ops = &armv8_crypto_pmd_ops;
340