1
2
3
4#include <string.h>
5#include <zlib.h>
6#include <math.h>
7#include <stdlib.h>
8#include <unistd.h>
9#include <stdio.h>
10
11#include <rte_cycles.h>
12#include <rte_malloc.h>
13#include <rte_mempool.h>
14#include <rte_mbuf.h>
15#include <rte_compressdev.h>
16#include <rte_string_fns.h>
17
18#include "test_compressdev_test_buffer.h"
19#include "test.h"
20
21#define DIV_CEIL(a, b) ((a) / (b) + ((a) % (b) != 0))
22
23#define DEFAULT_WINDOW_SIZE 15
24#define DEFAULT_MEM_LEVEL 8
25#define MAX_DEQD_RETRIES 10
26#define DEQUEUE_WAIT_TIME 10000
27
28
29
30
31
32
33#define COMPRESS_BUF_SIZE_RATIO 1.3
34#define COMPRESS_BUF_SIZE_RATIO_DISABLED 1.0
35#define COMPRESS_BUF_SIZE_RATIO_OVERFLOW 0.2
36#define NUM_LARGE_MBUFS 16
37#define SMALL_SEG_SIZE 256
38#define MAX_SEGS 16
39#define NUM_OPS 16
40#define NUM_MAX_XFORMS 16
41#define NUM_MAX_INFLIGHT_OPS 128
42#define CACHE_SIZE 0
43
44#define ZLIB_CRC_CHECKSUM_WINDOW_BITS 31
45#define ZLIB_HEADER_SIZE 2
46#define ZLIB_TRAILER_SIZE 4
47#define GZIP_HEADER_SIZE 10
48#define GZIP_TRAILER_SIZE 8
49
50#define OUT_OF_SPACE_BUF 1
51
52#define MAX_MBUF_SEGMENT_SIZE 65535
53#define MAX_DATA_MBUF_SIZE (MAX_MBUF_SEGMENT_SIZE - RTE_PKTMBUF_HEADROOM)
54#define NUM_BIG_MBUFS (512 + 1)
55#define BIG_DATA_TEST_SIZE (MAX_DATA_MBUF_SIZE * 2)
56
57
58
59
60#define IM_BUF_NUM_MBUFS 3
61
62#define IM_BUF_DATA_TEST_SIZE_LB 59600
63
64#define IM_BUF_DATA_TEST_SIZE_SGL (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS)
65
66#define IM_BUF_NUM_MBUFS_OVER (NUM_MAX_INFLIGHT_OPS + 1)
67
68#define IM_BUF_DATA_TEST_SIZE_OVER (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_OVER)
69
70#define IM_BUF_NUM_MBUFS_MID ((NUM_MAX_INFLIGHT_OPS / 3) + 1)
71
72#define IM_BUF_DATA_TEST_SIZE_MID (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_MID)
73
74
75const char *
76huffman_type_strings[] = {
77 [RTE_COMP_HUFFMAN_DEFAULT] = "PMD default",
78 [RTE_COMP_HUFFMAN_FIXED] = "Fixed",
79 [RTE_COMP_HUFFMAN_DYNAMIC] = "Dynamic"
80};
81
82enum zlib_direction {
83 ZLIB_NONE,
84 ZLIB_COMPRESS,
85 ZLIB_DECOMPRESS,
86 ZLIB_ALL
87};
88
89enum varied_buff {
90 LB_BOTH = 0,
91 SGL_BOTH,
92 SGL_TO_LB,
93 LB_TO_SGL
94};
95
96enum overflow_test {
97 OVERFLOW_DISABLED,
98 OVERFLOW_ENABLED
99};
100
101enum ratio_switch {
102 RATIO_DISABLED,
103 RATIO_ENABLED
104};
105
106enum operation_type {
107 OPERATION_COMPRESSION,
108 OPERATION_DECOMPRESSION
109};
110
111struct priv_op_data {
112 uint16_t orig_idx;
113};
114
115struct comp_testsuite_params {
116 struct rte_mempool *large_mbuf_pool;
117 struct rte_mempool *small_mbuf_pool;
118 struct rte_mempool *big_mbuf_pool;
119 struct rte_mempool *op_pool;
120 struct rte_comp_xform *def_comp_xform;
121 struct rte_comp_xform *def_decomp_xform;
122};
123
124struct interim_data_params {
125 const char * const *test_bufs;
126 unsigned int num_bufs;
127 uint16_t *buf_idx;
128 struct rte_comp_xform **compress_xforms;
129 struct rte_comp_xform **decompress_xforms;
130 unsigned int num_xforms;
131};
132
133struct test_data_params {
134 enum rte_comp_op_type compress_state;
135 enum rte_comp_op_type decompress_state;
136 enum varied_buff buff_type;
137 enum zlib_direction zlib_dir;
138 unsigned int out_of_space;
139 unsigned int big_data;
140
141 unsigned int decompress_output_block_size;
142 unsigned int decompress_steps_max;
143
144 unsigned int use_external_mbufs;
145 unsigned int inbuf_data_size;
146 const struct rte_memzone *inbuf_memzone;
147 const struct rte_memzone *compbuf_memzone;
148 const struct rte_memzone *uncompbuf_memzone;
149
150 enum overflow_test overflow;
151 enum ratio_switch ratio;
152};
153
154struct test_private_arrays {
155 struct rte_mbuf **uncomp_bufs;
156 struct rte_mbuf **comp_bufs;
157 struct rte_comp_op **ops;
158 struct rte_comp_op **ops_processed;
159 void **priv_xforms;
160 uint64_t *compress_checksum;
161 uint32_t *compressed_data_size;
162 void **stream;
163 char **all_decomp_data;
164 unsigned int *decomp_produced_data_size;
165 uint16_t num_priv_xforms;
166};
167
168static struct comp_testsuite_params testsuite_params = { 0 };
169
170
171static void
172testsuite_teardown(void)
173{
174 struct comp_testsuite_params *ts_params = &testsuite_params;
175
176 if (rte_mempool_in_use_count(ts_params->large_mbuf_pool))
177 RTE_LOG(ERR, USER1, "Large mbuf pool still has unfreed bufs\n");
178 if (rte_mempool_in_use_count(ts_params->small_mbuf_pool))
179 RTE_LOG(ERR, USER1, "Small mbuf pool still has unfreed bufs\n");
180 if (rte_mempool_in_use_count(ts_params->big_mbuf_pool))
181 RTE_LOG(ERR, USER1, "Big mbuf pool still has unfreed bufs\n");
182 if (rte_mempool_in_use_count(ts_params->op_pool))
183 RTE_LOG(ERR, USER1, "op pool still has unfreed ops\n");
184
185 rte_mempool_free(ts_params->large_mbuf_pool);
186 rte_mempool_free(ts_params->small_mbuf_pool);
187 rte_mempool_free(ts_params->big_mbuf_pool);
188 rte_mempool_free(ts_params->op_pool);
189 rte_free(ts_params->def_comp_xform);
190 rte_free(ts_params->def_decomp_xform);
191}
192
193static int
194testsuite_setup(void)
195{
196 struct comp_testsuite_params *ts_params = &testsuite_params;
197 uint32_t max_buf_size = 0;
198 unsigned int i;
199
200 if (rte_compressdev_count() == 0) {
201 RTE_LOG(WARNING, USER1, "Need at least one compress device\n");
202 return TEST_SKIPPED;
203 }
204
205 RTE_LOG(NOTICE, USER1, "Running tests on device %s\n",
206 rte_compressdev_name_get(0));
207
208 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
209 max_buf_size = RTE_MAX(max_buf_size,
210 strlen(compress_test_bufs[i]) + 1);
211
212
213
214
215
216
217
218 max_buf_size *= COMPRESS_BUF_SIZE_RATIO;
219 ts_params->large_mbuf_pool = rte_pktmbuf_pool_create("large_mbuf_pool",
220 NUM_LARGE_MBUFS,
221 CACHE_SIZE, 0,
222 max_buf_size + RTE_PKTMBUF_HEADROOM,
223 rte_socket_id());
224 if (ts_params->large_mbuf_pool == NULL) {
225 RTE_LOG(ERR, USER1, "Large mbuf pool could not be created\n");
226 return TEST_FAILED;
227 }
228
229
230 ts_params->small_mbuf_pool = rte_pktmbuf_pool_create("small_mbuf_pool",
231 NUM_LARGE_MBUFS * MAX_SEGS,
232 CACHE_SIZE, 0,
233 SMALL_SEG_SIZE + RTE_PKTMBUF_HEADROOM,
234 rte_socket_id());
235 if (ts_params->small_mbuf_pool == NULL) {
236 RTE_LOG(ERR, USER1, "Small mbuf pool could not be created\n");
237 goto exit;
238 }
239
240
241 ts_params->big_mbuf_pool = rte_pktmbuf_pool_create("big_mbuf_pool",
242 NUM_BIG_MBUFS + 1,
243 CACHE_SIZE, 0,
244 MAX_MBUF_SEGMENT_SIZE,
245 rte_socket_id());
246 if (ts_params->big_mbuf_pool == NULL) {
247 RTE_LOG(ERR, USER1, "Big mbuf pool could not be created\n");
248 goto exit;
249 }
250
251 ts_params->op_pool = rte_comp_op_pool_create("op_pool", NUM_OPS,
252 0, sizeof(struct priv_op_data),
253 rte_socket_id());
254 if (ts_params->op_pool == NULL) {
255 RTE_LOG(ERR, USER1, "Operation pool could not be created\n");
256 goto exit;
257 }
258
259 ts_params->def_comp_xform =
260 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
261 if (ts_params->def_comp_xform == NULL) {
262 RTE_LOG(ERR, USER1,
263 "Default compress xform could not be created\n");
264 goto exit;
265 }
266 ts_params->def_decomp_xform =
267 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
268 if (ts_params->def_decomp_xform == NULL) {
269 RTE_LOG(ERR, USER1,
270 "Default decompress xform could not be created\n");
271 goto exit;
272 }
273
274
275 ts_params->def_comp_xform->type = RTE_COMP_COMPRESS;
276 ts_params->def_comp_xform->compress.algo = RTE_COMP_ALGO_DEFLATE,
277 ts_params->def_comp_xform->compress.deflate.huffman =
278 RTE_COMP_HUFFMAN_DEFAULT;
279 ts_params->def_comp_xform->compress.level = RTE_COMP_LEVEL_PMD_DEFAULT;
280 ts_params->def_comp_xform->compress.chksum = RTE_COMP_CHECKSUM_NONE;
281 ts_params->def_comp_xform->compress.window_size = DEFAULT_WINDOW_SIZE;
282
283 ts_params->def_decomp_xform->type = RTE_COMP_DECOMPRESS;
284 ts_params->def_decomp_xform->decompress.algo = RTE_COMP_ALGO_DEFLATE,
285 ts_params->def_decomp_xform->decompress.chksum = RTE_COMP_CHECKSUM_NONE;
286 ts_params->def_decomp_xform->decompress.window_size = DEFAULT_WINDOW_SIZE;
287
288 return TEST_SUCCESS;
289
290exit:
291 testsuite_teardown();
292
293 return TEST_FAILED;
294}
295
296static int
297generic_ut_setup(void)
298{
299
300 struct rte_compressdev_config config = {
301 .socket_id = rte_socket_id(),
302 .nb_queue_pairs = 1,
303 .max_nb_priv_xforms = NUM_MAX_XFORMS,
304 .max_nb_streams = 1
305 };
306
307 if (rte_compressdev_configure(0, &config) < 0) {
308 RTE_LOG(ERR, USER1, "Device configuration failed\n");
309 return -1;
310 }
311
312 if (rte_compressdev_queue_pair_setup(0, 0, NUM_MAX_INFLIGHT_OPS,
313 rte_socket_id()) < 0) {
314 RTE_LOG(ERR, USER1, "Queue pair setup failed\n");
315 return -1;
316 }
317
318 if (rte_compressdev_start(0) < 0) {
319 RTE_LOG(ERR, USER1, "Device could not be started\n");
320 return -1;
321 }
322
323 return 0;
324}
325
326static void
327generic_ut_teardown(void)
328{
329 rte_compressdev_stop(0);
330 if (rte_compressdev_close(0) < 0)
331 RTE_LOG(ERR, USER1, "Device could not be closed\n");
332}
333
334static int
335test_compressdev_invalid_configuration(void)
336{
337 struct rte_compressdev_config invalid_config;
338 struct rte_compressdev_config valid_config = {
339 .socket_id = rte_socket_id(),
340 .nb_queue_pairs = 1,
341 .max_nb_priv_xforms = NUM_MAX_XFORMS,
342 .max_nb_streams = 1
343 };
344 struct rte_compressdev_info dev_info;
345
346 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
347
348
349 memcpy(&invalid_config, &valid_config,
350 sizeof(struct rte_compressdev_config));
351 invalid_config.nb_queue_pairs = 0;
352
353 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
354 "Device configuration was successful "
355 "with no queue pairs (invalid)\n");
356
357
358
359
360
361 rte_compressdev_info_get(0, &dev_info);
362 if (dev_info.max_nb_queue_pairs != 0) {
363 memcpy(&invalid_config, &valid_config,
364 sizeof(struct rte_compressdev_config));
365 invalid_config.nb_queue_pairs = dev_info.max_nb_queue_pairs + 1;
366
367 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
368 "Device configuration was successful "
369 "with too many queue pairs (invalid)\n");
370 }
371
372
373 TEST_ASSERT_FAIL(rte_compressdev_queue_pair_setup(0, 0,
374 NUM_MAX_INFLIGHT_OPS, rte_socket_id()),
375 "Queue pair setup was successful "
376 "with no queue pairs set (invalid)\n");
377
378 return TEST_SUCCESS;
379}
380
381static int
382compare_buffers(const char *buffer1, uint32_t buffer1_len,
383 const char *buffer2, uint32_t buffer2_len)
384{
385 if (buffer1_len != buffer2_len) {
386 RTE_LOG(ERR, USER1, "Buffer lengths are different\n");
387 return -1;
388 }
389
390 if (memcmp(buffer1, buffer2, buffer1_len) != 0) {
391 RTE_LOG(ERR, USER1, "Buffers are different\n");
392 return -1;
393 }
394
395 return 0;
396}
397
398
399
400
401static int
402map_zlib_flush_flag(enum rte_comp_flush_flag flag)
403{
404 switch (flag) {
405 case RTE_COMP_FLUSH_NONE:
406 return Z_NO_FLUSH;
407 case RTE_COMP_FLUSH_SYNC:
408 return Z_SYNC_FLUSH;
409 case RTE_COMP_FLUSH_FULL:
410 return Z_FULL_FLUSH;
411 case RTE_COMP_FLUSH_FINAL:
412 return Z_FINISH;
413
414
415
416
417 default:
418 return -1;
419 }
420}
421
422static int
423compress_zlib(struct rte_comp_op *op,
424 const struct rte_comp_xform *xform, int mem_level)
425{
426 z_stream stream;
427 int zlib_flush;
428 int strategy, window_bits, comp_level;
429 int ret = TEST_FAILED;
430 uint8_t *single_src_buf = NULL;
431 uint8_t *single_dst_buf = NULL;
432
433
434 stream.zalloc = Z_NULL;
435 stream.zfree = Z_NULL;
436 stream.opaque = Z_NULL;
437
438 if (xform->compress.deflate.huffman == RTE_COMP_HUFFMAN_FIXED)
439 strategy = Z_FIXED;
440 else
441 strategy = Z_DEFAULT_STRATEGY;
442
443
444
445
446
447 window_bits = -(xform->compress.window_size);
448 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32)
449 window_bits *= -1;
450 else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32)
451 window_bits = ZLIB_CRC_CHECKSUM_WINDOW_BITS;
452
453 comp_level = xform->compress.level;
454
455 if (comp_level != RTE_COMP_LEVEL_NONE)
456 ret = deflateInit2(&stream, comp_level, Z_DEFLATED,
457 window_bits, mem_level, strategy);
458 else
459 ret = deflateInit(&stream, Z_NO_COMPRESSION);
460
461 if (ret != Z_OK) {
462 printf("Zlib deflate could not be initialized\n");
463 goto exit;
464 }
465
466
467
468 if (op->m_src->nb_segs > 1) {
469 single_src_buf = rte_malloc(NULL,
470 rte_pktmbuf_pkt_len(op->m_src), 0);
471 if (single_src_buf == NULL) {
472 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
473 goto exit;
474 }
475
476 if (rte_pktmbuf_read(op->m_src, op->src.offset,
477 rte_pktmbuf_pkt_len(op->m_src) -
478 op->src.offset,
479 single_src_buf) == NULL) {
480 RTE_LOG(ERR, USER1,
481 "Buffer could not be read entirely\n");
482 goto exit;
483 }
484
485 stream.avail_in = op->src.length;
486 stream.next_in = single_src_buf;
487
488 } else {
489 stream.avail_in = op->src.length;
490 stream.next_in = rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
491 op->src.offset);
492 }
493
494 if (op->m_dst->nb_segs > 1) {
495
496 single_dst_buf = rte_malloc(NULL,
497 rte_pktmbuf_pkt_len(op->m_dst), 0);
498 if (single_dst_buf == NULL) {
499 RTE_LOG(ERR, USER1,
500 "Buffer could not be allocated\n");
501 goto exit;
502 }
503
504 stream.avail_out = op->m_dst->pkt_len;
505 stream.next_out = single_dst_buf;
506
507 } else {
508 stream.avail_out = op->m_dst->data_len;
509 stream.next_out = rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
510 op->dst.offset);
511 }
512
513
514 zlib_flush = map_zlib_flush_flag(op->flush_flag);
515 ret = deflate(&stream, zlib_flush);
516
517 if (stream.avail_in != 0) {
518 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
519 goto exit;
520 }
521
522 if (ret != Z_STREAM_END)
523 goto exit;
524
525
526 if (op->m_dst->nb_segs > 1) {
527 uint32_t remaining_data = stream.total_out;
528 uint8_t *src_data = single_dst_buf;
529 struct rte_mbuf *dst_buf = op->m_dst;
530
531 while (remaining_data > 0) {
532 uint8_t *dst_data = rte_pktmbuf_mtod_offset(dst_buf,
533 uint8_t *, op->dst.offset);
534
535 if (remaining_data < dst_buf->data_len) {
536 memcpy(dst_data, src_data, remaining_data);
537 remaining_data = 0;
538 } else {
539 memcpy(dst_data, src_data, dst_buf->data_len);
540 remaining_data -= dst_buf->data_len;
541 src_data += dst_buf->data_len;
542 dst_buf = dst_buf->next;
543 }
544 }
545 }
546
547 op->consumed = stream.total_in;
548 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32) {
549 rte_pktmbuf_adj(op->m_dst, ZLIB_HEADER_SIZE);
550 rte_pktmbuf_trim(op->m_dst, ZLIB_TRAILER_SIZE);
551 op->produced = stream.total_out - (ZLIB_HEADER_SIZE +
552 ZLIB_TRAILER_SIZE);
553 } else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32) {
554 rte_pktmbuf_adj(op->m_dst, GZIP_HEADER_SIZE);
555 rte_pktmbuf_trim(op->m_dst, GZIP_TRAILER_SIZE);
556 op->produced = stream.total_out - (GZIP_HEADER_SIZE +
557 GZIP_TRAILER_SIZE);
558 } else
559 op->produced = stream.total_out;
560
561 op->status = RTE_COMP_OP_STATUS_SUCCESS;
562 op->output_chksum = stream.adler;
563
564 deflateReset(&stream);
565
566 ret = 0;
567exit:
568 deflateEnd(&stream);
569 rte_free(single_src_buf);
570 rte_free(single_dst_buf);
571
572 return ret;
573}
574
575static int
576decompress_zlib(struct rte_comp_op *op,
577 const struct rte_comp_xform *xform)
578{
579 z_stream stream;
580 int window_bits;
581 int zlib_flush;
582 int ret = TEST_FAILED;
583 uint8_t *single_src_buf = NULL;
584 uint8_t *single_dst_buf = NULL;
585
586
587 stream.zalloc = Z_NULL;
588 stream.zfree = Z_NULL;
589 stream.opaque = Z_NULL;
590
591
592
593
594
595 window_bits = -(xform->decompress.window_size);
596 ret = inflateInit2(&stream, window_bits);
597
598 if (ret != Z_OK) {
599 printf("Zlib deflate could not be initialized\n");
600 goto exit;
601 }
602
603
604
605 if (op->m_src->nb_segs > 1) {
606 single_src_buf = rte_malloc(NULL,
607 rte_pktmbuf_pkt_len(op->m_src), 0);
608 if (single_src_buf == NULL) {
609 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
610 goto exit;
611 }
612 single_dst_buf = rte_malloc(NULL,
613 rte_pktmbuf_pkt_len(op->m_dst), 0);
614 if (single_dst_buf == NULL) {
615 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
616 goto exit;
617 }
618 if (rte_pktmbuf_read(op->m_src, 0,
619 rte_pktmbuf_pkt_len(op->m_src),
620 single_src_buf) == NULL) {
621 RTE_LOG(ERR, USER1,
622 "Buffer could not be read entirely\n");
623 goto exit;
624 }
625
626 stream.avail_in = op->src.length;
627 stream.next_in = single_src_buf;
628 stream.avail_out = rte_pktmbuf_pkt_len(op->m_dst);
629 stream.next_out = single_dst_buf;
630
631 } else {
632 stream.avail_in = op->src.length;
633 stream.next_in = rte_pktmbuf_mtod(op->m_src, uint8_t *);
634 stream.avail_out = op->m_dst->data_len;
635 stream.next_out = rte_pktmbuf_mtod(op->m_dst, uint8_t *);
636 }
637
638
639 zlib_flush = map_zlib_flush_flag(op->flush_flag);
640 ret = inflate(&stream, zlib_flush);
641
642 if (stream.avail_in != 0) {
643 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
644 goto exit;
645 }
646
647 if (ret != Z_STREAM_END)
648 goto exit;
649
650 if (op->m_src->nb_segs > 1) {
651 uint32_t remaining_data = stream.total_out;
652 uint8_t *src_data = single_dst_buf;
653 struct rte_mbuf *dst_buf = op->m_dst;
654
655 while (remaining_data > 0) {
656 uint8_t *dst_data = rte_pktmbuf_mtod(dst_buf,
657 uint8_t *);
658
659 if (remaining_data < dst_buf->data_len) {
660 memcpy(dst_data, src_data, remaining_data);
661 remaining_data = 0;
662 } else {
663 memcpy(dst_data, src_data, dst_buf->data_len);
664 remaining_data -= dst_buf->data_len;
665 src_data += dst_buf->data_len;
666 dst_buf = dst_buf->next;
667 }
668 }
669 }
670
671 op->consumed = stream.total_in;
672 op->produced = stream.total_out;
673 op->status = RTE_COMP_OP_STATUS_SUCCESS;
674
675 inflateReset(&stream);
676
677 ret = 0;
678exit:
679 inflateEnd(&stream);
680
681 return ret;
682}
683
684static int
685prepare_sgl_bufs(const char *test_buf, struct rte_mbuf *head_buf,
686 uint32_t total_data_size,
687 struct rte_mempool *small_mbuf_pool,
688 struct rte_mempool *large_mbuf_pool,
689 uint8_t limit_segs_in_sgl,
690 uint16_t seg_size)
691{
692 uint32_t remaining_data = total_data_size;
693 uint16_t num_remaining_segs = DIV_CEIL(remaining_data, seg_size);
694 struct rte_mempool *pool;
695 struct rte_mbuf *next_seg;
696 uint32_t data_size;
697 char *buf_ptr;
698 const char *data_ptr = test_buf;
699 uint16_t i;
700 int ret;
701
702 if (limit_segs_in_sgl != 0 && num_remaining_segs > limit_segs_in_sgl)
703 num_remaining_segs = limit_segs_in_sgl - 1;
704
705
706
707
708
709 if (remaining_data < seg_size)
710 data_size = remaining_data;
711 else
712 data_size = seg_size;
713
714 buf_ptr = rte_pktmbuf_append(head_buf, data_size);
715 if (buf_ptr == NULL) {
716 RTE_LOG(ERR, USER1,
717 "Not enough space in the 1st buffer\n");
718 return -1;
719 }
720
721 if (data_ptr != NULL) {
722
723 memcpy(buf_ptr, data_ptr, data_size);
724 data_ptr += data_size;
725 }
726 remaining_data -= data_size;
727 num_remaining_segs--;
728
729
730
731
732
733 for (i = 0; i < num_remaining_segs; i++) {
734
735 if (i == (num_remaining_segs - 1)) {
736
737 if (remaining_data > seg_size)
738 pool = large_mbuf_pool;
739 else
740 pool = small_mbuf_pool;
741 data_size = remaining_data;
742 } else {
743 data_size = seg_size;
744 pool = small_mbuf_pool;
745 }
746
747 next_seg = rte_pktmbuf_alloc(pool);
748 if (next_seg == NULL) {
749 RTE_LOG(ERR, USER1,
750 "New segment could not be allocated "
751 "from the mempool\n");
752 return -1;
753 }
754 buf_ptr = rte_pktmbuf_append(next_seg, data_size);
755 if (buf_ptr == NULL) {
756 RTE_LOG(ERR, USER1,
757 "Not enough space in the buffer\n");
758 rte_pktmbuf_free(next_seg);
759 return -1;
760 }
761 if (data_ptr != NULL) {
762
763 memcpy(buf_ptr, data_ptr, data_size);
764 data_ptr += data_size;
765 }
766 remaining_data -= data_size;
767
768 ret = rte_pktmbuf_chain(head_buf, next_seg);
769 if (ret != 0) {
770 rte_pktmbuf_free(next_seg);
771 RTE_LOG(ERR, USER1,
772 "Segment could not chained\n");
773 return -1;
774 }
775 }
776
777 return 0;
778}
779
780static void
781extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
782{
783}
784
785static int
786test_run_enqueue_dequeue(struct rte_comp_op **ops,
787 struct rte_comp_op **ops_processed,
788 unsigned int num_bufs)
789{
790 uint16_t num_enqd, num_deqd, num_total_deqd;
791 unsigned int deqd_retries = 0;
792 int res = 0;
793
794
795 num_enqd = rte_compressdev_enqueue_burst(0, 0, ops, num_bufs);
796 if (num_enqd < num_bufs) {
797 RTE_LOG(ERR, USER1,
798 "Some operations could not be enqueued\n");
799 res = -1;
800 }
801
802
803
804 num_total_deqd = 0;
805 while (num_total_deqd < num_enqd) {
806
807
808
809
810 if (deqd_retries != 0) {
811
812
813
814
815 if (deqd_retries == MAX_DEQD_RETRIES) {
816 RTE_LOG(ERR, USER1,
817 "Not all operations could be dequeued\n");
818 res = -1;
819 break;
820 }
821 usleep(DEQUEUE_WAIT_TIME);
822 }
823 num_deqd = rte_compressdev_dequeue_burst(0, 0,
824 &ops_processed[num_total_deqd], num_bufs);
825 num_total_deqd += num_deqd;
826 deqd_retries++;
827
828 }
829
830 return res;
831}
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849static int
850test_setup_com_bufs(const struct interim_data_params *int_data,
851 const struct test_data_params *test_data,
852 const struct test_private_arrays *test_priv_data)
853{
854
855 unsigned int i;
856 uint32_t data_size;
857 char *buf_ptr;
858 int ret;
859 char **all_decomp_data = test_priv_data->all_decomp_data;
860
861 struct comp_testsuite_params *ts_params = &testsuite_params;
862
863
864 const char * const *test_bufs = int_data->test_bufs;
865 unsigned int num_bufs = int_data->num_bufs;
866
867
868 unsigned int buff_type = test_data->buff_type;
869 unsigned int big_data = test_data->big_data;
870
871
872 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
873 struct rte_mempool *buf_pool;
874
875 static struct rte_mbuf_ext_shared_info inbuf_info;
876
877 size_t array_size = sizeof(void *) * num_bufs;
878
879
880 memset(test_priv_data->uncomp_bufs, 0, array_size);
881 memset(test_priv_data->comp_bufs, 0, array_size);
882 memset(test_priv_data->ops, 0, array_size);
883 memset(test_priv_data->ops_processed, 0, array_size);
884 memset(test_priv_data->priv_xforms, 0, array_size);
885 memset(test_priv_data->compressed_data_size,
886 0, sizeof(uint32_t) * num_bufs);
887
888 if (test_data->decompress_state == RTE_COMP_OP_STATEFUL) {
889 data_size = strlen(test_bufs[0]) + 1;
890 *all_decomp_data = rte_malloc(NULL, data_size,
891 RTE_CACHE_LINE_SIZE);
892 }
893
894 if (big_data)
895 buf_pool = ts_params->big_mbuf_pool;
896 else if (buff_type == SGL_BOTH)
897 buf_pool = ts_params->small_mbuf_pool;
898 else
899 buf_pool = ts_params->large_mbuf_pool;
900
901
902
903 ret = rte_pktmbuf_alloc_bulk(buf_pool,
904 uncomp_bufs, num_bufs);
905 if (ret < 0) {
906 RTE_LOG(ERR, USER1,
907 "Source mbufs could not be allocated "
908 "from the mempool\n");
909 return -1;
910 }
911
912 if (test_data->use_external_mbufs) {
913 inbuf_info.free_cb = extbuf_free_callback;
914 inbuf_info.fcb_opaque = NULL;
915 rte_mbuf_ext_refcnt_set(&inbuf_info, 1);
916 for (i = 0; i < num_bufs; i++) {
917 rte_pktmbuf_attach_extbuf(uncomp_bufs[i],
918 test_data->inbuf_memzone->addr,
919 test_data->inbuf_memzone->iova,
920 test_data->inbuf_data_size,
921 &inbuf_info);
922 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i],
923 test_data->inbuf_data_size);
924 if (buf_ptr == NULL) {
925 RTE_LOG(ERR, USER1,
926 "Append extra bytes to the source mbuf failed\n");
927 return -1;
928 }
929 }
930 } else if (buff_type == SGL_BOTH || buff_type == SGL_TO_LB) {
931 for (i = 0; i < num_bufs; i++) {
932 data_size = strlen(test_bufs[i]) + 1;
933 if (prepare_sgl_bufs(test_bufs[i], uncomp_bufs[i],
934 data_size,
935 big_data ? buf_pool : ts_params->small_mbuf_pool,
936 big_data ? buf_pool : ts_params->large_mbuf_pool,
937 big_data ? 0 : MAX_SEGS,
938 big_data ? MAX_DATA_MBUF_SIZE : SMALL_SEG_SIZE) < 0)
939 return -1;
940 }
941 } else {
942 for (i = 0; i < num_bufs; i++) {
943 data_size = strlen(test_bufs[i]) + 1;
944
945 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i], data_size);
946 if (buf_ptr == NULL) {
947 RTE_LOG(ERR, USER1,
948 "Append extra bytes to the source mbuf failed\n");
949 return -1;
950 }
951 strlcpy(buf_ptr, test_bufs[i], data_size);
952 }
953 }
954
955 return 0;
956}
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980static inline uint32_t
981test_mbufs_calculate_data_size(
982 enum operation_type op_type,
983 unsigned int out_of_space_and_zlib,
984 const struct test_private_arrays *test_priv_data,
985 const struct interim_data_params *int_data,
986 const struct test_data_params *test_data,
987 unsigned int i)
988{
989
990 uint32_t data_size;
991 struct priv_op_data *priv_data;
992 float ratio_val;
993 enum ratio_switch ratio = test_data->ratio;
994
995 uint8_t not_zlib_compr;
996 enum overflow_test overflow = test_data->overflow;
997
998
999 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1000
1001
1002 const char * const *test_bufs = int_data->test_bufs;
1003
1004 if (out_of_space_and_zlib)
1005 data_size = OUT_OF_SPACE_BUF;
1006 else {
1007 if (op_type == OPERATION_COMPRESSION) {
1008 not_zlib_compr = (test_data->zlib_dir == ZLIB_DECOMPRESS
1009 || test_data->zlib_dir == ZLIB_NONE);
1010
1011 ratio_val = (ratio == RATIO_ENABLED) ?
1012 COMPRESS_BUF_SIZE_RATIO :
1013 COMPRESS_BUF_SIZE_RATIO_DISABLED;
1014
1015 ratio_val = (not_zlib_compr &&
1016 (overflow == OVERFLOW_ENABLED)) ?
1017 COMPRESS_BUF_SIZE_RATIO_OVERFLOW :
1018 ratio_val;
1019
1020 data_size = strlen(test_bufs[i]) * ratio_val;
1021 } else {
1022 priv_data = (struct priv_op_data *)
1023 (ops_processed[i] + 1);
1024 data_size = strlen(test_bufs[priv_data->orig_idx]) + 1;
1025 }
1026 }
1027
1028 return data_size;
1029}
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055static int
1056test_setup_output_bufs(
1057 enum operation_type op_type,
1058 unsigned int out_of_space_and_zlib,
1059 const struct test_private_arrays *test_priv_data,
1060 const struct interim_data_params *int_data,
1061 const struct test_data_params *test_data,
1062 struct rte_mbuf_ext_shared_info *current_extbuf_info)
1063{
1064
1065 unsigned int i;
1066 uint32_t data_size;
1067 int ret;
1068 char *buf_ptr;
1069
1070
1071 struct rte_mbuf **current_bufs;
1072
1073
1074 unsigned int num_bufs = int_data->num_bufs;
1075
1076
1077 unsigned int buff_type = test_data->buff_type;
1078 unsigned int big_data = test_data->big_data;
1079 const struct rte_memzone *current_memzone;
1080
1081 struct comp_testsuite_params *ts_params = &testsuite_params;
1082 struct rte_mempool *buf_pool;
1083
1084 if (big_data)
1085 buf_pool = ts_params->big_mbuf_pool;
1086 else if (buff_type == SGL_BOTH)
1087 buf_pool = ts_params->small_mbuf_pool;
1088 else
1089 buf_pool = ts_params->large_mbuf_pool;
1090
1091 if (op_type == OPERATION_COMPRESSION)
1092 current_bufs = test_priv_data->comp_bufs;
1093 else
1094 current_bufs = test_priv_data->uncomp_bufs;
1095
1096
1097 ret = rte_pktmbuf_alloc_bulk(buf_pool, current_bufs, num_bufs);
1098 if (ret < 0) {
1099 RTE_LOG(ERR, USER1,
1100 "Destination mbufs could not be allocated "
1101 "from the mempool\n");
1102 return -1;
1103 }
1104
1105 if (test_data->use_external_mbufs) {
1106 current_extbuf_info->free_cb = extbuf_free_callback;
1107 current_extbuf_info->fcb_opaque = NULL;
1108 rte_mbuf_ext_refcnt_set(current_extbuf_info, 1);
1109 if (op_type == OPERATION_COMPRESSION)
1110 current_memzone = test_data->compbuf_memzone;
1111 else
1112 current_memzone = test_data->uncompbuf_memzone;
1113
1114 for (i = 0; i < num_bufs; i++) {
1115 rte_pktmbuf_attach_extbuf(current_bufs[i],
1116 current_memzone->addr,
1117 current_memzone->iova,
1118 current_memzone->len,
1119 current_extbuf_info);
1120 rte_pktmbuf_append(current_bufs[i],
1121 current_memzone->len);
1122 }
1123 } else {
1124 for (i = 0; i < num_bufs; i++) {
1125
1126 enum rte_comp_huffman comp_huffman =
1127 ts_params->def_comp_xform->compress.deflate.huffman;
1128
1129
1130 data_size = test_mbufs_calculate_data_size(
1131 op_type,
1132 out_of_space_and_zlib,
1133 test_priv_data,
1134 int_data,
1135 test_data,
1136 i);
1137
1138 if (comp_huffman != RTE_COMP_HUFFMAN_DYNAMIC) {
1139 if (op_type == OPERATION_DECOMPRESSION)
1140 data_size *= COMPRESS_BUF_SIZE_RATIO;
1141 }
1142
1143
1144 if (buff_type == SGL_BOTH || buff_type == LB_TO_SGL) {
1145 ret = prepare_sgl_bufs(NULL, current_bufs[i],
1146 data_size,
1147 big_data ? buf_pool :
1148 ts_params->small_mbuf_pool,
1149 big_data ? buf_pool :
1150 ts_params->large_mbuf_pool,
1151 big_data ? 0 : MAX_SEGS,
1152 big_data ? MAX_DATA_MBUF_SIZE :
1153 SMALL_SEG_SIZE);
1154 if (ret < 0)
1155 return -1;
1156 } else {
1157 buf_ptr = rte_pktmbuf_append(current_bufs[i],
1158 data_size);
1159 if (buf_ptr == NULL) {
1160 RTE_LOG(ERR, USER1,
1161 "Append extra bytes to the destination mbuf failed\n");
1162 return -1;
1163 }
1164 }
1165 }
1166 }
1167
1168 return 0;
1169}
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188static int
1189test_deflate_comp_run(const struct interim_data_params *int_data,
1190 const struct test_data_params *test_data,
1191 const struct test_private_arrays *test_priv_data)
1192{
1193
1194 struct priv_op_data *priv_data;
1195 unsigned int i;
1196 uint16_t num_priv_xforms = 0;
1197 int ret;
1198 int ret_status = 0;
1199 char *buf_ptr;
1200
1201 struct comp_testsuite_params *ts_params = &testsuite_params;
1202
1203
1204 enum rte_comp_op_type operation_type = test_data->compress_state;
1205 unsigned int zlib_compress =
1206 (test_data->zlib_dir == ZLIB_ALL ||
1207 test_data->zlib_dir == ZLIB_COMPRESS);
1208
1209
1210 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1211 unsigned int num_xforms = int_data->num_xforms;
1212 unsigned int num_bufs = int_data->num_bufs;
1213
1214
1215 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1216 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1217 struct rte_comp_op **ops = test_priv_data->ops;
1218 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1219 void **priv_xforms = test_priv_data->priv_xforms;
1220
1221 const struct rte_compressdev_capabilities *capa =
1222 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1223
1224
1225 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1226 if (ret < 0) {
1227 RTE_LOG(ERR, USER1,
1228 "Compress operations could not be allocated "
1229 "from the mempool\n");
1230 ret_status = -1;
1231 goto exit;
1232 }
1233
1234 for (i = 0; i < num_bufs; i++) {
1235 ops[i]->m_src = uncomp_bufs[i];
1236 ops[i]->m_dst = comp_bufs[i];
1237 ops[i]->src.offset = 0;
1238 ops[i]->src.length = rte_pktmbuf_pkt_len(uncomp_bufs[i]);
1239 ops[i]->dst.offset = 0;
1240
1241 RTE_LOG(DEBUG, USER1,
1242 "Uncompressed buffer length = %u compressed buffer length = %u",
1243 rte_pktmbuf_pkt_len(uncomp_bufs[i]),
1244 rte_pktmbuf_pkt_len(comp_bufs[i]));
1245
1246 if (operation_type == RTE_COMP_OP_STATELESS) {
1247 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1248 } else {
1249 RTE_LOG(ERR, USER1,
1250 "Compression: stateful operations are not "
1251 "supported in these tests yet\n");
1252 ret_status = -1;
1253 goto exit;
1254 }
1255 ops[i]->input_chksum = 0;
1256
1257
1258
1259
1260
1261
1262 priv_data = (struct priv_op_data *) (ops[i] + 1);
1263 priv_data->orig_idx = i;
1264 }
1265
1266
1267 if (zlib_compress) {
1268 for (i = 0; i < num_bufs; i++) {
1269 const struct rte_comp_xform *compress_xform =
1270 compress_xforms[i % num_xforms];
1271 ret = compress_zlib(ops[i], compress_xform,
1272 DEFAULT_MEM_LEVEL);
1273 if (ret < 0) {
1274 ret_status = -1;
1275 goto exit;
1276 }
1277
1278 ops_processed[i] = ops[i];
1279 }
1280 } else {
1281
1282 for (i = 0; i < num_xforms; i++) {
1283 ret = rte_compressdev_private_xform_create(0,
1284 (const struct rte_comp_xform *)
1285 compress_xforms[i],
1286 &priv_xforms[i]);
1287 if (ret < 0) {
1288 RTE_LOG(ERR, USER1,
1289 "Compression private xform "
1290 "could not be created\n");
1291 ret_status = -1;
1292 goto exit;
1293 }
1294 num_priv_xforms++;
1295 }
1296 if (capa->comp_feature_flags &
1297 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1298
1299 for (i = 0; i < num_bufs; i++)
1300 ops[i]->private_xform =
1301 priv_xforms[i % num_xforms];
1302 } else {
1303
1304 for (i = num_xforms; i < num_bufs; i++) {
1305 ret = rte_compressdev_private_xform_create(0,
1306 compress_xforms[i % num_xforms],
1307 &priv_xforms[i]);
1308 if (ret < 0) {
1309 RTE_LOG(ERR, USER1,
1310 "Compression private xform "
1311 "could not be created\n");
1312 ret_status = -1;
1313 goto exit;
1314 }
1315 num_priv_xforms++;
1316 }
1317
1318 for (i = 0; i < num_bufs; i++)
1319 ops[i]->private_xform = priv_xforms[i];
1320 }
1321
1322recovery_lb:
1323 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
1324 if (ret < 0) {
1325 RTE_LOG(ERR, USER1,
1326 "Compression: enqueue/dequeue operation failed\n");
1327 ret_status = -1;
1328 goto exit;
1329 }
1330
1331 for (i = 0; i < num_bufs; i++) {
1332 test_priv_data->compressed_data_size[i] +=
1333 ops_processed[i]->produced;
1334
1335 if (ops_processed[i]->status ==
1336 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
1337
1338 ops[i]->status =
1339 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1340 ops[i]->src.offset +=
1341 ops_processed[i]->consumed;
1342 ops[i]->src.length -=
1343 ops_processed[i]->consumed;
1344 ops[i]->dst.offset +=
1345 ops_processed[i]->produced;
1346
1347 buf_ptr = rte_pktmbuf_append(
1348 ops[i]->m_dst,
1349 ops_processed[i]->produced);
1350
1351 if (buf_ptr == NULL) {
1352 RTE_LOG(ERR, USER1,
1353 "Data recovery: append extra bytes to the current mbuf failed\n");
1354 ret_status = -1;
1355 goto exit;
1356 }
1357 goto recovery_lb;
1358 }
1359 }
1360 }
1361
1362exit:
1363
1364 if (ret_status < 0)
1365 for (i = 0; i < num_bufs; i++) {
1366 rte_comp_op_free(ops[i]);
1367 ops[i] = NULL;
1368 ops_processed[i] = NULL;
1369 }
1370
1371
1372 for (i = 0; i < num_priv_xforms; i++) {
1373 if (priv_xforms[i] != NULL) {
1374 rte_compressdev_private_xform_free(0, priv_xforms[i]);
1375 priv_xforms[i] = NULL;
1376 }
1377 }
1378
1379 return ret_status;
1380}
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402static int
1403test_deflate_comp_finalize(const struct interim_data_params *int_data,
1404 const struct test_data_params *test_data,
1405 const struct test_private_arrays *test_priv_data)
1406{
1407
1408 unsigned int i;
1409 struct priv_op_data *priv_data;
1410
1411
1412 unsigned int num_xforms = int_data->num_xforms;
1413 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1414 uint16_t *buf_idx = int_data->buf_idx;
1415 unsigned int num_bufs = int_data->num_bufs;
1416
1417
1418 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1419 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1420 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1421 struct rte_comp_op **ops = test_priv_data->ops;
1422
1423
1424 unsigned int out_of_space = test_data->out_of_space;
1425 unsigned int zlib_compress =
1426 (test_data->zlib_dir == ZLIB_ALL ||
1427 test_data->zlib_dir == ZLIB_COMPRESS);
1428 unsigned int zlib_decompress =
1429 (test_data->zlib_dir == ZLIB_ALL ||
1430 test_data->zlib_dir == ZLIB_DECOMPRESS);
1431
1432 for (i = 0; i < num_bufs; i++) {
1433 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1434 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1435 const struct rte_comp_compress_xform *compress_xform =
1436 &compress_xforms[xform_idx]->compress;
1437 enum rte_comp_huffman huffman_type =
1438 compress_xform->deflate.huffman;
1439 char engine[] = "zlib (directly, not PMD)";
1440 if (zlib_decompress)
1441 strlcpy(engine, "PMD", sizeof(engine));
1442
1443 RTE_LOG(DEBUG, USER1, "Buffer %u compressed by %s from %u to"
1444 " %u bytes (level = %d, huffman = %s)\n",
1445 buf_idx[priv_data->orig_idx], engine,
1446 ops_processed[i]->consumed, ops_processed[i]->produced,
1447 compress_xform->level,
1448 huffman_type_strings[huffman_type]);
1449 RTE_LOG(DEBUG, USER1, "Compression ratio = %.2f\n",
1450 ops_processed[i]->consumed == 0 ? 0 :
1451 (float)ops_processed[i]->produced /
1452 ops_processed[i]->consumed * 100);
1453 if (compress_xform->chksum != RTE_COMP_CHECKSUM_NONE)
1454 compress_checksum[i] = ops_processed[i]->output_chksum;
1455 ops[i] = NULL;
1456 }
1457
1458
1459
1460
1461
1462 for (i = 0; i < num_bufs; i++) {
1463 if (out_of_space && !zlib_compress) {
1464 if (ops_processed[i]->status !=
1465 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1466 RTE_LOG(ERR, USER1,
1467 "Operation without expected out of "
1468 "space status error\n");
1469 return -1;
1470 } else
1471 continue;
1472 }
1473
1474 if (ops_processed[i]->status != RTE_COMP_OP_STATUS_SUCCESS) {
1475 if (test_data->overflow == OVERFLOW_ENABLED) {
1476 if (ops_processed[i]->status ==
1477 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1478 RTE_LOG(INFO, USER1,
1479 "Out-of-space-recoverable functionality"
1480 " is not supported on this device\n");
1481 return 2;
1482 }
1483 }
1484
1485 RTE_LOG(ERR, USER1,
1486 "Comp: Some operations were not successful\n");
1487 return -1;
1488 }
1489 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1490 rte_pktmbuf_free(uncomp_bufs[priv_data->orig_idx]);
1491 uncomp_bufs[priv_data->orig_idx] = NULL;
1492 }
1493
1494 if (out_of_space && !zlib_compress)
1495 return 1;
1496
1497 return 0;
1498}
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517static int
1518test_deflate_decomp_run(const struct interim_data_params *int_data,
1519 const struct test_data_params *test_data,
1520 struct test_private_arrays *test_priv_data)
1521{
1522
1523
1524 struct priv_op_data *priv_data;
1525 unsigned int i;
1526 uint16_t num_priv_xforms = 0;
1527 int ret;
1528 int ret_status = 0;
1529
1530 struct comp_testsuite_params *ts_params = &testsuite_params;
1531
1532
1533 enum rte_comp_op_type operation_type = test_data->decompress_state;
1534 unsigned int zlib_decompress =
1535 (test_data->zlib_dir == ZLIB_ALL ||
1536 test_data->zlib_dir == ZLIB_DECOMPRESS);
1537
1538
1539 struct rte_comp_xform **decompress_xforms = int_data->decompress_xforms;
1540 unsigned int num_xforms = int_data->num_xforms;
1541 unsigned int num_bufs = int_data->num_bufs;
1542
1543
1544 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1545 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1546 struct rte_comp_op **ops = test_priv_data->ops;
1547 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1548 void **priv_xforms = test_priv_data->priv_xforms;
1549 uint32_t *compressed_data_size = test_priv_data->compressed_data_size;
1550 void **stream = test_priv_data->stream;
1551
1552 const struct rte_compressdev_capabilities *capa =
1553 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1554
1555 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1556 if (ret < 0) {
1557 RTE_LOG(ERR, USER1,
1558 "Decompress operations could not be allocated "
1559 "from the mempool\n");
1560 ret_status = -1;
1561 goto exit;
1562 }
1563
1564
1565 for (i = 0; i < num_bufs; i++) {
1566 ops[i]->m_src = comp_bufs[i];
1567 ops[i]->m_dst = uncomp_bufs[i];
1568 ops[i]->src.offset = 0;
1569
1570
1571
1572
1573
1574 if (compressed_data_size[i])
1575 ops[i]->src.length = compressed_data_size[i];
1576 else
1577 ops[i]->src.length = ops_processed[i]->produced;
1578
1579 ops[i]->dst.offset = 0;
1580
1581 if (operation_type == RTE_COMP_OP_STATELESS) {
1582 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1583 ops[i]->op_type = RTE_COMP_OP_STATELESS;
1584 } else if (!zlib_decompress) {
1585 ops[i]->flush_flag = RTE_COMP_FLUSH_SYNC;
1586 ops[i]->op_type = RTE_COMP_OP_STATEFUL;
1587 } else {
1588 RTE_LOG(ERR, USER1,
1589 "Decompression: stateful operations are"
1590 " not supported in these tests yet\n");
1591 ret_status = -1;
1592 goto exit;
1593 }
1594 ops[i]->input_chksum = 0;
1595
1596
1597
1598
1599 memcpy(ops[i] + 1, ops_processed[i] + 1,
1600 sizeof(struct priv_op_data));
1601 }
1602
1603
1604
1605
1606
1607 rte_comp_op_bulk_free(ops_processed, num_bufs);
1608
1609
1610 if (zlib_decompress) {
1611 for (i = 0; i < num_bufs; i++) {
1612 priv_data = (struct priv_op_data *)(ops[i] + 1);
1613 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1614 const struct rte_comp_xform *decompress_xform =
1615 decompress_xforms[xform_idx];
1616
1617 ret = decompress_zlib(ops[i], decompress_xform);
1618 if (ret < 0) {
1619 ret_status = -1;
1620 goto exit;
1621 }
1622
1623 ops_processed[i] = ops[i];
1624 }
1625 } else {
1626 if (operation_type == RTE_COMP_OP_STATELESS) {
1627
1628 for (i = 0; i < num_xforms; i++) {
1629 ret = rte_compressdev_private_xform_create(0,
1630 (const struct rte_comp_xform *)
1631 decompress_xforms[i],
1632 &priv_xforms[i]);
1633 if (ret < 0) {
1634 RTE_LOG(ERR, USER1,
1635 "Decompression private xform "
1636 "could not be created\n");
1637 ret_status = -1;
1638 goto exit;
1639 }
1640 num_priv_xforms++;
1641 }
1642
1643 if (capa->comp_feature_flags &
1644 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1645
1646 for (i = 0; i < num_bufs; i++) {
1647 priv_data = (struct priv_op_data *)
1648 (ops[i] + 1);
1649 uint16_t xform_idx =
1650 priv_data->orig_idx % num_xforms;
1651 ops[i]->private_xform =
1652 priv_xforms[xform_idx];
1653 }
1654 } else {
1655
1656
1657 for (i = num_xforms; i < num_bufs; i++) {
1658 ret =
1659 rte_compressdev_private_xform_create(0,
1660 decompress_xforms[i % num_xforms],
1661 &priv_xforms[i]);
1662 if (ret < 0) {
1663 RTE_LOG(ERR, USER1,
1664 "Decompression private xform"
1665 " could not be created\n");
1666 ret_status = -1;
1667 goto exit;
1668 }
1669 num_priv_xforms++;
1670 }
1671
1672
1673
1674 for (i = 0; i < num_bufs; i++) {
1675 priv_data = (struct priv_op_data *)
1676 (ops[i] + 1);
1677 uint16_t xform_idx =
1678 priv_data->orig_idx;
1679 ops[i]->private_xform =
1680 priv_xforms[xform_idx];
1681 }
1682 }
1683 } else {
1684
1685 ret = rte_compressdev_stream_create(0,
1686 decompress_xforms[0], stream);
1687 if (ret < 0) {
1688 RTE_LOG(ERR, USER1,
1689 "Decompression stream could not be created, error %d\n",
1690 ret);
1691 ret_status = -1;
1692 goto exit;
1693 }
1694
1695 for (i = 0; i < num_bufs; i++)
1696 ops[i]->stream = *stream;
1697 }
1698
1699 test_priv_data->num_priv_xforms = num_priv_xforms;
1700 }
1701
1702exit:
1703 return ret_status;
1704}
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726static int
1727test_deflate_decomp_finalize(const struct interim_data_params *int_data,
1728 const struct test_data_params *test_data,
1729 const struct test_private_arrays *test_priv_data)
1730{
1731
1732 unsigned int i;
1733 struct priv_op_data *priv_data;
1734 static unsigned int step;
1735
1736
1737 uint16_t *buf_idx = int_data->buf_idx;
1738 unsigned int num_bufs = int_data->num_bufs;
1739 const char * const *test_bufs = int_data->test_bufs;
1740 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1741
1742
1743 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1744 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1745 struct rte_comp_op **ops = test_priv_data->ops;
1746 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1747 unsigned int *decomp_produced_data_size =
1748 test_priv_data->decomp_produced_data_size;
1749 char **all_decomp_data = test_priv_data->all_decomp_data;
1750
1751
1752 unsigned int out_of_space = test_data->out_of_space;
1753 enum rte_comp_op_type operation_type = test_data->decompress_state;
1754
1755 unsigned int zlib_compress =
1756 (test_data->zlib_dir == ZLIB_ALL ||
1757 test_data->zlib_dir == ZLIB_COMPRESS);
1758 unsigned int zlib_decompress =
1759 (test_data->zlib_dir == ZLIB_ALL ||
1760 test_data->zlib_dir == ZLIB_DECOMPRESS);
1761
1762 for (i = 0; i < num_bufs; i++) {
1763 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1764 char engine[] = "zlib, (directly, no PMD)";
1765 if (zlib_compress)
1766 strlcpy(engine, "pmd", sizeof(engine));
1767 RTE_LOG(DEBUG, USER1,
1768 "Buffer %u decompressed by %s from %u to %u bytes\n",
1769 buf_idx[priv_data->orig_idx], engine,
1770 ops_processed[i]->consumed, ops_processed[i]->produced);
1771 ops[i] = NULL;
1772 }
1773
1774
1775
1776
1777
1778 for (i = 0; i < num_bufs; i++) {
1779 if (out_of_space && !zlib_decompress) {
1780 if (ops_processed[i]->status !=
1781 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1782
1783 RTE_LOG(ERR, USER1,
1784 "Operation without expected out of "
1785 "space status error\n");
1786 return -1;
1787 } else
1788 continue;
1789 }
1790
1791 if (operation_type == RTE_COMP_OP_STATEFUL
1792 && (ops_processed[i]->status ==
1793 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE
1794 || ops_processed[i]->status ==
1795 RTE_COMP_OP_STATUS_SUCCESS)) {
1796
1797 RTE_LOG(DEBUG, USER1,
1798 ".............RECOVERABLE\n");
1799
1800
1801 const void *ptr = rte_pktmbuf_read(
1802 ops_processed[i]->m_dst,
1803 ops_processed[i]->dst.offset,
1804 ops_processed[i]->produced,
1805 *all_decomp_data +
1806 *decomp_produced_data_size);
1807 if (ptr != *all_decomp_data +
1808 *decomp_produced_data_size)
1809 rte_memcpy(*all_decomp_data +
1810 *decomp_produced_data_size,
1811 ptr, ops_processed[i]->produced);
1812
1813 *decomp_produced_data_size +=
1814 ops_processed[i]->produced;
1815 if (ops_processed[i]->src.length >
1816 ops_processed[i]->consumed) {
1817 if (ops_processed[i]->status ==
1818 RTE_COMP_OP_STATUS_SUCCESS) {
1819 RTE_LOG(ERR, USER1,
1820 "Operation finished too early\n");
1821 return -1;
1822 }
1823 step++;
1824 if (step >= test_data->decompress_steps_max) {
1825 RTE_LOG(ERR, USER1,
1826 "Operation exceeded maximum steps\n");
1827 return -1;
1828 }
1829 ops[i] = ops_processed[i];
1830 ops[i]->status =
1831 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1832 ops[i]->src.offset +=
1833 ops_processed[i]->consumed;
1834 ops[i]->src.length -=
1835 ops_processed[i]->consumed;
1836
1837 return 2;
1838 } else {
1839
1840
1841 priv_data = (struct priv_op_data *)
1842 (ops_processed[i] + 1);
1843 const char *buf1 =
1844 test_bufs[priv_data->orig_idx];
1845 const char *buf2 = *all_decomp_data;
1846
1847 if (compare_buffers(buf1, strlen(buf1) + 1,
1848 buf2, *decomp_produced_data_size) < 0)
1849 return -1;
1850
1851 if (compress_xforms[0]->compress.chksum
1852 != RTE_COMP_CHECKSUM_NONE) {
1853 if (ops_processed[i]->output_chksum
1854 != compress_checksum[i]) {
1855 RTE_LOG(ERR, USER1,
1856 "The checksums differ\n"
1857 "Compression Checksum: %" PRIu64 "\tDecompression "
1858 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1859 ops_processed[i]->output_chksum);
1860 return -1;
1861 }
1862 }
1863 }
1864 } else if (ops_processed[i]->status !=
1865 RTE_COMP_OP_STATUS_SUCCESS) {
1866 RTE_LOG(ERR, USER1,
1867 "Decomp: Some operations were not successful, status = %u\n",
1868 ops_processed[i]->status);
1869 return -1;
1870 }
1871 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1872 rte_pktmbuf_free(comp_bufs[priv_data->orig_idx]);
1873 comp_bufs[priv_data->orig_idx] = NULL;
1874 }
1875
1876 if (out_of_space && !zlib_decompress)
1877 return 1;
1878
1879 return 0;
1880}
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900static int
1901test_results_validation(const struct interim_data_params *int_data,
1902 const struct test_data_params *test_data,
1903 const struct test_private_arrays *test_priv_data)
1904{
1905
1906 unsigned int i;
1907 struct priv_op_data *priv_data;
1908 const char *buf1;
1909 const char *buf2;
1910 char *contig_buf = NULL;
1911 uint32_t data_size;
1912
1913
1914 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1915 unsigned int num_bufs = int_data->num_bufs;
1916 const char * const *test_bufs = int_data->test_bufs;
1917
1918
1919 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1920 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1921
1922
1923
1924
1925
1926 for (i = 0; i < num_bufs; i++) {
1927 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1928 buf1 = test_data->use_external_mbufs ?
1929 test_data->inbuf_memzone->addr :
1930 test_bufs[priv_data->orig_idx];
1931 data_size = test_data->use_external_mbufs ?
1932 test_data->inbuf_data_size :
1933 strlen(buf1) + 1;
1934
1935 contig_buf = rte_malloc(NULL, ops_processed[i]->produced, 0);
1936 if (contig_buf == NULL) {
1937 RTE_LOG(ERR, USER1, "Contiguous buffer could not "
1938 "be allocated\n");
1939 goto exit;
1940 }
1941
1942 buf2 = rte_pktmbuf_read(ops_processed[i]->m_dst, 0,
1943 ops_processed[i]->produced, contig_buf);
1944 if (compare_buffers(buf1, data_size,
1945 buf2, ops_processed[i]->produced) < 0)
1946 goto exit;
1947
1948
1949 if (compress_xforms[0]->compress.chksum !=
1950 RTE_COMP_CHECKSUM_NONE) {
1951 if (ops_processed[i]->output_chksum !=
1952 compress_checksum[i]) {
1953 RTE_LOG(ERR, USER1, "The checksums differ\n"
1954 "Compression Checksum: %" PRIu64 "\tDecompression "
1955 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1956 ops_processed[i]->output_chksum);
1957 goto exit;
1958 }
1959 }
1960
1961 rte_free(contig_buf);
1962 contig_buf = NULL;
1963 }
1964 return 0;
1965
1966exit:
1967 rte_free(contig_buf);
1968 return -1;
1969}
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987static int
1988test_deflate_comp_decomp(const struct interim_data_params *int_data,
1989 const struct test_data_params *test_data)
1990{
1991 unsigned int num_bufs = int_data->num_bufs;
1992 unsigned int out_of_space = test_data->out_of_space;
1993
1994 void *stream = NULL;
1995 char *all_decomp_data = NULL;
1996 unsigned int decomp_produced_data_size = 0;
1997
1998 int ret_status = -1;
1999 int ret;
2000 struct rte_mbuf *uncomp_bufs[num_bufs];
2001 struct rte_mbuf *comp_bufs[num_bufs];
2002 struct rte_comp_op *ops[num_bufs];
2003 struct rte_comp_op *ops_processed[num_bufs];
2004 void *priv_xforms[num_bufs];
2005 unsigned int i;
2006
2007 uint64_t compress_checksum[num_bufs];
2008 uint32_t compressed_data_size[num_bufs];
2009 char *contig_buf = NULL;
2010
2011 struct rte_mbuf_ext_shared_info compbuf_info;
2012 struct rte_mbuf_ext_shared_info decompbuf_info;
2013
2014 const struct rte_compressdev_capabilities *capa;
2015
2016
2017 unsigned int zlib_compress =
2018 (test_data->zlib_dir == ZLIB_ALL ||
2019 test_data->zlib_dir == ZLIB_COMPRESS);
2020 unsigned int zlib_decompress =
2021 (test_data->zlib_dir == ZLIB_ALL ||
2022 test_data->zlib_dir == ZLIB_DECOMPRESS);
2023
2024 struct test_private_arrays test_priv_data;
2025
2026 test_priv_data.uncomp_bufs = uncomp_bufs;
2027 test_priv_data.comp_bufs = comp_bufs;
2028 test_priv_data.ops = ops;
2029 test_priv_data.ops_processed = ops_processed;
2030 test_priv_data.priv_xforms = priv_xforms;
2031 test_priv_data.compress_checksum = compress_checksum;
2032 test_priv_data.compressed_data_size = compressed_data_size;
2033
2034 test_priv_data.stream = &stream;
2035 test_priv_data.all_decomp_data = &all_decomp_data;
2036 test_priv_data.decomp_produced_data_size = &decomp_produced_data_size;
2037
2038 test_priv_data.num_priv_xforms = 0;
2039
2040 capa = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2041 if (capa == NULL) {
2042 RTE_LOG(ERR, USER1,
2043 "Compress device does not support DEFLATE\n");
2044 return -1;
2045 }
2046
2047
2048 ret = test_setup_com_bufs(int_data, test_data, &test_priv_data);
2049 if (ret < 0) {
2050 ret_status = -1;
2051 goto exit;
2052 }
2053
2054 RTE_LOG(DEBUG, USER1, "<<< COMPRESSION >>>\n");
2055
2056
2057
2058
2059 ret = test_setup_output_bufs(
2060 OPERATION_COMPRESSION,
2061 out_of_space == 1 && !zlib_compress,
2062 &test_priv_data,
2063 int_data,
2064 test_data,
2065 &compbuf_info);
2066 if (ret < 0) {
2067 ret_status = -1;
2068 goto exit;
2069 }
2070
2071
2072 ret = test_deflate_comp_run(int_data, test_data, &test_priv_data);
2073 if (ret < 0) {
2074 ret_status = -1;
2075 goto exit;
2076 }
2077
2078 ret = test_deflate_comp_finalize(int_data, test_data, &test_priv_data);
2079 if (ret < 0) {
2080 ret_status = -1;
2081 goto exit;
2082 } else if (ret == 1) {
2083 ret_status = 0;
2084 goto exit;
2085 } else if (ret == 2) {
2086 ret_status = 1;
2087 goto exit;
2088 }
2089
2090
2091
2092 RTE_LOG(DEBUG, USER1, "<<< DECOMPRESSION >>>\n");
2093
2094
2095 ret = test_setup_output_bufs(
2096 OPERATION_DECOMPRESSION,
2097 out_of_space == 1 && !zlib_decompress,
2098 &test_priv_data,
2099 int_data,
2100 test_data,
2101 &decompbuf_info);
2102 if (ret < 0) {
2103 ret_status = -1;
2104 goto exit;
2105 }
2106
2107
2108 ret = test_deflate_decomp_run(int_data, test_data, &test_priv_data);
2109 if (ret < 0) {
2110 ret_status = -1;
2111 goto exit;
2112 }
2113
2114 if (!zlib_decompress) {
2115next_step:
2116 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
2117 if (ret < 0) {
2118 ret_status = -1;
2119 RTE_LOG(ERR, USER1,
2120 "Decompression: enqueue/dequeue operation failed\n");
2121 }
2122 }
2123
2124 ret = test_deflate_decomp_finalize(int_data, test_data, &test_priv_data);
2125 if (ret < 0) {
2126 ret_status = -1;
2127 goto exit;
2128 } else if (ret == 1) {
2129 ret_status = 0;
2130 goto exit;
2131 } else if (ret == 2) {
2132 goto next_step;
2133 }
2134
2135
2136
2137 ret = test_results_validation(int_data, test_data, &test_priv_data);
2138 if (ret < 0) {
2139 ret_status = -1;
2140 goto exit;
2141 }
2142 ret_status = 0;
2143
2144exit:
2145
2146
2147 if (stream != NULL)
2148 rte_compressdev_stream_free(0, stream);
2149 if (all_decomp_data != NULL)
2150 rte_free(all_decomp_data);
2151
2152
2153 for (i = 0; i < test_priv_data.num_priv_xforms; i++) {
2154 if (priv_xforms[i] != NULL) {
2155 rte_compressdev_private_xform_free(0, priv_xforms[i]);
2156 priv_xforms[i] = NULL;
2157 }
2158 }
2159 for (i = 0; i < num_bufs; i++) {
2160 rte_pktmbuf_free(uncomp_bufs[i]);
2161 rte_pktmbuf_free(comp_bufs[i]);
2162 rte_comp_op_free(ops[i]);
2163 rte_comp_op_free(ops_processed[i]);
2164 }
2165 rte_free(contig_buf);
2166
2167 return ret_status;
2168}
2169
2170static int
2171test_compressdev_deflate_stateless_fixed(void)
2172{
2173 struct comp_testsuite_params *ts_params = &testsuite_params;
2174 uint16_t i;
2175 int ret;
2176 const struct rte_compressdev_capabilities *capab;
2177
2178 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2179 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2180
2181 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2182 return -ENOTSUP;
2183
2184 struct rte_comp_xform *compress_xform =
2185 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2186
2187 if (compress_xform == NULL) {
2188 RTE_LOG(ERR, USER1,
2189 "Compress xform could not be created\n");
2190 ret = TEST_FAILED;
2191 goto exit;
2192 }
2193
2194 memcpy(compress_xform, ts_params->def_comp_xform,
2195 sizeof(struct rte_comp_xform));
2196 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
2197
2198 struct interim_data_params int_data = {
2199 NULL,
2200 1,
2201 NULL,
2202 &compress_xform,
2203 &ts_params->def_decomp_xform,
2204 1
2205 };
2206
2207 struct test_data_params test_data = {
2208 .compress_state = RTE_COMP_OP_STATELESS,
2209 .decompress_state = RTE_COMP_OP_STATELESS,
2210 .buff_type = LB_BOTH,
2211 .zlib_dir = ZLIB_DECOMPRESS,
2212 .out_of_space = 0,
2213 .big_data = 0,
2214 .overflow = OVERFLOW_DISABLED,
2215 .ratio = RATIO_ENABLED
2216 };
2217
2218 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2219 int_data.test_bufs = &compress_test_bufs[i];
2220 int_data.buf_idx = &i;
2221
2222
2223 test_data.zlib_dir = ZLIB_DECOMPRESS;
2224 ret = test_deflate_comp_decomp(&int_data, &test_data);
2225 if (ret < 0)
2226 goto exit;
2227
2228
2229 test_data.zlib_dir = ZLIB_COMPRESS;
2230 ret = test_deflate_comp_decomp(&int_data, &test_data);
2231 if (ret < 0)
2232 goto exit;
2233 }
2234
2235 ret = TEST_SUCCESS;
2236
2237exit:
2238 rte_free(compress_xform);
2239 return ret;
2240}
2241
2242static int
2243test_compressdev_deflate_stateless_dynamic(void)
2244{
2245 struct comp_testsuite_params *ts_params = &testsuite_params;
2246 uint16_t i;
2247 int ret;
2248 struct rte_comp_xform *compress_xform =
2249 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2250
2251 const struct rte_compressdev_capabilities *capab;
2252
2253 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2254 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2255
2256 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2257 return -ENOTSUP;
2258
2259 if (compress_xform == NULL) {
2260 RTE_LOG(ERR, USER1,
2261 "Compress xform could not be created\n");
2262 ret = TEST_FAILED;
2263 goto exit;
2264 }
2265
2266 memcpy(compress_xform, ts_params->def_comp_xform,
2267 sizeof(struct rte_comp_xform));
2268 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_DYNAMIC;
2269
2270 struct interim_data_params int_data = {
2271 NULL,
2272 1,
2273 NULL,
2274 &compress_xform,
2275 &ts_params->def_decomp_xform,
2276 1
2277 };
2278
2279 struct test_data_params test_data = {
2280 .compress_state = RTE_COMP_OP_STATELESS,
2281 .decompress_state = RTE_COMP_OP_STATELESS,
2282 .buff_type = LB_BOTH,
2283 .zlib_dir = ZLIB_DECOMPRESS,
2284 .out_of_space = 0,
2285 .big_data = 0,
2286 .overflow = OVERFLOW_DISABLED,
2287 .ratio = RATIO_ENABLED
2288 };
2289
2290 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2291 int_data.test_bufs = &compress_test_bufs[i];
2292 int_data.buf_idx = &i;
2293
2294
2295 test_data.zlib_dir = ZLIB_DECOMPRESS;
2296 ret = test_deflate_comp_decomp(&int_data, &test_data);
2297 if (ret < 0)
2298 goto exit;
2299
2300
2301 test_data.zlib_dir = ZLIB_COMPRESS;
2302 ret = test_deflate_comp_decomp(&int_data, &test_data);
2303 if (ret < 0)
2304 goto exit;
2305 }
2306
2307 ret = TEST_SUCCESS;
2308
2309exit:
2310 rte_free(compress_xform);
2311 return ret;
2312}
2313
2314static int
2315test_compressdev_deflate_stateless_multi_op(void)
2316{
2317 struct comp_testsuite_params *ts_params = &testsuite_params;
2318 uint16_t num_bufs = RTE_DIM(compress_test_bufs);
2319 uint16_t buf_idx[num_bufs];
2320 uint16_t i;
2321 int ret;
2322
2323 for (i = 0; i < num_bufs; i++)
2324 buf_idx[i] = i;
2325
2326 struct interim_data_params int_data = {
2327 compress_test_bufs,
2328 num_bufs,
2329 buf_idx,
2330 &ts_params->def_comp_xform,
2331 &ts_params->def_decomp_xform,
2332 1
2333 };
2334
2335 struct test_data_params test_data = {
2336 .compress_state = RTE_COMP_OP_STATELESS,
2337 .decompress_state = RTE_COMP_OP_STATELESS,
2338 .buff_type = LB_BOTH,
2339 .zlib_dir = ZLIB_DECOMPRESS,
2340 .out_of_space = 0,
2341 .big_data = 0,
2342 .overflow = OVERFLOW_DISABLED,
2343 .ratio = RATIO_ENABLED
2344 };
2345
2346
2347 test_data.zlib_dir = ZLIB_DECOMPRESS;
2348 ret = test_deflate_comp_decomp(&int_data, &test_data);
2349 if (ret < 0)
2350 return ret;
2351
2352
2353 test_data.zlib_dir = ZLIB_COMPRESS;
2354 ret = test_deflate_comp_decomp(&int_data, &test_data);
2355 if (ret < 0)
2356 return ret;
2357
2358 return TEST_SUCCESS;
2359}
2360
2361static int
2362test_compressdev_deflate_stateless_multi_level(void)
2363{
2364 struct comp_testsuite_params *ts_params = &testsuite_params;
2365 unsigned int level;
2366 uint16_t i;
2367 int ret;
2368 struct rte_comp_xform *compress_xform =
2369 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2370
2371 if (compress_xform == NULL) {
2372 RTE_LOG(ERR, USER1,
2373 "Compress xform could not be created\n");
2374 ret = TEST_FAILED;
2375 goto exit;
2376 }
2377
2378 memcpy(compress_xform, ts_params->def_comp_xform,
2379 sizeof(struct rte_comp_xform));
2380
2381 struct interim_data_params int_data = {
2382 NULL,
2383 1,
2384 NULL,
2385 &compress_xform,
2386 &ts_params->def_decomp_xform,
2387 1
2388 };
2389
2390 struct test_data_params test_data = {
2391 .compress_state = RTE_COMP_OP_STATELESS,
2392 .decompress_state = RTE_COMP_OP_STATELESS,
2393 .buff_type = LB_BOTH,
2394 .zlib_dir = ZLIB_DECOMPRESS,
2395 .out_of_space = 0,
2396 .big_data = 0,
2397 .overflow = OVERFLOW_DISABLED,
2398 .ratio = RATIO_ENABLED
2399 };
2400
2401 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2402 int_data.test_bufs = &compress_test_bufs[i];
2403 int_data.buf_idx = &i;
2404
2405 for (level = RTE_COMP_LEVEL_MIN; level <= RTE_COMP_LEVEL_MAX;
2406 level++) {
2407 compress_xform->compress.level = level;
2408
2409 test_data.zlib_dir = ZLIB_DECOMPRESS;
2410 ret = test_deflate_comp_decomp(&int_data, &test_data);
2411 if (ret < 0)
2412 goto exit;
2413 }
2414 }
2415
2416 ret = TEST_SUCCESS;
2417
2418exit:
2419 rte_free(compress_xform);
2420 return ret;
2421}
2422
2423#define NUM_XFORMS 3
2424static int
2425test_compressdev_deflate_stateless_multi_xform(void)
2426{
2427 struct comp_testsuite_params *ts_params = &testsuite_params;
2428 uint16_t num_bufs = NUM_XFORMS;
2429 struct rte_comp_xform *compress_xforms[NUM_XFORMS] = {NULL};
2430 struct rte_comp_xform *decompress_xforms[NUM_XFORMS] = {NULL};
2431 const char *test_buffers[NUM_XFORMS];
2432 uint16_t i;
2433 unsigned int level = RTE_COMP_LEVEL_MIN;
2434 uint16_t buf_idx[num_bufs];
2435 int ret;
2436
2437
2438 for (i = 0; i < NUM_XFORMS; i++) {
2439 compress_xforms[i] = rte_malloc(NULL,
2440 sizeof(struct rte_comp_xform), 0);
2441 if (compress_xforms[i] == NULL) {
2442 RTE_LOG(ERR, USER1,
2443 "Compress xform could not be created\n");
2444 ret = TEST_FAILED;
2445 goto exit;
2446 }
2447
2448 memcpy(compress_xforms[i], ts_params->def_comp_xform,
2449 sizeof(struct rte_comp_xform));
2450 compress_xforms[i]->compress.level = level;
2451 level++;
2452
2453 decompress_xforms[i] = rte_malloc(NULL,
2454 sizeof(struct rte_comp_xform), 0);
2455 if (decompress_xforms[i] == NULL) {
2456 RTE_LOG(ERR, USER1,
2457 "Decompress xform could not be created\n");
2458 ret = TEST_FAILED;
2459 goto exit;
2460 }
2461
2462 memcpy(decompress_xforms[i], ts_params->def_decomp_xform,
2463 sizeof(struct rte_comp_xform));
2464 }
2465
2466 for (i = 0; i < NUM_XFORMS; i++) {
2467 buf_idx[i] = 0;
2468
2469 test_buffers[i] = compress_test_bufs[0];
2470 }
2471
2472 struct interim_data_params int_data = {
2473 test_buffers,
2474 num_bufs,
2475 buf_idx,
2476 compress_xforms,
2477 decompress_xforms,
2478 NUM_XFORMS
2479 };
2480
2481 struct test_data_params test_data = {
2482 .compress_state = RTE_COMP_OP_STATELESS,
2483 .decompress_state = RTE_COMP_OP_STATELESS,
2484 .buff_type = LB_BOTH,
2485 .zlib_dir = ZLIB_DECOMPRESS,
2486 .out_of_space = 0,
2487 .big_data = 0,
2488 .overflow = OVERFLOW_DISABLED,
2489 .ratio = RATIO_ENABLED
2490 };
2491
2492
2493 ret = test_deflate_comp_decomp(&int_data, &test_data);
2494 if (ret < 0)
2495 goto exit;
2496
2497 ret = TEST_SUCCESS;
2498
2499exit:
2500 for (i = 0; i < NUM_XFORMS; i++) {
2501 rte_free(compress_xforms[i]);
2502 rte_free(decompress_xforms[i]);
2503 }
2504
2505 return ret;
2506}
2507
2508static int
2509test_compressdev_deflate_stateless_sgl(void)
2510{
2511 struct comp_testsuite_params *ts_params = &testsuite_params;
2512 uint16_t i;
2513 int ret;
2514 const struct rte_compressdev_capabilities *capab;
2515
2516 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2517 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2518
2519 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2520 return -ENOTSUP;
2521
2522 struct interim_data_params int_data = {
2523 NULL,
2524 1,
2525 NULL,
2526 &ts_params->def_comp_xform,
2527 &ts_params->def_decomp_xform,
2528 1
2529 };
2530
2531 struct test_data_params test_data = {
2532 .compress_state = RTE_COMP_OP_STATELESS,
2533 .decompress_state = RTE_COMP_OP_STATELESS,
2534 .buff_type = SGL_BOTH,
2535 .zlib_dir = ZLIB_DECOMPRESS,
2536 .out_of_space = 0,
2537 .big_data = 0,
2538 .overflow = OVERFLOW_DISABLED,
2539 .ratio = RATIO_ENABLED
2540 };
2541
2542 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2543 int_data.test_bufs = &compress_test_bufs[i];
2544 int_data.buf_idx = &i;
2545
2546
2547 test_data.zlib_dir = ZLIB_DECOMPRESS;
2548 ret = test_deflate_comp_decomp(&int_data, &test_data);
2549 if (ret < 0)
2550 return ret;
2551
2552
2553 test_data.zlib_dir = ZLIB_COMPRESS;
2554 ret = test_deflate_comp_decomp(&int_data, &test_data);
2555 if (ret < 0)
2556 return ret;
2557
2558 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_LB_OUT) {
2559
2560 test_data.zlib_dir = ZLIB_DECOMPRESS;
2561 test_data.buff_type = SGL_TO_LB;
2562 ret = test_deflate_comp_decomp(&int_data, &test_data);
2563 if (ret < 0)
2564 return ret;
2565
2566
2567 test_data.zlib_dir = ZLIB_COMPRESS;
2568 test_data.buff_type = SGL_TO_LB;
2569 ret = test_deflate_comp_decomp(&int_data, &test_data);
2570 if (ret < 0)
2571 return ret;
2572 }
2573
2574 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_LB_IN_SGL_OUT) {
2575
2576 test_data.zlib_dir = ZLIB_DECOMPRESS;
2577 test_data.buff_type = LB_TO_SGL;
2578 ret = test_deflate_comp_decomp(&int_data, &test_data);
2579 if (ret < 0)
2580 return ret;
2581
2582
2583 test_data.zlib_dir = ZLIB_COMPRESS;
2584 test_data.buff_type = LB_TO_SGL;
2585 ret = test_deflate_comp_decomp(&int_data, &test_data);
2586 if (ret < 0)
2587 return ret;
2588 }
2589 }
2590
2591 return TEST_SUCCESS;
2592}
2593
2594static int
2595test_compressdev_deflate_stateless_checksum(void)
2596{
2597 struct comp_testsuite_params *ts_params = &testsuite_params;
2598 uint16_t i;
2599 int ret;
2600 const struct rte_compressdev_capabilities *capab;
2601
2602 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2603 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2604
2605
2606 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) == 0 &&
2607 (capab->comp_feature_flags &
2608 RTE_COMP_FF_ADLER32_CHECKSUM) == 0 &&
2609 (capab->comp_feature_flags &
2610 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) == 0)
2611 return -ENOTSUP;
2612
2613 struct rte_comp_xform *compress_xform =
2614 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2615 if (compress_xform == NULL) {
2616 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2617 return TEST_FAILED;
2618 }
2619
2620 memcpy(compress_xform, ts_params->def_comp_xform,
2621 sizeof(struct rte_comp_xform));
2622
2623 struct rte_comp_xform *decompress_xform =
2624 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2625 if (decompress_xform == NULL) {
2626 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2627 rte_free(compress_xform);
2628 return TEST_FAILED;
2629 }
2630
2631 memcpy(decompress_xform, ts_params->def_decomp_xform,
2632 sizeof(struct rte_comp_xform));
2633
2634 struct interim_data_params int_data = {
2635 NULL,
2636 1,
2637 NULL,
2638 &compress_xform,
2639 &decompress_xform,
2640 1
2641 };
2642
2643 struct test_data_params test_data = {
2644 .compress_state = RTE_COMP_OP_STATELESS,
2645 .decompress_state = RTE_COMP_OP_STATELESS,
2646 .buff_type = LB_BOTH,
2647 .zlib_dir = ZLIB_DECOMPRESS,
2648 .out_of_space = 0,
2649 .big_data = 0,
2650 .overflow = OVERFLOW_DISABLED,
2651 .ratio = RATIO_ENABLED
2652 };
2653
2654
2655 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM)) {
2656 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
2657 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
2658
2659 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2660
2661 int_data.test_bufs = &compress_test_bufs[i];
2662 int_data.buf_idx = &i;
2663
2664
2665
2666
2667 test_data.zlib_dir = ZLIB_COMPRESS;
2668 ret = test_deflate_comp_decomp(&int_data, &test_data);
2669 if (ret < 0)
2670 goto exit;
2671
2672
2673
2674
2675 test_data.zlib_dir = ZLIB_NONE;
2676 ret = test_deflate_comp_decomp(&int_data, &test_data);
2677 if (ret < 0)
2678 goto exit;
2679 }
2680 }
2681
2682
2683 if ((capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM)) {
2684 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2685 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2686
2687 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2688 int_data.test_bufs = &compress_test_bufs[i];
2689 int_data.buf_idx = &i;
2690
2691
2692
2693
2694 test_data.zlib_dir = ZLIB_COMPRESS;
2695 ret = test_deflate_comp_decomp(&int_data, &test_data);
2696 if (ret < 0)
2697 goto exit;
2698
2699
2700
2701 test_data.zlib_dir = ZLIB_NONE;
2702 ret = test_deflate_comp_decomp(&int_data, &test_data);
2703 if (ret < 0)
2704 goto exit;
2705 }
2706 }
2707
2708
2709 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)) {
2710 compress_xform->compress.chksum =
2711 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2712 decompress_xform->decompress.chksum =
2713 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2714
2715 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2716 int_data.test_bufs = &compress_test_bufs[i];
2717 int_data.buf_idx = &i;
2718
2719
2720
2721
2722 test_data.zlib_dir = ZLIB_NONE;
2723 ret = test_deflate_comp_decomp(&int_data, &test_data);
2724 if (ret < 0)
2725 goto exit;
2726 }
2727 }
2728
2729 ret = TEST_SUCCESS;
2730
2731exit:
2732 rte_free(compress_xform);
2733 rte_free(decompress_xform);
2734 return ret;
2735}
2736
2737static int
2738test_compressdev_out_of_space_buffer(void)
2739{
2740 struct comp_testsuite_params *ts_params = &testsuite_params;
2741 int ret;
2742 uint16_t i;
2743 const struct rte_compressdev_capabilities *capab;
2744
2745 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
2746
2747 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2748 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2749
2750 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2751 return -ENOTSUP;
2752
2753 struct interim_data_params int_data = {
2754 &compress_test_bufs[0],
2755 1,
2756 &i,
2757 &ts_params->def_comp_xform,
2758 &ts_params->def_decomp_xform,
2759 1
2760 };
2761
2762 struct test_data_params test_data = {
2763 .compress_state = RTE_COMP_OP_STATELESS,
2764 .decompress_state = RTE_COMP_OP_STATELESS,
2765 .buff_type = LB_BOTH,
2766 .zlib_dir = ZLIB_DECOMPRESS,
2767 .out_of_space = 1,
2768 .big_data = 0,
2769 .overflow = OVERFLOW_DISABLED,
2770 .ratio = RATIO_ENABLED
2771 };
2772
2773 test_data.zlib_dir = ZLIB_DECOMPRESS;
2774 ret = test_deflate_comp_decomp(&int_data, &test_data);
2775 if (ret < 0)
2776 goto exit;
2777
2778
2779 test_data.zlib_dir = ZLIB_COMPRESS;
2780 ret = test_deflate_comp_decomp(&int_data, &test_data);
2781 if (ret < 0)
2782 goto exit;
2783
2784 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2785
2786 test_data.zlib_dir = ZLIB_DECOMPRESS;
2787 test_data.buff_type = SGL_BOTH;
2788 ret = test_deflate_comp_decomp(&int_data, &test_data);
2789 if (ret < 0)
2790 goto exit;
2791
2792
2793 test_data.zlib_dir = ZLIB_COMPRESS;
2794 test_data.buff_type = SGL_BOTH;
2795 ret = test_deflate_comp_decomp(&int_data, &test_data);
2796 if (ret < 0)
2797 goto exit;
2798 }
2799
2800 ret = TEST_SUCCESS;
2801
2802exit:
2803 return ret;
2804}
2805
2806static int
2807test_compressdev_deflate_stateless_dynamic_big(void)
2808{
2809 struct comp_testsuite_params *ts_params = &testsuite_params;
2810 uint16_t i = 0;
2811 int ret;
2812 unsigned int j;
2813 const struct rte_compressdev_capabilities *capab;
2814 char *test_buffer = NULL;
2815
2816 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2817 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2818
2819 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2820 return -ENOTSUP;
2821
2822 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2823 return -ENOTSUP;
2824
2825 test_buffer = rte_malloc(NULL, BIG_DATA_TEST_SIZE, 0);
2826 if (test_buffer == NULL) {
2827 RTE_LOG(ERR, USER1,
2828 "Can't allocate buffer for big-data\n");
2829 return TEST_FAILED;
2830 }
2831
2832 struct interim_data_params int_data = {
2833 (const char * const *)&test_buffer,
2834 1,
2835 &i,
2836 &ts_params->def_comp_xform,
2837 &ts_params->def_decomp_xform,
2838 1
2839 };
2840
2841 struct test_data_params test_data = {
2842 .compress_state = RTE_COMP_OP_STATELESS,
2843 .decompress_state = RTE_COMP_OP_STATELESS,
2844 .buff_type = SGL_BOTH,
2845 .zlib_dir = ZLIB_DECOMPRESS,
2846 .out_of_space = 0,
2847 .big_data = 1,
2848 .overflow = OVERFLOW_DISABLED,
2849 .ratio = RATIO_DISABLED
2850 };
2851
2852 ts_params->def_comp_xform->compress.deflate.huffman =
2853 RTE_COMP_HUFFMAN_DYNAMIC;
2854
2855
2856 srand(BIG_DATA_TEST_SIZE);
2857 for (j = 0; j < BIG_DATA_TEST_SIZE - 1; ++j)
2858 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
2859 test_buffer[BIG_DATA_TEST_SIZE - 1] = 0;
2860
2861
2862 test_data.zlib_dir = ZLIB_DECOMPRESS;
2863 ret = test_deflate_comp_decomp(&int_data, &test_data);
2864 if (ret < 0)
2865 goto exit;
2866
2867
2868 test_data.zlib_dir = ZLIB_COMPRESS;
2869 ret = test_deflate_comp_decomp(&int_data, &test_data);
2870 if (ret < 0)
2871 goto exit;
2872
2873 ret = TEST_SUCCESS;
2874
2875exit:
2876 ts_params->def_comp_xform->compress.deflate.huffman =
2877 RTE_COMP_HUFFMAN_DEFAULT;
2878 rte_free(test_buffer);
2879 return ret;
2880}
2881
2882static int
2883test_compressdev_deflate_stateful_decomp(void)
2884{
2885 struct comp_testsuite_params *ts_params = &testsuite_params;
2886 int ret;
2887 uint16_t i;
2888 const struct rte_compressdev_capabilities *capab;
2889
2890 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2891 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2892
2893 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2894 return -ENOTSUP;
2895
2896 struct interim_data_params int_data = {
2897 &compress_test_bufs[0],
2898 1,
2899 &i,
2900 &ts_params->def_comp_xform,
2901 &ts_params->def_decomp_xform,
2902 1
2903 };
2904
2905 struct test_data_params test_data = {
2906 .compress_state = RTE_COMP_OP_STATELESS,
2907 .decompress_state = RTE_COMP_OP_STATEFUL,
2908 .buff_type = LB_BOTH,
2909 .zlib_dir = ZLIB_COMPRESS,
2910 .out_of_space = 0,
2911 .big_data = 0,
2912 .decompress_output_block_size = 2000,
2913 .decompress_steps_max = 4,
2914 .overflow = OVERFLOW_DISABLED,
2915 .ratio = RATIO_ENABLED
2916 };
2917
2918
2919 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2920 ret = TEST_FAILED;
2921 goto exit;
2922 }
2923
2924 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2925
2926 test_data.buff_type = SGL_BOTH;
2927 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2928 ret = TEST_FAILED;
2929 goto exit;
2930 }
2931 }
2932
2933 ret = TEST_SUCCESS;
2934
2935exit:
2936 return ret;
2937}
2938
2939static int
2940test_compressdev_deflate_stateful_decomp_checksum(void)
2941{
2942 struct comp_testsuite_params *ts_params = &testsuite_params;
2943 int ret;
2944 uint16_t i;
2945 const struct rte_compressdev_capabilities *capab;
2946
2947 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2948 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2949
2950 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2951 return -ENOTSUP;
2952
2953
2954 if (!(capab->comp_feature_flags &
2955 (RTE_COMP_FF_CRC32_CHECKSUM | RTE_COMP_FF_ADLER32_CHECKSUM |
2956 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)))
2957 return -ENOTSUP;
2958
2959 struct rte_comp_xform *compress_xform =
2960 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2961 if (compress_xform == NULL) {
2962 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2963 return TEST_FAILED;
2964 }
2965
2966 memcpy(compress_xform, ts_params->def_comp_xform,
2967 sizeof(struct rte_comp_xform));
2968
2969 struct rte_comp_xform *decompress_xform =
2970 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2971 if (decompress_xform == NULL) {
2972 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2973 rte_free(compress_xform);
2974 return TEST_FAILED;
2975 }
2976
2977 memcpy(decompress_xform, ts_params->def_decomp_xform,
2978 sizeof(struct rte_comp_xform));
2979
2980 struct interim_data_params int_data = {
2981 &compress_test_bufs[0],
2982 1,
2983 &i,
2984 &compress_xform,
2985 &decompress_xform,
2986 1
2987 };
2988
2989 struct test_data_params test_data = {
2990 .compress_state = RTE_COMP_OP_STATELESS,
2991 .decompress_state = RTE_COMP_OP_STATEFUL,
2992 .buff_type = LB_BOTH,
2993 .zlib_dir = ZLIB_COMPRESS,
2994 .out_of_space = 0,
2995 .big_data = 0,
2996 .decompress_output_block_size = 2000,
2997 .decompress_steps_max = 4,
2998 .overflow = OVERFLOW_DISABLED,
2999 .ratio = RATIO_ENABLED
3000 };
3001
3002
3003 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) {
3004 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
3005 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
3006
3007 test_data.buff_type = LB_BOTH;
3008 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3009 ret = TEST_FAILED;
3010 goto exit;
3011 }
3012 if (capab->comp_feature_flags &
3013 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3014
3015 test_data.buff_type = SGL_BOTH;
3016 if (test_deflate_comp_decomp(&int_data,
3017 &test_data) < 0) {
3018 ret = TEST_FAILED;
3019 goto exit;
3020 }
3021 }
3022 }
3023
3024
3025 if (capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM) {
3026 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3027 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3028
3029 test_data.buff_type = LB_BOTH;
3030 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3031 ret = TEST_FAILED;
3032 goto exit;
3033 }
3034 if (capab->comp_feature_flags &
3035 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3036
3037 test_data.buff_type = SGL_BOTH;
3038 if (test_deflate_comp_decomp(&int_data,
3039 &test_data) < 0) {
3040 ret = TEST_FAILED;
3041 goto exit;
3042 }
3043 }
3044 }
3045
3046
3047 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) {
3048 compress_xform->compress.chksum =
3049 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3050 decompress_xform->decompress.chksum =
3051 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3052
3053 test_data.zlib_dir = ZLIB_NONE;
3054
3055 test_data.buff_type = LB_BOTH;
3056 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3057 ret = TEST_FAILED;
3058 goto exit;
3059 }
3060 if (capab->comp_feature_flags &
3061 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3062
3063 test_data.buff_type = SGL_BOTH;
3064 if (test_deflate_comp_decomp(&int_data,
3065 &test_data) < 0) {
3066 ret = TEST_FAILED;
3067 goto exit;
3068 }
3069 }
3070 }
3071
3072 ret = TEST_SUCCESS;
3073
3074exit:
3075 rte_free(compress_xform);
3076 rte_free(decompress_xform);
3077 return ret;
3078}
3079
3080static const struct rte_memzone *
3081make_memzone(const char *name, size_t size)
3082{
3083 unsigned int socket_id = rte_socket_id();
3084 char mz_name[RTE_MEMZONE_NAMESIZE];
3085 const struct rte_memzone *memzone;
3086
3087 snprintf(mz_name, RTE_MEMZONE_NAMESIZE, "%s_%u", name, socket_id);
3088 memzone = rte_memzone_lookup(mz_name);
3089 if (memzone != NULL && memzone->len != size) {
3090 rte_memzone_free(memzone);
3091 memzone = NULL;
3092 }
3093 if (memzone == NULL) {
3094 memzone = rte_memzone_reserve_aligned(mz_name, size, socket_id,
3095 RTE_MEMZONE_IOVA_CONTIG, RTE_CACHE_LINE_SIZE);
3096 if (memzone == NULL)
3097 RTE_LOG(ERR, USER1, "Can't allocate memory zone %s",
3098 mz_name);
3099 }
3100 return memzone;
3101}
3102
3103static int
3104test_compressdev_external_mbufs(void)
3105{
3106 struct comp_testsuite_params *ts_params = &testsuite_params;
3107 size_t data_len = 0;
3108 uint16_t i;
3109 int ret = TEST_FAILED;
3110
3111 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
3112 data_len = RTE_MAX(data_len, strlen(compress_test_bufs[i]) + 1);
3113
3114 struct interim_data_params int_data = {
3115 NULL,
3116 1,
3117 NULL,
3118 &ts_params->def_comp_xform,
3119 &ts_params->def_decomp_xform,
3120 1
3121 };
3122
3123 struct test_data_params test_data = {
3124 .compress_state = RTE_COMP_OP_STATELESS,
3125 .decompress_state = RTE_COMP_OP_STATELESS,
3126 .buff_type = LB_BOTH,
3127 .zlib_dir = ZLIB_DECOMPRESS,
3128 .out_of_space = 0,
3129 .big_data = 0,
3130 .use_external_mbufs = 1,
3131 .inbuf_data_size = data_len,
3132 .inbuf_memzone = make_memzone("inbuf", data_len),
3133 .compbuf_memzone = make_memzone("compbuf", data_len *
3134 COMPRESS_BUF_SIZE_RATIO),
3135 .uncompbuf_memzone = make_memzone("decompbuf", data_len),
3136 .overflow = OVERFLOW_DISABLED
3137 };
3138
3139 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3140
3141 data_len = strlen(compress_test_bufs[i]) + 1;
3142 rte_memcpy(test_data.inbuf_memzone->addr, compress_test_bufs[i],
3143 data_len);
3144 test_data.inbuf_data_size = data_len;
3145 int_data.buf_idx = &i;
3146
3147
3148 test_data.zlib_dir = ZLIB_DECOMPRESS;
3149 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3150 goto exit;
3151
3152
3153 test_data.zlib_dir = ZLIB_COMPRESS;
3154 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3155 goto exit;
3156 }
3157
3158 ret = TEST_SUCCESS;
3159
3160exit:
3161 rte_memzone_free(test_data.inbuf_memzone);
3162 rte_memzone_free(test_data.compbuf_memzone);
3163 rte_memzone_free(test_data.uncompbuf_memzone);
3164 return ret;
3165}
3166
3167static int
3168test_compressdev_deflate_stateless_fixed_oos_recoverable(void)
3169{
3170 struct comp_testsuite_params *ts_params = &testsuite_params;
3171 uint16_t i;
3172 int ret;
3173 int comp_result;
3174 const struct rte_compressdev_capabilities *capab;
3175
3176 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3177 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3178
3179 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
3180 return -ENOTSUP;
3181
3182 struct rte_comp_xform *compress_xform =
3183 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
3184
3185 if (compress_xform == NULL) {
3186 RTE_LOG(ERR, USER1,
3187 "Compress xform could not be created\n");
3188 ret = TEST_FAILED;
3189 goto exit;
3190 }
3191
3192 memcpy(compress_xform, ts_params->def_comp_xform,
3193 sizeof(struct rte_comp_xform));
3194 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
3195
3196 struct interim_data_params int_data = {
3197 NULL,
3198 1,
3199 NULL,
3200 &compress_xform,
3201 &ts_params->def_decomp_xform,
3202 1
3203 };
3204
3205 struct test_data_params test_data = {
3206 .compress_state = RTE_COMP_OP_STATELESS,
3207 .decompress_state = RTE_COMP_OP_STATELESS,
3208 .buff_type = LB_BOTH,
3209 .zlib_dir = ZLIB_DECOMPRESS,
3210 .out_of_space = 0,
3211 .big_data = 0,
3212 .overflow = OVERFLOW_ENABLED,
3213 .ratio = RATIO_ENABLED
3214 };
3215
3216 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3217 int_data.test_bufs = &compress_test_bufs[i];
3218 int_data.buf_idx = &i;
3219
3220
3221 test_data.zlib_dir = ZLIB_DECOMPRESS;
3222 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3223 if (comp_result < 0) {
3224 ret = TEST_FAILED;
3225 goto exit;
3226 } else if (comp_result > 0) {
3227 ret = -ENOTSUP;
3228 goto exit;
3229 }
3230
3231
3232 test_data.zlib_dir = ZLIB_COMPRESS;
3233 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3234 if (comp_result < 0) {
3235 ret = TEST_FAILED;
3236 goto exit;
3237 } else if (comp_result > 0) {
3238 ret = -ENOTSUP;
3239 goto exit;
3240 }
3241 }
3242
3243 ret = TEST_SUCCESS;
3244
3245exit:
3246 rte_free(compress_xform);
3247 return ret;
3248}
3249
3250static int
3251test_compressdev_deflate_im_buffers_LB_1op(void)
3252{
3253 struct comp_testsuite_params *ts_params = &testsuite_params;
3254 uint16_t i = 0;
3255 int ret = TEST_SUCCESS;
3256 int j;
3257 const struct rte_compressdev_capabilities *capab;
3258 char *test_buffer = NULL;
3259
3260 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3261 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3262
3263 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3264 return -ENOTSUP;
3265
3266 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3267 return -ENOTSUP;
3268
3269 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3270 if (test_buffer == NULL) {
3271 RTE_LOG(ERR, USER1,
3272 "Can't allocate buffer for 'im buffer' test\n");
3273 return TEST_FAILED;
3274 }
3275
3276 struct interim_data_params int_data = {
3277 (const char * const *)&test_buffer,
3278 1,
3279 &i,
3280 &ts_params->def_comp_xform,
3281 &ts_params->def_decomp_xform,
3282 1
3283 };
3284
3285 struct test_data_params test_data = {
3286 .compress_state = RTE_COMP_OP_STATELESS,
3287 .decompress_state = RTE_COMP_OP_STATELESS,
3288
3289
3290
3291
3292
3293 .buff_type = LB_BOTH,
3294 .zlib_dir = ZLIB_NONE,
3295 .out_of_space = 0,
3296 .big_data = 1,
3297 .overflow = OVERFLOW_DISABLED,
3298 .ratio = RATIO_DISABLED
3299 };
3300
3301 ts_params->def_comp_xform->compress.deflate.huffman =
3302 RTE_COMP_HUFFMAN_DYNAMIC;
3303
3304
3305 srand(IM_BUF_DATA_TEST_SIZE_LB);
3306 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3307 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3308
3309
3310 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3311 ret = TEST_FAILED;
3312 goto end;
3313 }
3314
3315end:
3316 ts_params->def_comp_xform->compress.deflate.huffman =
3317 RTE_COMP_HUFFMAN_DEFAULT;
3318 rte_free(test_buffer);
3319 return ret;
3320}
3321
3322static int
3323test_compressdev_deflate_im_buffers_LB_2ops_first(void)
3324{
3325 struct comp_testsuite_params *ts_params = &testsuite_params;
3326 uint16_t i = 0;
3327 int ret = TEST_SUCCESS;
3328 int j;
3329 const struct rte_compressdev_capabilities *capab;
3330 char *test_buffer = NULL;
3331 const char *test_buffers[2];
3332
3333 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3334 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3335
3336 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3337 return -ENOTSUP;
3338
3339 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3340 return -ENOTSUP;
3341
3342 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3343 if (test_buffer == NULL) {
3344 RTE_LOG(ERR, USER1,
3345 "Can't allocate buffer for 'im buffer' test\n");
3346 return TEST_FAILED;
3347 }
3348
3349 test_buffers[0] = test_buffer;
3350 test_buffers[1] = compress_test_bufs[0];
3351
3352 struct interim_data_params int_data = {
3353 (const char * const *)test_buffers,
3354 2,
3355 &i,
3356 &ts_params->def_comp_xform,
3357 &ts_params->def_decomp_xform,
3358 1
3359 };
3360
3361 struct test_data_params test_data = {
3362 .compress_state = RTE_COMP_OP_STATELESS,
3363 .decompress_state = RTE_COMP_OP_STATELESS,
3364 .buff_type = LB_BOTH,
3365 .zlib_dir = ZLIB_NONE,
3366 .out_of_space = 0,
3367 .big_data = 1,
3368 .overflow = OVERFLOW_DISABLED,
3369 .ratio = RATIO_DISABLED
3370 };
3371
3372 ts_params->def_comp_xform->compress.deflate.huffman =
3373 RTE_COMP_HUFFMAN_DYNAMIC;
3374
3375
3376 srand(IM_BUF_DATA_TEST_SIZE_LB);
3377 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3378 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3379
3380
3381 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3382 ret = TEST_FAILED;
3383 goto end;
3384 }
3385
3386end:
3387 ts_params->def_comp_xform->compress.deflate.huffman =
3388 RTE_COMP_HUFFMAN_DEFAULT;
3389 rte_free(test_buffer);
3390 return ret;
3391}
3392
3393static int
3394test_compressdev_deflate_im_buffers_LB_2ops_second(void)
3395{
3396 struct comp_testsuite_params *ts_params = &testsuite_params;
3397 uint16_t i = 0;
3398 int ret = TEST_SUCCESS;
3399 int j;
3400 const struct rte_compressdev_capabilities *capab;
3401 char *test_buffer = NULL;
3402 const char *test_buffers[2];
3403
3404 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3405 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3406
3407 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3408 return -ENOTSUP;
3409
3410 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3411 return -ENOTSUP;
3412
3413 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3414 if (test_buffer == NULL) {
3415 RTE_LOG(ERR, USER1,
3416 "Can't allocate buffer for 'im buffer' test\n");
3417 return TEST_FAILED;
3418 }
3419
3420 test_buffers[0] = compress_test_bufs[0];
3421 test_buffers[1] = test_buffer;
3422
3423 struct interim_data_params int_data = {
3424 (const char * const *)test_buffers,
3425 2,
3426 &i,
3427 &ts_params->def_comp_xform,
3428 &ts_params->def_decomp_xform,
3429 1
3430 };
3431
3432 struct test_data_params test_data = {
3433 .compress_state = RTE_COMP_OP_STATELESS,
3434 .decompress_state = RTE_COMP_OP_STATELESS,
3435 .buff_type = LB_BOTH,
3436 .zlib_dir = ZLIB_NONE,
3437 .out_of_space = 0,
3438 .big_data = 1,
3439 .overflow = OVERFLOW_DISABLED,
3440 .ratio = RATIO_DISABLED
3441 };
3442
3443 ts_params->def_comp_xform->compress.deflate.huffman =
3444 RTE_COMP_HUFFMAN_DYNAMIC;
3445
3446
3447 srand(IM_BUF_DATA_TEST_SIZE_LB);
3448 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3449 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3450
3451
3452 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3453 ret = TEST_FAILED;
3454 goto end;
3455 }
3456
3457end:
3458 ts_params->def_comp_xform->compress.deflate.huffman =
3459 RTE_COMP_HUFFMAN_DEFAULT;
3460 rte_free(test_buffer);
3461 return ret;
3462}
3463
3464static int
3465test_compressdev_deflate_im_buffers_LB_3ops(void)
3466{
3467 struct comp_testsuite_params *ts_params = &testsuite_params;
3468 uint16_t i = 0;
3469 int ret = TEST_SUCCESS;
3470 int j;
3471 const struct rte_compressdev_capabilities *capab;
3472 char *test_buffer = NULL;
3473 const char *test_buffers[3];
3474
3475 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3476 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3477
3478 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3479 return -ENOTSUP;
3480
3481 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3482 return -ENOTSUP;
3483
3484 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3485 if (test_buffer == NULL) {
3486 RTE_LOG(ERR, USER1,
3487 "Can't allocate buffer for 'im buffer' test\n");
3488 return TEST_FAILED;
3489 }
3490
3491 test_buffers[0] = compress_test_bufs[0];
3492 test_buffers[1] = test_buffer;
3493 test_buffers[2] = compress_test_bufs[1];
3494
3495 struct interim_data_params int_data = {
3496 (const char * const *)test_buffers,
3497 3,
3498 &i,
3499 &ts_params->def_comp_xform,
3500 &ts_params->def_decomp_xform,
3501 1
3502 };
3503
3504 struct test_data_params test_data = {
3505 .compress_state = RTE_COMP_OP_STATELESS,
3506 .decompress_state = RTE_COMP_OP_STATELESS,
3507 .buff_type = LB_BOTH,
3508 .zlib_dir = ZLIB_NONE,
3509 .out_of_space = 0,
3510 .big_data = 1,
3511 .overflow = OVERFLOW_DISABLED,
3512 .ratio = RATIO_DISABLED
3513 };
3514
3515 ts_params->def_comp_xform->compress.deflate.huffman =
3516 RTE_COMP_HUFFMAN_DYNAMIC;
3517
3518
3519 srand(IM_BUF_DATA_TEST_SIZE_LB);
3520 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3521 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3522
3523
3524 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3525 ret = TEST_FAILED;
3526 goto end;
3527 }
3528
3529end:
3530 ts_params->def_comp_xform->compress.deflate.huffman =
3531 RTE_COMP_HUFFMAN_DEFAULT;
3532 rte_free(test_buffer);
3533 return ret;
3534}
3535
3536static int
3537test_compressdev_deflate_im_buffers_LB_4ops(void)
3538{
3539 struct comp_testsuite_params *ts_params = &testsuite_params;
3540 uint16_t i = 0;
3541 int ret = TEST_SUCCESS;
3542 int j;
3543 const struct rte_compressdev_capabilities *capab;
3544 char *test_buffer = NULL;
3545 const char *test_buffers[4];
3546
3547 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3548 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3549
3550 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3551 return -ENOTSUP;
3552
3553 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3554 return -ENOTSUP;
3555
3556 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3557 if (test_buffer == NULL) {
3558 RTE_LOG(ERR, USER1,
3559 "Can't allocate buffer for 'im buffer' test\n");
3560 return TEST_FAILED;
3561 }
3562
3563 test_buffers[0] = compress_test_bufs[0];
3564 test_buffers[1] = test_buffer;
3565 test_buffers[2] = compress_test_bufs[1];
3566 test_buffers[3] = test_buffer;
3567
3568 struct interim_data_params int_data = {
3569 (const char * const *)test_buffers,
3570 4,
3571 &i,
3572 &ts_params->def_comp_xform,
3573 &ts_params->def_decomp_xform,
3574 1
3575 };
3576
3577 struct test_data_params test_data = {
3578 .compress_state = RTE_COMP_OP_STATELESS,
3579 .decompress_state = RTE_COMP_OP_STATELESS,
3580 .buff_type = LB_BOTH,
3581 .zlib_dir = ZLIB_NONE,
3582 .out_of_space = 0,
3583 .big_data = 1,
3584 .overflow = OVERFLOW_DISABLED,
3585 .ratio = RATIO_DISABLED
3586 };
3587
3588 ts_params->def_comp_xform->compress.deflate.huffman =
3589 RTE_COMP_HUFFMAN_DYNAMIC;
3590
3591
3592 srand(IM_BUF_DATA_TEST_SIZE_LB);
3593 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3594 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3595
3596
3597 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3598 ret = TEST_FAILED;
3599 goto end;
3600 }
3601
3602end:
3603 ts_params->def_comp_xform->compress.deflate.huffman =
3604 RTE_COMP_HUFFMAN_DEFAULT;
3605 rte_free(test_buffer);
3606 return ret;
3607}
3608
3609
3610static int
3611test_compressdev_deflate_im_buffers_SGL_1op(void)
3612{
3613 struct comp_testsuite_params *ts_params = &testsuite_params;
3614 uint16_t i = 0;
3615 int ret = TEST_SUCCESS;
3616 int j;
3617 const struct rte_compressdev_capabilities *capab;
3618 char *test_buffer = NULL;
3619
3620 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3621 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3622
3623 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3624 return -ENOTSUP;
3625
3626 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3627 return -ENOTSUP;
3628
3629 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3630 if (test_buffer == NULL) {
3631 RTE_LOG(ERR, USER1,
3632 "Can't allocate buffer for big-data\n");
3633 return TEST_FAILED;
3634 }
3635
3636 struct interim_data_params int_data = {
3637 (const char * const *)&test_buffer,
3638 1,
3639 &i,
3640 &ts_params->def_comp_xform,
3641 &ts_params->def_decomp_xform,
3642 1
3643 };
3644
3645 struct test_data_params test_data = {
3646 .compress_state = RTE_COMP_OP_STATELESS,
3647 .decompress_state = RTE_COMP_OP_STATELESS,
3648 .buff_type = SGL_BOTH,
3649 .zlib_dir = ZLIB_NONE,
3650 .out_of_space = 0,
3651 .big_data = 1,
3652 .overflow = OVERFLOW_DISABLED,
3653 .ratio = RATIO_DISABLED
3654 };
3655
3656 ts_params->def_comp_xform->compress.deflate.huffman =
3657 RTE_COMP_HUFFMAN_DYNAMIC;
3658
3659
3660 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3661 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3662 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3663
3664
3665 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3666 ret = TEST_FAILED;
3667 goto end;
3668 }
3669
3670end:
3671 ts_params->def_comp_xform->compress.deflate.huffman =
3672 RTE_COMP_HUFFMAN_DEFAULT;
3673 rte_free(test_buffer);
3674 return ret;
3675}
3676
3677static int
3678test_compressdev_deflate_im_buffers_SGL_2ops_first(void)
3679{
3680 struct comp_testsuite_params *ts_params = &testsuite_params;
3681 uint16_t i = 0;
3682 int ret = TEST_SUCCESS;
3683 int j;
3684 const struct rte_compressdev_capabilities *capab;
3685 char *test_buffer = NULL;
3686 const char *test_buffers[2];
3687
3688 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3689 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3690
3691 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3692 return -ENOTSUP;
3693
3694 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3695 return -ENOTSUP;
3696
3697 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3698 if (test_buffer == NULL) {
3699 RTE_LOG(ERR, USER1,
3700 "Can't allocate buffer for big-data\n");
3701 return TEST_FAILED;
3702 }
3703
3704 test_buffers[0] = test_buffer;
3705 test_buffers[1] = compress_test_bufs[0];
3706
3707 struct interim_data_params int_data = {
3708 (const char * const *)test_buffers,
3709 2,
3710 &i,
3711 &ts_params->def_comp_xform,
3712 &ts_params->def_decomp_xform,
3713 1
3714 };
3715
3716 struct test_data_params test_data = {
3717 .compress_state = RTE_COMP_OP_STATELESS,
3718 .decompress_state = RTE_COMP_OP_STATELESS,
3719 .buff_type = SGL_BOTH,
3720 .zlib_dir = ZLIB_NONE,
3721 .out_of_space = 0,
3722 .big_data = 1,
3723 .overflow = OVERFLOW_DISABLED,
3724 .ratio = RATIO_DISABLED
3725 };
3726
3727 ts_params->def_comp_xform->compress.deflate.huffman =
3728 RTE_COMP_HUFFMAN_DYNAMIC;
3729
3730
3731 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3732 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3733 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3734
3735
3736 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3737 ret = TEST_FAILED;
3738 goto end;
3739 }
3740
3741end:
3742 ts_params->def_comp_xform->compress.deflate.huffman =
3743 RTE_COMP_HUFFMAN_DEFAULT;
3744 rte_free(test_buffer);
3745 return ret;
3746}
3747
3748static int
3749test_compressdev_deflate_im_buffers_SGL_2ops_second(void)
3750{
3751 struct comp_testsuite_params *ts_params = &testsuite_params;
3752 uint16_t i = 0;
3753 int ret = TEST_SUCCESS;
3754 int j;
3755 const struct rte_compressdev_capabilities *capab;
3756 char *test_buffer = NULL;
3757 const char *test_buffers[2];
3758
3759 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3760 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3761
3762 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3763 return -ENOTSUP;
3764
3765 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3766 return -ENOTSUP;
3767
3768 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3769 if (test_buffer == NULL) {
3770 RTE_LOG(ERR, USER1,
3771 "Can't allocate buffer for big-data\n");
3772 return TEST_FAILED;
3773 }
3774
3775 test_buffers[0] = compress_test_bufs[0];
3776 test_buffers[1] = test_buffer;
3777
3778 struct interim_data_params int_data = {
3779 (const char * const *)test_buffers,
3780 2,
3781 &i,
3782 &ts_params->def_comp_xform,
3783 &ts_params->def_decomp_xform,
3784 1
3785 };
3786
3787 struct test_data_params test_data = {
3788 .compress_state = RTE_COMP_OP_STATELESS,
3789 .decompress_state = RTE_COMP_OP_STATELESS,
3790 .buff_type = SGL_BOTH,
3791 .zlib_dir = ZLIB_NONE,
3792 .out_of_space = 0,
3793 .big_data = 1,
3794 .overflow = OVERFLOW_DISABLED,
3795 .ratio = RATIO_DISABLED
3796 };
3797
3798 ts_params->def_comp_xform->compress.deflate.huffman =
3799 RTE_COMP_HUFFMAN_DYNAMIC;
3800
3801
3802 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3803 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3804 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3805
3806
3807 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3808 ret = TEST_FAILED;
3809 goto end;
3810 }
3811
3812end:
3813 ts_params->def_comp_xform->compress.deflate.huffman =
3814 RTE_COMP_HUFFMAN_DEFAULT;
3815 rte_free(test_buffer);
3816 return ret;
3817}
3818
3819static int
3820test_compressdev_deflate_im_buffers_SGL_3ops(void)
3821{
3822 struct comp_testsuite_params *ts_params = &testsuite_params;
3823 uint16_t i = 0;
3824 int ret = TEST_SUCCESS;
3825 int j;
3826 const struct rte_compressdev_capabilities *capab;
3827 char *test_buffer = NULL;
3828 const char *test_buffers[3];
3829
3830 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3831 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3832
3833 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3834 return -ENOTSUP;
3835
3836 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3837 return -ENOTSUP;
3838
3839 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3840 if (test_buffer == NULL) {
3841 RTE_LOG(ERR, USER1,
3842 "Can't allocate buffer for big-data\n");
3843 return TEST_FAILED;
3844 }
3845
3846 test_buffers[0] = compress_test_bufs[0];
3847 test_buffers[1] = test_buffer;
3848 test_buffers[2] = compress_test_bufs[1];
3849
3850 struct interim_data_params int_data = {
3851 (const char * const *)test_buffers,
3852 3,
3853 &i,
3854 &ts_params->def_comp_xform,
3855 &ts_params->def_decomp_xform,
3856 1
3857 };
3858
3859 struct test_data_params test_data = {
3860 .compress_state = RTE_COMP_OP_STATELESS,
3861 .decompress_state = RTE_COMP_OP_STATELESS,
3862 .buff_type = SGL_BOTH,
3863 .zlib_dir = ZLIB_NONE,
3864 .out_of_space = 0,
3865 .big_data = 1,
3866 .overflow = OVERFLOW_DISABLED,
3867 .ratio = RATIO_DISABLED
3868 };
3869
3870 ts_params->def_comp_xform->compress.deflate.huffman =
3871 RTE_COMP_HUFFMAN_DYNAMIC;
3872
3873
3874 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3875 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3876 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3877
3878
3879 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3880 ret = TEST_FAILED;
3881 goto end;
3882 }
3883
3884end:
3885 ts_params->def_comp_xform->compress.deflate.huffman =
3886 RTE_COMP_HUFFMAN_DEFAULT;
3887 rte_free(test_buffer);
3888 return ret;
3889}
3890
3891
3892static int
3893test_compressdev_deflate_im_buffers_SGL_4ops(void)
3894{
3895 struct comp_testsuite_params *ts_params = &testsuite_params;
3896 uint16_t i = 0;
3897 int ret = TEST_SUCCESS;
3898 int j;
3899 const struct rte_compressdev_capabilities *capab;
3900 char *test_buffer = NULL;
3901 const char *test_buffers[4];
3902
3903 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3904 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3905
3906 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3907 return -ENOTSUP;
3908
3909 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3910 return -ENOTSUP;
3911
3912 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3913 if (test_buffer == NULL) {
3914 RTE_LOG(ERR, USER1,
3915 "Can't allocate buffer for big-data\n");
3916 return TEST_FAILED;
3917 }
3918
3919 test_buffers[0] = compress_test_bufs[0];
3920 test_buffers[1] = test_buffer;
3921 test_buffers[2] = compress_test_bufs[1];
3922 test_buffers[3] = test_buffer;
3923
3924 struct interim_data_params int_data = {
3925 (const char * const *)test_buffers,
3926 4,
3927 &i,
3928 &ts_params->def_comp_xform,
3929 &ts_params->def_decomp_xform,
3930 1
3931 };
3932
3933 struct test_data_params test_data = {
3934 .compress_state = RTE_COMP_OP_STATELESS,
3935 .decompress_state = RTE_COMP_OP_STATELESS,
3936 .buff_type = SGL_BOTH,
3937 .zlib_dir = ZLIB_NONE,
3938 .out_of_space = 0,
3939 .big_data = 1,
3940 .overflow = OVERFLOW_DISABLED,
3941 .ratio = RATIO_DISABLED
3942 };
3943
3944 ts_params->def_comp_xform->compress.deflate.huffman =
3945 RTE_COMP_HUFFMAN_DYNAMIC;
3946
3947
3948 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3949 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3950 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3951
3952
3953 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3954 ret = TEST_FAILED;
3955 goto end;
3956 }
3957
3958end:
3959 ts_params->def_comp_xform->compress.deflate.huffman =
3960 RTE_COMP_HUFFMAN_DEFAULT;
3961 rte_free(test_buffer);
3962 return ret;
3963}
3964
3965static int
3966test_compressdev_deflate_im_buffers_SGL_over_1op(void)
3967{
3968 struct comp_testsuite_params *ts_params = &testsuite_params;
3969 uint16_t i = 0;
3970 int ret = TEST_SUCCESS;
3971 int j;
3972 const struct rte_compressdev_capabilities *capab;
3973 char *test_buffer = NULL;
3974
3975 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
3976
3977 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3978 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3979
3980 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3981 return -ENOTSUP;
3982
3983 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3984 return -ENOTSUP;
3985
3986 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
3987 if (test_buffer == NULL) {
3988 RTE_LOG(ERR, USER1,
3989 "Can't allocate buffer for big-data\n");
3990 return TEST_FAILED;
3991 }
3992
3993 struct interim_data_params int_data = {
3994 (const char * const *)&test_buffer,
3995 1,
3996 &i,
3997 &ts_params->def_comp_xform,
3998 &ts_params->def_decomp_xform,
3999 1
4000 };
4001
4002 struct test_data_params test_data = {
4003 .compress_state = RTE_COMP_OP_STATELESS,
4004 .decompress_state = RTE_COMP_OP_STATELESS,
4005 .buff_type = SGL_BOTH,
4006 .zlib_dir = ZLIB_NONE,
4007 .out_of_space = 0,
4008 .big_data = 1,
4009 .overflow = OVERFLOW_DISABLED,
4010 .ratio = RATIO_DISABLED
4011 };
4012
4013 ts_params->def_comp_xform->compress.deflate.huffman =
4014 RTE_COMP_HUFFMAN_DYNAMIC;
4015
4016
4017 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4018 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4019 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4020
4021
4022 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4023 ret = TEST_SUCCESS;
4024 goto end;
4025 }
4026
4027end:
4028 ts_params->def_comp_xform->compress.deflate.huffman =
4029 RTE_COMP_HUFFMAN_DEFAULT;
4030 rte_free(test_buffer);
4031
4032 return ret;
4033}
4034
4035
4036static int
4037test_compressdev_deflate_im_buffers_SGL_over_2ops_first(void)
4038{
4039 struct comp_testsuite_params *ts_params = &testsuite_params;
4040 uint16_t i = 0;
4041 int ret = TEST_SUCCESS;
4042 int j;
4043 const struct rte_compressdev_capabilities *capab;
4044 char *test_buffer = NULL;
4045 const char *test_buffers[2];
4046
4047 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4048
4049 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4050 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4051
4052 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4053 return -ENOTSUP;
4054
4055 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4056 return -ENOTSUP;
4057
4058 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4059 if (test_buffer == NULL) {
4060 RTE_LOG(ERR, USER1,
4061 "Can't allocate buffer for big-data\n");
4062 return TEST_FAILED;
4063 }
4064
4065 test_buffers[0] = test_buffer;
4066 test_buffers[1] = compress_test_bufs[0];
4067
4068 struct interim_data_params int_data = {
4069 (const char * const *)test_buffers,
4070 2,
4071 &i,
4072 &ts_params->def_comp_xform,
4073 &ts_params->def_decomp_xform,
4074 1
4075 };
4076
4077 struct test_data_params test_data = {
4078 .compress_state = RTE_COMP_OP_STATELESS,
4079 .decompress_state = RTE_COMP_OP_STATELESS,
4080 .buff_type = SGL_BOTH,
4081 .zlib_dir = ZLIB_NONE,
4082 .out_of_space = 0,
4083 .big_data = 1,
4084 .overflow = OVERFLOW_DISABLED,
4085 .ratio = RATIO_DISABLED
4086 };
4087
4088 ts_params->def_comp_xform->compress.deflate.huffman =
4089 RTE_COMP_HUFFMAN_DYNAMIC;
4090
4091
4092 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4093 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4094 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4095
4096
4097 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4098 ret = TEST_SUCCESS;
4099 goto end;
4100 }
4101
4102end:
4103 ts_params->def_comp_xform->compress.deflate.huffman =
4104 RTE_COMP_HUFFMAN_DEFAULT;
4105 rte_free(test_buffer);
4106 return ret;
4107}
4108
4109static int
4110test_compressdev_deflate_im_buffers_SGL_over_2ops_second(void)
4111{
4112 struct comp_testsuite_params *ts_params = &testsuite_params;
4113 uint16_t i = 0;
4114 int ret = TEST_SUCCESS;
4115 int j;
4116 const struct rte_compressdev_capabilities *capab;
4117 char *test_buffer = NULL;
4118 const char *test_buffers[2];
4119
4120 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4121
4122 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4123 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4124
4125 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4126 return -ENOTSUP;
4127
4128 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4129 return -ENOTSUP;
4130
4131 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4132 if (test_buffer == NULL) {
4133 RTE_LOG(ERR, USER1,
4134 "Can't allocate buffer for big-data\n");
4135 return TEST_FAILED;
4136 }
4137
4138 test_buffers[0] = compress_test_bufs[0];
4139 test_buffers[1] = test_buffer;
4140
4141 struct interim_data_params int_data = {
4142 (const char * const *)test_buffers,
4143 2,
4144 &i,
4145 &ts_params->def_comp_xform,
4146 &ts_params->def_decomp_xform,
4147 1
4148 };
4149
4150 struct test_data_params test_data = {
4151 .compress_state = RTE_COMP_OP_STATELESS,
4152 .decompress_state = RTE_COMP_OP_STATELESS,
4153 .buff_type = SGL_BOTH,
4154 .zlib_dir = ZLIB_NONE,
4155 .out_of_space = 0,
4156 .big_data = 1,
4157 .overflow = OVERFLOW_DISABLED,
4158 .ratio = RATIO_DISABLED
4159 };
4160
4161 ts_params->def_comp_xform->compress.deflate.huffman =
4162 RTE_COMP_HUFFMAN_DYNAMIC;
4163
4164
4165 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4166 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4167 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4168
4169
4170 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4171 ret = TEST_SUCCESS;
4172 goto end;
4173 }
4174
4175end:
4176 ts_params->def_comp_xform->compress.deflate.huffman =
4177 RTE_COMP_HUFFMAN_DEFAULT;
4178 rte_free(test_buffer);
4179 return ret;
4180}
4181
4182static struct unit_test_suite compressdev_testsuite = {
4183 .suite_name = "compressdev unit test suite",
4184 .setup = testsuite_setup,
4185 .teardown = testsuite_teardown,
4186 .unit_test_cases = {
4187 TEST_CASE_ST(NULL, NULL,
4188 test_compressdev_invalid_configuration),
4189 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4190 test_compressdev_deflate_stateless_fixed),
4191 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4192 test_compressdev_deflate_stateless_dynamic),
4193 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4194 test_compressdev_deflate_stateless_dynamic_big),
4195 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4196 test_compressdev_deflate_stateless_multi_op),
4197 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4198 test_compressdev_deflate_stateless_multi_level),
4199 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4200 test_compressdev_deflate_stateless_multi_xform),
4201 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4202 test_compressdev_deflate_stateless_sgl),
4203 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4204 test_compressdev_deflate_stateless_checksum),
4205 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4206 test_compressdev_out_of_space_buffer),
4207 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4208 test_compressdev_deflate_stateful_decomp),
4209 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4210 test_compressdev_deflate_stateful_decomp_checksum),
4211 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4212 test_compressdev_external_mbufs),
4213 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4214 test_compressdev_deflate_stateless_fixed_oos_recoverable),
4215
4216
4217 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4218 test_compressdev_deflate_im_buffers_LB_1op),
4219 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4220 test_compressdev_deflate_im_buffers_LB_2ops_first),
4221 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4222 test_compressdev_deflate_im_buffers_LB_2ops_second),
4223 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4224 test_compressdev_deflate_im_buffers_LB_3ops),
4225
4226 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4227 test_compressdev_deflate_im_buffers_LB_4ops),
4228 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4229 test_compressdev_deflate_im_buffers_SGL_1op),
4230
4231 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4232 test_compressdev_deflate_im_buffers_SGL_2ops_first),
4233 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4234 test_compressdev_deflate_im_buffers_SGL_2ops_second),
4235 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4236 test_compressdev_deflate_im_buffers_SGL_3ops),
4237 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4238 test_compressdev_deflate_im_buffers_SGL_4ops),
4239
4240
4241
4242
4243
4244
4245
4246
4247
4248 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4249 test_compressdev_deflate_im_buffers_SGL_over_1op),
4250
4251
4252
4253
4254
4255
4256
4257 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4258 test_compressdev_deflate_im_buffers_SGL_over_2ops_first),
4259
4260 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4261 test_compressdev_deflate_im_buffers_SGL_over_2ops_second),
4262
4263 TEST_CASES_END()
4264 }
4265};
4266
4267static int
4268test_compressdev(void)
4269{
4270 return unit_test_suite_runner(&compressdev_testsuite);
4271}
4272
4273REGISTER_TEST_COMMAND(compressdev_autotest, test_compressdev);
4274