1
2
3
4#include <string.h>
5#include <zlib.h>
6#include <math.h>
7#include <stdlib.h>
8#include <unistd.h>
9#include <stdio.h>
10
11#include <rte_cycles.h>
12#include <rte_malloc.h>
13#include <rte_mempool.h>
14#include <rte_mbuf.h>
15#include <rte_compressdev.h>
16#include <rte_string_fns.h>
17
18#include "test_compressdev_test_buffer.h"
19#include "test.h"
20
21#define DIV_CEIL(a, b) ((a) / (b) + ((a) % (b) != 0))
22
23#define DEFAULT_WINDOW_SIZE 15
24#define DEFAULT_MEM_LEVEL 8
25#define MAX_DEQD_RETRIES 10
26#define DEQUEUE_WAIT_TIME 10000
27
28
29
30
31
32
33#define COMPRESS_BUF_SIZE_RATIO 1.3
34#define COMPRESS_BUF_SIZE_RATIO_DISABLED 1.0
35#define COMPRESS_BUF_SIZE_RATIO_OVERFLOW 0.2
36#define NUM_LARGE_MBUFS 16
37#define SMALL_SEG_SIZE 256
38#define MAX_SEGS 16
39#define NUM_OPS 16
40#define NUM_MAX_XFORMS 16
41#define NUM_MAX_INFLIGHT_OPS 128
42#define CACHE_SIZE 0
43
44#define ZLIB_CRC_CHECKSUM_WINDOW_BITS 31
45#define ZLIB_HEADER_SIZE 2
46#define ZLIB_TRAILER_SIZE 4
47#define GZIP_HEADER_SIZE 10
48#define GZIP_TRAILER_SIZE 8
49
50#define OUT_OF_SPACE_BUF 1
51
52#define MAX_MBUF_SEGMENT_SIZE 65535
53#define MAX_DATA_MBUF_SIZE (MAX_MBUF_SEGMENT_SIZE - RTE_PKTMBUF_HEADROOM)
54#define NUM_BIG_MBUFS (512 + 1)
55#define BIG_DATA_TEST_SIZE (MAX_DATA_MBUF_SIZE * 2)
56
57
58
59
60#define IM_BUF_NUM_MBUFS 3
61
62#define IM_BUF_DATA_TEST_SIZE_LB 59600
63
64#define IM_BUF_DATA_TEST_SIZE_SGL (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS)
65
66#define IM_BUF_NUM_MBUFS_OVER (NUM_MAX_INFLIGHT_OPS + 1)
67
68#define IM_BUF_DATA_TEST_SIZE_OVER (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_OVER)
69
70#define IM_BUF_NUM_MBUFS_MID ((NUM_MAX_INFLIGHT_OPS / 3) + 1)
71
72#define IM_BUF_DATA_TEST_SIZE_MID (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_MID)
73
74
75const char *
76huffman_type_strings[] = {
77 [RTE_COMP_HUFFMAN_DEFAULT] = "PMD default",
78 [RTE_COMP_HUFFMAN_FIXED] = "Fixed",
79 [RTE_COMP_HUFFMAN_DYNAMIC] = "Dynamic"
80};
81
82enum zlib_direction {
83 ZLIB_NONE,
84 ZLIB_COMPRESS,
85 ZLIB_DECOMPRESS,
86 ZLIB_ALL
87};
88
89enum varied_buff {
90 LB_BOTH = 0,
91 SGL_BOTH,
92 SGL_TO_LB,
93 LB_TO_SGL
94};
95
96enum overflow_test {
97 OVERFLOW_DISABLED,
98 OVERFLOW_ENABLED
99};
100
101enum ratio_switch {
102 RATIO_DISABLED,
103 RATIO_ENABLED
104};
105
106enum operation_type {
107 OPERATION_COMPRESSION,
108 OPERATION_DECOMPRESSION
109};
110
111struct priv_op_data {
112 uint16_t orig_idx;
113};
114
115struct comp_testsuite_params {
116 struct rte_mempool *large_mbuf_pool;
117 struct rte_mempool *small_mbuf_pool;
118 struct rte_mempool *big_mbuf_pool;
119 struct rte_mempool *op_pool;
120 struct rte_comp_xform *def_comp_xform;
121 struct rte_comp_xform *def_decomp_xform;
122};
123
124struct interim_data_params {
125 const char * const *test_bufs;
126 unsigned int num_bufs;
127 uint16_t *buf_idx;
128 struct rte_comp_xform **compress_xforms;
129 struct rte_comp_xform **decompress_xforms;
130 unsigned int num_xforms;
131};
132
133struct test_data_params {
134 enum rte_comp_op_type compress_state;
135 enum rte_comp_op_type decompress_state;
136 enum varied_buff buff_type;
137 enum zlib_direction zlib_dir;
138 unsigned int out_of_space;
139 unsigned int big_data;
140
141 unsigned int decompress_output_block_size;
142 unsigned int decompress_steps_max;
143
144 unsigned int use_external_mbufs;
145 unsigned int inbuf_data_size;
146 const struct rte_memzone *inbuf_memzone;
147 const struct rte_memzone *compbuf_memzone;
148 const struct rte_memzone *uncompbuf_memzone;
149
150 enum overflow_test overflow;
151 enum ratio_switch ratio;
152};
153
154struct test_private_arrays {
155 struct rte_mbuf **uncomp_bufs;
156 struct rte_mbuf **comp_bufs;
157 struct rte_comp_op **ops;
158 struct rte_comp_op **ops_processed;
159 void **priv_xforms;
160 uint64_t *compress_checksum;
161 uint32_t *compressed_data_size;
162 void **stream;
163 char **all_decomp_data;
164 unsigned int *decomp_produced_data_size;
165 uint16_t num_priv_xforms;
166};
167
168static struct comp_testsuite_params testsuite_params = { 0 };
169
170
171static void
172testsuite_teardown(void)
173{
174 struct comp_testsuite_params *ts_params = &testsuite_params;
175
176 if (rte_mempool_in_use_count(ts_params->large_mbuf_pool))
177 RTE_LOG(ERR, USER1, "Large mbuf pool still has unfreed bufs\n");
178 if (rte_mempool_in_use_count(ts_params->small_mbuf_pool))
179 RTE_LOG(ERR, USER1, "Small mbuf pool still has unfreed bufs\n");
180 if (rte_mempool_in_use_count(ts_params->big_mbuf_pool))
181 RTE_LOG(ERR, USER1, "Big mbuf pool still has unfreed bufs\n");
182 if (rte_mempool_in_use_count(ts_params->op_pool))
183 RTE_LOG(ERR, USER1, "op pool still has unfreed ops\n");
184
185 rte_mempool_free(ts_params->large_mbuf_pool);
186 rte_mempool_free(ts_params->small_mbuf_pool);
187 rte_mempool_free(ts_params->big_mbuf_pool);
188 rte_mempool_free(ts_params->op_pool);
189 rte_free(ts_params->def_comp_xform);
190 rte_free(ts_params->def_decomp_xform);
191}
192
193static int
194testsuite_setup(void)
195{
196 struct comp_testsuite_params *ts_params = &testsuite_params;
197 uint32_t max_buf_size = 0;
198 unsigned int i;
199
200 if (rte_compressdev_count() == 0) {
201 RTE_LOG(WARNING, USER1, "Need at least one compress device\n");
202 return TEST_SKIPPED;
203 }
204
205 RTE_LOG(NOTICE, USER1, "Running tests on device %s\n",
206 rte_compressdev_name_get(0));
207
208 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
209 max_buf_size = RTE_MAX(max_buf_size,
210 strlen(compress_test_bufs[i]) + 1);
211
212
213
214
215
216
217
218 max_buf_size *= COMPRESS_BUF_SIZE_RATIO;
219 ts_params->large_mbuf_pool = rte_pktmbuf_pool_create("large_mbuf_pool",
220 NUM_LARGE_MBUFS,
221 CACHE_SIZE, 0,
222 max_buf_size + RTE_PKTMBUF_HEADROOM,
223 rte_socket_id());
224 if (ts_params->large_mbuf_pool == NULL) {
225 RTE_LOG(ERR, USER1, "Large mbuf pool could not be created\n");
226 return TEST_FAILED;
227 }
228
229
230 ts_params->small_mbuf_pool = rte_pktmbuf_pool_create("small_mbuf_pool",
231 NUM_LARGE_MBUFS * MAX_SEGS,
232 CACHE_SIZE, 0,
233 SMALL_SEG_SIZE + RTE_PKTMBUF_HEADROOM,
234 rte_socket_id());
235 if (ts_params->small_mbuf_pool == NULL) {
236 RTE_LOG(ERR, USER1, "Small mbuf pool could not be created\n");
237 goto exit;
238 }
239
240
241 ts_params->big_mbuf_pool = rte_pktmbuf_pool_create("big_mbuf_pool",
242 NUM_BIG_MBUFS + 1,
243 CACHE_SIZE, 0,
244 MAX_MBUF_SEGMENT_SIZE,
245 rte_socket_id());
246 if (ts_params->big_mbuf_pool == NULL) {
247 RTE_LOG(ERR, USER1, "Big mbuf pool could not be created\n");
248 goto exit;
249 }
250
251 ts_params->op_pool = rte_comp_op_pool_create("op_pool", NUM_OPS,
252 0, sizeof(struct priv_op_data),
253 rte_socket_id());
254 if (ts_params->op_pool == NULL) {
255 RTE_LOG(ERR, USER1, "Operation pool could not be created\n");
256 goto exit;
257 }
258
259 ts_params->def_comp_xform =
260 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
261 if (ts_params->def_comp_xform == NULL) {
262 RTE_LOG(ERR, USER1,
263 "Default compress xform could not be created\n");
264 goto exit;
265 }
266 ts_params->def_decomp_xform =
267 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
268 if (ts_params->def_decomp_xform == NULL) {
269 RTE_LOG(ERR, USER1,
270 "Default decompress xform could not be created\n");
271 goto exit;
272 }
273
274
275 ts_params->def_comp_xform->type = RTE_COMP_COMPRESS;
276 ts_params->def_comp_xform->compress.algo = RTE_COMP_ALGO_DEFLATE,
277 ts_params->def_comp_xform->compress.deflate.huffman =
278 RTE_COMP_HUFFMAN_DEFAULT;
279 ts_params->def_comp_xform->compress.level = RTE_COMP_LEVEL_PMD_DEFAULT;
280 ts_params->def_comp_xform->compress.chksum = RTE_COMP_CHECKSUM_NONE;
281 ts_params->def_comp_xform->compress.window_size = DEFAULT_WINDOW_SIZE;
282
283 ts_params->def_decomp_xform->type = RTE_COMP_DECOMPRESS;
284 ts_params->def_decomp_xform->decompress.algo = RTE_COMP_ALGO_DEFLATE,
285 ts_params->def_decomp_xform->decompress.chksum = RTE_COMP_CHECKSUM_NONE;
286 ts_params->def_decomp_xform->decompress.window_size = DEFAULT_WINDOW_SIZE;
287
288 return TEST_SUCCESS;
289
290exit:
291 testsuite_teardown();
292
293 return TEST_FAILED;
294}
295
296static int
297generic_ut_setup(void)
298{
299
300 struct rte_compressdev_config config = {
301 .socket_id = rte_socket_id(),
302 .nb_queue_pairs = 1,
303 .max_nb_priv_xforms = NUM_MAX_XFORMS,
304 .max_nb_streams = 1
305 };
306
307 if (rte_compressdev_configure(0, &config) < 0) {
308 RTE_LOG(ERR, USER1, "Device configuration failed\n");
309 return -1;
310 }
311
312 if (rte_compressdev_queue_pair_setup(0, 0, NUM_MAX_INFLIGHT_OPS,
313 rte_socket_id()) < 0) {
314 RTE_LOG(ERR, USER1, "Queue pair setup failed\n");
315 return -1;
316 }
317
318 if (rte_compressdev_start(0) < 0) {
319 RTE_LOG(ERR, USER1, "Device could not be started\n");
320 return -1;
321 }
322
323 return 0;
324}
325
326static void
327generic_ut_teardown(void)
328{
329 rte_compressdev_stop(0);
330 if (rte_compressdev_close(0) < 0)
331 RTE_LOG(ERR, USER1, "Device could not be closed\n");
332}
333
334static int
335test_compressdev_invalid_configuration(void)
336{
337 struct rte_compressdev_config invalid_config;
338 struct rte_compressdev_config valid_config = {
339 .socket_id = rte_socket_id(),
340 .nb_queue_pairs = 1,
341 .max_nb_priv_xforms = NUM_MAX_XFORMS,
342 .max_nb_streams = 1
343 };
344 struct rte_compressdev_info dev_info;
345
346 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
347
348
349 memcpy(&invalid_config, &valid_config,
350 sizeof(struct rte_compressdev_config));
351 invalid_config.nb_queue_pairs = 0;
352
353 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
354 "Device configuration was successful "
355 "with no queue pairs (invalid)\n");
356
357
358
359
360
361 rte_compressdev_info_get(0, &dev_info);
362 if (dev_info.max_nb_queue_pairs != 0) {
363 memcpy(&invalid_config, &valid_config,
364 sizeof(struct rte_compressdev_config));
365 invalid_config.nb_queue_pairs = dev_info.max_nb_queue_pairs + 1;
366
367 TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
368 "Device configuration was successful "
369 "with too many queue pairs (invalid)\n");
370 }
371
372
373 TEST_ASSERT_FAIL(rte_compressdev_queue_pair_setup(0, 0,
374 NUM_MAX_INFLIGHT_OPS, rte_socket_id()),
375 "Queue pair setup was successful "
376 "with no queue pairs set (invalid)\n");
377
378 return TEST_SUCCESS;
379}
380
381static int
382compare_buffers(const char *buffer1, uint32_t buffer1_len,
383 const char *buffer2, uint32_t buffer2_len)
384{
385 if (buffer1_len != buffer2_len) {
386 RTE_LOG(ERR, USER1, "Buffer lengths are different\n");
387 return -1;
388 }
389
390 if (memcmp(buffer1, buffer2, buffer1_len) != 0) {
391 RTE_LOG(ERR, USER1, "Buffers are different\n");
392 return -1;
393 }
394
395 return 0;
396}
397
398
399
400
401static int
402map_zlib_flush_flag(enum rte_comp_flush_flag flag)
403{
404 switch (flag) {
405 case RTE_COMP_FLUSH_NONE:
406 return Z_NO_FLUSH;
407 case RTE_COMP_FLUSH_SYNC:
408 return Z_SYNC_FLUSH;
409 case RTE_COMP_FLUSH_FULL:
410 return Z_FULL_FLUSH;
411 case RTE_COMP_FLUSH_FINAL:
412 return Z_FINISH;
413
414
415
416
417 default:
418 return -1;
419 }
420}
421
422static int
423compress_zlib(struct rte_comp_op *op,
424 const struct rte_comp_xform *xform, int mem_level)
425{
426 z_stream stream;
427 int zlib_flush;
428 int strategy, window_bits, comp_level;
429 int ret = TEST_FAILED;
430 uint8_t *single_src_buf = NULL;
431 uint8_t *single_dst_buf = NULL;
432
433
434 stream.zalloc = Z_NULL;
435 stream.zfree = Z_NULL;
436 stream.opaque = Z_NULL;
437
438 if (xform->compress.deflate.huffman == RTE_COMP_HUFFMAN_FIXED)
439 strategy = Z_FIXED;
440 else
441 strategy = Z_DEFAULT_STRATEGY;
442
443
444
445
446
447 window_bits = -(xform->compress.window_size);
448 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32)
449 window_bits *= -1;
450 else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32)
451 window_bits = ZLIB_CRC_CHECKSUM_WINDOW_BITS;
452
453 comp_level = xform->compress.level;
454
455 if (comp_level != RTE_COMP_LEVEL_NONE)
456 ret = deflateInit2(&stream, comp_level, Z_DEFLATED,
457 window_bits, mem_level, strategy);
458 else
459 ret = deflateInit(&stream, Z_NO_COMPRESSION);
460
461 if (ret != Z_OK) {
462 printf("Zlib deflate could not be initialized\n");
463 goto exit;
464 }
465
466
467
468 if (op->m_src->nb_segs > 1) {
469 single_src_buf = rte_malloc(NULL,
470 rte_pktmbuf_pkt_len(op->m_src), 0);
471 if (single_src_buf == NULL) {
472 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
473 goto exit;
474 }
475
476 if (rte_pktmbuf_read(op->m_src, op->src.offset,
477 rte_pktmbuf_pkt_len(op->m_src) -
478 op->src.offset,
479 single_src_buf) == NULL) {
480 RTE_LOG(ERR, USER1,
481 "Buffer could not be read entirely\n");
482 goto exit;
483 }
484
485 stream.avail_in = op->src.length;
486 stream.next_in = single_src_buf;
487
488 } else {
489 stream.avail_in = op->src.length;
490 stream.next_in = rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
491 op->src.offset);
492 }
493
494 if (op->m_dst->nb_segs > 1) {
495
496 single_dst_buf = rte_malloc(NULL,
497 rte_pktmbuf_pkt_len(op->m_dst), 0);
498 if (single_dst_buf == NULL) {
499 RTE_LOG(ERR, USER1,
500 "Buffer could not be allocated\n");
501 goto exit;
502 }
503
504 stream.avail_out = op->m_dst->pkt_len;
505 stream.next_out = single_dst_buf;
506
507 } else {
508 stream.avail_out = op->m_dst->data_len;
509 stream.next_out = rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
510 op->dst.offset);
511 }
512
513
514 zlib_flush = map_zlib_flush_flag(op->flush_flag);
515 ret = deflate(&stream, zlib_flush);
516
517 if (stream.avail_in != 0) {
518 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
519 goto exit;
520 }
521
522 if (ret != Z_STREAM_END)
523 goto exit;
524
525
526 if (op->m_dst->nb_segs > 1) {
527 uint32_t remaining_data = stream.total_out;
528 uint8_t *src_data = single_dst_buf;
529 struct rte_mbuf *dst_buf = op->m_dst;
530
531 while (remaining_data > 0) {
532 uint8_t *dst_data = rte_pktmbuf_mtod_offset(dst_buf,
533 uint8_t *, op->dst.offset);
534
535 if (remaining_data < dst_buf->data_len) {
536 memcpy(dst_data, src_data, remaining_data);
537 remaining_data = 0;
538 } else {
539 memcpy(dst_data, src_data, dst_buf->data_len);
540 remaining_data -= dst_buf->data_len;
541 src_data += dst_buf->data_len;
542 dst_buf = dst_buf->next;
543 }
544 }
545 }
546
547 op->consumed = stream.total_in;
548 if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32) {
549 rte_pktmbuf_adj(op->m_dst, ZLIB_HEADER_SIZE);
550 rte_pktmbuf_trim(op->m_dst, ZLIB_TRAILER_SIZE);
551 op->produced = stream.total_out - (ZLIB_HEADER_SIZE +
552 ZLIB_TRAILER_SIZE);
553 } else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32) {
554 rte_pktmbuf_adj(op->m_dst, GZIP_HEADER_SIZE);
555 rte_pktmbuf_trim(op->m_dst, GZIP_TRAILER_SIZE);
556 op->produced = stream.total_out - (GZIP_HEADER_SIZE +
557 GZIP_TRAILER_SIZE);
558 } else
559 op->produced = stream.total_out;
560
561 op->status = RTE_COMP_OP_STATUS_SUCCESS;
562 op->output_chksum = stream.adler;
563
564 deflateReset(&stream);
565
566 ret = 0;
567exit:
568 deflateEnd(&stream);
569 rte_free(single_src_buf);
570 rte_free(single_dst_buf);
571
572 return ret;
573}
574
575static int
576decompress_zlib(struct rte_comp_op *op,
577 const struct rte_comp_xform *xform)
578{
579 z_stream stream;
580 int window_bits;
581 int zlib_flush;
582 int ret = TEST_FAILED;
583 uint8_t *single_src_buf = NULL;
584 uint8_t *single_dst_buf = NULL;
585
586
587 stream.zalloc = Z_NULL;
588 stream.zfree = Z_NULL;
589 stream.opaque = Z_NULL;
590
591
592
593
594
595 window_bits = -(xform->decompress.window_size);
596 ret = inflateInit2(&stream, window_bits);
597
598 if (ret != Z_OK) {
599 printf("Zlib deflate could not be initialized\n");
600 goto exit;
601 }
602
603
604
605 if (op->m_src->nb_segs > 1) {
606 single_src_buf = rte_malloc(NULL,
607 rte_pktmbuf_pkt_len(op->m_src), 0);
608 if (single_src_buf == NULL) {
609 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
610 goto exit;
611 }
612 single_dst_buf = rte_malloc(NULL,
613 rte_pktmbuf_pkt_len(op->m_dst), 0);
614 if (single_dst_buf == NULL) {
615 RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
616 goto exit;
617 }
618 if (rte_pktmbuf_read(op->m_src, 0,
619 rte_pktmbuf_pkt_len(op->m_src),
620 single_src_buf) == NULL) {
621 RTE_LOG(ERR, USER1,
622 "Buffer could not be read entirely\n");
623 goto exit;
624 }
625
626 stream.avail_in = op->src.length;
627 stream.next_in = single_src_buf;
628 stream.avail_out = rte_pktmbuf_pkt_len(op->m_dst);
629 stream.next_out = single_dst_buf;
630
631 } else {
632 stream.avail_in = op->src.length;
633 stream.next_in = rte_pktmbuf_mtod(op->m_src, uint8_t *);
634 stream.avail_out = op->m_dst->data_len;
635 stream.next_out = rte_pktmbuf_mtod(op->m_dst, uint8_t *);
636 }
637
638
639 zlib_flush = map_zlib_flush_flag(op->flush_flag);
640 ret = inflate(&stream, zlib_flush);
641
642 if (stream.avail_in != 0) {
643 RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
644 goto exit;
645 }
646
647 if (ret != Z_STREAM_END)
648 goto exit;
649
650 if (op->m_src->nb_segs > 1) {
651 uint32_t remaining_data = stream.total_out;
652 uint8_t *src_data = single_dst_buf;
653 struct rte_mbuf *dst_buf = op->m_dst;
654
655 while (remaining_data > 0) {
656 uint8_t *dst_data = rte_pktmbuf_mtod(dst_buf,
657 uint8_t *);
658
659 if (remaining_data < dst_buf->data_len) {
660 memcpy(dst_data, src_data, remaining_data);
661 remaining_data = 0;
662 } else {
663 memcpy(dst_data, src_data, dst_buf->data_len);
664 remaining_data -= dst_buf->data_len;
665 src_data += dst_buf->data_len;
666 dst_buf = dst_buf->next;
667 }
668 }
669 }
670
671 op->consumed = stream.total_in;
672 op->produced = stream.total_out;
673 op->status = RTE_COMP_OP_STATUS_SUCCESS;
674
675 inflateReset(&stream);
676
677 ret = 0;
678exit:
679 inflateEnd(&stream);
680
681 return ret;
682}
683
684static int
685prepare_sgl_bufs(const char *test_buf, struct rte_mbuf *head_buf,
686 uint32_t total_data_size,
687 struct rte_mempool *small_mbuf_pool,
688 struct rte_mempool *large_mbuf_pool,
689 uint8_t limit_segs_in_sgl,
690 uint16_t seg_size)
691{
692 uint32_t remaining_data = total_data_size;
693 uint16_t num_remaining_segs = DIV_CEIL(remaining_data, seg_size);
694 struct rte_mempool *pool;
695 struct rte_mbuf *next_seg;
696 uint32_t data_size;
697 char *buf_ptr;
698 const char *data_ptr = test_buf;
699 uint16_t i;
700 int ret;
701
702 if (limit_segs_in_sgl != 0 && num_remaining_segs > limit_segs_in_sgl)
703 num_remaining_segs = limit_segs_in_sgl - 1;
704
705
706
707
708
709 if (remaining_data < seg_size)
710 data_size = remaining_data;
711 else
712 data_size = seg_size;
713
714 buf_ptr = rte_pktmbuf_append(head_buf, data_size);
715 if (buf_ptr == NULL) {
716 RTE_LOG(ERR, USER1,
717 "Not enough space in the 1st buffer\n");
718 return -1;
719 }
720
721 if (data_ptr != NULL) {
722
723 memcpy(buf_ptr, data_ptr, data_size);
724 data_ptr += data_size;
725 }
726 remaining_data -= data_size;
727 num_remaining_segs--;
728
729
730
731
732
733 for (i = 0; i < num_remaining_segs; i++) {
734
735 if (i == (num_remaining_segs - 1)) {
736
737 if (remaining_data > seg_size)
738 pool = large_mbuf_pool;
739 else
740 pool = small_mbuf_pool;
741 data_size = remaining_data;
742 } else {
743 data_size = seg_size;
744 pool = small_mbuf_pool;
745 }
746
747 next_seg = rte_pktmbuf_alloc(pool);
748 if (next_seg == NULL) {
749 RTE_LOG(ERR, USER1,
750 "New segment could not be allocated "
751 "from the mempool\n");
752 return -1;
753 }
754 buf_ptr = rte_pktmbuf_append(next_seg, data_size);
755 if (buf_ptr == NULL) {
756 RTE_LOG(ERR, USER1,
757 "Not enough space in the buffer\n");
758 rte_pktmbuf_free(next_seg);
759 return -1;
760 }
761 if (data_ptr != NULL) {
762
763 memcpy(buf_ptr, data_ptr, data_size);
764 data_ptr += data_size;
765 }
766 remaining_data -= data_size;
767
768 ret = rte_pktmbuf_chain(head_buf, next_seg);
769 if (ret != 0) {
770 rte_pktmbuf_free(next_seg);
771 RTE_LOG(ERR, USER1,
772 "Segment could not chained\n");
773 return -1;
774 }
775 }
776
777 return 0;
778}
779
780static void
781extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
782{
783}
784
785static int
786test_run_enqueue_dequeue(struct rte_comp_op **ops,
787 struct rte_comp_op **ops_processed,
788 unsigned int num_bufs)
789{
790 uint16_t num_enqd, num_deqd, num_total_deqd;
791 unsigned int deqd_retries = 0;
792 int res = 0;
793
794
795 num_enqd = rte_compressdev_enqueue_burst(0, 0, ops, num_bufs);
796 if (num_enqd < num_bufs) {
797 RTE_LOG(ERR, USER1,
798 "Some operations could not be enqueued\n");
799 res = -1;
800 }
801
802
803
804 num_total_deqd = 0;
805 while (num_total_deqd < num_enqd) {
806
807
808
809
810 if (deqd_retries != 0) {
811
812
813
814
815 if (deqd_retries == MAX_DEQD_RETRIES) {
816 RTE_LOG(ERR, USER1,
817 "Not all operations could be dequeued\n");
818 res = -1;
819 break;
820 }
821 usleep(DEQUEUE_WAIT_TIME);
822 }
823 num_deqd = rte_compressdev_dequeue_burst(0, 0,
824 &ops_processed[num_total_deqd], num_bufs);
825 num_total_deqd += num_deqd;
826 deqd_retries++;
827
828 }
829
830 return res;
831}
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849static int
850test_setup_com_bufs(const struct interim_data_params *int_data,
851 const struct test_data_params *test_data,
852 const struct test_private_arrays *test_priv_data)
853{
854
855 unsigned int i;
856 uint32_t data_size;
857 char *buf_ptr;
858 int ret;
859 char **all_decomp_data = test_priv_data->all_decomp_data;
860
861 struct comp_testsuite_params *ts_params = &testsuite_params;
862
863
864 const char * const *test_bufs = int_data->test_bufs;
865 unsigned int num_bufs = int_data->num_bufs;
866
867
868 unsigned int buff_type = test_data->buff_type;
869 unsigned int big_data = test_data->big_data;
870
871
872 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
873 struct rte_mempool *buf_pool;
874
875 static struct rte_mbuf_ext_shared_info inbuf_info;
876
877 size_t array_size = sizeof(void *) * num_bufs;
878
879
880 memset(test_priv_data->uncomp_bufs, 0, array_size);
881 memset(test_priv_data->comp_bufs, 0, array_size);
882 memset(test_priv_data->ops, 0, array_size);
883 memset(test_priv_data->ops_processed, 0, array_size);
884 memset(test_priv_data->priv_xforms, 0, array_size);
885 memset(test_priv_data->compressed_data_size,
886 0, sizeof(uint32_t) * num_bufs);
887
888 if (test_data->decompress_state == RTE_COMP_OP_STATEFUL) {
889 data_size = strlen(test_bufs[0]) + 1;
890 *all_decomp_data = rte_malloc(NULL, data_size,
891 RTE_CACHE_LINE_SIZE);
892 }
893
894 if (big_data)
895 buf_pool = ts_params->big_mbuf_pool;
896 else if (buff_type == SGL_BOTH)
897 buf_pool = ts_params->small_mbuf_pool;
898 else
899 buf_pool = ts_params->large_mbuf_pool;
900
901
902
903 ret = rte_pktmbuf_alloc_bulk(buf_pool,
904 uncomp_bufs, num_bufs);
905 if (ret < 0) {
906 RTE_LOG(ERR, USER1,
907 "Source mbufs could not be allocated "
908 "from the mempool\n");
909 return -1;
910 }
911
912 if (test_data->use_external_mbufs) {
913 inbuf_info.free_cb = extbuf_free_callback;
914 inbuf_info.fcb_opaque = NULL;
915 rte_mbuf_ext_refcnt_set(&inbuf_info, 1);
916 for (i = 0; i < num_bufs; i++) {
917 rte_pktmbuf_attach_extbuf(uncomp_bufs[i],
918 test_data->inbuf_memzone->addr,
919 test_data->inbuf_memzone->iova,
920 test_data->inbuf_data_size,
921 &inbuf_info);
922 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i],
923 test_data->inbuf_data_size);
924 if (buf_ptr == NULL) {
925 RTE_LOG(ERR, USER1,
926 "Append extra bytes to the source mbuf failed\n");
927 return -1;
928 }
929 }
930 } else if (buff_type == SGL_BOTH || buff_type == SGL_TO_LB) {
931 for (i = 0; i < num_bufs; i++) {
932 data_size = strlen(test_bufs[i]) + 1;
933 if (prepare_sgl_bufs(test_bufs[i], uncomp_bufs[i],
934 data_size,
935 big_data ? buf_pool : ts_params->small_mbuf_pool,
936 big_data ? buf_pool : ts_params->large_mbuf_pool,
937 big_data ? 0 : MAX_SEGS,
938 big_data ? MAX_DATA_MBUF_SIZE : SMALL_SEG_SIZE) < 0)
939 return -1;
940 }
941 } else {
942 for (i = 0; i < num_bufs; i++) {
943 data_size = strlen(test_bufs[i]) + 1;
944
945 buf_ptr = rte_pktmbuf_append(uncomp_bufs[i], data_size);
946 if (buf_ptr == NULL) {
947 RTE_LOG(ERR, USER1,
948 "Append extra bytes to the source mbuf failed\n");
949 return -1;
950 }
951 strlcpy(buf_ptr, test_bufs[i], data_size);
952 }
953 }
954
955 return 0;
956}
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980static inline uint32_t
981test_mbufs_calculate_data_size(
982 enum operation_type op_type,
983 unsigned int out_of_space_and_zlib,
984 const struct test_private_arrays *test_priv_data,
985 const struct interim_data_params *int_data,
986 const struct test_data_params *test_data,
987 unsigned int i)
988{
989
990 uint32_t data_size;
991 struct priv_op_data *priv_data;
992 float ratio_val;
993 enum ratio_switch ratio = test_data->ratio;
994
995 uint8_t not_zlib_compr;
996 enum overflow_test overflow = test_data->overflow;
997
998
999 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1000
1001
1002 const char * const *test_bufs = int_data->test_bufs;
1003
1004 if (out_of_space_and_zlib)
1005 data_size = OUT_OF_SPACE_BUF;
1006 else {
1007 if (op_type == OPERATION_COMPRESSION) {
1008 not_zlib_compr = (test_data->zlib_dir == ZLIB_DECOMPRESS
1009 || test_data->zlib_dir == ZLIB_NONE);
1010
1011 ratio_val = (ratio == RATIO_ENABLED) ?
1012 COMPRESS_BUF_SIZE_RATIO :
1013 COMPRESS_BUF_SIZE_RATIO_DISABLED;
1014
1015 ratio_val = (not_zlib_compr &&
1016 (overflow == OVERFLOW_ENABLED)) ?
1017 COMPRESS_BUF_SIZE_RATIO_OVERFLOW :
1018 ratio_val;
1019
1020 data_size = strlen(test_bufs[i]) * ratio_val;
1021 } else {
1022 priv_data = (struct priv_op_data *)
1023 (ops_processed[i] + 1);
1024 data_size = strlen(test_bufs[priv_data->orig_idx]) + 1;
1025 }
1026 }
1027
1028 return data_size;
1029}
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055static int
1056test_setup_output_bufs(
1057 enum operation_type op_type,
1058 unsigned int out_of_space_and_zlib,
1059 const struct test_private_arrays *test_priv_data,
1060 const struct interim_data_params *int_data,
1061 const struct test_data_params *test_data,
1062 struct rte_mbuf_ext_shared_info *current_extbuf_info)
1063{
1064
1065 unsigned int i;
1066 uint32_t data_size;
1067 int ret;
1068 char *buf_ptr;
1069
1070
1071 struct rte_mbuf **current_bufs;
1072
1073
1074 unsigned int num_bufs = int_data->num_bufs;
1075
1076
1077 unsigned int buff_type = test_data->buff_type;
1078 unsigned int big_data = test_data->big_data;
1079 const struct rte_memzone *current_memzone;
1080
1081 struct comp_testsuite_params *ts_params = &testsuite_params;
1082 struct rte_mempool *buf_pool;
1083
1084 if (big_data)
1085 buf_pool = ts_params->big_mbuf_pool;
1086 else if (buff_type == SGL_BOTH)
1087 buf_pool = ts_params->small_mbuf_pool;
1088 else
1089 buf_pool = ts_params->large_mbuf_pool;
1090
1091 if (op_type == OPERATION_COMPRESSION)
1092 current_bufs = test_priv_data->comp_bufs;
1093 else
1094 current_bufs = test_priv_data->uncomp_bufs;
1095
1096
1097 ret = rte_pktmbuf_alloc_bulk(buf_pool, current_bufs, num_bufs);
1098 if (ret < 0) {
1099 RTE_LOG(ERR, USER1,
1100 "Destination mbufs could not be allocated "
1101 "from the mempool\n");
1102 return -1;
1103 }
1104
1105 if (test_data->use_external_mbufs) {
1106 current_extbuf_info->free_cb = extbuf_free_callback;
1107 current_extbuf_info->fcb_opaque = NULL;
1108 rte_mbuf_ext_refcnt_set(current_extbuf_info, 1);
1109 if (op_type == OPERATION_COMPRESSION)
1110 current_memzone = test_data->compbuf_memzone;
1111 else
1112 current_memzone = test_data->uncompbuf_memzone;
1113
1114 for (i = 0; i < num_bufs; i++) {
1115 rte_pktmbuf_attach_extbuf(current_bufs[i],
1116 current_memzone->addr,
1117 current_memzone->iova,
1118 current_memzone->len,
1119 current_extbuf_info);
1120 rte_pktmbuf_append(current_bufs[i],
1121 current_memzone->len);
1122 }
1123 } else {
1124 for (i = 0; i < num_bufs; i++) {
1125
1126 enum rte_comp_huffman comp_huffman =
1127 ts_params->def_comp_xform->compress.deflate.huffman;
1128
1129
1130 data_size = test_mbufs_calculate_data_size(
1131 op_type,
1132 out_of_space_and_zlib,
1133 test_priv_data,
1134 int_data,
1135 test_data,
1136 i);
1137
1138 if (comp_huffman != RTE_COMP_HUFFMAN_DYNAMIC) {
1139 if (op_type == OPERATION_DECOMPRESSION)
1140 data_size *= COMPRESS_BUF_SIZE_RATIO;
1141 }
1142
1143
1144 if (buff_type == SGL_BOTH || buff_type == LB_TO_SGL) {
1145 ret = prepare_sgl_bufs(NULL, current_bufs[i],
1146 data_size,
1147 big_data ? buf_pool :
1148 ts_params->small_mbuf_pool,
1149 big_data ? buf_pool :
1150 ts_params->large_mbuf_pool,
1151 big_data ? 0 : MAX_SEGS,
1152 big_data ? MAX_DATA_MBUF_SIZE :
1153 SMALL_SEG_SIZE);
1154 if (ret < 0)
1155 return -1;
1156 } else {
1157 buf_ptr = rte_pktmbuf_append(current_bufs[i],
1158 data_size);
1159 if (buf_ptr == NULL) {
1160 RTE_LOG(ERR, USER1,
1161 "Append extra bytes to the destination mbuf failed\n");
1162 return -1;
1163 }
1164 }
1165 }
1166 }
1167
1168 return 0;
1169}
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188static int
1189test_deflate_comp_run(const struct interim_data_params *int_data,
1190 const struct test_data_params *test_data,
1191 const struct test_private_arrays *test_priv_data)
1192{
1193
1194 struct priv_op_data *priv_data;
1195 unsigned int i;
1196 uint16_t num_priv_xforms = 0;
1197 int ret;
1198 int ret_status = 0;
1199 char *buf_ptr;
1200
1201 struct comp_testsuite_params *ts_params = &testsuite_params;
1202
1203
1204 enum rte_comp_op_type operation_type = test_data->compress_state;
1205 unsigned int zlib_compress =
1206 (test_data->zlib_dir == ZLIB_ALL ||
1207 test_data->zlib_dir == ZLIB_COMPRESS);
1208
1209
1210 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1211 unsigned int num_xforms = int_data->num_xforms;
1212 unsigned int num_bufs = int_data->num_bufs;
1213
1214
1215 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1216 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1217 struct rte_comp_op **ops = test_priv_data->ops;
1218 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1219 void **priv_xforms = test_priv_data->priv_xforms;
1220
1221 const struct rte_compressdev_capabilities *capa =
1222 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1223
1224
1225 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1226 if (ret < 0) {
1227 RTE_LOG(ERR, USER1,
1228 "Compress operations could not be allocated "
1229 "from the mempool\n");
1230 ret_status = -1;
1231 goto exit;
1232 }
1233
1234 for (i = 0; i < num_bufs; i++) {
1235 ops[i]->m_src = uncomp_bufs[i];
1236 ops[i]->m_dst = comp_bufs[i];
1237 ops[i]->src.offset = 0;
1238 ops[i]->src.length = rte_pktmbuf_pkt_len(uncomp_bufs[i]);
1239 ops[i]->dst.offset = 0;
1240
1241 RTE_LOG(DEBUG, USER1,
1242 "Uncompressed buffer length = %u compressed buffer length = %u",
1243 rte_pktmbuf_pkt_len(uncomp_bufs[i]),
1244 rte_pktmbuf_pkt_len(comp_bufs[i]));
1245
1246 if (operation_type == RTE_COMP_OP_STATELESS) {
1247 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1248 } else {
1249 RTE_LOG(ERR, USER1,
1250 "Compression: stateful operations are not "
1251 "supported in these tests yet\n");
1252 ret_status = -1;
1253 goto exit;
1254 }
1255 ops[i]->input_chksum = 0;
1256
1257
1258
1259
1260
1261
1262 priv_data = (struct priv_op_data *) (ops[i] + 1);
1263 priv_data->orig_idx = i;
1264 }
1265
1266
1267 if (zlib_compress) {
1268 for (i = 0; i < num_bufs; i++) {
1269 const struct rte_comp_xform *compress_xform =
1270 compress_xforms[i % num_xforms];
1271 ret = compress_zlib(ops[i], compress_xform,
1272 DEFAULT_MEM_LEVEL);
1273 if (ret < 0) {
1274 ret_status = -1;
1275 goto exit;
1276 }
1277
1278 ops_processed[i] = ops[i];
1279 }
1280 } else {
1281
1282 for (i = 0; i < num_xforms; i++) {
1283 ret = rte_compressdev_private_xform_create(0,
1284 (const struct rte_comp_xform *)
1285 compress_xforms[i],
1286 &priv_xforms[i]);
1287 if (ret < 0) {
1288 RTE_LOG(ERR, USER1,
1289 "Compression private xform "
1290 "could not be created\n");
1291 ret_status = -1;
1292 goto exit;
1293 }
1294 num_priv_xforms++;
1295 }
1296 if (capa->comp_feature_flags &
1297 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1298
1299 for (i = 0; i < num_bufs; i++)
1300 ops[i]->private_xform =
1301 priv_xforms[i % num_xforms];
1302 } else {
1303
1304 for (i = num_xforms; i < num_bufs; i++) {
1305 ret = rte_compressdev_private_xform_create(0,
1306 compress_xforms[i % num_xforms],
1307 &priv_xforms[i]);
1308 if (ret < 0) {
1309 RTE_LOG(ERR, USER1,
1310 "Compression private xform "
1311 "could not be created\n");
1312 ret_status = -1;
1313 goto exit;
1314 }
1315 num_priv_xforms++;
1316 }
1317
1318 for (i = 0; i < num_bufs; i++)
1319 ops[i]->private_xform = priv_xforms[i];
1320 }
1321
1322recovery_lb:
1323 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
1324 if (ret < 0) {
1325 RTE_LOG(ERR, USER1,
1326 "Compression: enqueue/dequeue operation failed\n");
1327 ret_status = -1;
1328 goto exit;
1329 }
1330
1331 for (i = 0; i < num_bufs; i++) {
1332 test_priv_data->compressed_data_size[i] +=
1333 ops_processed[i]->produced;
1334
1335 if (ops_processed[i]->status ==
1336 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
1337
1338 ops[i]->status =
1339 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1340 ops[i]->src.offset +=
1341 ops_processed[i]->consumed;
1342 ops[i]->src.length -=
1343 ops_processed[i]->consumed;
1344 ops[i]->dst.offset +=
1345 ops_processed[i]->produced;
1346
1347 buf_ptr = rte_pktmbuf_append(
1348 ops[i]->m_dst,
1349 ops_processed[i]->produced);
1350
1351 if (buf_ptr == NULL) {
1352 RTE_LOG(ERR, USER1,
1353 "Data recovery: append extra bytes to the current mbuf failed\n");
1354 ret_status = -1;
1355 goto exit;
1356 }
1357 goto recovery_lb;
1358 }
1359 }
1360 }
1361
1362exit:
1363
1364 if (ret_status < 0)
1365 for (i = 0; i < num_bufs; i++) {
1366 rte_comp_op_free(ops[i]);
1367 ops[i] = NULL;
1368 ops_processed[i] = NULL;
1369 }
1370
1371
1372 for (i = 0; i < num_priv_xforms; i++) {
1373 if (priv_xforms[i] != NULL) {
1374 rte_compressdev_private_xform_free(0, priv_xforms[i]);
1375 priv_xforms[i] = NULL;
1376 }
1377 }
1378
1379 return ret_status;
1380}
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402static int
1403test_deflate_comp_finalize(const struct interim_data_params *int_data,
1404 const struct test_data_params *test_data,
1405 const struct test_private_arrays *test_priv_data)
1406{
1407
1408 unsigned int i;
1409 struct priv_op_data *priv_data;
1410
1411
1412 unsigned int num_xforms = int_data->num_xforms;
1413 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1414 unsigned int num_bufs = int_data->num_bufs;
1415
1416
1417 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1418 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1419 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1420 struct rte_comp_op **ops = test_priv_data->ops;
1421
1422
1423 unsigned int out_of_space = test_data->out_of_space;
1424 unsigned int zlib_compress =
1425 (test_data->zlib_dir == ZLIB_ALL ||
1426 test_data->zlib_dir == ZLIB_COMPRESS);
1427 unsigned int zlib_decompress =
1428 (test_data->zlib_dir == ZLIB_ALL ||
1429 test_data->zlib_dir == ZLIB_DECOMPRESS);
1430
1431 for (i = 0; i < num_bufs; i++) {
1432 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1433 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1434 const struct rte_comp_compress_xform *compress_xform =
1435 &compress_xforms[xform_idx]->compress;
1436 enum rte_comp_huffman huffman_type =
1437 compress_xform->deflate.huffman;
1438 char engine[] = "zlib (directly, not PMD)";
1439 if (zlib_decompress)
1440 strlcpy(engine, "PMD", sizeof(engine));
1441
1442 RTE_LOG(DEBUG, USER1, "Buffer %u compressed by %s from %u to"
1443 " %u bytes (level = %d, huffman = %s)\n",
1444 i, engine,
1445 ops_processed[i]->consumed, ops_processed[i]->produced,
1446 compress_xform->level,
1447 huffman_type_strings[huffman_type]);
1448 RTE_LOG(DEBUG, USER1, "Compression ratio = %.2f\n",
1449 ops_processed[i]->consumed == 0 ? 0 :
1450 (float)ops_processed[i]->produced /
1451 ops_processed[i]->consumed * 100);
1452 if (compress_xform->chksum != RTE_COMP_CHECKSUM_NONE)
1453 compress_checksum[i] = ops_processed[i]->output_chksum;
1454 ops[i] = NULL;
1455 }
1456
1457
1458
1459
1460
1461 for (i = 0; i < num_bufs; i++) {
1462 if (out_of_space && !zlib_compress) {
1463 if (ops_processed[i]->status !=
1464 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1465 RTE_LOG(ERR, USER1,
1466 "Operation without expected out of "
1467 "space status error\n");
1468 return -1;
1469 } else
1470 continue;
1471 }
1472
1473 if (ops_processed[i]->status != RTE_COMP_OP_STATUS_SUCCESS) {
1474 if (test_data->overflow == OVERFLOW_ENABLED) {
1475 if (ops_processed[i]->status ==
1476 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1477 RTE_LOG(INFO, USER1,
1478 "Out-of-space-recoverable functionality"
1479 " is not supported on this device\n");
1480 return 2;
1481 }
1482 }
1483
1484 RTE_LOG(ERR, USER1,
1485 "Comp: Some operations were not successful\n");
1486 return -1;
1487 }
1488 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1489 rte_pktmbuf_free(uncomp_bufs[priv_data->orig_idx]);
1490 uncomp_bufs[priv_data->orig_idx] = NULL;
1491 }
1492
1493 if (out_of_space && !zlib_compress)
1494 return 1;
1495
1496 return 0;
1497}
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516static int
1517test_deflate_decomp_run(const struct interim_data_params *int_data,
1518 const struct test_data_params *test_data,
1519 struct test_private_arrays *test_priv_data)
1520{
1521
1522
1523 struct priv_op_data *priv_data;
1524 unsigned int i;
1525 uint16_t num_priv_xforms = 0;
1526 int ret;
1527 int ret_status = 0;
1528
1529 struct comp_testsuite_params *ts_params = &testsuite_params;
1530
1531
1532 enum rte_comp_op_type operation_type = test_data->decompress_state;
1533 unsigned int zlib_decompress =
1534 (test_data->zlib_dir == ZLIB_ALL ||
1535 test_data->zlib_dir == ZLIB_DECOMPRESS);
1536
1537
1538 struct rte_comp_xform **decompress_xforms = int_data->decompress_xforms;
1539 unsigned int num_xforms = int_data->num_xforms;
1540 unsigned int num_bufs = int_data->num_bufs;
1541
1542
1543 struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1544 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1545 struct rte_comp_op **ops = test_priv_data->ops;
1546 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1547 void **priv_xforms = test_priv_data->priv_xforms;
1548 uint32_t *compressed_data_size = test_priv_data->compressed_data_size;
1549 void **stream = test_priv_data->stream;
1550
1551 const struct rte_compressdev_capabilities *capa =
1552 rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1553
1554 ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1555 if (ret < 0) {
1556 RTE_LOG(ERR, USER1,
1557 "Decompress operations could not be allocated "
1558 "from the mempool\n");
1559 ret_status = -1;
1560 goto exit;
1561 }
1562
1563
1564 for (i = 0; i < num_bufs; i++) {
1565 ops[i]->m_src = comp_bufs[i];
1566 ops[i]->m_dst = uncomp_bufs[i];
1567 ops[i]->src.offset = 0;
1568
1569
1570
1571
1572
1573 if (compressed_data_size[i])
1574 ops[i]->src.length = compressed_data_size[i];
1575 else
1576 ops[i]->src.length = ops_processed[i]->produced;
1577
1578 ops[i]->dst.offset = 0;
1579
1580 if (operation_type == RTE_COMP_OP_STATELESS) {
1581 ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1582 ops[i]->op_type = RTE_COMP_OP_STATELESS;
1583 } else if (!zlib_decompress) {
1584 ops[i]->flush_flag = RTE_COMP_FLUSH_SYNC;
1585 ops[i]->op_type = RTE_COMP_OP_STATEFUL;
1586 } else {
1587 RTE_LOG(ERR, USER1,
1588 "Decompression: stateful operations are"
1589 " not supported in these tests yet\n");
1590 ret_status = -1;
1591 goto exit;
1592 }
1593 ops[i]->input_chksum = 0;
1594
1595
1596
1597
1598 memcpy(ops[i] + 1, ops_processed[i] + 1,
1599 sizeof(struct priv_op_data));
1600 }
1601
1602
1603
1604
1605
1606 rte_comp_op_bulk_free(ops_processed, num_bufs);
1607
1608
1609 if (zlib_decompress) {
1610 for (i = 0; i < num_bufs; i++) {
1611 priv_data = (struct priv_op_data *)(ops[i] + 1);
1612 uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1613 const struct rte_comp_xform *decompress_xform =
1614 decompress_xforms[xform_idx];
1615
1616 ret = decompress_zlib(ops[i], decompress_xform);
1617 if (ret < 0) {
1618 ret_status = -1;
1619 goto exit;
1620 }
1621
1622 ops_processed[i] = ops[i];
1623 }
1624 } else {
1625 if (operation_type == RTE_COMP_OP_STATELESS) {
1626
1627 for (i = 0; i < num_xforms; i++) {
1628 ret = rte_compressdev_private_xform_create(0,
1629 (const struct rte_comp_xform *)
1630 decompress_xforms[i],
1631 &priv_xforms[i]);
1632 if (ret < 0) {
1633 RTE_LOG(ERR, USER1,
1634 "Decompression private xform "
1635 "could not be created\n");
1636 ret_status = -1;
1637 goto exit;
1638 }
1639 num_priv_xforms++;
1640 }
1641
1642 if (capa->comp_feature_flags &
1643 RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1644
1645 for (i = 0; i < num_bufs; i++) {
1646 priv_data = (struct priv_op_data *)
1647 (ops[i] + 1);
1648 uint16_t xform_idx =
1649 priv_data->orig_idx % num_xforms;
1650 ops[i]->private_xform =
1651 priv_xforms[xform_idx];
1652 }
1653 } else {
1654
1655
1656 for (i = num_xforms; i < num_bufs; i++) {
1657 ret =
1658 rte_compressdev_private_xform_create(0,
1659 decompress_xforms[i % num_xforms],
1660 &priv_xforms[i]);
1661 if (ret < 0) {
1662 RTE_LOG(ERR, USER1,
1663 "Decompression private xform"
1664 " could not be created\n");
1665 ret_status = -1;
1666 goto exit;
1667 }
1668 num_priv_xforms++;
1669 }
1670
1671
1672
1673 for (i = 0; i < num_bufs; i++) {
1674 priv_data = (struct priv_op_data *)
1675 (ops[i] + 1);
1676 uint16_t xform_idx =
1677 priv_data->orig_idx;
1678 ops[i]->private_xform =
1679 priv_xforms[xform_idx];
1680 }
1681 }
1682 } else {
1683
1684 ret = rte_compressdev_stream_create(0,
1685 decompress_xforms[0], stream);
1686 if (ret < 0) {
1687 RTE_LOG(ERR, USER1,
1688 "Decompression stream could not be created, error %d\n",
1689 ret);
1690 ret_status = -1;
1691 goto exit;
1692 }
1693
1694 for (i = 0; i < num_bufs; i++)
1695 ops[i]->stream = *stream;
1696 }
1697
1698 test_priv_data->num_priv_xforms = num_priv_xforms;
1699 }
1700
1701exit:
1702 return ret_status;
1703}
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725static int
1726test_deflate_decomp_finalize(const struct interim_data_params *int_data,
1727 const struct test_data_params *test_data,
1728 const struct test_private_arrays *test_priv_data)
1729{
1730
1731 unsigned int i;
1732 struct priv_op_data *priv_data;
1733 static unsigned int step;
1734
1735
1736 unsigned int num_bufs = int_data->num_bufs;
1737 const char * const *test_bufs = int_data->test_bufs;
1738 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1739
1740
1741 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1742 struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1743 struct rte_comp_op **ops = test_priv_data->ops;
1744 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1745 unsigned int *decomp_produced_data_size =
1746 test_priv_data->decomp_produced_data_size;
1747 char **all_decomp_data = test_priv_data->all_decomp_data;
1748
1749
1750 unsigned int out_of_space = test_data->out_of_space;
1751 enum rte_comp_op_type operation_type = test_data->decompress_state;
1752
1753 unsigned int zlib_compress =
1754 (test_data->zlib_dir == ZLIB_ALL ||
1755 test_data->zlib_dir == ZLIB_COMPRESS);
1756 unsigned int zlib_decompress =
1757 (test_data->zlib_dir == ZLIB_ALL ||
1758 test_data->zlib_dir == ZLIB_DECOMPRESS);
1759
1760 for (i = 0; i < num_bufs; i++) {
1761 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1762 char engine[] = "zlib, (directly, no PMD)";
1763 if (zlib_compress)
1764 strlcpy(engine, "pmd", sizeof(engine));
1765 RTE_LOG(DEBUG, USER1,
1766 "Buffer %u decompressed by %s from %u to %u bytes\n",
1767 i, engine,
1768 ops_processed[i]->consumed, ops_processed[i]->produced);
1769 ops[i] = NULL;
1770 }
1771
1772
1773
1774
1775
1776 for (i = 0; i < num_bufs; i++) {
1777 if (out_of_space && !zlib_decompress) {
1778 if (ops_processed[i]->status !=
1779 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1780
1781 RTE_LOG(ERR, USER1,
1782 "Operation without expected out of "
1783 "space status error\n");
1784 return -1;
1785 } else
1786 continue;
1787 }
1788
1789 if (operation_type == RTE_COMP_OP_STATEFUL
1790 && (ops_processed[i]->status ==
1791 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE
1792 || ops_processed[i]->status ==
1793 RTE_COMP_OP_STATUS_SUCCESS)) {
1794
1795 RTE_LOG(DEBUG, USER1,
1796 ".............RECOVERABLE\n");
1797
1798
1799 const void *ptr = rte_pktmbuf_read(
1800 ops_processed[i]->m_dst,
1801 ops_processed[i]->dst.offset,
1802 ops_processed[i]->produced,
1803 *all_decomp_data +
1804 *decomp_produced_data_size);
1805 if (ptr != *all_decomp_data +
1806 *decomp_produced_data_size)
1807 rte_memcpy(*all_decomp_data +
1808 *decomp_produced_data_size,
1809 ptr, ops_processed[i]->produced);
1810
1811 *decomp_produced_data_size +=
1812 ops_processed[i]->produced;
1813 if (ops_processed[i]->src.length >
1814 ops_processed[i]->consumed) {
1815 if (ops_processed[i]->status ==
1816 RTE_COMP_OP_STATUS_SUCCESS) {
1817 RTE_LOG(ERR, USER1,
1818 "Operation finished too early\n");
1819 return -1;
1820 }
1821 step++;
1822 if (step >= test_data->decompress_steps_max) {
1823 RTE_LOG(ERR, USER1,
1824 "Operation exceeded maximum steps\n");
1825 return -1;
1826 }
1827 ops[i] = ops_processed[i];
1828 ops[i]->status =
1829 RTE_COMP_OP_STATUS_NOT_PROCESSED;
1830 ops[i]->src.offset +=
1831 ops_processed[i]->consumed;
1832 ops[i]->src.length -=
1833 ops_processed[i]->consumed;
1834
1835 return 2;
1836 } else {
1837
1838
1839 priv_data = (struct priv_op_data *)
1840 (ops_processed[i] + 1);
1841 const char *buf1 =
1842 test_bufs[priv_data->orig_idx];
1843 const char *buf2 = *all_decomp_data;
1844
1845 if (compare_buffers(buf1, strlen(buf1) + 1,
1846 buf2, *decomp_produced_data_size) < 0)
1847 return -1;
1848
1849 if (compress_xforms[0]->compress.chksum
1850 != RTE_COMP_CHECKSUM_NONE) {
1851 if (ops_processed[i]->output_chksum
1852 != compress_checksum[i]) {
1853 RTE_LOG(ERR, USER1,
1854 "The checksums differ\n"
1855 "Compression Checksum: %" PRIu64 "\tDecompression "
1856 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1857 ops_processed[i]->output_chksum);
1858 return -1;
1859 }
1860 }
1861 }
1862 } else if (ops_processed[i]->status !=
1863 RTE_COMP_OP_STATUS_SUCCESS) {
1864 RTE_LOG(ERR, USER1,
1865 "Decomp: Some operations were not successful, status = %u\n",
1866 ops_processed[i]->status);
1867 return -1;
1868 }
1869 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1870 rte_pktmbuf_free(comp_bufs[priv_data->orig_idx]);
1871 comp_bufs[priv_data->orig_idx] = NULL;
1872 }
1873
1874 if (out_of_space && !zlib_decompress)
1875 return 1;
1876
1877 return 0;
1878}
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898static int
1899test_results_validation(const struct interim_data_params *int_data,
1900 const struct test_data_params *test_data,
1901 const struct test_private_arrays *test_priv_data)
1902{
1903
1904 unsigned int i;
1905 struct priv_op_data *priv_data;
1906 const char *buf1;
1907 const char *buf2;
1908 char *contig_buf = NULL;
1909 uint32_t data_size;
1910
1911
1912 struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1913 unsigned int num_bufs = int_data->num_bufs;
1914 const char * const *test_bufs = int_data->test_bufs;
1915
1916
1917 uint64_t *compress_checksum = test_priv_data->compress_checksum;
1918 struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1919
1920
1921
1922
1923
1924 for (i = 0; i < num_bufs; i++) {
1925 priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1926 buf1 = test_data->use_external_mbufs ?
1927 test_data->inbuf_memzone->addr :
1928 test_bufs[priv_data->orig_idx];
1929 data_size = test_data->use_external_mbufs ?
1930 test_data->inbuf_data_size :
1931 strlen(buf1) + 1;
1932
1933 contig_buf = rte_malloc(NULL, ops_processed[i]->produced, 0);
1934 if (contig_buf == NULL) {
1935 RTE_LOG(ERR, USER1, "Contiguous buffer could not "
1936 "be allocated\n");
1937 goto exit;
1938 }
1939
1940 buf2 = rte_pktmbuf_read(ops_processed[i]->m_dst, 0,
1941 ops_processed[i]->produced, contig_buf);
1942 if (compare_buffers(buf1, data_size,
1943 buf2, ops_processed[i]->produced) < 0)
1944 goto exit;
1945
1946
1947 if (compress_xforms[0]->compress.chksum !=
1948 RTE_COMP_CHECKSUM_NONE) {
1949 if (ops_processed[i]->output_chksum !=
1950 compress_checksum[i]) {
1951 RTE_LOG(ERR, USER1, "The checksums differ\n"
1952 "Compression Checksum: %" PRIu64 "\tDecompression "
1953 "Checksum: %" PRIu64 "\n", compress_checksum[i],
1954 ops_processed[i]->output_chksum);
1955 goto exit;
1956 }
1957 }
1958
1959 rte_free(contig_buf);
1960 contig_buf = NULL;
1961 }
1962 return 0;
1963
1964exit:
1965 rte_free(contig_buf);
1966 return -1;
1967}
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985static int
1986test_deflate_comp_decomp(const struct interim_data_params *int_data,
1987 const struct test_data_params *test_data)
1988{
1989 unsigned int num_bufs = int_data->num_bufs;
1990 unsigned int out_of_space = test_data->out_of_space;
1991
1992 void *stream = NULL;
1993 char *all_decomp_data = NULL;
1994 unsigned int decomp_produced_data_size = 0;
1995
1996 int ret_status = -1;
1997 int ret;
1998 struct rte_mbuf *uncomp_bufs[num_bufs];
1999 struct rte_mbuf *comp_bufs[num_bufs];
2000 struct rte_comp_op *ops[num_bufs];
2001 struct rte_comp_op *ops_processed[num_bufs];
2002 void *priv_xforms[num_bufs];
2003 unsigned int i;
2004
2005 uint64_t compress_checksum[num_bufs];
2006 uint32_t compressed_data_size[num_bufs];
2007 char *contig_buf = NULL;
2008
2009 struct rte_mbuf_ext_shared_info compbuf_info;
2010 struct rte_mbuf_ext_shared_info decompbuf_info;
2011
2012 const struct rte_compressdev_capabilities *capa;
2013
2014
2015 unsigned int zlib_compress =
2016 (test_data->zlib_dir == ZLIB_ALL ||
2017 test_data->zlib_dir == ZLIB_COMPRESS);
2018 unsigned int zlib_decompress =
2019 (test_data->zlib_dir == ZLIB_ALL ||
2020 test_data->zlib_dir == ZLIB_DECOMPRESS);
2021
2022 struct test_private_arrays test_priv_data;
2023
2024 test_priv_data.uncomp_bufs = uncomp_bufs;
2025 test_priv_data.comp_bufs = comp_bufs;
2026 test_priv_data.ops = ops;
2027 test_priv_data.ops_processed = ops_processed;
2028 test_priv_data.priv_xforms = priv_xforms;
2029 test_priv_data.compress_checksum = compress_checksum;
2030 test_priv_data.compressed_data_size = compressed_data_size;
2031
2032 test_priv_data.stream = &stream;
2033 test_priv_data.all_decomp_data = &all_decomp_data;
2034 test_priv_data.decomp_produced_data_size = &decomp_produced_data_size;
2035
2036 test_priv_data.num_priv_xforms = 0;
2037
2038 capa = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2039 if (capa == NULL) {
2040 RTE_LOG(ERR, USER1,
2041 "Compress device does not support DEFLATE\n");
2042 return -1;
2043 }
2044
2045
2046 ret = test_setup_com_bufs(int_data, test_data, &test_priv_data);
2047 if (ret < 0) {
2048 ret_status = -1;
2049 goto exit;
2050 }
2051
2052 RTE_LOG(DEBUG, USER1, "<<< COMPRESSION >>>\n");
2053
2054
2055
2056
2057 ret = test_setup_output_bufs(
2058 OPERATION_COMPRESSION,
2059 out_of_space == 1 && !zlib_compress,
2060 &test_priv_data,
2061 int_data,
2062 test_data,
2063 &compbuf_info);
2064 if (ret < 0) {
2065 ret_status = -1;
2066 goto exit;
2067 }
2068
2069
2070 ret = test_deflate_comp_run(int_data, test_data, &test_priv_data);
2071 if (ret < 0) {
2072 ret_status = -1;
2073 goto exit;
2074 }
2075
2076 ret = test_deflate_comp_finalize(int_data, test_data, &test_priv_data);
2077 if (ret < 0) {
2078 ret_status = -1;
2079 goto exit;
2080 } else if (ret == 1) {
2081 ret_status = 0;
2082 goto exit;
2083 } else if (ret == 2) {
2084 ret_status = 1;
2085 goto exit;
2086 }
2087
2088
2089
2090 RTE_LOG(DEBUG, USER1, "<<< DECOMPRESSION >>>\n");
2091
2092
2093 ret = test_setup_output_bufs(
2094 OPERATION_DECOMPRESSION,
2095 out_of_space == 1 && !zlib_decompress,
2096 &test_priv_data,
2097 int_data,
2098 test_data,
2099 &decompbuf_info);
2100 if (ret < 0) {
2101 ret_status = -1;
2102 goto exit;
2103 }
2104
2105
2106 ret = test_deflate_decomp_run(int_data, test_data, &test_priv_data);
2107 if (ret < 0) {
2108 ret_status = -1;
2109 goto exit;
2110 }
2111
2112 if (!zlib_decompress) {
2113next_step:
2114 ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
2115 if (ret < 0) {
2116 ret_status = -1;
2117 RTE_LOG(ERR, USER1,
2118 "Decompression: enqueue/dequeue operation failed\n");
2119 }
2120 }
2121
2122 ret = test_deflate_decomp_finalize(int_data, test_data, &test_priv_data);
2123 if (ret < 0) {
2124 ret_status = -1;
2125 goto exit;
2126 } else if (ret == 1) {
2127 ret_status = 0;
2128 goto exit;
2129 } else if (ret == 2) {
2130 goto next_step;
2131 }
2132
2133
2134
2135 ret = test_results_validation(int_data, test_data, &test_priv_data);
2136 if (ret < 0) {
2137 ret_status = -1;
2138 goto exit;
2139 }
2140 ret_status = 0;
2141
2142exit:
2143
2144
2145 if (stream != NULL)
2146 rte_compressdev_stream_free(0, stream);
2147 rte_free(all_decomp_data);
2148
2149
2150 for (i = 0; i < test_priv_data.num_priv_xforms; i++) {
2151 if (priv_xforms[i] != NULL) {
2152 rte_compressdev_private_xform_free(0, priv_xforms[i]);
2153 priv_xforms[i] = NULL;
2154 }
2155 }
2156 for (i = 0; i < num_bufs; i++) {
2157 rte_pktmbuf_free(uncomp_bufs[i]);
2158 rte_pktmbuf_free(comp_bufs[i]);
2159 rte_comp_op_free(ops[i]);
2160 rte_comp_op_free(ops_processed[i]);
2161 }
2162 rte_free(contig_buf);
2163
2164 return ret_status;
2165}
2166
2167static int
2168test_compressdev_deflate_stateless_fixed(void)
2169{
2170 struct comp_testsuite_params *ts_params = &testsuite_params;
2171 uint16_t i;
2172 int ret;
2173 const struct rte_compressdev_capabilities *capab;
2174
2175 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2176 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2177
2178 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2179 return -ENOTSUP;
2180
2181 struct rte_comp_xform *compress_xform =
2182 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2183
2184 if (compress_xform == NULL) {
2185 RTE_LOG(ERR, USER1,
2186 "Compress xform could not be created\n");
2187 ret = TEST_FAILED;
2188 goto exit;
2189 }
2190
2191 memcpy(compress_xform, ts_params->def_comp_xform,
2192 sizeof(struct rte_comp_xform));
2193 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
2194
2195 struct interim_data_params int_data = {
2196 NULL,
2197 1,
2198 NULL,
2199 &compress_xform,
2200 &ts_params->def_decomp_xform,
2201 1
2202 };
2203
2204 struct test_data_params test_data = {
2205 .compress_state = RTE_COMP_OP_STATELESS,
2206 .decompress_state = RTE_COMP_OP_STATELESS,
2207 .buff_type = LB_BOTH,
2208 .zlib_dir = ZLIB_DECOMPRESS,
2209 .out_of_space = 0,
2210 .big_data = 0,
2211 .overflow = OVERFLOW_DISABLED,
2212 .ratio = RATIO_ENABLED
2213 };
2214
2215 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2216 int_data.test_bufs = &compress_test_bufs[i];
2217 int_data.buf_idx = &i;
2218
2219
2220 test_data.zlib_dir = ZLIB_DECOMPRESS;
2221 ret = test_deflate_comp_decomp(&int_data, &test_data);
2222 if (ret < 0)
2223 goto exit;
2224
2225
2226 test_data.zlib_dir = ZLIB_COMPRESS;
2227 ret = test_deflate_comp_decomp(&int_data, &test_data);
2228 if (ret < 0)
2229 goto exit;
2230 }
2231
2232 ret = TEST_SUCCESS;
2233
2234exit:
2235 rte_free(compress_xform);
2236 return ret;
2237}
2238
2239static int
2240test_compressdev_deflate_stateless_dynamic(void)
2241{
2242 struct comp_testsuite_params *ts_params = &testsuite_params;
2243 uint16_t i;
2244 int ret;
2245 struct rte_comp_xform *compress_xform =
2246 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2247
2248 const struct rte_compressdev_capabilities *capab;
2249
2250 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2251 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2252
2253 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2254 return -ENOTSUP;
2255
2256 if (compress_xform == NULL) {
2257 RTE_LOG(ERR, USER1,
2258 "Compress xform could not be created\n");
2259 ret = TEST_FAILED;
2260 goto exit;
2261 }
2262
2263 memcpy(compress_xform, ts_params->def_comp_xform,
2264 sizeof(struct rte_comp_xform));
2265 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_DYNAMIC;
2266
2267 struct interim_data_params int_data = {
2268 NULL,
2269 1,
2270 NULL,
2271 &compress_xform,
2272 &ts_params->def_decomp_xform,
2273 1
2274 };
2275
2276 struct test_data_params test_data = {
2277 .compress_state = RTE_COMP_OP_STATELESS,
2278 .decompress_state = RTE_COMP_OP_STATELESS,
2279 .buff_type = LB_BOTH,
2280 .zlib_dir = ZLIB_DECOMPRESS,
2281 .out_of_space = 0,
2282 .big_data = 0,
2283 .overflow = OVERFLOW_DISABLED,
2284 .ratio = RATIO_ENABLED
2285 };
2286
2287 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2288 int_data.test_bufs = &compress_test_bufs[i];
2289 int_data.buf_idx = &i;
2290
2291
2292 test_data.zlib_dir = ZLIB_DECOMPRESS;
2293 ret = test_deflate_comp_decomp(&int_data, &test_data);
2294 if (ret < 0)
2295 goto exit;
2296
2297
2298 test_data.zlib_dir = ZLIB_COMPRESS;
2299 ret = test_deflate_comp_decomp(&int_data, &test_data);
2300 if (ret < 0)
2301 goto exit;
2302 }
2303
2304 ret = TEST_SUCCESS;
2305
2306exit:
2307 rte_free(compress_xform);
2308 return ret;
2309}
2310
2311static int
2312test_compressdev_deflate_stateless_multi_op(void)
2313{
2314 struct comp_testsuite_params *ts_params = &testsuite_params;
2315 uint16_t num_bufs = RTE_DIM(compress_test_bufs);
2316 uint16_t buf_idx[num_bufs];
2317 uint16_t i;
2318 int ret;
2319
2320 for (i = 0; i < num_bufs; i++)
2321 buf_idx[i] = i;
2322
2323 struct interim_data_params int_data = {
2324 compress_test_bufs,
2325 num_bufs,
2326 buf_idx,
2327 &ts_params->def_comp_xform,
2328 &ts_params->def_decomp_xform,
2329 1
2330 };
2331
2332 struct test_data_params test_data = {
2333 .compress_state = RTE_COMP_OP_STATELESS,
2334 .decompress_state = RTE_COMP_OP_STATELESS,
2335 .buff_type = LB_BOTH,
2336 .zlib_dir = ZLIB_DECOMPRESS,
2337 .out_of_space = 0,
2338 .big_data = 0,
2339 .overflow = OVERFLOW_DISABLED,
2340 .ratio = RATIO_ENABLED
2341 };
2342
2343
2344 test_data.zlib_dir = ZLIB_DECOMPRESS;
2345 ret = test_deflate_comp_decomp(&int_data, &test_data);
2346 if (ret < 0)
2347 return ret;
2348
2349
2350 test_data.zlib_dir = ZLIB_COMPRESS;
2351 ret = test_deflate_comp_decomp(&int_data, &test_data);
2352 if (ret < 0)
2353 return ret;
2354
2355 return TEST_SUCCESS;
2356}
2357
2358static int
2359test_compressdev_deflate_stateless_multi_level(void)
2360{
2361 struct comp_testsuite_params *ts_params = &testsuite_params;
2362 unsigned int level;
2363 uint16_t i;
2364 int ret;
2365 struct rte_comp_xform *compress_xform =
2366 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2367
2368 if (compress_xform == NULL) {
2369 RTE_LOG(ERR, USER1,
2370 "Compress xform could not be created\n");
2371 ret = TEST_FAILED;
2372 goto exit;
2373 }
2374
2375 memcpy(compress_xform, ts_params->def_comp_xform,
2376 sizeof(struct rte_comp_xform));
2377
2378 struct interim_data_params int_data = {
2379 NULL,
2380 1,
2381 NULL,
2382 &compress_xform,
2383 &ts_params->def_decomp_xform,
2384 1
2385 };
2386
2387 struct test_data_params test_data = {
2388 .compress_state = RTE_COMP_OP_STATELESS,
2389 .decompress_state = RTE_COMP_OP_STATELESS,
2390 .buff_type = LB_BOTH,
2391 .zlib_dir = ZLIB_DECOMPRESS,
2392 .out_of_space = 0,
2393 .big_data = 0,
2394 .overflow = OVERFLOW_DISABLED,
2395 .ratio = RATIO_ENABLED
2396 };
2397
2398 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2399 int_data.test_bufs = &compress_test_bufs[i];
2400 int_data.buf_idx = &i;
2401
2402 for (level = RTE_COMP_LEVEL_MIN; level <= RTE_COMP_LEVEL_MAX;
2403 level++) {
2404 compress_xform->compress.level = level;
2405
2406 test_data.zlib_dir = ZLIB_DECOMPRESS;
2407 ret = test_deflate_comp_decomp(&int_data, &test_data);
2408 if (ret < 0)
2409 goto exit;
2410 }
2411 }
2412
2413 ret = TEST_SUCCESS;
2414
2415exit:
2416 rte_free(compress_xform);
2417 return ret;
2418}
2419
2420#define NUM_XFORMS 3
2421static int
2422test_compressdev_deflate_stateless_multi_xform(void)
2423{
2424 struct comp_testsuite_params *ts_params = &testsuite_params;
2425 uint16_t num_bufs = NUM_XFORMS;
2426 struct rte_comp_xform *compress_xforms[NUM_XFORMS] = {NULL};
2427 struct rte_comp_xform *decompress_xforms[NUM_XFORMS] = {NULL};
2428 const char *test_buffers[NUM_XFORMS];
2429 uint16_t i;
2430 unsigned int level = RTE_COMP_LEVEL_MIN;
2431 uint16_t buf_idx[num_bufs];
2432 int ret;
2433
2434
2435 for (i = 0; i < NUM_XFORMS; i++) {
2436 compress_xforms[i] = rte_malloc(NULL,
2437 sizeof(struct rte_comp_xform), 0);
2438 if (compress_xforms[i] == NULL) {
2439 RTE_LOG(ERR, USER1,
2440 "Compress xform could not be created\n");
2441 ret = TEST_FAILED;
2442 goto exit;
2443 }
2444
2445 memcpy(compress_xforms[i], ts_params->def_comp_xform,
2446 sizeof(struct rte_comp_xform));
2447 compress_xforms[i]->compress.level = level;
2448 level++;
2449
2450 decompress_xforms[i] = rte_malloc(NULL,
2451 sizeof(struct rte_comp_xform), 0);
2452 if (decompress_xforms[i] == NULL) {
2453 RTE_LOG(ERR, USER1,
2454 "Decompress xform could not be created\n");
2455 ret = TEST_FAILED;
2456 goto exit;
2457 }
2458
2459 memcpy(decompress_xforms[i], ts_params->def_decomp_xform,
2460 sizeof(struct rte_comp_xform));
2461 }
2462
2463 for (i = 0; i < NUM_XFORMS; i++) {
2464 buf_idx[i] = 0;
2465
2466 test_buffers[i] = compress_test_bufs[0];
2467 }
2468
2469 struct interim_data_params int_data = {
2470 test_buffers,
2471 num_bufs,
2472 buf_idx,
2473 compress_xforms,
2474 decompress_xforms,
2475 NUM_XFORMS
2476 };
2477
2478 struct test_data_params test_data = {
2479 .compress_state = RTE_COMP_OP_STATELESS,
2480 .decompress_state = RTE_COMP_OP_STATELESS,
2481 .buff_type = LB_BOTH,
2482 .zlib_dir = ZLIB_DECOMPRESS,
2483 .out_of_space = 0,
2484 .big_data = 0,
2485 .overflow = OVERFLOW_DISABLED,
2486 .ratio = RATIO_ENABLED
2487 };
2488
2489
2490 ret = test_deflate_comp_decomp(&int_data, &test_data);
2491 if (ret < 0)
2492 goto exit;
2493
2494 ret = TEST_SUCCESS;
2495
2496exit:
2497 for (i = 0; i < NUM_XFORMS; i++) {
2498 rte_free(compress_xforms[i]);
2499 rte_free(decompress_xforms[i]);
2500 }
2501
2502 return ret;
2503}
2504
2505static int
2506test_compressdev_deflate_stateless_sgl(void)
2507{
2508 struct comp_testsuite_params *ts_params = &testsuite_params;
2509 uint16_t i;
2510 int ret;
2511 const struct rte_compressdev_capabilities *capab;
2512
2513 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2514 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2515
2516 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2517 return -ENOTSUP;
2518
2519 struct interim_data_params int_data = {
2520 NULL,
2521 1,
2522 NULL,
2523 &ts_params->def_comp_xform,
2524 &ts_params->def_decomp_xform,
2525 1
2526 };
2527
2528 struct test_data_params test_data = {
2529 .compress_state = RTE_COMP_OP_STATELESS,
2530 .decompress_state = RTE_COMP_OP_STATELESS,
2531 .buff_type = SGL_BOTH,
2532 .zlib_dir = ZLIB_DECOMPRESS,
2533 .out_of_space = 0,
2534 .big_data = 0,
2535 .overflow = OVERFLOW_DISABLED,
2536 .ratio = RATIO_ENABLED
2537 };
2538
2539 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2540 int_data.test_bufs = &compress_test_bufs[i];
2541 int_data.buf_idx = &i;
2542
2543
2544 test_data.zlib_dir = ZLIB_DECOMPRESS;
2545 ret = test_deflate_comp_decomp(&int_data, &test_data);
2546 if (ret < 0)
2547 return ret;
2548
2549
2550 test_data.zlib_dir = ZLIB_COMPRESS;
2551 ret = test_deflate_comp_decomp(&int_data, &test_data);
2552 if (ret < 0)
2553 return ret;
2554
2555 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_LB_OUT) {
2556
2557 test_data.zlib_dir = ZLIB_DECOMPRESS;
2558 test_data.buff_type = SGL_TO_LB;
2559 ret = test_deflate_comp_decomp(&int_data, &test_data);
2560 if (ret < 0)
2561 return ret;
2562
2563
2564 test_data.zlib_dir = ZLIB_COMPRESS;
2565 test_data.buff_type = SGL_TO_LB;
2566 ret = test_deflate_comp_decomp(&int_data, &test_data);
2567 if (ret < 0)
2568 return ret;
2569 }
2570
2571 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_LB_IN_SGL_OUT) {
2572
2573 test_data.zlib_dir = ZLIB_DECOMPRESS;
2574 test_data.buff_type = LB_TO_SGL;
2575 ret = test_deflate_comp_decomp(&int_data, &test_data);
2576 if (ret < 0)
2577 return ret;
2578
2579
2580 test_data.zlib_dir = ZLIB_COMPRESS;
2581 test_data.buff_type = LB_TO_SGL;
2582 ret = test_deflate_comp_decomp(&int_data, &test_data);
2583 if (ret < 0)
2584 return ret;
2585 }
2586 }
2587
2588 return TEST_SUCCESS;
2589}
2590
2591static int
2592test_compressdev_deflate_stateless_checksum(void)
2593{
2594 struct comp_testsuite_params *ts_params = &testsuite_params;
2595 uint16_t i;
2596 int ret;
2597 const struct rte_compressdev_capabilities *capab;
2598
2599 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2600 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2601
2602
2603 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) == 0 &&
2604 (capab->comp_feature_flags &
2605 RTE_COMP_FF_ADLER32_CHECKSUM) == 0 &&
2606 (capab->comp_feature_flags &
2607 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) == 0)
2608 return -ENOTSUP;
2609
2610 struct rte_comp_xform *compress_xform =
2611 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2612 if (compress_xform == NULL) {
2613 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2614 return TEST_FAILED;
2615 }
2616
2617 memcpy(compress_xform, ts_params->def_comp_xform,
2618 sizeof(struct rte_comp_xform));
2619
2620 struct rte_comp_xform *decompress_xform =
2621 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2622 if (decompress_xform == NULL) {
2623 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2624 rte_free(compress_xform);
2625 return TEST_FAILED;
2626 }
2627
2628 memcpy(decompress_xform, ts_params->def_decomp_xform,
2629 sizeof(struct rte_comp_xform));
2630
2631 struct interim_data_params int_data = {
2632 NULL,
2633 1,
2634 NULL,
2635 &compress_xform,
2636 &decompress_xform,
2637 1
2638 };
2639
2640 struct test_data_params test_data = {
2641 .compress_state = RTE_COMP_OP_STATELESS,
2642 .decompress_state = RTE_COMP_OP_STATELESS,
2643 .buff_type = LB_BOTH,
2644 .zlib_dir = ZLIB_DECOMPRESS,
2645 .out_of_space = 0,
2646 .big_data = 0,
2647 .overflow = OVERFLOW_DISABLED,
2648 .ratio = RATIO_ENABLED
2649 };
2650
2651
2652 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM)) {
2653 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
2654 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
2655
2656 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2657
2658 int_data.test_bufs = &compress_test_bufs[i];
2659 int_data.buf_idx = &i;
2660
2661
2662
2663
2664 test_data.zlib_dir = ZLIB_COMPRESS;
2665 ret = test_deflate_comp_decomp(&int_data, &test_data);
2666 if (ret < 0)
2667 goto exit;
2668
2669
2670
2671
2672 test_data.zlib_dir = ZLIB_NONE;
2673 ret = test_deflate_comp_decomp(&int_data, &test_data);
2674 if (ret < 0)
2675 goto exit;
2676 }
2677 }
2678
2679
2680 if ((capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM)) {
2681 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2682 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2683
2684 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2685 int_data.test_bufs = &compress_test_bufs[i];
2686 int_data.buf_idx = &i;
2687
2688
2689
2690
2691 test_data.zlib_dir = ZLIB_COMPRESS;
2692 ret = test_deflate_comp_decomp(&int_data, &test_data);
2693 if (ret < 0)
2694 goto exit;
2695
2696
2697
2698 test_data.zlib_dir = ZLIB_NONE;
2699 ret = test_deflate_comp_decomp(&int_data, &test_data);
2700 if (ret < 0)
2701 goto exit;
2702 }
2703 }
2704
2705
2706 if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)) {
2707 compress_xform->compress.chksum =
2708 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2709 decompress_xform->decompress.chksum =
2710 RTE_COMP_CHECKSUM_CRC32_ADLER32;
2711
2712 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2713 int_data.test_bufs = &compress_test_bufs[i];
2714 int_data.buf_idx = &i;
2715
2716
2717
2718
2719 test_data.zlib_dir = ZLIB_NONE;
2720 ret = test_deflate_comp_decomp(&int_data, &test_data);
2721 if (ret < 0)
2722 goto exit;
2723 }
2724 }
2725
2726 ret = TEST_SUCCESS;
2727
2728exit:
2729 rte_free(compress_xform);
2730 rte_free(decompress_xform);
2731 return ret;
2732}
2733
2734static int
2735test_compressdev_out_of_space_buffer(void)
2736{
2737 struct comp_testsuite_params *ts_params = &testsuite_params;
2738 int ret;
2739 uint16_t i;
2740 const struct rte_compressdev_capabilities *capab;
2741
2742 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
2743
2744 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2745 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2746
2747 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2748 return -ENOTSUP;
2749
2750 struct interim_data_params int_data = {
2751 &compress_test_bufs[0],
2752 1,
2753 &i,
2754 &ts_params->def_comp_xform,
2755 &ts_params->def_decomp_xform,
2756 1
2757 };
2758
2759 struct test_data_params test_data = {
2760 .compress_state = RTE_COMP_OP_STATELESS,
2761 .decompress_state = RTE_COMP_OP_STATELESS,
2762 .buff_type = LB_BOTH,
2763 .zlib_dir = ZLIB_DECOMPRESS,
2764 .out_of_space = 1,
2765 .big_data = 0,
2766 .overflow = OVERFLOW_DISABLED,
2767 .ratio = RATIO_ENABLED
2768 };
2769
2770 test_data.zlib_dir = ZLIB_DECOMPRESS;
2771 ret = test_deflate_comp_decomp(&int_data, &test_data);
2772 if (ret < 0)
2773 goto exit;
2774
2775
2776 test_data.zlib_dir = ZLIB_COMPRESS;
2777 ret = test_deflate_comp_decomp(&int_data, &test_data);
2778 if (ret < 0)
2779 goto exit;
2780
2781 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2782
2783 test_data.zlib_dir = ZLIB_DECOMPRESS;
2784 test_data.buff_type = SGL_BOTH;
2785 ret = test_deflate_comp_decomp(&int_data, &test_data);
2786 if (ret < 0)
2787 goto exit;
2788
2789
2790 test_data.zlib_dir = ZLIB_COMPRESS;
2791 test_data.buff_type = SGL_BOTH;
2792 ret = test_deflate_comp_decomp(&int_data, &test_data);
2793 if (ret < 0)
2794 goto exit;
2795 }
2796
2797 ret = TEST_SUCCESS;
2798
2799exit:
2800 return ret;
2801}
2802
2803static int
2804test_compressdev_deflate_stateless_dynamic_big(void)
2805{
2806 struct comp_testsuite_params *ts_params = &testsuite_params;
2807 uint16_t i = 0;
2808 int ret;
2809 unsigned int j;
2810 const struct rte_compressdev_capabilities *capab;
2811 char *test_buffer = NULL;
2812
2813 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2814 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2815
2816 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2817 return -ENOTSUP;
2818
2819 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2820 return -ENOTSUP;
2821
2822 test_buffer = rte_malloc(NULL, BIG_DATA_TEST_SIZE, 0);
2823 if (test_buffer == NULL) {
2824 RTE_LOG(ERR, USER1,
2825 "Can't allocate buffer for big-data\n");
2826 return TEST_FAILED;
2827 }
2828
2829 struct interim_data_params int_data = {
2830 (const char * const *)&test_buffer,
2831 1,
2832 &i,
2833 &ts_params->def_comp_xform,
2834 &ts_params->def_decomp_xform,
2835 1
2836 };
2837
2838 struct test_data_params test_data = {
2839 .compress_state = RTE_COMP_OP_STATELESS,
2840 .decompress_state = RTE_COMP_OP_STATELESS,
2841 .buff_type = SGL_BOTH,
2842 .zlib_dir = ZLIB_DECOMPRESS,
2843 .out_of_space = 0,
2844 .big_data = 1,
2845 .overflow = OVERFLOW_DISABLED,
2846 .ratio = RATIO_DISABLED
2847 };
2848
2849 ts_params->def_comp_xform->compress.deflate.huffman =
2850 RTE_COMP_HUFFMAN_DYNAMIC;
2851
2852
2853 srand(BIG_DATA_TEST_SIZE);
2854 for (j = 0; j < BIG_DATA_TEST_SIZE - 1; ++j)
2855 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
2856 test_buffer[BIG_DATA_TEST_SIZE - 1] = 0;
2857
2858
2859 test_data.zlib_dir = ZLIB_DECOMPRESS;
2860 ret = test_deflate_comp_decomp(&int_data, &test_data);
2861 if (ret < 0)
2862 goto exit;
2863
2864
2865 test_data.zlib_dir = ZLIB_COMPRESS;
2866 ret = test_deflate_comp_decomp(&int_data, &test_data);
2867 if (ret < 0)
2868 goto exit;
2869
2870 ret = TEST_SUCCESS;
2871
2872exit:
2873 ts_params->def_comp_xform->compress.deflate.huffman =
2874 RTE_COMP_HUFFMAN_DEFAULT;
2875 rte_free(test_buffer);
2876 return ret;
2877}
2878
2879static int
2880test_compressdev_deflate_stateful_decomp(void)
2881{
2882 struct comp_testsuite_params *ts_params = &testsuite_params;
2883 int ret;
2884 uint16_t i;
2885 const struct rte_compressdev_capabilities *capab;
2886
2887 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2888 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2889
2890 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2891 return -ENOTSUP;
2892
2893 struct interim_data_params int_data = {
2894 &compress_test_bufs[0],
2895 1,
2896 &i,
2897 &ts_params->def_comp_xform,
2898 &ts_params->def_decomp_xform,
2899 1
2900 };
2901
2902 struct test_data_params test_data = {
2903 .compress_state = RTE_COMP_OP_STATELESS,
2904 .decompress_state = RTE_COMP_OP_STATEFUL,
2905 .buff_type = LB_BOTH,
2906 .zlib_dir = ZLIB_COMPRESS,
2907 .out_of_space = 0,
2908 .big_data = 0,
2909 .decompress_output_block_size = 2000,
2910 .decompress_steps_max = 4,
2911 .overflow = OVERFLOW_DISABLED,
2912 .ratio = RATIO_ENABLED
2913 };
2914
2915
2916 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2917 ret = TEST_FAILED;
2918 goto exit;
2919 }
2920
2921 if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2922
2923 test_data.buff_type = SGL_BOTH;
2924 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2925 ret = TEST_FAILED;
2926 goto exit;
2927 }
2928 }
2929
2930 ret = TEST_SUCCESS;
2931
2932exit:
2933 return ret;
2934}
2935
2936static int
2937test_compressdev_deflate_stateful_decomp_checksum(void)
2938{
2939 struct comp_testsuite_params *ts_params = &testsuite_params;
2940 int ret;
2941 uint16_t i;
2942 const struct rte_compressdev_capabilities *capab;
2943
2944 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2945 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2946
2947 if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2948 return -ENOTSUP;
2949
2950
2951 if (!(capab->comp_feature_flags &
2952 (RTE_COMP_FF_CRC32_CHECKSUM | RTE_COMP_FF_ADLER32_CHECKSUM |
2953 RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)))
2954 return -ENOTSUP;
2955
2956 struct rte_comp_xform *compress_xform =
2957 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2958 if (compress_xform == NULL) {
2959 RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2960 return TEST_FAILED;
2961 }
2962
2963 memcpy(compress_xform, ts_params->def_comp_xform,
2964 sizeof(struct rte_comp_xform));
2965
2966 struct rte_comp_xform *decompress_xform =
2967 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2968 if (decompress_xform == NULL) {
2969 RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2970 rte_free(compress_xform);
2971 return TEST_FAILED;
2972 }
2973
2974 memcpy(decompress_xform, ts_params->def_decomp_xform,
2975 sizeof(struct rte_comp_xform));
2976
2977 struct interim_data_params int_data = {
2978 &compress_test_bufs[0],
2979 1,
2980 &i,
2981 &compress_xform,
2982 &decompress_xform,
2983 1
2984 };
2985
2986 struct test_data_params test_data = {
2987 .compress_state = RTE_COMP_OP_STATELESS,
2988 .decompress_state = RTE_COMP_OP_STATEFUL,
2989 .buff_type = LB_BOTH,
2990 .zlib_dir = ZLIB_COMPRESS,
2991 .out_of_space = 0,
2992 .big_data = 0,
2993 .decompress_output_block_size = 2000,
2994 .decompress_steps_max = 4,
2995 .overflow = OVERFLOW_DISABLED,
2996 .ratio = RATIO_ENABLED
2997 };
2998
2999
3000 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) {
3001 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
3002 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
3003
3004 test_data.buff_type = LB_BOTH;
3005 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3006 ret = TEST_FAILED;
3007 goto exit;
3008 }
3009 if (capab->comp_feature_flags &
3010 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3011
3012 test_data.buff_type = SGL_BOTH;
3013 if (test_deflate_comp_decomp(&int_data,
3014 &test_data) < 0) {
3015 ret = TEST_FAILED;
3016 goto exit;
3017 }
3018 }
3019 }
3020
3021
3022 if (capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM) {
3023 compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3024 decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3025
3026 test_data.buff_type = LB_BOTH;
3027 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3028 ret = TEST_FAILED;
3029 goto exit;
3030 }
3031 if (capab->comp_feature_flags &
3032 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3033
3034 test_data.buff_type = SGL_BOTH;
3035 if (test_deflate_comp_decomp(&int_data,
3036 &test_data) < 0) {
3037 ret = TEST_FAILED;
3038 goto exit;
3039 }
3040 }
3041 }
3042
3043
3044 if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) {
3045 compress_xform->compress.chksum =
3046 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3047 decompress_xform->decompress.chksum =
3048 RTE_COMP_CHECKSUM_CRC32_ADLER32;
3049
3050 test_data.zlib_dir = ZLIB_NONE;
3051
3052 test_data.buff_type = LB_BOTH;
3053 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3054 ret = TEST_FAILED;
3055 goto exit;
3056 }
3057 if (capab->comp_feature_flags &
3058 RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3059
3060 test_data.buff_type = SGL_BOTH;
3061 if (test_deflate_comp_decomp(&int_data,
3062 &test_data) < 0) {
3063 ret = TEST_FAILED;
3064 goto exit;
3065 }
3066 }
3067 }
3068
3069 ret = TEST_SUCCESS;
3070
3071exit:
3072 rte_free(compress_xform);
3073 rte_free(decompress_xform);
3074 return ret;
3075}
3076
3077static const struct rte_memzone *
3078make_memzone(const char *name, size_t size)
3079{
3080 unsigned int socket_id = rte_socket_id();
3081 char mz_name[RTE_MEMZONE_NAMESIZE];
3082 const struct rte_memzone *memzone;
3083
3084 snprintf(mz_name, RTE_MEMZONE_NAMESIZE, "%s_%u", name, socket_id);
3085 memzone = rte_memzone_lookup(mz_name);
3086 if (memzone != NULL && memzone->len != size) {
3087 rte_memzone_free(memzone);
3088 memzone = NULL;
3089 }
3090 if (memzone == NULL) {
3091 memzone = rte_memzone_reserve_aligned(mz_name, size, socket_id,
3092 RTE_MEMZONE_IOVA_CONTIG, RTE_CACHE_LINE_SIZE);
3093 if (memzone == NULL)
3094 RTE_LOG(ERR, USER1, "Can't allocate memory zone %s",
3095 mz_name);
3096 }
3097 return memzone;
3098}
3099
3100static int
3101test_compressdev_external_mbufs(void)
3102{
3103 struct comp_testsuite_params *ts_params = &testsuite_params;
3104 size_t data_len = 0;
3105 uint16_t i;
3106 int ret = TEST_FAILED;
3107
3108 for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
3109 data_len = RTE_MAX(data_len, strlen(compress_test_bufs[i]) + 1);
3110
3111 struct interim_data_params int_data = {
3112 NULL,
3113 1,
3114 NULL,
3115 &ts_params->def_comp_xform,
3116 &ts_params->def_decomp_xform,
3117 1
3118 };
3119
3120 struct test_data_params test_data = {
3121 .compress_state = RTE_COMP_OP_STATELESS,
3122 .decompress_state = RTE_COMP_OP_STATELESS,
3123 .buff_type = LB_BOTH,
3124 .zlib_dir = ZLIB_DECOMPRESS,
3125 .out_of_space = 0,
3126 .big_data = 0,
3127 .use_external_mbufs = 1,
3128 .inbuf_data_size = data_len,
3129 .inbuf_memzone = make_memzone("inbuf", data_len),
3130 .compbuf_memzone = make_memzone("compbuf", data_len *
3131 COMPRESS_BUF_SIZE_RATIO),
3132 .uncompbuf_memzone = make_memzone("decompbuf", data_len),
3133 .overflow = OVERFLOW_DISABLED
3134 };
3135
3136 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3137
3138 data_len = strlen(compress_test_bufs[i]) + 1;
3139 rte_memcpy(test_data.inbuf_memzone->addr, compress_test_bufs[i],
3140 data_len);
3141 test_data.inbuf_data_size = data_len;
3142 int_data.buf_idx = &i;
3143
3144
3145 test_data.zlib_dir = ZLIB_DECOMPRESS;
3146 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3147 goto exit;
3148
3149
3150 test_data.zlib_dir = ZLIB_COMPRESS;
3151 if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3152 goto exit;
3153 }
3154
3155 ret = TEST_SUCCESS;
3156
3157exit:
3158 rte_memzone_free(test_data.inbuf_memzone);
3159 rte_memzone_free(test_data.compbuf_memzone);
3160 rte_memzone_free(test_data.uncompbuf_memzone);
3161 return ret;
3162}
3163
3164static int
3165test_compressdev_deflate_stateless_fixed_oos_recoverable(void)
3166{
3167 struct comp_testsuite_params *ts_params = &testsuite_params;
3168 uint16_t i;
3169 int ret;
3170 int comp_result;
3171 const struct rte_compressdev_capabilities *capab;
3172
3173 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3174 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3175
3176 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
3177 return -ENOTSUP;
3178
3179 struct rte_comp_xform *compress_xform =
3180 rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
3181
3182 if (compress_xform == NULL) {
3183 RTE_LOG(ERR, USER1,
3184 "Compress xform could not be created\n");
3185 ret = TEST_FAILED;
3186 goto exit;
3187 }
3188
3189 memcpy(compress_xform, ts_params->def_comp_xform,
3190 sizeof(struct rte_comp_xform));
3191 compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
3192
3193 struct interim_data_params int_data = {
3194 NULL,
3195 1,
3196 NULL,
3197 &compress_xform,
3198 &ts_params->def_decomp_xform,
3199 1
3200 };
3201
3202 struct test_data_params test_data = {
3203 .compress_state = RTE_COMP_OP_STATELESS,
3204 .decompress_state = RTE_COMP_OP_STATELESS,
3205 .buff_type = LB_BOTH,
3206 .zlib_dir = ZLIB_DECOMPRESS,
3207 .out_of_space = 0,
3208 .big_data = 0,
3209 .overflow = OVERFLOW_ENABLED,
3210 .ratio = RATIO_ENABLED
3211 };
3212
3213 for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3214 int_data.test_bufs = &compress_test_bufs[i];
3215 int_data.buf_idx = &i;
3216
3217
3218 test_data.zlib_dir = ZLIB_DECOMPRESS;
3219 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3220 if (comp_result < 0) {
3221 ret = TEST_FAILED;
3222 goto exit;
3223 } else if (comp_result > 0) {
3224 ret = -ENOTSUP;
3225 goto exit;
3226 }
3227
3228
3229 test_data.zlib_dir = ZLIB_COMPRESS;
3230 comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3231 if (comp_result < 0) {
3232 ret = TEST_FAILED;
3233 goto exit;
3234 } else if (comp_result > 0) {
3235 ret = -ENOTSUP;
3236 goto exit;
3237 }
3238 }
3239
3240 ret = TEST_SUCCESS;
3241
3242exit:
3243 rte_free(compress_xform);
3244 return ret;
3245}
3246
3247static int
3248test_compressdev_deflate_im_buffers_LB_1op(void)
3249{
3250 struct comp_testsuite_params *ts_params = &testsuite_params;
3251 uint16_t i = 0;
3252 int ret = TEST_SUCCESS;
3253 int j;
3254 const struct rte_compressdev_capabilities *capab;
3255 char *test_buffer = NULL;
3256
3257 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3258 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3259
3260 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3261 return -ENOTSUP;
3262
3263 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3264 return -ENOTSUP;
3265
3266 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3267 if (test_buffer == NULL) {
3268 RTE_LOG(ERR, USER1,
3269 "Can't allocate buffer for 'im buffer' test\n");
3270 return TEST_FAILED;
3271 }
3272
3273 struct interim_data_params int_data = {
3274 (const char * const *)&test_buffer,
3275 1,
3276 &i,
3277 &ts_params->def_comp_xform,
3278 &ts_params->def_decomp_xform,
3279 1
3280 };
3281
3282 struct test_data_params test_data = {
3283 .compress_state = RTE_COMP_OP_STATELESS,
3284 .decompress_state = RTE_COMP_OP_STATELESS,
3285
3286
3287
3288
3289
3290 .buff_type = LB_BOTH,
3291 .zlib_dir = ZLIB_NONE,
3292 .out_of_space = 0,
3293 .big_data = 1,
3294 .overflow = OVERFLOW_DISABLED,
3295 .ratio = RATIO_DISABLED
3296 };
3297
3298 ts_params->def_comp_xform->compress.deflate.huffman =
3299 RTE_COMP_HUFFMAN_DYNAMIC;
3300
3301
3302 srand(IM_BUF_DATA_TEST_SIZE_LB);
3303 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3304 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3305
3306
3307 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3308 ret = TEST_FAILED;
3309 goto end;
3310 }
3311
3312end:
3313 ts_params->def_comp_xform->compress.deflate.huffman =
3314 RTE_COMP_HUFFMAN_DEFAULT;
3315 rte_free(test_buffer);
3316 return ret;
3317}
3318
3319static int
3320test_compressdev_deflate_im_buffers_LB_2ops_first(void)
3321{
3322 struct comp_testsuite_params *ts_params = &testsuite_params;
3323 uint16_t i = 0;
3324 int ret = TEST_SUCCESS;
3325 int j;
3326 const struct rte_compressdev_capabilities *capab;
3327 char *test_buffer = NULL;
3328 const char *test_buffers[2];
3329
3330 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3331 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3332
3333 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3334 return -ENOTSUP;
3335
3336 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3337 return -ENOTSUP;
3338
3339 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3340 if (test_buffer == NULL) {
3341 RTE_LOG(ERR, USER1,
3342 "Can't allocate buffer for 'im buffer' test\n");
3343 return TEST_FAILED;
3344 }
3345
3346 test_buffers[0] = test_buffer;
3347 test_buffers[1] = compress_test_bufs[0];
3348
3349 struct interim_data_params int_data = {
3350 (const char * const *)test_buffers,
3351 2,
3352 &i,
3353 &ts_params->def_comp_xform,
3354 &ts_params->def_decomp_xform,
3355 1
3356 };
3357
3358 struct test_data_params test_data = {
3359 .compress_state = RTE_COMP_OP_STATELESS,
3360 .decompress_state = RTE_COMP_OP_STATELESS,
3361 .buff_type = LB_BOTH,
3362 .zlib_dir = ZLIB_NONE,
3363 .out_of_space = 0,
3364 .big_data = 1,
3365 .overflow = OVERFLOW_DISABLED,
3366 .ratio = RATIO_DISABLED
3367 };
3368
3369 ts_params->def_comp_xform->compress.deflate.huffman =
3370 RTE_COMP_HUFFMAN_DYNAMIC;
3371
3372
3373 srand(IM_BUF_DATA_TEST_SIZE_LB);
3374 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3375 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3376
3377
3378 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3379 ret = TEST_FAILED;
3380 goto end;
3381 }
3382
3383end:
3384 ts_params->def_comp_xform->compress.deflate.huffman =
3385 RTE_COMP_HUFFMAN_DEFAULT;
3386 rte_free(test_buffer);
3387 return ret;
3388}
3389
3390static int
3391test_compressdev_deflate_im_buffers_LB_2ops_second(void)
3392{
3393 struct comp_testsuite_params *ts_params = &testsuite_params;
3394 uint16_t i = 0;
3395 int ret = TEST_SUCCESS;
3396 int j;
3397 const struct rte_compressdev_capabilities *capab;
3398 char *test_buffer = NULL;
3399 const char *test_buffers[2];
3400
3401 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3402 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3403
3404 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3405 return -ENOTSUP;
3406
3407 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3408 return -ENOTSUP;
3409
3410 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3411 if (test_buffer == NULL) {
3412 RTE_LOG(ERR, USER1,
3413 "Can't allocate buffer for 'im buffer' test\n");
3414 return TEST_FAILED;
3415 }
3416
3417 test_buffers[0] = compress_test_bufs[0];
3418 test_buffers[1] = test_buffer;
3419
3420 struct interim_data_params int_data = {
3421 (const char * const *)test_buffers,
3422 2,
3423 &i,
3424 &ts_params->def_comp_xform,
3425 &ts_params->def_decomp_xform,
3426 1
3427 };
3428
3429 struct test_data_params test_data = {
3430 .compress_state = RTE_COMP_OP_STATELESS,
3431 .decompress_state = RTE_COMP_OP_STATELESS,
3432 .buff_type = LB_BOTH,
3433 .zlib_dir = ZLIB_NONE,
3434 .out_of_space = 0,
3435 .big_data = 1,
3436 .overflow = OVERFLOW_DISABLED,
3437 .ratio = RATIO_DISABLED
3438 };
3439
3440 ts_params->def_comp_xform->compress.deflate.huffman =
3441 RTE_COMP_HUFFMAN_DYNAMIC;
3442
3443
3444 srand(IM_BUF_DATA_TEST_SIZE_LB);
3445 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3446 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3447
3448
3449 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3450 ret = TEST_FAILED;
3451 goto end;
3452 }
3453
3454end:
3455 ts_params->def_comp_xform->compress.deflate.huffman =
3456 RTE_COMP_HUFFMAN_DEFAULT;
3457 rte_free(test_buffer);
3458 return ret;
3459}
3460
3461static int
3462test_compressdev_deflate_im_buffers_LB_3ops(void)
3463{
3464 struct comp_testsuite_params *ts_params = &testsuite_params;
3465 uint16_t i = 0;
3466 int ret = TEST_SUCCESS;
3467 int j;
3468 const struct rte_compressdev_capabilities *capab;
3469 char *test_buffer = NULL;
3470 const char *test_buffers[3];
3471
3472 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3473 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3474
3475 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3476 return -ENOTSUP;
3477
3478 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3479 return -ENOTSUP;
3480
3481 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3482 if (test_buffer == NULL) {
3483 RTE_LOG(ERR, USER1,
3484 "Can't allocate buffer for 'im buffer' test\n");
3485 return TEST_FAILED;
3486 }
3487
3488 test_buffers[0] = compress_test_bufs[0];
3489 test_buffers[1] = test_buffer;
3490 test_buffers[2] = compress_test_bufs[1];
3491
3492 struct interim_data_params int_data = {
3493 (const char * const *)test_buffers,
3494 3,
3495 &i,
3496 &ts_params->def_comp_xform,
3497 &ts_params->def_decomp_xform,
3498 1
3499 };
3500
3501 struct test_data_params test_data = {
3502 .compress_state = RTE_COMP_OP_STATELESS,
3503 .decompress_state = RTE_COMP_OP_STATELESS,
3504 .buff_type = LB_BOTH,
3505 .zlib_dir = ZLIB_NONE,
3506 .out_of_space = 0,
3507 .big_data = 1,
3508 .overflow = OVERFLOW_DISABLED,
3509 .ratio = RATIO_DISABLED
3510 };
3511
3512 ts_params->def_comp_xform->compress.deflate.huffman =
3513 RTE_COMP_HUFFMAN_DYNAMIC;
3514
3515
3516 srand(IM_BUF_DATA_TEST_SIZE_LB);
3517 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3518 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3519
3520
3521 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3522 ret = TEST_FAILED;
3523 goto end;
3524 }
3525
3526end:
3527 ts_params->def_comp_xform->compress.deflate.huffman =
3528 RTE_COMP_HUFFMAN_DEFAULT;
3529 rte_free(test_buffer);
3530 return ret;
3531}
3532
3533static int
3534test_compressdev_deflate_im_buffers_LB_4ops(void)
3535{
3536 struct comp_testsuite_params *ts_params = &testsuite_params;
3537 uint16_t i = 0;
3538 int ret = TEST_SUCCESS;
3539 int j;
3540 const struct rte_compressdev_capabilities *capab;
3541 char *test_buffer = NULL;
3542 const char *test_buffers[4];
3543
3544 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3545 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3546
3547 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3548 return -ENOTSUP;
3549
3550 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3551 return -ENOTSUP;
3552
3553 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3554 if (test_buffer == NULL) {
3555 RTE_LOG(ERR, USER1,
3556 "Can't allocate buffer for 'im buffer' test\n");
3557 return TEST_FAILED;
3558 }
3559
3560 test_buffers[0] = compress_test_bufs[0];
3561 test_buffers[1] = test_buffer;
3562 test_buffers[2] = compress_test_bufs[1];
3563 test_buffers[3] = test_buffer;
3564
3565 struct interim_data_params int_data = {
3566 (const char * const *)test_buffers,
3567 4,
3568 &i,
3569 &ts_params->def_comp_xform,
3570 &ts_params->def_decomp_xform,
3571 1
3572 };
3573
3574 struct test_data_params test_data = {
3575 .compress_state = RTE_COMP_OP_STATELESS,
3576 .decompress_state = RTE_COMP_OP_STATELESS,
3577 .buff_type = LB_BOTH,
3578 .zlib_dir = ZLIB_NONE,
3579 .out_of_space = 0,
3580 .big_data = 1,
3581 .overflow = OVERFLOW_DISABLED,
3582 .ratio = RATIO_DISABLED
3583 };
3584
3585 ts_params->def_comp_xform->compress.deflate.huffman =
3586 RTE_COMP_HUFFMAN_DYNAMIC;
3587
3588
3589 srand(IM_BUF_DATA_TEST_SIZE_LB);
3590 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3591 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3592
3593
3594 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3595 ret = TEST_FAILED;
3596 goto end;
3597 }
3598
3599end:
3600 ts_params->def_comp_xform->compress.deflate.huffman =
3601 RTE_COMP_HUFFMAN_DEFAULT;
3602 rte_free(test_buffer);
3603 return ret;
3604}
3605
3606
3607static int
3608test_compressdev_deflate_im_buffers_SGL_1op(void)
3609{
3610 struct comp_testsuite_params *ts_params = &testsuite_params;
3611 uint16_t i = 0;
3612 int ret = TEST_SUCCESS;
3613 int j;
3614 const struct rte_compressdev_capabilities *capab;
3615 char *test_buffer = NULL;
3616
3617 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3618 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3619
3620 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3621 return -ENOTSUP;
3622
3623 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3624 return -ENOTSUP;
3625
3626 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3627 if (test_buffer == NULL) {
3628 RTE_LOG(ERR, USER1,
3629 "Can't allocate buffer for big-data\n");
3630 return TEST_FAILED;
3631 }
3632
3633 struct interim_data_params int_data = {
3634 (const char * const *)&test_buffer,
3635 1,
3636 &i,
3637 &ts_params->def_comp_xform,
3638 &ts_params->def_decomp_xform,
3639 1
3640 };
3641
3642 struct test_data_params test_data = {
3643 .compress_state = RTE_COMP_OP_STATELESS,
3644 .decompress_state = RTE_COMP_OP_STATELESS,
3645 .buff_type = SGL_BOTH,
3646 .zlib_dir = ZLIB_NONE,
3647 .out_of_space = 0,
3648 .big_data = 1,
3649 .overflow = OVERFLOW_DISABLED,
3650 .ratio = RATIO_DISABLED
3651 };
3652
3653 ts_params->def_comp_xform->compress.deflate.huffman =
3654 RTE_COMP_HUFFMAN_DYNAMIC;
3655
3656
3657 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3658 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3659 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3660
3661
3662 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3663 ret = TEST_FAILED;
3664 goto end;
3665 }
3666
3667end:
3668 ts_params->def_comp_xform->compress.deflate.huffman =
3669 RTE_COMP_HUFFMAN_DEFAULT;
3670 rte_free(test_buffer);
3671 return ret;
3672}
3673
3674static int
3675test_compressdev_deflate_im_buffers_SGL_2ops_first(void)
3676{
3677 struct comp_testsuite_params *ts_params = &testsuite_params;
3678 uint16_t i = 0;
3679 int ret = TEST_SUCCESS;
3680 int j;
3681 const struct rte_compressdev_capabilities *capab;
3682 char *test_buffer = NULL;
3683 const char *test_buffers[2];
3684
3685 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3686 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3687
3688 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3689 return -ENOTSUP;
3690
3691 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3692 return -ENOTSUP;
3693
3694 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3695 if (test_buffer == NULL) {
3696 RTE_LOG(ERR, USER1,
3697 "Can't allocate buffer for big-data\n");
3698 return TEST_FAILED;
3699 }
3700
3701 test_buffers[0] = test_buffer;
3702 test_buffers[1] = compress_test_bufs[0];
3703
3704 struct interim_data_params int_data = {
3705 (const char * const *)test_buffers,
3706 2,
3707 &i,
3708 &ts_params->def_comp_xform,
3709 &ts_params->def_decomp_xform,
3710 1
3711 };
3712
3713 struct test_data_params test_data = {
3714 .compress_state = RTE_COMP_OP_STATELESS,
3715 .decompress_state = RTE_COMP_OP_STATELESS,
3716 .buff_type = SGL_BOTH,
3717 .zlib_dir = ZLIB_NONE,
3718 .out_of_space = 0,
3719 .big_data = 1,
3720 .overflow = OVERFLOW_DISABLED,
3721 .ratio = RATIO_DISABLED
3722 };
3723
3724 ts_params->def_comp_xform->compress.deflate.huffman =
3725 RTE_COMP_HUFFMAN_DYNAMIC;
3726
3727
3728 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3729 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3730 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3731
3732
3733 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3734 ret = TEST_FAILED;
3735 goto end;
3736 }
3737
3738end:
3739 ts_params->def_comp_xform->compress.deflate.huffman =
3740 RTE_COMP_HUFFMAN_DEFAULT;
3741 rte_free(test_buffer);
3742 return ret;
3743}
3744
3745static int
3746test_compressdev_deflate_im_buffers_SGL_2ops_second(void)
3747{
3748 struct comp_testsuite_params *ts_params = &testsuite_params;
3749 uint16_t i = 0;
3750 int ret = TEST_SUCCESS;
3751 int j;
3752 const struct rte_compressdev_capabilities *capab;
3753 char *test_buffer = NULL;
3754 const char *test_buffers[2];
3755
3756 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3757 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3758
3759 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3760 return -ENOTSUP;
3761
3762 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3763 return -ENOTSUP;
3764
3765 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3766 if (test_buffer == NULL) {
3767 RTE_LOG(ERR, USER1,
3768 "Can't allocate buffer for big-data\n");
3769 return TEST_FAILED;
3770 }
3771
3772 test_buffers[0] = compress_test_bufs[0];
3773 test_buffers[1] = test_buffer;
3774
3775 struct interim_data_params int_data = {
3776 (const char * const *)test_buffers,
3777 2,
3778 &i,
3779 &ts_params->def_comp_xform,
3780 &ts_params->def_decomp_xform,
3781 1
3782 };
3783
3784 struct test_data_params test_data = {
3785 .compress_state = RTE_COMP_OP_STATELESS,
3786 .decompress_state = RTE_COMP_OP_STATELESS,
3787 .buff_type = SGL_BOTH,
3788 .zlib_dir = ZLIB_NONE,
3789 .out_of_space = 0,
3790 .big_data = 1,
3791 .overflow = OVERFLOW_DISABLED,
3792 .ratio = RATIO_DISABLED
3793 };
3794
3795 ts_params->def_comp_xform->compress.deflate.huffman =
3796 RTE_COMP_HUFFMAN_DYNAMIC;
3797
3798
3799 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3800 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3801 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3802
3803
3804 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3805 ret = TEST_FAILED;
3806 goto end;
3807 }
3808
3809end:
3810 ts_params->def_comp_xform->compress.deflate.huffman =
3811 RTE_COMP_HUFFMAN_DEFAULT;
3812 rte_free(test_buffer);
3813 return ret;
3814}
3815
3816static int
3817test_compressdev_deflate_im_buffers_SGL_3ops(void)
3818{
3819 struct comp_testsuite_params *ts_params = &testsuite_params;
3820 uint16_t i = 0;
3821 int ret = TEST_SUCCESS;
3822 int j;
3823 const struct rte_compressdev_capabilities *capab;
3824 char *test_buffer = NULL;
3825 const char *test_buffers[3];
3826
3827 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3828 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3829
3830 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3831 return -ENOTSUP;
3832
3833 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3834 return -ENOTSUP;
3835
3836 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3837 if (test_buffer == NULL) {
3838 RTE_LOG(ERR, USER1,
3839 "Can't allocate buffer for big-data\n");
3840 return TEST_FAILED;
3841 }
3842
3843 test_buffers[0] = compress_test_bufs[0];
3844 test_buffers[1] = test_buffer;
3845 test_buffers[2] = compress_test_bufs[1];
3846
3847 struct interim_data_params int_data = {
3848 (const char * const *)test_buffers,
3849 3,
3850 &i,
3851 &ts_params->def_comp_xform,
3852 &ts_params->def_decomp_xform,
3853 1
3854 };
3855
3856 struct test_data_params test_data = {
3857 .compress_state = RTE_COMP_OP_STATELESS,
3858 .decompress_state = RTE_COMP_OP_STATELESS,
3859 .buff_type = SGL_BOTH,
3860 .zlib_dir = ZLIB_NONE,
3861 .out_of_space = 0,
3862 .big_data = 1,
3863 .overflow = OVERFLOW_DISABLED,
3864 .ratio = RATIO_DISABLED
3865 };
3866
3867 ts_params->def_comp_xform->compress.deflate.huffman =
3868 RTE_COMP_HUFFMAN_DYNAMIC;
3869
3870
3871 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3872 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3873 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3874
3875
3876 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3877 ret = TEST_FAILED;
3878 goto end;
3879 }
3880
3881end:
3882 ts_params->def_comp_xform->compress.deflate.huffman =
3883 RTE_COMP_HUFFMAN_DEFAULT;
3884 rte_free(test_buffer);
3885 return ret;
3886}
3887
3888
3889static int
3890test_compressdev_deflate_im_buffers_SGL_4ops(void)
3891{
3892 struct comp_testsuite_params *ts_params = &testsuite_params;
3893 uint16_t i = 0;
3894 int ret = TEST_SUCCESS;
3895 int j;
3896 const struct rte_compressdev_capabilities *capab;
3897 char *test_buffer = NULL;
3898 const char *test_buffers[4];
3899
3900 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3901 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3902
3903 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3904 return -ENOTSUP;
3905
3906 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3907 return -ENOTSUP;
3908
3909 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3910 if (test_buffer == NULL) {
3911 RTE_LOG(ERR, USER1,
3912 "Can't allocate buffer for big-data\n");
3913 return TEST_FAILED;
3914 }
3915
3916 test_buffers[0] = compress_test_bufs[0];
3917 test_buffers[1] = test_buffer;
3918 test_buffers[2] = compress_test_bufs[1];
3919 test_buffers[3] = test_buffer;
3920
3921 struct interim_data_params int_data = {
3922 (const char * const *)test_buffers,
3923 4,
3924 &i,
3925 &ts_params->def_comp_xform,
3926 &ts_params->def_decomp_xform,
3927 1
3928 };
3929
3930 struct test_data_params test_data = {
3931 .compress_state = RTE_COMP_OP_STATELESS,
3932 .decompress_state = RTE_COMP_OP_STATELESS,
3933 .buff_type = SGL_BOTH,
3934 .zlib_dir = ZLIB_NONE,
3935 .out_of_space = 0,
3936 .big_data = 1,
3937 .overflow = OVERFLOW_DISABLED,
3938 .ratio = RATIO_DISABLED
3939 };
3940
3941 ts_params->def_comp_xform->compress.deflate.huffman =
3942 RTE_COMP_HUFFMAN_DYNAMIC;
3943
3944
3945 srand(IM_BUF_DATA_TEST_SIZE_SGL);
3946 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3947 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3948
3949
3950 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3951 ret = TEST_FAILED;
3952 goto end;
3953 }
3954
3955end:
3956 ts_params->def_comp_xform->compress.deflate.huffman =
3957 RTE_COMP_HUFFMAN_DEFAULT;
3958 rte_free(test_buffer);
3959 return ret;
3960}
3961
3962static int
3963test_compressdev_deflate_im_buffers_SGL_over_1op(void)
3964{
3965 struct comp_testsuite_params *ts_params = &testsuite_params;
3966 uint16_t i = 0;
3967 int ret = TEST_SUCCESS;
3968 int j;
3969 const struct rte_compressdev_capabilities *capab;
3970 char *test_buffer = NULL;
3971
3972 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
3973
3974 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3975 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3976
3977 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3978 return -ENOTSUP;
3979
3980 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3981 return -ENOTSUP;
3982
3983 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
3984 if (test_buffer == NULL) {
3985 RTE_LOG(ERR, USER1,
3986 "Can't allocate buffer for big-data\n");
3987 return TEST_FAILED;
3988 }
3989
3990 struct interim_data_params int_data = {
3991 (const char * const *)&test_buffer,
3992 1,
3993 &i,
3994 &ts_params->def_comp_xform,
3995 &ts_params->def_decomp_xform,
3996 1
3997 };
3998
3999 struct test_data_params test_data = {
4000 .compress_state = RTE_COMP_OP_STATELESS,
4001 .decompress_state = RTE_COMP_OP_STATELESS,
4002 .buff_type = SGL_BOTH,
4003 .zlib_dir = ZLIB_NONE,
4004 .out_of_space = 0,
4005 .big_data = 1,
4006 .overflow = OVERFLOW_DISABLED,
4007 .ratio = RATIO_DISABLED
4008 };
4009
4010 ts_params->def_comp_xform->compress.deflate.huffman =
4011 RTE_COMP_HUFFMAN_DYNAMIC;
4012
4013
4014 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4015 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4016 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4017
4018
4019 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4020 ret = TEST_SUCCESS;
4021 goto end;
4022 }
4023
4024end:
4025 ts_params->def_comp_xform->compress.deflate.huffman =
4026 RTE_COMP_HUFFMAN_DEFAULT;
4027 rte_free(test_buffer);
4028
4029 return ret;
4030}
4031
4032
4033static int
4034test_compressdev_deflate_im_buffers_SGL_over_2ops_first(void)
4035{
4036 struct comp_testsuite_params *ts_params = &testsuite_params;
4037 uint16_t i = 0;
4038 int ret = TEST_SUCCESS;
4039 int j;
4040 const struct rte_compressdev_capabilities *capab;
4041 char *test_buffer = NULL;
4042 const char *test_buffers[2];
4043
4044 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4045
4046 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4047 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4048
4049 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4050 return -ENOTSUP;
4051
4052 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4053 return -ENOTSUP;
4054
4055 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4056 if (test_buffer == NULL) {
4057 RTE_LOG(ERR, USER1,
4058 "Can't allocate buffer for big-data\n");
4059 return TEST_FAILED;
4060 }
4061
4062 test_buffers[0] = test_buffer;
4063 test_buffers[1] = compress_test_bufs[0];
4064
4065 struct interim_data_params int_data = {
4066 (const char * const *)test_buffers,
4067 2,
4068 &i,
4069 &ts_params->def_comp_xform,
4070 &ts_params->def_decomp_xform,
4071 1
4072 };
4073
4074 struct test_data_params test_data = {
4075 .compress_state = RTE_COMP_OP_STATELESS,
4076 .decompress_state = RTE_COMP_OP_STATELESS,
4077 .buff_type = SGL_BOTH,
4078 .zlib_dir = ZLIB_NONE,
4079 .out_of_space = 0,
4080 .big_data = 1,
4081 .overflow = OVERFLOW_DISABLED,
4082 .ratio = RATIO_DISABLED
4083 };
4084
4085 ts_params->def_comp_xform->compress.deflate.huffman =
4086 RTE_COMP_HUFFMAN_DYNAMIC;
4087
4088
4089 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4090 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4091 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4092
4093
4094 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4095 ret = TEST_SUCCESS;
4096 goto end;
4097 }
4098
4099end:
4100 ts_params->def_comp_xform->compress.deflate.huffman =
4101 RTE_COMP_HUFFMAN_DEFAULT;
4102 rte_free(test_buffer);
4103 return ret;
4104}
4105
4106static int
4107test_compressdev_deflate_im_buffers_SGL_over_2ops_second(void)
4108{
4109 struct comp_testsuite_params *ts_params = &testsuite_params;
4110 uint16_t i = 0;
4111 int ret = TEST_SUCCESS;
4112 int j;
4113 const struct rte_compressdev_capabilities *capab;
4114 char *test_buffer = NULL;
4115 const char *test_buffers[2];
4116
4117 RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4118
4119 capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4120 TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4121
4122 if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4123 return -ENOTSUP;
4124
4125 if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4126 return -ENOTSUP;
4127
4128 test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4129 if (test_buffer == NULL) {
4130 RTE_LOG(ERR, USER1,
4131 "Can't allocate buffer for big-data\n");
4132 return TEST_FAILED;
4133 }
4134
4135 test_buffers[0] = compress_test_bufs[0];
4136 test_buffers[1] = test_buffer;
4137
4138 struct interim_data_params int_data = {
4139 (const char * const *)test_buffers,
4140 2,
4141 &i,
4142 &ts_params->def_comp_xform,
4143 &ts_params->def_decomp_xform,
4144 1
4145 };
4146
4147 struct test_data_params test_data = {
4148 .compress_state = RTE_COMP_OP_STATELESS,
4149 .decompress_state = RTE_COMP_OP_STATELESS,
4150 .buff_type = SGL_BOTH,
4151 .zlib_dir = ZLIB_NONE,
4152 .out_of_space = 0,
4153 .big_data = 1,
4154 .overflow = OVERFLOW_DISABLED,
4155 .ratio = RATIO_DISABLED
4156 };
4157
4158 ts_params->def_comp_xform->compress.deflate.huffman =
4159 RTE_COMP_HUFFMAN_DYNAMIC;
4160
4161
4162 srand(IM_BUF_DATA_TEST_SIZE_OVER);
4163 for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4164 test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4165
4166
4167 if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4168 ret = TEST_SUCCESS;
4169 goto end;
4170 }
4171
4172end:
4173 ts_params->def_comp_xform->compress.deflate.huffman =
4174 RTE_COMP_HUFFMAN_DEFAULT;
4175 rte_free(test_buffer);
4176 return ret;
4177}
4178
4179static struct unit_test_suite compressdev_testsuite = {
4180 .suite_name = "compressdev unit test suite",
4181 .setup = testsuite_setup,
4182 .teardown = testsuite_teardown,
4183 .unit_test_cases = {
4184 TEST_CASE_ST(NULL, NULL,
4185 test_compressdev_invalid_configuration),
4186 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4187 test_compressdev_deflate_stateless_fixed),
4188 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4189 test_compressdev_deflate_stateless_dynamic),
4190 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4191 test_compressdev_deflate_stateless_dynamic_big),
4192 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4193 test_compressdev_deflate_stateless_multi_op),
4194 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4195 test_compressdev_deflate_stateless_multi_level),
4196 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4197 test_compressdev_deflate_stateless_multi_xform),
4198 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4199 test_compressdev_deflate_stateless_sgl),
4200 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4201 test_compressdev_deflate_stateless_checksum),
4202 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4203 test_compressdev_out_of_space_buffer),
4204 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4205 test_compressdev_deflate_stateful_decomp),
4206 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4207 test_compressdev_deflate_stateful_decomp_checksum),
4208 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4209 test_compressdev_external_mbufs),
4210 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4211 test_compressdev_deflate_stateless_fixed_oos_recoverable),
4212
4213
4214 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4215 test_compressdev_deflate_im_buffers_LB_1op),
4216 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4217 test_compressdev_deflate_im_buffers_LB_2ops_first),
4218 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4219 test_compressdev_deflate_im_buffers_LB_2ops_second),
4220 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4221 test_compressdev_deflate_im_buffers_LB_3ops),
4222
4223 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4224 test_compressdev_deflate_im_buffers_LB_4ops),
4225 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4226 test_compressdev_deflate_im_buffers_SGL_1op),
4227
4228 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4229 test_compressdev_deflate_im_buffers_SGL_2ops_first),
4230 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4231 test_compressdev_deflate_im_buffers_SGL_2ops_second),
4232 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4233 test_compressdev_deflate_im_buffers_SGL_3ops),
4234 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4235 test_compressdev_deflate_im_buffers_SGL_4ops),
4236
4237
4238
4239
4240
4241
4242
4243
4244
4245 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4246 test_compressdev_deflate_im_buffers_SGL_over_1op),
4247
4248
4249
4250
4251
4252
4253
4254 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4255 test_compressdev_deflate_im_buffers_SGL_over_2ops_first),
4256
4257 TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4258 test_compressdev_deflate_im_buffers_SGL_over_2ops_second),
4259
4260 TEST_CASES_END()
4261 }
4262};
4263
4264static int
4265test_compressdev(void)
4266{
4267 return unit_test_suite_runner(&compressdev_testsuite);
4268}
4269
4270REGISTER_TEST_COMMAND(compressdev_autotest, test_compressdev);
4271