1
2
3
4
5#include "ice_acl.h"
6#include "ice_flow.h"
7
8
9#define ICE_ACL_TBL_TCAM_IDX(e) ((e) / ICE_AQC_ACL_TCAM_DEPTH)
10
11
12#define ICE_ACL_TBL_TCAM_ENTRY_IDX(e) ((e) % ICE_AQC_ACL_TCAM_DEPTH)
13
14#define ICE_ACL_SCEN_ENTRY_INVAL 0xFFFF
15
16
17
18
19
20
21
22static void ice_acl_init_entry(struct ice_acl_scen *scen)
23{
24
25
26
27
28 scen->first_idx[ICE_ACL_PRIO_LOW] = scen->num_entry - 1;
29 scen->first_idx[ICE_ACL_PRIO_NORMAL] = scen->num_entry -
30 scen->num_entry / 4 - 1;
31 scen->first_idx[ICE_ACL_PRIO_HIGH] = 0;
32
33 scen->last_idx[ICE_ACL_PRIO_LOW] = scen->num_entry -
34 scen->num_entry / 4;
35 scen->last_idx[ICE_ACL_PRIO_NORMAL] = scen->num_entry / 4;
36 scen->last_idx[ICE_ACL_PRIO_HIGH] = scen->num_entry / 4 - 1;
37}
38
39
40
41
42
43
44
45
46
47
48
49static u16
50ice_acl_scen_assign_entry_idx(struct ice_acl_scen *scen,
51 enum ice_acl_entry_prio prio)
52{
53 u16 first_idx, last_idx, i;
54 s8 step;
55
56 if (prio >= ICE_ACL_MAX_PRIO)
57 return ICE_ACL_SCEN_ENTRY_INVAL;
58
59 first_idx = scen->first_idx[prio];
60 last_idx = scen->last_idx[prio];
61 step = first_idx <= last_idx ? 1 : -1;
62
63 for (i = first_idx; i != last_idx + step; i += step)
64 if (!ice_test_and_set_bit(i, scen->entry_bitmap))
65 return i;
66
67 return ICE_ACL_SCEN_ENTRY_INVAL;
68}
69
70
71
72
73
74
75
76
77static enum ice_status
78ice_acl_scen_free_entry_idx(struct ice_acl_scen *scen, u16 idx)
79{
80 if (idx >= scen->num_entry)
81 return ICE_ERR_MAX_LIMIT;
82
83 if (!ice_test_and_clear_bit(idx, scen->entry_bitmap))
84 return ICE_ERR_DOES_NOT_EXIST;
85
86 return ICE_SUCCESS;
87}
88
89
90
91
92
93
94
95
96
97
98static u16 ice_acl_tbl_calc_end_idx(u16 start, u16 num_entries, u16 width)
99{
100 u16 end_idx, add_entries = 0;
101
102 end_idx = start + (num_entries - 1);
103
104
105 if (width > 1) {
106 u16 num_stack_level;
107
108
109 num_stack_level = (start % ICE_AQC_ACL_TCAM_DEPTH) +
110 num_entries;
111 num_stack_level = DIVIDE_AND_ROUND_UP(num_stack_level,
112 ICE_AQC_ACL_TCAM_DEPTH);
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131 add_entries = (width - 1) * num_stack_level *
132 ICE_AQC_ACL_TCAM_DEPTH;
133 }
134
135 return end_idx + add_entries;
136}
137
138
139
140
141
142
143
144static enum ice_status ice_acl_init_tbl(struct ice_hw *hw)
145{
146 struct ice_aqc_actpair act_buf;
147 struct ice_aqc_acl_data buf;
148 enum ice_status status = ICE_SUCCESS;
149 struct ice_acl_tbl *tbl;
150 u8 tcam_idx, i;
151 u16 idx;
152
153 tbl = hw->acl_tbl;
154 if (!tbl)
155 return ICE_ERR_CFG;
156
157 ice_memset(&buf, 0, sizeof(buf), ICE_NONDMA_MEM);
158 ice_memset(&act_buf, 0, sizeof(act_buf), ICE_NONDMA_MEM);
159
160 tcam_idx = tbl->first_tcam;
161 idx = tbl->first_entry;
162 while (tcam_idx < tbl->last_tcam ||
163 (tcam_idx == tbl->last_tcam && idx <= tbl->last_entry)) {
164
165
166
167 status = ice_aq_program_acl_entry(hw, tcam_idx, idx, &buf,
168 NULL);
169 if (status)
170 return status;
171
172 if (++idx > tbl->last_entry) {
173 tcam_idx++;
174 idx = tbl->first_entry;
175 }
176 }
177
178 for (i = 0; i < ICE_AQC_MAX_ACTION_MEMORIES; i++) {
179 u16 act_entry_idx, start, end;
180
181 if (tbl->act_mems[i].act_mem == ICE_ACL_ACT_PAIR_MEM_INVAL)
182 continue;
183
184 start = tbl->first_entry;
185 end = tbl->last_entry;
186
187 for (act_entry_idx = start; act_entry_idx <= end;
188 act_entry_idx++) {
189
190 status = ice_aq_program_actpair(hw, i, act_entry_idx,
191 &act_buf, NULL);
192 if (status)
193 return status;
194 }
195 }
196
197 return status;
198}
199
200
201
202
203
204
205
206
207
208
209
210
211static void
212ice_acl_assign_act_mems_to_tcam(struct ice_acl_tbl *tbl, u8 cur_tcam,
213 u8 *cur_mem_idx, u8 num_mem)
214{
215 u8 mem_cnt;
216
217 for (mem_cnt = 0;
218 *cur_mem_idx < ICE_AQC_MAX_ACTION_MEMORIES && mem_cnt < num_mem;
219 (*cur_mem_idx)++) {
220 struct ice_acl_act_mem *p_mem = &tbl->act_mems[*cur_mem_idx];
221
222 if (p_mem->act_mem == ICE_ACL_ACT_PAIR_MEM_INVAL)
223 continue;
224
225 p_mem->member_of_tcam = cur_tcam;
226
227 mem_cnt++;
228 }
229}
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252static void ice_acl_divide_act_mems_to_tcams(struct ice_acl_tbl *tbl)
253{
254 u16 num_cscd, stack_level, stack_idx, min_act_mem;
255 u8 tcam_idx = tbl->first_tcam;
256 u16 max_idx_to_get_extra;
257 u8 mem_idx = 0;
258
259
260 stack_level = DIVIDE_AND_ROUND_UP(tbl->info.depth,
261 ICE_AQC_ACL_TCAM_DEPTH);
262
263
264 num_cscd = DIVIDE_AND_ROUND_UP(tbl->info.width,
265 ICE_AQC_ACL_KEY_WIDTH_BYTES);
266
267
268
269
270
271
272
273
274
275
276
277
278 min_act_mem = tbl->info.entry_act_pairs / num_cscd;
279 max_idx_to_get_extra = tbl->info.entry_act_pairs % num_cscd;
280
281 for (stack_idx = 0; stack_idx < stack_level; stack_idx++) {
282 u16 i;
283
284 for (i = 0; i < num_cscd; i++) {
285 u8 total_act_mem = min_act_mem;
286
287 if (i < max_idx_to_get_extra)
288 total_act_mem++;
289
290 ice_acl_assign_act_mems_to_tcam(tbl, tcam_idx,
291 &mem_idx,
292 total_act_mem);
293
294 tcam_idx++;
295 }
296 }
297}
298
299
300
301
302
303
304
305
306
307
308enum ice_status
309ice_acl_create_tbl(struct ice_hw *hw, struct ice_acl_tbl_params *params)
310{
311 u16 width, depth, first_e, last_e, i;
312 struct ice_aqc_acl_generic *resp_buf;
313 struct ice_acl_alloc_tbl tbl_alloc;
314 struct ice_acl_tbl *tbl;
315 enum ice_status status;
316
317 if (hw->acl_tbl)
318 return ICE_ERR_ALREADY_EXISTS;
319
320 if (!params)
321 return ICE_ERR_PARAM;
322
323
324 width = ROUND_UP(params->width, (u16)ICE_AQC_ACL_KEY_WIDTH_BYTES);
325
326 depth = ICE_ALIGN(params->depth, ICE_ACL_ENTRY_ALLOC_UNIT);
327
328 if (params->entry_act_pairs < width / ICE_AQC_ACL_KEY_WIDTH_BYTES) {
329 params->entry_act_pairs = width / ICE_AQC_ACL_KEY_WIDTH_BYTES;
330
331 if (params->entry_act_pairs > ICE_AQC_TBL_MAX_ACTION_PAIRS)
332 params->entry_act_pairs = ICE_AQC_TBL_MAX_ACTION_PAIRS;
333 }
334
335
336 if ((DIVIDE_AND_ROUND_UP(depth, ICE_AQC_ACL_TCAM_DEPTH) *
337 (width / ICE_AQC_ACL_KEY_WIDTH_BYTES)) > ICE_AQC_ACL_SLICES)
338 return ICE_ERR_MAX_LIMIT;
339
340 ice_memset(&tbl_alloc, 0, sizeof(tbl_alloc), ICE_NONDMA_MEM);
341 tbl_alloc.width = width;
342 tbl_alloc.depth = depth;
343 tbl_alloc.act_pairs_per_entry = params->entry_act_pairs;
344 tbl_alloc.concurr = params->concurr;
345
346 if (params->concurr) {
347 tbl_alloc.num_dependent_alloc_ids =
348 ICE_AQC_MAX_CONCURRENT_ACL_TBL;
349
350 for (i = 0; i < ICE_AQC_MAX_CONCURRENT_ACL_TBL; i++)
351 tbl_alloc.buf.data_buf.alloc_ids[i] =
352 CPU_TO_LE16(params->dep_tbls[i]);
353 }
354
355
356 status = ice_aq_alloc_acl_tbl(hw, &tbl_alloc, NULL);
357 if (status) {
358 if (LE16_TO_CPU(tbl_alloc.buf.resp_buf.alloc_id) <
359 ICE_AQC_ALLOC_ID_LESS_THAN_4K)
360 ice_debug(hw, ICE_DBG_ACL, "Alloc ACL table failed. Unavailable resource.\n");
361 else
362 ice_debug(hw, ICE_DBG_ACL, "AQ allocation of ACL failed with error. status: %d\n",
363 status);
364 return status;
365 }
366
367 tbl = (struct ice_acl_tbl *)ice_malloc(hw, sizeof(*tbl));
368 if (!tbl) {
369 status = ICE_ERR_NO_MEMORY;
370
371 goto out;
372 }
373
374 resp_buf = &tbl_alloc.buf.resp_buf;
375
376
377 tbl->id = LE16_TO_CPU(resp_buf->alloc_id);
378 tbl->first_tcam = resp_buf->ops.table.first_tcam;
379 tbl->last_tcam = resp_buf->ops.table.last_tcam;
380 tbl->first_entry = LE16_TO_CPU(resp_buf->first_entry);
381 tbl->last_entry = LE16_TO_CPU(resp_buf->last_entry);
382
383 tbl->info = *params;
384 tbl->info.width = width;
385 tbl->info.depth = depth;
386 hw->acl_tbl = tbl;
387
388 for (i = 0; i < ICE_AQC_MAX_ACTION_MEMORIES; i++)
389 tbl->act_mems[i].act_mem = resp_buf->act_mem[i];
390
391
392
393
394 ice_acl_divide_act_mems_to_tcams(tbl);
395
396
397
398
399 status = ice_acl_init_tbl(hw);
400 if (status) {
401 ice_free(hw, tbl);
402 hw->acl_tbl = NULL;
403 ice_debug(hw, ICE_DBG_ACL, "Initialization of TCAM entries failed. status: %d\n",
404 status);
405 goto out;
406 }
407
408 first_e = (tbl->first_tcam * ICE_AQC_MAX_TCAM_ALLOC_UNITS) +
409 (tbl->first_entry / ICE_ACL_ENTRY_ALLOC_UNIT);
410 last_e = (tbl->last_tcam * ICE_AQC_MAX_TCAM_ALLOC_UNITS) +
411 (tbl->last_entry / ICE_ACL_ENTRY_ALLOC_UNIT);
412
413
414 ice_bitmap_set(tbl->avail, first_e, last_e - first_e + 1);
415
416 INIT_LIST_HEAD(&tbl->scens);
417out:
418
419 return status;
420}
421
422
423
424
425
426
427static enum ice_status
428ice_acl_alloc_partition(struct ice_hw *hw, struct ice_acl_scen *req)
429{
430 u16 start = 0, cnt = 0, off = 0;
431 u16 width, r_entries, row;
432 bool done = false;
433 int dir;
434
435
436 width = DIVIDE_AND_ROUND_UP(req->width, ICE_AQC_ACL_KEY_WIDTH_BYTES);
437
438
439 if (width > hw->acl_tbl->last_tcam - hw->acl_tbl->first_tcam + 1)
440 return ICE_ERR_MAX_LIMIT;
441
442
443 r_entries = ICE_ALIGN(req->num_entry, ICE_ACL_ENTRY_ALLOC_UNIT);
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470 if (width == 1) {
471 row = hw->acl_tbl->first_tcam;
472 dir = 1;
473 } else {
474
475
476
477 row = hw->acl_tbl->last_tcam + 1 - width;
478 dir = -1;
479 }
480
481 do {
482 u16 i;
483
484
485
486
487 for (i = 0;
488 i < ICE_AQC_MAX_TCAM_ALLOC_UNITS && cnt < r_entries;
489 i++) {
490 bool avail = true;
491 u16 w, p;
492
493
494
495
496
497 p = dir > 0 ? i : ICE_AQC_MAX_TCAM_ALLOC_UNITS - i - 1;
498 for (w = row; w < row + width && avail; w++) {
499 u16 b;
500
501 b = (w * ICE_AQC_MAX_TCAM_ALLOC_UNITS) + p;
502 avail &= ice_is_bit_set(hw->acl_tbl->avail, b);
503 }
504
505 if (!avail) {
506 cnt = 0;
507 } else {
508
509
510
511
512
513
514 if (!cnt || dir < 0)
515 start = (row * ICE_AQC_ACL_TCAM_DEPTH) +
516 (p * ICE_ACL_ENTRY_ALLOC_UNIT);
517 cnt += ICE_ACL_ENTRY_ALLOC_UNIT;
518 }
519 }
520
521 if (cnt >= r_entries) {
522 req->start = start;
523 req->num_entry = r_entries;
524 req->end = ice_acl_tbl_calc_end_idx(start, r_entries,
525 width);
526 break;
527 }
528
529 row = dir > 0 ? row + width : row - width;
530 if (row > hw->acl_tbl->last_tcam ||
531 row < hw->acl_tbl->first_tcam) {
532
533
534
535
536
537 off++;
538
539
540
541
542 if (off >= width)
543 done = true;
544 else
545 row = dir > 0 ? off :
546 hw->acl_tbl->last_tcam + 1 - off -
547 width;
548 }
549 } while (!done);
550
551 return cnt >= r_entries ? ICE_SUCCESS : ICE_ERR_MAX_LIMIT;
552}
553
554
555
556
557
558
559
560
561
562
563
564static void
565ice_acl_fill_tcam_select(struct ice_aqc_acl_scen *scen_buf,
566 struct ice_acl_scen *scen, u16 tcam_idx,
567 u16 tcam_idx_in_cascade)
568{
569 u16 cascade_cnt, idx;
570 u8 j;
571
572 idx = tcam_idx_in_cascade * ICE_AQC_ACL_KEY_WIDTH_BYTES;
573 cascade_cnt = DIVIDE_AND_ROUND_UP(scen->width,
574 ICE_AQC_ACL_KEY_WIDTH_BYTES);
575
576
577
578
579
580
581
582
583
584
585
586
587
588 for (j = 0; j < ICE_AQC_ACL_KEY_WIDTH_BYTES; j++) {
589
590 u8 val = ICE_AQC_ACL_BYTE_SEL_BASE + 1 + idx;
591
592 if (tcam_idx_in_cascade == cascade_cnt - 1) {
593 if (j == ICE_ACL_SCEN_RNG_CHK_IDX_IN_TCAM)
594 val = ICE_AQC_ACL_BYTE_SEL_BASE_RNG_CHK;
595 else if (j == ICE_ACL_SCEN_PID_IDX_IN_TCAM)
596 val = ICE_AQC_ACL_BYTE_SEL_BASE_PID;
597 else if (j == ICE_ACL_SCEN_PKT_DIR_IDX_IN_TCAM)
598 val = ICE_AQC_ACL_BYTE_SEL_BASE_PKT_DIR;
599 }
600
601
602
603
604
605
606 if (val > ICE_AQC_ACL_BYTE_SEL_BASE_RNG_CHK)
607 continue;
608
609 scen_buf->tcam_cfg[tcam_idx].tcam_select[j] = val;
610
611 idx++;
612 }
613}
614
615
616
617
618
619
620
621
622static void
623ice_acl_set_scen_chnk_msk(struct ice_aqc_acl_scen *scen_buf,
624 struct ice_acl_scen *scen)
625{
626 u16 tcam_idx, num_cscd, units, cnt;
627 u8 chnk_offst;
628
629
630 tcam_idx = ICE_ACL_TBL_TCAM_IDX(scen->start);
631 chnk_offst = (u8)((scen->start % ICE_AQC_ACL_TCAM_DEPTH) /
632 ICE_ACL_ENTRY_ALLOC_UNIT);
633
634
635 units = scen->num_entry / ICE_ACL_ENTRY_ALLOC_UNIT;
636
637
638 num_cscd = scen->width / ICE_AQC_ACL_KEY_WIDTH_BYTES;
639
640 for (cnt = 0; cnt < units; cnt++) {
641 u16 i;
642
643
644
645
646
647
648 for (i = tcam_idx; i < tcam_idx + num_cscd; i++)
649 scen_buf->tcam_cfg[i].chnk_msk |= BIT(chnk_offst);
650
651 chnk_offst = (chnk_offst + 1) % ICE_AQC_MAX_TCAM_ALLOC_UNITS;
652 if (!chnk_offst)
653 tcam_idx += num_cscd;
654 }
655}
656
657
658
659
660
661
662
663
664
665
666
667static void
668ice_acl_assign_act_mem_for_scen(struct ice_acl_tbl *tbl,
669 struct ice_acl_scen *scen,
670 struct ice_aqc_acl_scen *scen_buf,
671 u8 current_tcam_idx, u8 target_tcam_idx)
672{
673 u8 i;
674
675 for (i = 0; i < ICE_AQC_MAX_ACTION_MEMORIES; i++) {
676 struct ice_acl_act_mem *p_mem = &tbl->act_mems[i];
677
678 if (p_mem->act_mem == ICE_ACL_ACT_PAIR_MEM_INVAL ||
679 p_mem->member_of_tcam != current_tcam_idx)
680 continue;
681
682 scen_buf->act_mem_cfg[i] = target_tcam_idx;
683 scen_buf->act_mem_cfg[i] |= ICE_AQC_ACL_SCE_ACT_MEM_EN;
684 ice_set_bit(i, scen->act_mem_bitmap);
685 }
686}
687
688
689
690
691
692
693
694static void
695ice_acl_commit_partition(struct ice_hw *hw, struct ice_acl_scen *scen,
696 bool commit)
697{
698 u16 tcam_idx, off, num_cscd, units, cnt;
699
700
701 tcam_idx = ICE_ACL_TBL_TCAM_IDX(scen->start);
702 off = (scen->start % ICE_AQC_ACL_TCAM_DEPTH) /
703 ICE_ACL_ENTRY_ALLOC_UNIT;
704
705
706 units = scen->num_entry / ICE_ACL_ENTRY_ALLOC_UNIT;
707
708
709 num_cscd = scen->width / ICE_AQC_ACL_KEY_WIDTH_BYTES;
710
711 for (cnt = 0; cnt < units; cnt++) {
712 u16 w;
713
714
715
716
717 for (w = 0; w < num_cscd; w++) {
718 u16 b;
719
720 b = ((tcam_idx + w) * ICE_AQC_MAX_TCAM_ALLOC_UNITS) +
721 off;
722 if (commit)
723 ice_set_bit(b, hw->acl_tbl->avail);
724 else
725 ice_clear_bit(b, hw->acl_tbl->avail);
726 }
727
728 off = (off + 1) % ICE_AQC_MAX_TCAM_ALLOC_UNITS;
729 if (!off)
730 tcam_idx += num_cscd;
731 }
732}
733
734
735
736
737
738
739
740
741enum ice_status
742ice_acl_create_scen(struct ice_hw *hw, u16 match_width, u16 num_entries,
743 u16 *scen_id)
744{
745 u8 cascade_cnt, first_tcam, last_tcam, i, k;
746 struct ice_aqc_acl_scen scen_buf;
747 struct ice_acl_scen *scen;
748 enum ice_status status;
749
750 if (!hw->acl_tbl)
751 return ICE_ERR_DOES_NOT_EXIST;
752
753 scen = (struct ice_acl_scen *)ice_malloc(hw, sizeof(*scen));
754 if (!scen)
755 return ICE_ERR_NO_MEMORY;
756
757 scen->start = hw->acl_tbl->first_entry;
758 scen->width = ICE_AQC_ACL_KEY_WIDTH_BYTES *
759 DIVIDE_AND_ROUND_UP(match_width, ICE_AQC_ACL_KEY_WIDTH_BYTES);
760 scen->num_entry = num_entries;
761
762 status = ice_acl_alloc_partition(hw, scen);
763 if (status)
764 goto out;
765
766 ice_memset(&scen_buf, 0, sizeof(scen_buf), ICE_NONDMA_MEM);
767
768
769 cascade_cnt = DIVIDE_AND_ROUND_UP(scen->width,
770 ICE_AQC_ACL_KEY_WIDTH_BYTES);
771 first_tcam = ICE_ACL_TBL_TCAM_IDX(scen->start);
772 last_tcam = ICE_ACL_TBL_TCAM_IDX(scen->end);
773
774
775
776
777
778
779 scen->eff_width = cascade_cnt * ICE_AQC_ACL_KEY_WIDTH_BYTES -
780 ICE_ACL_SCEN_MIN_WIDTH;
781 scen->rng_chk_idx = (cascade_cnt - 1) * ICE_AQC_ACL_KEY_WIDTH_BYTES +
782 ICE_ACL_SCEN_RNG_CHK_IDX_IN_TCAM;
783 scen->pid_idx = (cascade_cnt - 1) * ICE_AQC_ACL_KEY_WIDTH_BYTES +
784 ICE_ACL_SCEN_PID_IDX_IN_TCAM;
785 scen->pkt_dir_idx = (cascade_cnt - 1) * ICE_AQC_ACL_KEY_WIDTH_BYTES +
786 ICE_ACL_SCEN_PKT_DIR_IDX_IN_TCAM;
787
788
789 ice_acl_set_scen_chnk_msk(&scen_buf, scen);
790
791
792 k = first_tcam;
793
794 scen_buf.tcam_cfg[k].start_cmp_set |= ICE_AQC_ACL_ALLOC_SCE_START_SET;
795 while (k <= last_tcam) {
796 u8 last_tcam_idx_cascade = cascade_cnt + k - 1;
797
798
799 scen_buf.tcam_cfg[k].start_cmp_set |=
800 ICE_AQC_ACL_ALLOC_SCE_START_CMP;
801
802
803 for (i = k; i < cascade_cnt + k; i++) {
804 ice_acl_fill_tcam_select(&scen_buf, scen, i, i - k);
805 ice_acl_assign_act_mem_for_scen(hw->acl_tbl, scen,
806 &scen_buf,
807 i,
808 last_tcam_idx_cascade);
809 }
810
811 k = i;
812 }
813
814
815 i = 0;
816 while (i < first_tcam)
817 scen_buf.tcam_cfg[i++].start_cmp_set =
818 ICE_AQC_ACL_ALLOC_SCE_START_CMP;
819
820 i = last_tcam + 1;
821 while (i < ICE_AQC_ACL_SLICES)
822 scen_buf.tcam_cfg[i++].start_cmp_set =
823 ICE_AQC_ACL_ALLOC_SCE_START_CMP;
824
825 status = ice_aq_alloc_acl_scen(hw, scen_id, &scen_buf, NULL);
826 if (status) {
827 ice_debug(hw, ICE_DBG_ACL, "AQ allocation of ACL scenario failed. status: %d\n",
828 status);
829 goto out;
830 }
831
832 scen->id = *scen_id;
833 ice_acl_commit_partition(hw, scen, false);
834 ice_acl_init_entry(scen);
835 LIST_ADD(&scen->list_entry, &hw->acl_tbl->scens);
836
837out:
838 if (status)
839 ice_free(hw, scen);
840
841 return status;
842}
843
844
845
846
847
848
849static enum ice_status ice_acl_destroy_scen(struct ice_hw *hw, u16 scen_id)
850{
851 struct ice_acl_scen *scen, *tmp_scen;
852 struct ice_flow_prof *p, *tmp;
853 enum ice_status status;
854
855 if (!hw->acl_tbl)
856 return ICE_ERR_DOES_NOT_EXIST;
857
858
859 LIST_FOR_EACH_ENTRY_SAFE(p, tmp, &hw->fl_profs[ICE_BLK_ACL],
860 ice_flow_prof, l_entry)
861 if (p->cfg.scen && p->cfg.scen->id == scen_id) {
862 status = ice_flow_rem_prof(hw, ICE_BLK_ACL, p->id);
863 if (status) {
864 ice_debug(hw, ICE_DBG_ACL, "ice_flow_rem_prof failed. status: %d\n",
865 status);
866 return status;
867 }
868 }
869
870
871 status = ice_aq_dealloc_acl_scen(hw, scen_id, NULL);
872 if (status) {
873 ice_debug(hw, ICE_DBG_ACL, "AQ de-allocation of scenario failed. status: %d\n",
874 status);
875 return status;
876 }
877
878
879 LIST_FOR_EACH_ENTRY_SAFE(scen, tmp_scen, &hw->acl_tbl->scens,
880 ice_acl_scen, list_entry)
881 if (scen->id == scen_id) {
882 LIST_DEL(&scen->list_entry);
883 ice_free(hw, scen);
884 }
885
886 return ICE_SUCCESS;
887}
888
889
890
891
892
893enum ice_status ice_acl_destroy_tbl(struct ice_hw *hw)
894{
895 struct ice_acl_scen *pos_scen, *tmp_scen;
896 struct ice_aqc_acl_generic resp_buf;
897 struct ice_aqc_acl_scen buf;
898 enum ice_status status;
899 u8 i;
900
901 if (!hw->acl_tbl)
902 return ICE_ERR_DOES_NOT_EXIST;
903
904
905
906
907 LIST_FOR_EACH_ENTRY_SAFE(pos_scen, tmp_scen, &hw->acl_tbl->scens,
908 ice_acl_scen, list_entry) {
909 status = ice_aq_query_acl_scen(hw, pos_scen->id, &buf, NULL);
910 if (status) {
911 ice_debug(hw, ICE_DBG_ACL, "ice_aq_query_acl_scen() failed. status: %d\n",
912 status);
913 return status;
914 }
915
916 for (i = 0; i < ICE_AQC_ACL_SLICES; i++) {
917 buf.tcam_cfg[i].chnk_msk = 0;
918 buf.tcam_cfg[i].start_cmp_set =
919 ICE_AQC_ACL_ALLOC_SCE_START_CMP;
920 }
921
922 for (i = 0; i < ICE_AQC_MAX_ACTION_MEMORIES; i++)
923 buf.act_mem_cfg[i] = 0;
924
925 status = ice_aq_update_acl_scen(hw, pos_scen->id, &buf, NULL);
926 if (status) {
927 ice_debug(hw, ICE_DBG_ACL, "ice_aq_update_acl_scen() failed. status: %d\n",
928 status);
929 return status;
930 }
931
932 status = ice_acl_destroy_scen(hw, pos_scen->id);
933 if (status) {
934 ice_debug(hw, ICE_DBG_ACL, "deletion of scenario failed. status: %d\n",
935 status);
936 return status;
937 }
938 }
939
940
941 status = ice_aq_dealloc_acl_tbl(hw, hw->acl_tbl->id, &resp_buf, NULL);
942 if (status) {
943 ice_debug(hw, ICE_DBG_ACL, "AQ de-allocation of ACL failed. status: %d\n",
944 status);
945 return status;
946 }
947
948 ice_free(hw, hw->acl_tbl);
949 hw->acl_tbl = NULL;
950
951 return ICE_SUCCESS;
952}
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970enum ice_status
971ice_acl_add_entry(struct ice_hw *hw, struct ice_acl_scen *scen,
972 enum ice_acl_entry_prio prio, u8 *keys, u8 *inverts,
973 struct ice_acl_act_entry *acts, u8 acts_cnt, u16 *entry_idx)
974{
975 u8 i, entry_tcam, num_cscd, offset;
976 struct ice_aqc_acl_data buf;
977 enum ice_status status = ICE_SUCCESS;
978 u16 idx;
979
980 if (!scen)
981 return ICE_ERR_DOES_NOT_EXIST;
982
983 *entry_idx = ice_acl_scen_assign_entry_idx(scen, prio);
984 if (*entry_idx >= scen->num_entry) {
985 *entry_idx = 0;
986 return ICE_ERR_MAX_LIMIT;
987 }
988
989
990 num_cscd = DIVIDE_AND_ROUND_UP(scen->width,
991 ICE_AQC_ACL_KEY_WIDTH_BYTES);
992
993 entry_tcam = ICE_ACL_TBL_TCAM_IDX(scen->start);
994 idx = ICE_ACL_TBL_TCAM_ENTRY_IDX(scen->start + *entry_idx);
995
996 ice_memset(&buf, 0, sizeof(buf), ICE_NONDMA_MEM);
997 for (i = 0; i < num_cscd; i++) {
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008 offset = num_cscd - i - 1;
1009 ice_memcpy(&buf.entry_key.val,
1010 &keys[offset * sizeof(buf.entry_key.val)],
1011 sizeof(buf.entry_key.val), ICE_NONDMA_TO_NONDMA);
1012 ice_memcpy(&buf.entry_key_invert.val,
1013 &inverts[offset * sizeof(buf.entry_key_invert.val)],
1014 sizeof(buf.entry_key_invert.val),
1015 ICE_NONDMA_TO_NONDMA);
1016 status = ice_aq_program_acl_entry(hw, entry_tcam + offset, idx,
1017 &buf, NULL);
1018 if (status) {
1019 ice_debug(hw, ICE_DBG_ACL, "aq program acl entry failed status: %d\n",
1020 status);
1021 goto out;
1022 }
1023 }
1024
1025
1026 status = ice_acl_prog_act(hw, scen, acts, acts_cnt, *entry_idx);
1027
1028out:
1029 if (status) {
1030 ice_acl_rem_entry(hw, scen, *entry_idx);
1031 *entry_idx = 0;
1032 }
1033
1034 return status;
1035}
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047enum ice_status
1048ice_acl_prog_act(struct ice_hw *hw, struct ice_acl_scen *scen,
1049 struct ice_acl_act_entry *acts, u8 acts_cnt,
1050 u16 entry_idx)
1051{
1052 u8 entry_tcam, num_cscd, i, actx_idx = 0;
1053 struct ice_aqc_actpair act_buf;
1054 enum ice_status status = ICE_SUCCESS;
1055 u16 idx;
1056
1057 if (entry_idx >= scen->num_entry)
1058 return ICE_ERR_MAX_LIMIT;
1059
1060 ice_memset(&act_buf, 0, sizeof(act_buf), ICE_NONDMA_MEM);
1061
1062
1063 num_cscd = DIVIDE_AND_ROUND_UP(scen->width,
1064 ICE_AQC_ACL_KEY_WIDTH_BYTES);
1065
1066 entry_tcam = ICE_ACL_TBL_TCAM_IDX(scen->start);
1067 idx = ICE_ACL_TBL_TCAM_ENTRY_IDX(scen->start + entry_idx);
1068
1069 ice_for_each_set_bit(i, scen->act_mem_bitmap,
1070 ICE_AQC_MAX_ACTION_MEMORIES) {
1071 struct ice_acl_act_mem *mem = &hw->acl_tbl->act_mems[i];
1072
1073 if (actx_idx >= acts_cnt)
1074 break;
1075 if (mem->member_of_tcam >= entry_tcam &&
1076 mem->member_of_tcam < entry_tcam + num_cscd) {
1077 ice_memcpy(&act_buf.act[0], &acts[actx_idx],
1078 sizeof(struct ice_acl_act_entry),
1079 ICE_NONDMA_TO_NONDMA);
1080
1081 if (++actx_idx < acts_cnt) {
1082 ice_memcpy(&act_buf.act[1], &acts[actx_idx],
1083 sizeof(struct ice_acl_act_entry),
1084 ICE_NONDMA_TO_NONDMA);
1085 }
1086
1087 status = ice_aq_program_actpair(hw, i, idx, &act_buf,
1088 NULL);
1089 if (status) {
1090 ice_debug(hw, ICE_DBG_ACL, "program actpair failed status: %d\n",
1091 status);
1092 break;
1093 }
1094 actx_idx++;
1095 }
1096 }
1097
1098 if (!status && actx_idx < acts_cnt)
1099 status = ICE_ERR_MAX_LIMIT;
1100
1101 return status;
1102}
1103
1104
1105
1106
1107
1108
1109
1110enum ice_status
1111ice_acl_rem_entry(struct ice_hw *hw, struct ice_acl_scen *scen, u16 entry_idx)
1112{
1113 struct ice_aqc_actpair act_buf;
1114 struct ice_aqc_acl_data buf;
1115 u8 entry_tcam, num_cscd, i;
1116 enum ice_status status = ICE_SUCCESS;
1117 u16 idx;
1118
1119 if (!scen)
1120 return ICE_ERR_DOES_NOT_EXIST;
1121
1122 if (entry_idx >= scen->num_entry)
1123 return ICE_ERR_MAX_LIMIT;
1124
1125 if (!ice_is_bit_set(scen->entry_bitmap, entry_idx))
1126 return ICE_ERR_DOES_NOT_EXIST;
1127
1128
1129 num_cscd = DIVIDE_AND_ROUND_UP(scen->width,
1130 ICE_AQC_ACL_KEY_WIDTH_BYTES);
1131
1132 entry_tcam = ICE_ACL_TBL_TCAM_IDX(scen->start);
1133 idx = ICE_ACL_TBL_TCAM_ENTRY_IDX(scen->start + entry_idx);
1134
1135
1136 ice_memset(&buf, 0, sizeof(buf), ICE_NONDMA_MEM);
1137 for (i = 0; i < num_cscd; i++) {
1138 status = ice_aq_program_acl_entry(hw, entry_tcam + i, idx, &buf,
1139 NULL);
1140 if (status)
1141 ice_debug(hw, ICE_DBG_ACL, "AQ program ACL entry failed status: %d\n",
1142 status);
1143 }
1144
1145 ice_memset(&act_buf, 0, sizeof(act_buf), ICE_NONDMA_MEM);
1146
1147 ice_for_each_set_bit(i, scen->act_mem_bitmap,
1148 ICE_AQC_MAX_ACTION_MEMORIES) {
1149 struct ice_acl_act_mem *mem = &hw->acl_tbl->act_mems[i];
1150
1151 if (mem->member_of_tcam >= entry_tcam &&
1152 mem->member_of_tcam < entry_tcam + num_cscd) {
1153
1154 status = ice_aq_program_actpair(hw, i, idx, &act_buf,
1155 NULL);
1156 if (status)
1157 ice_debug(hw, ICE_DBG_ACL, "program actpair failed status: %d\n",
1158 status);
1159 }
1160 }
1161
1162 ice_acl_scen_free_entry_idx(scen, entry_idx);
1163
1164 return status;
1165}
1166