1
2
3
4#include "dr_types.h"
5
6static bool dr_mask_is_smac_set(struct mlx5dr_match_spec *spec)
7{
8 return (spec->smac_47_16 || spec->smac_15_0);
9}
10
11static bool dr_mask_is_dmac_set(struct mlx5dr_match_spec *spec)
12{
13 return (spec->dmac_47_16 || spec->dmac_15_0);
14}
15
16static bool dr_mask_is_src_addr_set(struct mlx5dr_match_spec *spec)
17{
18 return (spec->src_ip_127_96 || spec->src_ip_95_64 ||
19 spec->src_ip_63_32 || spec->src_ip_31_0);
20}
21
22static bool dr_mask_is_dst_addr_set(struct mlx5dr_match_spec *spec)
23{
24 return (spec->dst_ip_127_96 || spec->dst_ip_95_64 ||
25 spec->dst_ip_63_32 || spec->dst_ip_31_0);
26}
27
28static bool dr_mask_is_l3_base_set(struct mlx5dr_match_spec *spec)
29{
30 return (spec->ip_protocol || spec->frag || spec->tcp_flags ||
31 spec->ip_ecn || spec->ip_dscp);
32}
33
34static bool dr_mask_is_tcp_udp_base_set(struct mlx5dr_match_spec *spec)
35{
36 return (spec->tcp_sport || spec->tcp_dport ||
37 spec->udp_sport || spec->udp_dport);
38}
39
40static bool dr_mask_is_ipv4_set(struct mlx5dr_match_spec *spec)
41{
42 return (spec->dst_ip_31_0 || spec->src_ip_31_0);
43}
44
45static bool dr_mask_is_ipv4_5_tuple_set(struct mlx5dr_match_spec *spec)
46{
47 return (dr_mask_is_l3_base_set(spec) ||
48 dr_mask_is_tcp_udp_base_set(spec) ||
49 dr_mask_is_ipv4_set(spec));
50}
51
52static bool dr_mask_is_eth_l2_tnl_set(struct mlx5dr_match_misc *misc)
53{
54 return misc->vxlan_vni;
55}
56
57static bool dr_mask_is_ttl_set(struct mlx5dr_match_spec *spec)
58{
59 return spec->ttl_hoplimit;
60}
61
62#define DR_MASK_IS_L2_DST(_spec, _misc, _inner_outer) (_spec.first_vid || \
63 (_spec).first_cfi || (_spec).first_prio || (_spec).cvlan_tag || \
64 (_spec).svlan_tag || (_spec).dmac_47_16 || (_spec).dmac_15_0 || \
65 (_spec).ethertype || (_spec).ip_version || \
66 (_misc)._inner_outer##_second_vid || \
67 (_misc)._inner_outer##_second_cfi || \
68 (_misc)._inner_outer##_second_prio || \
69 (_misc)._inner_outer##_second_cvlan_tag || \
70 (_misc)._inner_outer##_second_svlan_tag)
71
72#define DR_MASK_IS_ETH_L4_SET(_spec, _misc, _inner_outer) ( \
73 dr_mask_is_l3_base_set(&(_spec)) || \
74 dr_mask_is_tcp_udp_base_set(&(_spec)) || \
75 dr_mask_is_ttl_set(&(_spec)) || \
76 (_misc)._inner_outer##_ipv6_flow_label)
77
78#define DR_MASK_IS_ETH_L4_MISC_SET(_misc3, _inner_outer) ( \
79 (_misc3)._inner_outer##_tcp_seq_num || \
80 (_misc3)._inner_outer##_tcp_ack_num)
81
82#define DR_MASK_IS_FIRST_MPLS_SET(_misc2, _inner_outer) ( \
83 (_misc2)._inner_outer##_first_mpls_label || \
84 (_misc2)._inner_outer##_first_mpls_exp || \
85 (_misc2)._inner_outer##_first_mpls_s_bos || \
86 (_misc2)._inner_outer##_first_mpls_ttl)
87
88static bool dr_mask_is_tnl_gre_set(struct mlx5dr_match_misc *misc)
89{
90 return (misc->gre_key_h || misc->gre_key_l ||
91 misc->gre_protocol || misc->gre_c_present ||
92 misc->gre_k_present || misc->gre_s_present);
93}
94
95#define DR_MASK_IS_OUTER_MPLS_OVER_GRE_SET(_misc) (\
96 (_misc)->outer_first_mpls_over_gre_label || \
97 (_misc)->outer_first_mpls_over_gre_exp || \
98 (_misc)->outer_first_mpls_over_gre_s_bos || \
99 (_misc)->outer_first_mpls_over_gre_ttl)
100
101#define DR_MASK_IS_OUTER_MPLS_OVER_UDP_SET(_misc) (\
102 (_misc)->outer_first_mpls_over_udp_label || \
103 (_misc)->outer_first_mpls_over_udp_exp || \
104 (_misc)->outer_first_mpls_over_udp_s_bos || \
105 (_misc)->outer_first_mpls_over_udp_ttl)
106
107static bool
108dr_mask_is_vxlan_gpe_set(struct mlx5dr_match_misc3 *misc3)
109{
110 return (misc3->outer_vxlan_gpe_vni ||
111 misc3->outer_vxlan_gpe_next_protocol ||
112 misc3->outer_vxlan_gpe_flags);
113}
114
115static bool
116dr_matcher_supp_vxlan_gpe(struct mlx5dr_cmd_caps *caps)
117{
118 return (caps->sw_format_ver == MLX5_STEERING_FORMAT_CONNECTX_6DX) ||
119 (caps->flex_protocols & MLX5_FLEX_PARSER_VXLAN_GPE_ENABLED);
120}
121
122static bool
123dr_mask_is_tnl_vxlan_gpe(struct mlx5dr_match_param *mask,
124 struct mlx5dr_domain *dmn)
125{
126 return dr_mask_is_vxlan_gpe_set(&mask->misc3) &&
127 dr_matcher_supp_vxlan_gpe(&dmn->info.caps);
128}
129
130static bool dr_mask_is_tnl_geneve_set(struct mlx5dr_match_misc *misc)
131{
132 return misc->geneve_vni ||
133 misc->geneve_oam ||
134 misc->geneve_protocol_type ||
135 misc->geneve_opt_len;
136}
137
138static bool dr_mask_is_tnl_geneve_tlv_opt(struct mlx5dr_match_misc3 *misc3)
139{
140 return misc3->geneve_tlv_option_0_data;
141}
142
143static bool
144dr_matcher_supp_tnl_geneve(struct mlx5dr_cmd_caps *caps)
145{
146 return (caps->sw_format_ver == MLX5_STEERING_FORMAT_CONNECTX_6DX) ||
147 (caps->flex_protocols & MLX5_FLEX_PARSER_GENEVE_ENABLED);
148}
149
150static bool
151dr_mask_is_tnl_geneve(struct mlx5dr_match_param *mask,
152 struct mlx5dr_domain *dmn)
153{
154 return dr_mask_is_tnl_geneve_set(&mask->misc) &&
155 dr_matcher_supp_tnl_geneve(&dmn->info.caps);
156}
157
158static bool dr_mask_is_tnl_gtpu_set(struct mlx5dr_match_misc3 *misc3)
159{
160 return misc3->gtpu_msg_flags || misc3->gtpu_msg_type || misc3->gtpu_teid;
161}
162
163static bool dr_matcher_supp_tnl_gtpu(struct mlx5dr_cmd_caps *caps)
164{
165 return caps->flex_protocols & MLX5_FLEX_PARSER_GTPU_ENABLED;
166}
167
168static bool dr_mask_is_tnl_gtpu(struct mlx5dr_match_param *mask,
169 struct mlx5dr_domain *dmn)
170{
171 return dr_mask_is_tnl_gtpu_set(&mask->misc3) &&
172 dr_matcher_supp_tnl_gtpu(&dmn->info.caps);
173}
174
175static int dr_matcher_supp_tnl_gtpu_dw_0(struct mlx5dr_cmd_caps *caps)
176{
177 return caps->flex_protocols & MLX5_FLEX_PARSER_GTPU_DW_0_ENABLED;
178}
179
180static bool dr_mask_is_tnl_gtpu_dw_0(struct mlx5dr_match_param *mask,
181 struct mlx5dr_domain *dmn)
182{
183 return mask->misc3.gtpu_dw_0 &&
184 dr_matcher_supp_tnl_gtpu_dw_0(&dmn->info.caps);
185}
186
187static int dr_matcher_supp_tnl_gtpu_teid(struct mlx5dr_cmd_caps *caps)
188{
189 return caps->flex_protocols & MLX5_FLEX_PARSER_GTPU_TEID_ENABLED;
190}
191
192static bool dr_mask_is_tnl_gtpu_teid(struct mlx5dr_match_param *mask,
193 struct mlx5dr_domain *dmn)
194{
195 return mask->misc3.gtpu_teid &&
196 dr_matcher_supp_tnl_gtpu_teid(&dmn->info.caps);
197}
198
199static int dr_matcher_supp_tnl_gtpu_dw_2(struct mlx5dr_cmd_caps *caps)
200{
201 return caps->flex_protocols & MLX5_FLEX_PARSER_GTPU_DW_2_ENABLED;
202}
203
204static bool dr_mask_is_tnl_gtpu_dw_2(struct mlx5dr_match_param *mask,
205 struct mlx5dr_domain *dmn)
206{
207 return mask->misc3.gtpu_dw_2 &&
208 dr_matcher_supp_tnl_gtpu_dw_2(&dmn->info.caps);
209}
210
211static int dr_matcher_supp_tnl_gtpu_first_ext(struct mlx5dr_cmd_caps *caps)
212{
213 return caps->flex_protocols & MLX5_FLEX_PARSER_GTPU_FIRST_EXT_DW_0_ENABLED;
214}
215
216static bool dr_mask_is_tnl_gtpu_first_ext(struct mlx5dr_match_param *mask,
217 struct mlx5dr_domain *dmn)
218{
219 return mask->misc3.gtpu_first_ext_dw_0 &&
220 dr_matcher_supp_tnl_gtpu_first_ext(&dmn->info.caps);
221}
222
223static bool dr_mask_is_tnl_gtpu_flex_parser_0(struct mlx5dr_match_param *mask,
224 struct mlx5dr_domain *dmn)
225{
226 struct mlx5dr_cmd_caps *caps = &dmn->info.caps;
227
228 return (dr_is_flex_parser_0_id(caps->flex_parser_id_gtpu_dw_0) &&
229 dr_mask_is_tnl_gtpu_dw_0(mask, dmn)) ||
230 (dr_is_flex_parser_0_id(caps->flex_parser_id_gtpu_teid) &&
231 dr_mask_is_tnl_gtpu_teid(mask, dmn)) ||
232 (dr_is_flex_parser_0_id(caps->flex_parser_id_gtpu_dw_2) &&
233 dr_mask_is_tnl_gtpu_dw_2(mask, dmn)) ||
234 (dr_is_flex_parser_0_id(caps->flex_parser_id_gtpu_first_ext_dw_0) &&
235 dr_mask_is_tnl_gtpu_first_ext(mask, dmn));
236}
237
238static bool dr_mask_is_tnl_gtpu_flex_parser_1(struct mlx5dr_match_param *mask,
239 struct mlx5dr_domain *dmn)
240{
241 struct mlx5dr_cmd_caps *caps = &dmn->info.caps;
242
243 return (dr_is_flex_parser_1_id(caps->flex_parser_id_gtpu_dw_0) &&
244 dr_mask_is_tnl_gtpu_dw_0(mask, dmn)) ||
245 (dr_is_flex_parser_1_id(caps->flex_parser_id_gtpu_teid) &&
246 dr_mask_is_tnl_gtpu_teid(mask, dmn)) ||
247 (dr_is_flex_parser_1_id(caps->flex_parser_id_gtpu_dw_2) &&
248 dr_mask_is_tnl_gtpu_dw_2(mask, dmn)) ||
249 (dr_is_flex_parser_1_id(caps->flex_parser_id_gtpu_first_ext_dw_0) &&
250 dr_mask_is_tnl_gtpu_first_ext(mask, dmn));
251}
252
253static bool dr_mask_is_tnl_gtpu_any(struct mlx5dr_match_param *mask,
254 struct mlx5dr_domain *dmn)
255{
256 return dr_mask_is_tnl_gtpu_flex_parser_0(mask, dmn) ||
257 dr_mask_is_tnl_gtpu_flex_parser_1(mask, dmn) ||
258 dr_mask_is_tnl_gtpu(mask, dmn);
259}
260
261static int dr_matcher_supp_icmp_v4(struct mlx5dr_cmd_caps *caps)
262{
263 return (caps->sw_format_ver == MLX5_STEERING_FORMAT_CONNECTX_6DX) ||
264 (caps->flex_protocols & MLX5_FLEX_PARSER_ICMP_V4_ENABLED);
265}
266
267static int dr_matcher_supp_icmp_v6(struct mlx5dr_cmd_caps *caps)
268{
269 return (caps->sw_format_ver == MLX5_STEERING_FORMAT_CONNECTX_6DX) ||
270 (caps->flex_protocols & MLX5_FLEX_PARSER_ICMP_V6_ENABLED);
271}
272
273static bool dr_mask_is_icmpv6_set(struct mlx5dr_match_misc3 *misc3)
274{
275 return (misc3->icmpv6_type || misc3->icmpv6_code ||
276 misc3->icmpv6_header_data);
277}
278
279static bool dr_mask_is_icmp(struct mlx5dr_match_param *mask,
280 struct mlx5dr_domain *dmn)
281{
282 if (DR_MASK_IS_ICMPV4_SET(&mask->misc3))
283 return dr_matcher_supp_icmp_v4(&dmn->info.caps);
284 else if (dr_mask_is_icmpv6_set(&mask->misc3))
285 return dr_matcher_supp_icmp_v6(&dmn->info.caps);
286
287 return false;
288}
289
290static bool dr_mask_is_wqe_metadata_set(struct mlx5dr_match_misc2 *misc2)
291{
292 return misc2->metadata_reg_a;
293}
294
295static bool dr_mask_is_reg_c_0_3_set(struct mlx5dr_match_misc2 *misc2)
296{
297 return (misc2->metadata_reg_c_0 || misc2->metadata_reg_c_1 ||
298 misc2->metadata_reg_c_2 || misc2->metadata_reg_c_3);
299}
300
301static bool dr_mask_is_reg_c_4_7_set(struct mlx5dr_match_misc2 *misc2)
302{
303 return (misc2->metadata_reg_c_4 || misc2->metadata_reg_c_5 ||
304 misc2->metadata_reg_c_6 || misc2->metadata_reg_c_7);
305}
306
307static bool dr_mask_is_gvmi_or_qpn_set(struct mlx5dr_match_misc *misc)
308{
309 return (misc->source_sqn || misc->source_port);
310}
311
312static bool dr_mask_is_flex_parser_id_0_3_set(u32 flex_parser_id,
313 u32 flex_parser_value)
314{
315 if (flex_parser_id)
316 return flex_parser_id <= DR_STE_MAX_FLEX_0_ID;
317
318
319 return flex_parser_value;
320}
321
322static bool dr_mask_is_flex_parser_0_3_set(struct mlx5dr_match_misc4 *misc4)
323{
324 return (dr_mask_is_flex_parser_id_0_3_set(misc4->prog_sample_field_id_0,
325 misc4->prog_sample_field_value_0) ||
326 dr_mask_is_flex_parser_id_0_3_set(misc4->prog_sample_field_id_1,
327 misc4->prog_sample_field_value_1) ||
328 dr_mask_is_flex_parser_id_0_3_set(misc4->prog_sample_field_id_2,
329 misc4->prog_sample_field_value_2) ||
330 dr_mask_is_flex_parser_id_0_3_set(misc4->prog_sample_field_id_3,
331 misc4->prog_sample_field_value_3));
332}
333
334static bool dr_mask_is_flex_parser_id_4_7_set(u32 flex_parser_id)
335{
336 return flex_parser_id > DR_STE_MAX_FLEX_0_ID &&
337 flex_parser_id <= DR_STE_MAX_FLEX_1_ID;
338}
339
340static bool dr_mask_is_flex_parser_4_7_set(struct mlx5dr_match_misc4 *misc4)
341{
342 return (dr_mask_is_flex_parser_id_4_7_set(misc4->prog_sample_field_id_0) ||
343 dr_mask_is_flex_parser_id_4_7_set(misc4->prog_sample_field_id_1) ||
344 dr_mask_is_flex_parser_id_4_7_set(misc4->prog_sample_field_id_2) ||
345 dr_mask_is_flex_parser_id_4_7_set(misc4->prog_sample_field_id_3));
346}
347
348static int dr_matcher_supp_tnl_mpls_over_gre(struct mlx5dr_cmd_caps *caps)
349{
350 return caps->flex_protocols & MLX5_FLEX_PARSER_MPLS_OVER_GRE_ENABLED;
351}
352
353static bool dr_mask_is_tnl_mpls_over_gre(struct mlx5dr_match_param *mask,
354 struct mlx5dr_domain *dmn)
355{
356 return DR_MASK_IS_OUTER_MPLS_OVER_GRE_SET(&mask->misc2) &&
357 dr_matcher_supp_tnl_mpls_over_gre(&dmn->info.caps);
358}
359
360static int dr_matcher_supp_tnl_mpls_over_udp(struct mlx5dr_cmd_caps *caps)
361{
362 return caps->flex_protocols & mlx5_FLEX_PARSER_MPLS_OVER_UDP_ENABLED;
363}
364
365static bool dr_mask_is_tnl_mpls_over_udp(struct mlx5dr_match_param *mask,
366 struct mlx5dr_domain *dmn)
367{
368 return DR_MASK_IS_OUTER_MPLS_OVER_UDP_SET(&mask->misc2) &&
369 dr_matcher_supp_tnl_mpls_over_udp(&dmn->info.caps);
370}
371int mlx5dr_matcher_select_builders(struct mlx5dr_matcher *matcher,
372 struct mlx5dr_matcher_rx_tx *nic_matcher,
373 enum mlx5dr_ipv outer_ipv,
374 enum mlx5dr_ipv inner_ipv)
375{
376 nic_matcher->ste_builder =
377 nic_matcher->ste_builder_arr[outer_ipv][inner_ipv];
378 nic_matcher->num_of_builders =
379 nic_matcher->num_of_builders_arr[outer_ipv][inner_ipv];
380
381 if (!nic_matcher->num_of_builders) {
382 mlx5dr_dbg(matcher->tbl->dmn,
383 "Rule not supported on this matcher due to IP related fields\n");
384 return -EINVAL;
385 }
386
387 return 0;
388}
389
390static int dr_matcher_set_ste_builders(struct mlx5dr_matcher *matcher,
391 struct mlx5dr_matcher_rx_tx *nic_matcher,
392 enum mlx5dr_ipv outer_ipv,
393 enum mlx5dr_ipv inner_ipv)
394{
395 struct mlx5dr_domain_rx_tx *nic_dmn = nic_matcher->nic_tbl->nic_dmn;
396 struct mlx5dr_domain *dmn = matcher->tbl->dmn;
397 struct mlx5dr_ste_ctx *ste_ctx = dmn->ste_ctx;
398 struct mlx5dr_match_param mask = {};
399 bool allow_empty_match = false;
400 struct mlx5dr_ste_build *sb;
401 bool inner, rx;
402 int idx = 0;
403 int ret, i;
404
405 sb = nic_matcher->ste_builder_arr[outer_ipv][inner_ipv];
406 rx = nic_dmn->type == DR_DOMAIN_NIC_TYPE_RX;
407
408
409 if (matcher->match_criteria & DR_MATCHER_CRITERIA_OUTER)
410 mask.outer = matcher->mask.outer;
411
412 if (matcher->match_criteria & DR_MATCHER_CRITERIA_MISC)
413 mask.misc = matcher->mask.misc;
414
415 if (matcher->match_criteria & DR_MATCHER_CRITERIA_INNER)
416 mask.inner = matcher->mask.inner;
417
418 if (matcher->match_criteria & DR_MATCHER_CRITERIA_MISC2)
419 mask.misc2 = matcher->mask.misc2;
420
421 if (matcher->match_criteria & DR_MATCHER_CRITERIA_MISC3)
422 mask.misc3 = matcher->mask.misc3;
423
424 if (matcher->match_criteria & DR_MATCHER_CRITERIA_MISC4)
425 mask.misc4 = matcher->mask.misc4;
426
427 ret = mlx5dr_ste_build_pre_check(dmn, matcher->match_criteria,
428 &matcher->mask, NULL);
429 if (ret)
430 return ret;
431
432
433
434
435 if (dmn->type == MLX5DR_DOMAIN_TYPE_FDB &&
436 rx && mask.misc.source_port) {
437 mask.misc.source_port = 0;
438 mask.misc.source_eswitch_owner_vhca_id = 0;
439 allow_empty_match = true;
440 }
441
442
443 if (matcher->match_criteria & (DR_MATCHER_CRITERIA_OUTER |
444 DR_MATCHER_CRITERIA_MISC |
445 DR_MATCHER_CRITERIA_MISC2 |
446 DR_MATCHER_CRITERIA_MISC3)) {
447 inner = false;
448
449 if (dr_mask_is_wqe_metadata_set(&mask.misc2))
450 mlx5dr_ste_build_general_purpose(ste_ctx, &sb[idx++],
451 &mask, inner, rx);
452
453 if (dr_mask_is_reg_c_0_3_set(&mask.misc2))
454 mlx5dr_ste_build_register_0(ste_ctx, &sb[idx++],
455 &mask, inner, rx);
456
457 if (dr_mask_is_reg_c_4_7_set(&mask.misc2))
458 mlx5dr_ste_build_register_1(ste_ctx, &sb[idx++],
459 &mask, inner, rx);
460
461 if (dr_mask_is_gvmi_or_qpn_set(&mask.misc) &&
462 (dmn->type == MLX5DR_DOMAIN_TYPE_FDB ||
463 dmn->type == MLX5DR_DOMAIN_TYPE_NIC_RX)) {
464 mlx5dr_ste_build_src_gvmi_qpn(ste_ctx, &sb[idx++],
465 &mask, dmn, inner, rx);
466 }
467
468 if (dr_mask_is_smac_set(&mask.outer) &&
469 dr_mask_is_dmac_set(&mask.outer)) {
470 mlx5dr_ste_build_eth_l2_src_dst(ste_ctx, &sb[idx++],
471 &mask, inner, rx);
472 }
473
474 if (dr_mask_is_smac_set(&mask.outer))
475 mlx5dr_ste_build_eth_l2_src(ste_ctx, &sb[idx++],
476 &mask, inner, rx);
477
478 if (DR_MASK_IS_L2_DST(mask.outer, mask.misc, outer))
479 mlx5dr_ste_build_eth_l2_dst(ste_ctx, &sb[idx++],
480 &mask, inner, rx);
481
482 if (outer_ipv == DR_RULE_IPV6) {
483 if (dr_mask_is_dst_addr_set(&mask.outer))
484 mlx5dr_ste_build_eth_l3_ipv6_dst(ste_ctx, &sb[idx++],
485 &mask, inner, rx);
486
487 if (dr_mask_is_src_addr_set(&mask.outer))
488 mlx5dr_ste_build_eth_l3_ipv6_src(ste_ctx, &sb[idx++],
489 &mask, inner, rx);
490
491 if (DR_MASK_IS_ETH_L4_SET(mask.outer, mask.misc, outer))
492 mlx5dr_ste_build_eth_ipv6_l3_l4(ste_ctx, &sb[idx++],
493 &mask, inner, rx);
494 } else {
495 if (dr_mask_is_ipv4_5_tuple_set(&mask.outer))
496 mlx5dr_ste_build_eth_l3_ipv4_5_tuple(ste_ctx, &sb[idx++],
497 &mask, inner, rx);
498
499 if (dr_mask_is_ttl_set(&mask.outer))
500 mlx5dr_ste_build_eth_l3_ipv4_misc(ste_ctx, &sb[idx++],
501 &mask, inner, rx);
502 }
503
504 if (dr_mask_is_tnl_vxlan_gpe(&mask, dmn))
505 mlx5dr_ste_build_tnl_vxlan_gpe(ste_ctx, &sb[idx++],
506 &mask, inner, rx);
507 else if (dr_mask_is_tnl_geneve(&mask, dmn)) {
508 mlx5dr_ste_build_tnl_geneve(ste_ctx, &sb[idx++],
509 &mask, inner, rx);
510 if (dr_mask_is_tnl_geneve_tlv_opt(&mask.misc3))
511 mlx5dr_ste_build_tnl_geneve_tlv_opt(ste_ctx, &sb[idx++],
512 &mask, &dmn->info.caps,
513 inner, rx);
514 } else if (dr_mask_is_tnl_gtpu_any(&mask, dmn)) {
515 if (dr_mask_is_tnl_gtpu_flex_parser_0(&mask, dmn))
516 mlx5dr_ste_build_tnl_gtpu_flex_parser_0(ste_ctx, &sb[idx++],
517 &mask, &dmn->info.caps,
518 inner, rx);
519
520 if (dr_mask_is_tnl_gtpu_flex_parser_1(&mask, dmn))
521 mlx5dr_ste_build_tnl_gtpu_flex_parser_1(ste_ctx, &sb[idx++],
522 &mask, &dmn->info.caps,
523 inner, rx);
524
525 if (dr_mask_is_tnl_gtpu(&mask, dmn))
526 mlx5dr_ste_build_tnl_gtpu(ste_ctx, &sb[idx++],
527 &mask, inner, rx);
528 }
529
530 if (DR_MASK_IS_ETH_L4_MISC_SET(mask.misc3, outer))
531 mlx5dr_ste_build_eth_l4_misc(ste_ctx, &sb[idx++],
532 &mask, inner, rx);
533
534 if (DR_MASK_IS_FIRST_MPLS_SET(mask.misc2, outer))
535 mlx5dr_ste_build_mpls(ste_ctx, &sb[idx++],
536 &mask, inner, rx);
537
538 if (dr_mask_is_tnl_mpls_over_gre(&mask, dmn))
539 mlx5dr_ste_build_tnl_mpls_over_gre(ste_ctx, &sb[idx++],
540 &mask, &dmn->info.caps,
541 inner, rx);
542 else if (dr_mask_is_tnl_mpls_over_udp(&mask, dmn))
543 mlx5dr_ste_build_tnl_mpls_over_udp(ste_ctx, &sb[idx++],
544 &mask, &dmn->info.caps,
545 inner, rx);
546
547 if (dr_mask_is_icmp(&mask, dmn))
548 mlx5dr_ste_build_icmp(ste_ctx, &sb[idx++],
549 &mask, &dmn->info.caps,
550 inner, rx);
551
552 if (dr_mask_is_tnl_gre_set(&mask.misc))
553 mlx5dr_ste_build_tnl_gre(ste_ctx, &sb[idx++],
554 &mask, inner, rx);
555 }
556
557
558 if (matcher->match_criteria & (DR_MATCHER_CRITERIA_INNER |
559 DR_MATCHER_CRITERIA_MISC |
560 DR_MATCHER_CRITERIA_MISC2 |
561 DR_MATCHER_CRITERIA_MISC3)) {
562 inner = true;
563
564 if (dr_mask_is_eth_l2_tnl_set(&mask.misc))
565 mlx5dr_ste_build_eth_l2_tnl(ste_ctx, &sb[idx++],
566 &mask, inner, rx);
567
568 if (dr_mask_is_smac_set(&mask.inner) &&
569 dr_mask_is_dmac_set(&mask.inner)) {
570 mlx5dr_ste_build_eth_l2_src_dst(ste_ctx, &sb[idx++],
571 &mask, inner, rx);
572 }
573
574 if (dr_mask_is_smac_set(&mask.inner))
575 mlx5dr_ste_build_eth_l2_src(ste_ctx, &sb[idx++],
576 &mask, inner, rx);
577
578 if (DR_MASK_IS_L2_DST(mask.inner, mask.misc, inner))
579 mlx5dr_ste_build_eth_l2_dst(ste_ctx, &sb[idx++],
580 &mask, inner, rx);
581
582 if (inner_ipv == DR_RULE_IPV6) {
583 if (dr_mask_is_dst_addr_set(&mask.inner))
584 mlx5dr_ste_build_eth_l3_ipv6_dst(ste_ctx, &sb[idx++],
585 &mask, inner, rx);
586
587 if (dr_mask_is_src_addr_set(&mask.inner))
588 mlx5dr_ste_build_eth_l3_ipv6_src(ste_ctx, &sb[idx++],
589 &mask, inner, rx);
590
591 if (DR_MASK_IS_ETH_L4_SET(mask.inner, mask.misc, inner))
592 mlx5dr_ste_build_eth_ipv6_l3_l4(ste_ctx, &sb[idx++],
593 &mask, inner, rx);
594 } else {
595 if (dr_mask_is_ipv4_5_tuple_set(&mask.inner))
596 mlx5dr_ste_build_eth_l3_ipv4_5_tuple(ste_ctx, &sb[idx++],
597 &mask, inner, rx);
598
599 if (dr_mask_is_ttl_set(&mask.inner))
600 mlx5dr_ste_build_eth_l3_ipv4_misc(ste_ctx, &sb[idx++],
601 &mask, inner, rx);
602 }
603
604 if (DR_MASK_IS_ETH_L4_MISC_SET(mask.misc3, inner))
605 mlx5dr_ste_build_eth_l4_misc(ste_ctx, &sb[idx++],
606 &mask, inner, rx);
607
608 if (DR_MASK_IS_FIRST_MPLS_SET(mask.misc2, inner))
609 mlx5dr_ste_build_mpls(ste_ctx, &sb[idx++],
610 &mask, inner, rx);
611
612 if (dr_mask_is_tnl_mpls_over_gre(&mask, dmn))
613 mlx5dr_ste_build_tnl_mpls_over_gre(ste_ctx, &sb[idx++],
614 &mask, &dmn->info.caps,
615 inner, rx);
616 else if (dr_mask_is_tnl_mpls_over_udp(&mask, dmn))
617 mlx5dr_ste_build_tnl_mpls_over_udp(ste_ctx, &sb[idx++],
618 &mask, &dmn->info.caps,
619 inner, rx);
620 }
621
622 if (matcher->match_criteria & DR_MATCHER_CRITERIA_MISC4) {
623 if (dr_mask_is_flex_parser_0_3_set(&mask.misc4))
624 mlx5dr_ste_build_flex_parser_0(ste_ctx, &sb[idx++],
625 &mask, false, rx);
626
627 if (dr_mask_is_flex_parser_4_7_set(&mask.misc4))
628 mlx5dr_ste_build_flex_parser_1(ste_ctx, &sb[idx++],
629 &mask, false, rx);
630 }
631
632
633 if ((!idx && allow_empty_match) ||
634 matcher->match_criteria == DR_MATCHER_CRITERIA_EMPTY)
635 mlx5dr_ste_build_empty_always_hit(&sb[idx++], rx);
636
637 if (idx == 0) {
638 mlx5dr_err(dmn, "Cannot generate any valid rules from mask\n");
639 return -EINVAL;
640 }
641
642
643 for (i = 0; i < sizeof(struct mlx5dr_match_param); i++) {
644 if (((u8 *)&mask)[i] != 0) {
645 mlx5dr_dbg(dmn, "Mask contains unsupported parameters\n");
646 return -EOPNOTSUPP;
647 }
648 }
649
650 nic_matcher->ste_builder = sb;
651 nic_matcher->num_of_builders_arr[outer_ipv][inner_ipv] = idx;
652
653 return 0;
654}
655
656static int dr_matcher_connect(struct mlx5dr_domain *dmn,
657 struct mlx5dr_matcher_rx_tx *curr_nic_matcher,
658 struct mlx5dr_matcher_rx_tx *next_nic_matcher,
659 struct mlx5dr_matcher_rx_tx *prev_nic_matcher)
660{
661 struct mlx5dr_table_rx_tx *nic_tbl = curr_nic_matcher->nic_tbl;
662 struct mlx5dr_domain_rx_tx *nic_dmn = nic_tbl->nic_dmn;
663 struct mlx5dr_htbl_connect_info info;
664 struct mlx5dr_ste_htbl *prev_htbl;
665 int ret;
666
667
668 if (next_nic_matcher) {
669 info.type = CONNECT_HIT;
670 info.hit_next_htbl = next_nic_matcher->s_htbl;
671 } else {
672 info.type = CONNECT_MISS;
673 info.miss_icm_addr = nic_tbl->default_icm_addr;
674 }
675 ret = mlx5dr_ste_htbl_init_and_postsend(dmn, nic_dmn,
676 curr_nic_matcher->e_anchor,
677 &info, info.type == CONNECT_HIT);
678 if (ret)
679 return ret;
680
681
682 info.type = CONNECT_MISS;
683 info.miss_icm_addr = curr_nic_matcher->e_anchor->chunk->icm_addr;
684 ret = mlx5dr_ste_htbl_init_and_postsend(dmn, nic_dmn,
685 curr_nic_matcher->s_htbl,
686 &info, false);
687 if (ret)
688 return ret;
689
690
691 if (prev_nic_matcher)
692 prev_htbl = prev_nic_matcher->e_anchor;
693 else
694 prev_htbl = nic_tbl->s_anchor;
695
696 info.type = CONNECT_HIT;
697 info.hit_next_htbl = curr_nic_matcher->s_htbl;
698 ret = mlx5dr_ste_htbl_init_and_postsend(dmn, nic_dmn, prev_htbl,
699 &info, true);
700 if (ret)
701 return ret;
702
703
704 curr_nic_matcher->s_htbl->pointing_ste = prev_htbl->ste_arr;
705 prev_htbl->ste_arr[0].next_htbl = curr_nic_matcher->s_htbl;
706
707 if (next_nic_matcher) {
708 next_nic_matcher->s_htbl->pointing_ste = curr_nic_matcher->e_anchor->ste_arr;
709 curr_nic_matcher->e_anchor->ste_arr[0].next_htbl = next_nic_matcher->s_htbl;
710 }
711
712 return 0;
713}
714
715static int dr_matcher_add_to_tbl(struct mlx5dr_matcher *matcher)
716{
717 struct mlx5dr_matcher *next_matcher, *prev_matcher, *tmp_matcher;
718 struct mlx5dr_table *tbl = matcher->tbl;
719 struct mlx5dr_domain *dmn = tbl->dmn;
720 bool first = true;
721 int ret;
722
723 next_matcher = NULL;
724 list_for_each_entry(tmp_matcher, &tbl->matcher_list, matcher_list) {
725 if (tmp_matcher->prio >= matcher->prio) {
726 next_matcher = tmp_matcher;
727 break;
728 }
729 first = false;
730 }
731
732 prev_matcher = NULL;
733 if (next_matcher && !first)
734 prev_matcher = list_prev_entry(next_matcher, matcher_list);
735 else if (!first)
736 prev_matcher = list_last_entry(&tbl->matcher_list,
737 struct mlx5dr_matcher,
738 matcher_list);
739
740 if (dmn->type == MLX5DR_DOMAIN_TYPE_FDB ||
741 dmn->type == MLX5DR_DOMAIN_TYPE_NIC_RX) {
742 ret = dr_matcher_connect(dmn, &matcher->rx,
743 next_matcher ? &next_matcher->rx : NULL,
744 prev_matcher ? &prev_matcher->rx : NULL);
745 if (ret)
746 return ret;
747 }
748
749 if (dmn->type == MLX5DR_DOMAIN_TYPE_FDB ||
750 dmn->type == MLX5DR_DOMAIN_TYPE_NIC_TX) {
751 ret = dr_matcher_connect(dmn, &matcher->tx,
752 next_matcher ? &next_matcher->tx : NULL,
753 prev_matcher ? &prev_matcher->tx : NULL);
754 if (ret)
755 return ret;
756 }
757
758 if (prev_matcher)
759 list_add(&matcher->matcher_list, &prev_matcher->matcher_list);
760 else if (next_matcher)
761 list_add_tail(&matcher->matcher_list,
762 &next_matcher->matcher_list);
763 else
764 list_add(&matcher->matcher_list, &tbl->matcher_list);
765
766 return 0;
767}
768
769static void dr_matcher_uninit_nic(struct mlx5dr_matcher_rx_tx *nic_matcher)
770{
771 mlx5dr_htbl_put(nic_matcher->s_htbl);
772 mlx5dr_htbl_put(nic_matcher->e_anchor);
773}
774
775static void dr_matcher_uninit_fdb(struct mlx5dr_matcher *matcher)
776{
777 dr_matcher_uninit_nic(&matcher->rx);
778 dr_matcher_uninit_nic(&matcher->tx);
779}
780
781static void dr_matcher_uninit(struct mlx5dr_matcher *matcher)
782{
783 struct mlx5dr_domain *dmn = matcher->tbl->dmn;
784
785 switch (dmn->type) {
786 case MLX5DR_DOMAIN_TYPE_NIC_RX:
787 dr_matcher_uninit_nic(&matcher->rx);
788 break;
789 case MLX5DR_DOMAIN_TYPE_NIC_TX:
790 dr_matcher_uninit_nic(&matcher->tx);
791 break;
792 case MLX5DR_DOMAIN_TYPE_FDB:
793 dr_matcher_uninit_fdb(matcher);
794 break;
795 default:
796 WARN_ON(true);
797 break;
798 }
799}
800
801static int dr_matcher_set_all_ste_builders(struct mlx5dr_matcher *matcher,
802 struct mlx5dr_matcher_rx_tx *nic_matcher)
803{
804 struct mlx5dr_domain *dmn = matcher->tbl->dmn;
805
806 dr_matcher_set_ste_builders(matcher, nic_matcher, DR_RULE_IPV4, DR_RULE_IPV4);
807 dr_matcher_set_ste_builders(matcher, nic_matcher, DR_RULE_IPV4, DR_RULE_IPV6);
808 dr_matcher_set_ste_builders(matcher, nic_matcher, DR_RULE_IPV6, DR_RULE_IPV4);
809 dr_matcher_set_ste_builders(matcher, nic_matcher, DR_RULE_IPV6, DR_RULE_IPV6);
810
811 if (!nic_matcher->ste_builder) {
812 mlx5dr_err(dmn, "Cannot generate IPv4 or IPv6 rules with given mask\n");
813 return -EINVAL;
814 }
815
816 return 0;
817}
818
819static int dr_matcher_init_nic(struct mlx5dr_matcher *matcher,
820 struct mlx5dr_matcher_rx_tx *nic_matcher)
821{
822 struct mlx5dr_domain *dmn = matcher->tbl->dmn;
823 int ret;
824
825 ret = dr_matcher_set_all_ste_builders(matcher, nic_matcher);
826 if (ret)
827 return ret;
828
829 nic_matcher->e_anchor = mlx5dr_ste_htbl_alloc(dmn->ste_icm_pool,
830 DR_CHUNK_SIZE_1,
831 MLX5DR_STE_LU_TYPE_DONT_CARE,
832 0);
833 if (!nic_matcher->e_anchor)
834 return -ENOMEM;
835
836 nic_matcher->s_htbl = mlx5dr_ste_htbl_alloc(dmn->ste_icm_pool,
837 DR_CHUNK_SIZE_1,
838 nic_matcher->ste_builder[0].lu_type,
839 nic_matcher->ste_builder[0].byte_mask);
840 if (!nic_matcher->s_htbl) {
841 ret = -ENOMEM;
842 goto free_e_htbl;
843 }
844
845
846 mlx5dr_htbl_get(nic_matcher->s_htbl);
847 mlx5dr_htbl_get(nic_matcher->e_anchor);
848
849 return 0;
850
851free_e_htbl:
852 mlx5dr_ste_htbl_free(nic_matcher->e_anchor);
853 return ret;
854}
855
856static int dr_matcher_init_fdb(struct mlx5dr_matcher *matcher)
857{
858 int ret;
859
860 ret = dr_matcher_init_nic(matcher, &matcher->rx);
861 if (ret)
862 return ret;
863
864 ret = dr_matcher_init_nic(matcher, &matcher->tx);
865 if (ret)
866 goto uninit_nic_rx;
867
868 return 0;
869
870uninit_nic_rx:
871 dr_matcher_uninit_nic(&matcher->rx);
872 return ret;
873}
874
875static int dr_matcher_init(struct mlx5dr_matcher *matcher,
876 struct mlx5dr_match_parameters *mask)
877{
878 struct mlx5dr_table *tbl = matcher->tbl;
879 struct mlx5dr_domain *dmn = tbl->dmn;
880 int ret;
881
882 if (matcher->match_criteria >= DR_MATCHER_CRITERIA_MAX) {
883 mlx5dr_err(dmn, "Invalid match criteria attribute\n");
884 return -EINVAL;
885 }
886
887 if (mask) {
888 if (mask->match_sz > DR_SZ_MATCH_PARAM) {
889 mlx5dr_err(dmn, "Invalid match size attribute\n");
890 return -EINVAL;
891 }
892 mlx5dr_ste_copy_param(matcher->match_criteria,
893 &matcher->mask, mask);
894 }
895
896 switch (dmn->type) {
897 case MLX5DR_DOMAIN_TYPE_NIC_RX:
898 matcher->rx.nic_tbl = &tbl->rx;
899 ret = dr_matcher_init_nic(matcher, &matcher->rx);
900 break;
901 case MLX5DR_DOMAIN_TYPE_NIC_TX:
902 matcher->tx.nic_tbl = &tbl->tx;
903 ret = dr_matcher_init_nic(matcher, &matcher->tx);
904 break;
905 case MLX5DR_DOMAIN_TYPE_FDB:
906 matcher->rx.nic_tbl = &tbl->rx;
907 matcher->tx.nic_tbl = &tbl->tx;
908 ret = dr_matcher_init_fdb(matcher);
909 break;
910 default:
911 WARN_ON(true);
912 return -EINVAL;
913 }
914
915 return ret;
916}
917
918struct mlx5dr_matcher *
919mlx5dr_matcher_create(struct mlx5dr_table *tbl,
920 u32 priority,
921 u8 match_criteria_enable,
922 struct mlx5dr_match_parameters *mask)
923{
924 struct mlx5dr_matcher *matcher;
925 int ret;
926
927 refcount_inc(&tbl->refcount);
928
929 matcher = kzalloc(sizeof(*matcher), GFP_KERNEL);
930 if (!matcher)
931 goto dec_ref;
932
933 matcher->tbl = tbl;
934 matcher->prio = priority;
935 matcher->match_criteria = match_criteria_enable;
936 refcount_set(&matcher->refcount, 1);
937 INIT_LIST_HEAD(&matcher->matcher_list);
938
939 mlx5dr_domain_lock(tbl->dmn);
940
941 ret = dr_matcher_init(matcher, mask);
942 if (ret)
943 goto free_matcher;
944
945 ret = dr_matcher_add_to_tbl(matcher);
946 if (ret)
947 goto matcher_uninit;
948
949 mlx5dr_domain_unlock(tbl->dmn);
950
951 return matcher;
952
953matcher_uninit:
954 dr_matcher_uninit(matcher);
955free_matcher:
956 mlx5dr_domain_unlock(tbl->dmn);
957 kfree(matcher);
958dec_ref:
959 refcount_dec(&tbl->refcount);
960 return NULL;
961}
962
963static int dr_matcher_disconnect(struct mlx5dr_domain *dmn,
964 struct mlx5dr_table_rx_tx *nic_tbl,
965 struct mlx5dr_matcher_rx_tx *next_nic_matcher,
966 struct mlx5dr_matcher_rx_tx *prev_nic_matcher)
967{
968 struct mlx5dr_domain_rx_tx *nic_dmn = nic_tbl->nic_dmn;
969 struct mlx5dr_htbl_connect_info info;
970 struct mlx5dr_ste_htbl *prev_anchor;
971
972 if (prev_nic_matcher)
973 prev_anchor = prev_nic_matcher->e_anchor;
974 else
975 prev_anchor = nic_tbl->s_anchor;
976
977
978 if (next_nic_matcher) {
979 info.type = CONNECT_HIT;
980 info.hit_next_htbl = next_nic_matcher->s_htbl;
981 next_nic_matcher->s_htbl->pointing_ste = prev_anchor->ste_arr;
982 prev_anchor->ste_arr[0].next_htbl = next_nic_matcher->s_htbl;
983 } else {
984 info.type = CONNECT_MISS;
985 info.miss_icm_addr = nic_tbl->default_icm_addr;
986 prev_anchor->ste_arr[0].next_htbl = NULL;
987 }
988
989 return mlx5dr_ste_htbl_init_and_postsend(dmn, nic_dmn, prev_anchor,
990 &info, true);
991}
992
993static int dr_matcher_remove_from_tbl(struct mlx5dr_matcher *matcher)
994{
995 struct mlx5dr_matcher *prev_matcher, *next_matcher;
996 struct mlx5dr_table *tbl = matcher->tbl;
997 struct mlx5dr_domain *dmn = tbl->dmn;
998 int ret = 0;
999
1000 if (list_is_last(&matcher->matcher_list, &tbl->matcher_list))
1001 next_matcher = NULL;
1002 else
1003 next_matcher = list_next_entry(matcher, matcher_list);
1004
1005 if (matcher->matcher_list.prev == &tbl->matcher_list)
1006 prev_matcher = NULL;
1007 else
1008 prev_matcher = list_prev_entry(matcher, matcher_list);
1009
1010 if (dmn->type == MLX5DR_DOMAIN_TYPE_FDB ||
1011 dmn->type == MLX5DR_DOMAIN_TYPE_NIC_RX) {
1012 ret = dr_matcher_disconnect(dmn, &tbl->rx,
1013 next_matcher ? &next_matcher->rx : NULL,
1014 prev_matcher ? &prev_matcher->rx : NULL);
1015 if (ret)
1016 return ret;
1017 }
1018
1019 if (dmn->type == MLX5DR_DOMAIN_TYPE_FDB ||
1020 dmn->type == MLX5DR_DOMAIN_TYPE_NIC_TX) {
1021 ret = dr_matcher_disconnect(dmn, &tbl->tx,
1022 next_matcher ? &next_matcher->tx : NULL,
1023 prev_matcher ? &prev_matcher->tx : NULL);
1024 if (ret)
1025 return ret;
1026 }
1027
1028 list_del(&matcher->matcher_list);
1029
1030 return 0;
1031}
1032
1033int mlx5dr_matcher_destroy(struct mlx5dr_matcher *matcher)
1034{
1035 struct mlx5dr_table *tbl = matcher->tbl;
1036
1037 if (refcount_read(&matcher->refcount) > 1)
1038 return -EBUSY;
1039
1040 mlx5dr_domain_lock(tbl->dmn);
1041
1042 dr_matcher_remove_from_tbl(matcher);
1043 dr_matcher_uninit(matcher);
1044 refcount_dec(&matcher->tbl->refcount);
1045
1046 mlx5dr_domain_unlock(tbl->dmn);
1047 kfree(matcher);
1048
1049 return 0;
1050}
1051