1 // SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB
2 /* Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved. */
3 
4 #include <linux/types.h>
5 #include "mlx5_ifc_dr_ste_v1.h"
6 #include "dr_ste.h"
7 
8 #define DR_STE_CALC_DFNR_TYPE(lookup_type, inner) \
9 	((inner) ? DR_STE_V1_LU_TYPE_##lookup_type##_I : \
10 		   DR_STE_V1_LU_TYPE_##lookup_type##_O)
11 
12 enum dr_ste_v1_entry_format {
13 	DR_STE_V1_TYPE_BWC_BYTE	= 0x0,
14 	DR_STE_V1_TYPE_BWC_DW	= 0x1,
15 	DR_STE_V1_TYPE_MATCH	= 0x2,
16 };
17 
18 /* Lookup type is built from 2B: [ Definer mode 1B ][ Definer index 1B ] */
19 enum {
20 	DR_STE_V1_LU_TYPE_NOP				= 0x0000,
21 	DR_STE_V1_LU_TYPE_ETHL2_TNL			= 0x0002,
22 	DR_STE_V1_LU_TYPE_IBL3_EXT			= 0x0102,
23 	DR_STE_V1_LU_TYPE_ETHL2_O			= 0x0003,
24 	DR_STE_V1_LU_TYPE_IBL4				= 0x0103,
25 	DR_STE_V1_LU_TYPE_ETHL2_I			= 0x0004,
26 	DR_STE_V1_LU_TYPE_SRC_QP_GVMI			= 0x0104,
27 	DR_STE_V1_LU_TYPE_ETHL2_SRC_O			= 0x0005,
28 	DR_STE_V1_LU_TYPE_ETHL2_HEADERS_O		= 0x0105,
29 	DR_STE_V1_LU_TYPE_ETHL2_SRC_I			= 0x0006,
30 	DR_STE_V1_LU_TYPE_ETHL2_HEADERS_I		= 0x0106,
31 	DR_STE_V1_LU_TYPE_ETHL3_IPV4_5_TUPLE_O		= 0x0007,
32 	DR_STE_V1_LU_TYPE_IPV6_DES_O			= 0x0107,
33 	DR_STE_V1_LU_TYPE_ETHL3_IPV4_5_TUPLE_I		= 0x0008,
34 	DR_STE_V1_LU_TYPE_IPV6_DES_I			= 0x0108,
35 	DR_STE_V1_LU_TYPE_ETHL4_O			= 0x0009,
36 	DR_STE_V1_LU_TYPE_IPV6_SRC_O			= 0x0109,
37 	DR_STE_V1_LU_TYPE_ETHL4_I			= 0x000a,
38 	DR_STE_V1_LU_TYPE_IPV6_SRC_I			= 0x010a,
39 	DR_STE_V1_LU_TYPE_ETHL2_SRC_DST_O		= 0x000b,
40 	DR_STE_V1_LU_TYPE_MPLS_O			= 0x010b,
41 	DR_STE_V1_LU_TYPE_ETHL2_SRC_DST_I		= 0x000c,
42 	DR_STE_V1_LU_TYPE_MPLS_I			= 0x010c,
43 	DR_STE_V1_LU_TYPE_ETHL3_IPV4_MISC_O		= 0x000d,
44 	DR_STE_V1_LU_TYPE_GRE				= 0x010d,
45 	DR_STE_V1_LU_TYPE_FLEX_PARSER_TNL_HEADER	= 0x000e,
46 	DR_STE_V1_LU_TYPE_GENERAL_PURPOSE		= 0x010e,
47 	DR_STE_V1_LU_TYPE_ETHL3_IPV4_MISC_I		= 0x000f,
48 	DR_STE_V1_LU_TYPE_STEERING_REGISTERS_0		= 0x010f,
49 	DR_STE_V1_LU_TYPE_STEERING_REGISTERS_1		= 0x0110,
50 	DR_STE_V1_LU_TYPE_FLEX_PARSER_0			= 0x0111,
51 	DR_STE_V1_LU_TYPE_FLEX_PARSER_1			= 0x0112,
52 	DR_STE_V1_LU_TYPE_ETHL4_MISC_O			= 0x0113,
53 	DR_STE_V1_LU_TYPE_ETHL4_MISC_I			= 0x0114,
54 	DR_STE_V1_LU_TYPE_INVALID			= 0x00ff,
55 	DR_STE_V1_LU_TYPE_DONT_CARE			= MLX5DR_STE_LU_TYPE_DONT_CARE,
56 };
57 
58 enum dr_ste_v1_header_anchors {
59 	DR_STE_HEADER_ANCHOR_START_OUTER		= 0x00,
60 	DR_STE_HEADER_ANCHOR_1ST_VLAN			= 0x02,
61 	DR_STE_HEADER_ANCHOR_IPV6_IPV4			= 0x07,
62 	DR_STE_HEADER_ANCHOR_INNER_MAC			= 0x13,
63 	DR_STE_HEADER_ANCHOR_INNER_IPV6_IPV4		= 0x19,
64 };
65 
66 enum dr_ste_v1_action_size {
67 	DR_STE_ACTION_SINGLE_SZ = 4,
68 	DR_STE_ACTION_DOUBLE_SZ = 8,
69 	DR_STE_ACTION_TRIPLE_SZ = 12,
70 };
71 
72 enum dr_ste_v1_action_insert_ptr_attr {
73 	DR_STE_V1_ACTION_INSERT_PTR_ATTR_NONE = 0,  /* Regular push header (e.g. push vlan) */
74 	DR_STE_V1_ACTION_INSERT_PTR_ATTR_ENCAP = 1, /* Encapsulation / Tunneling */
75 	DR_STE_V1_ACTION_INSERT_PTR_ATTR_ESP = 2,   /* IPsec */
76 };
77 
78 enum dr_ste_v1_action_id {
79 	DR_STE_V1_ACTION_ID_NOP				= 0x00,
80 	DR_STE_V1_ACTION_ID_COPY			= 0x05,
81 	DR_STE_V1_ACTION_ID_SET				= 0x06,
82 	DR_STE_V1_ACTION_ID_ADD				= 0x07,
83 	DR_STE_V1_ACTION_ID_REMOVE_BY_SIZE		= 0x08,
84 	DR_STE_V1_ACTION_ID_REMOVE_HEADER_TO_HEADER	= 0x09,
85 	DR_STE_V1_ACTION_ID_INSERT_INLINE		= 0x0a,
86 	DR_STE_V1_ACTION_ID_INSERT_POINTER		= 0x0b,
87 	DR_STE_V1_ACTION_ID_FLOW_TAG			= 0x0c,
88 	DR_STE_V1_ACTION_ID_QUEUE_ID_SEL		= 0x0d,
89 	DR_STE_V1_ACTION_ID_ACCELERATED_LIST		= 0x0e,
90 	DR_STE_V1_ACTION_ID_MODIFY_LIST			= 0x0f,
91 	DR_STE_V1_ACTION_ID_TRAILER			= 0x13,
92 	DR_STE_V1_ACTION_ID_COUNTER_ID			= 0x14,
93 	DR_STE_V1_ACTION_ID_MAX				= 0x21,
94 	/* use for special cases */
95 	DR_STE_V1_ACTION_ID_SPECIAL_ENCAP_L3		= 0x22,
96 };
97 
98 enum {
99 	DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_0		= 0x00,
100 	DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_1		= 0x01,
101 	DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_2		= 0x02,
102 	DR_STE_V1_ACTION_MDFY_FLD_SRC_L2_OUT_0		= 0x08,
103 	DR_STE_V1_ACTION_MDFY_FLD_SRC_L2_OUT_1		= 0x09,
104 	DR_STE_V1_ACTION_MDFY_FLD_L3_OUT_0		= 0x0e,
105 	DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_0		= 0x18,
106 	DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_1		= 0x19,
107 	DR_STE_V1_ACTION_MDFY_FLD_IPV4_OUT_0		= 0x40,
108 	DR_STE_V1_ACTION_MDFY_FLD_IPV4_OUT_1		= 0x41,
109 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_0	= 0x44,
110 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_1	= 0x45,
111 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_2	= 0x46,
112 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_3	= 0x47,
113 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_0	= 0x4c,
114 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_1	= 0x4d,
115 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_2	= 0x4e,
116 	DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_3	= 0x4f,
117 	DR_STE_V1_ACTION_MDFY_FLD_TCP_MISC_0		= 0x5e,
118 	DR_STE_V1_ACTION_MDFY_FLD_TCP_MISC_1		= 0x5f,
119 	DR_STE_V1_ACTION_MDFY_FLD_CFG_HDR_0_0		= 0x6f,
120 	DR_STE_V1_ACTION_MDFY_FLD_CFG_HDR_0_1		= 0x70,
121 	DR_STE_V1_ACTION_MDFY_FLD_METADATA_2_CQE	= 0x7b,
122 	DR_STE_V1_ACTION_MDFY_FLD_GNRL_PURPOSE		= 0x7c,
123 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_2		= 0x8c,
124 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_3		= 0x8d,
125 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_4		= 0x8e,
126 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_5		= 0x8f,
127 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_6		= 0x90,
128 	DR_STE_V1_ACTION_MDFY_FLD_REGISTER_7		= 0x91,
129 };
130 
131 static const struct mlx5dr_ste_action_modify_field dr_ste_v1_action_modify_field_arr[] = {
132 	[MLX5_ACTION_IN_FIELD_OUT_SMAC_47_16] = {
133 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_SRC_L2_OUT_0, .start = 0, .end = 31,
134 	},
135 	[MLX5_ACTION_IN_FIELD_OUT_SMAC_15_0] = {
136 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_SRC_L2_OUT_1, .start = 16, .end = 31,
137 	},
138 	[MLX5_ACTION_IN_FIELD_OUT_ETHERTYPE] = {
139 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_1, .start = 0, .end = 15,
140 	},
141 	[MLX5_ACTION_IN_FIELD_OUT_DMAC_47_16] = {
142 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_0, .start = 0, .end = 31,
143 	},
144 	[MLX5_ACTION_IN_FIELD_OUT_DMAC_15_0] = {
145 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_1, .start = 16, .end = 31,
146 	},
147 	[MLX5_ACTION_IN_FIELD_OUT_IP_DSCP] = {
148 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L3_OUT_0, .start = 18, .end = 23,
149 	},
150 	[MLX5_ACTION_IN_FIELD_OUT_TCP_FLAGS] = {
151 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_1, .start = 16, .end = 24,
152 		.l4_type = DR_STE_ACTION_MDFY_TYPE_L4_TCP,
153 	},
154 	[MLX5_ACTION_IN_FIELD_OUT_TCP_SPORT] = {
155 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_0, .start = 16, .end = 31,
156 		.l4_type = DR_STE_ACTION_MDFY_TYPE_L4_TCP,
157 	},
158 	[MLX5_ACTION_IN_FIELD_OUT_TCP_DPORT] = {
159 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_0, .start = 0, .end = 15,
160 		.l4_type = DR_STE_ACTION_MDFY_TYPE_L4_TCP,
161 	},
162 	[MLX5_ACTION_IN_FIELD_OUT_IP_TTL] = {
163 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L3_OUT_0, .start = 8, .end = 15,
164 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV4,
165 	},
166 	[MLX5_ACTION_IN_FIELD_OUT_IPV6_HOPLIMIT] = {
167 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L3_OUT_0, .start = 8, .end = 15,
168 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
169 	},
170 	[MLX5_ACTION_IN_FIELD_OUT_UDP_SPORT] = {
171 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_0, .start = 16, .end = 31,
172 		.l4_type = DR_STE_ACTION_MDFY_TYPE_L4_UDP,
173 	},
174 	[MLX5_ACTION_IN_FIELD_OUT_UDP_DPORT] = {
175 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L4_OUT_0, .start = 0, .end = 15,
176 		.l4_type = DR_STE_ACTION_MDFY_TYPE_L4_UDP,
177 	},
178 	[MLX5_ACTION_IN_FIELD_OUT_SIPV6_127_96] = {
179 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_0, .start = 0, .end = 31,
180 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
181 	},
182 	[MLX5_ACTION_IN_FIELD_OUT_SIPV6_95_64] = {
183 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_1, .start = 0, .end = 31,
184 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
185 	},
186 	[MLX5_ACTION_IN_FIELD_OUT_SIPV6_63_32] = {
187 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_2, .start = 0, .end = 31,
188 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
189 	},
190 	[MLX5_ACTION_IN_FIELD_OUT_SIPV6_31_0] = {
191 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_SRC_OUT_3, .start = 0, .end = 31,
192 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
193 	},
194 	[MLX5_ACTION_IN_FIELD_OUT_DIPV6_127_96] = {
195 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_0, .start = 0, .end = 31,
196 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
197 	},
198 	[MLX5_ACTION_IN_FIELD_OUT_DIPV6_95_64] = {
199 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_1, .start = 0, .end = 31,
200 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
201 	},
202 	[MLX5_ACTION_IN_FIELD_OUT_DIPV6_63_32] = {
203 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_2, .start = 0, .end = 31,
204 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
205 	},
206 	[MLX5_ACTION_IN_FIELD_OUT_DIPV6_31_0] = {
207 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV6_DST_OUT_3, .start = 0, .end = 31,
208 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV6,
209 	},
210 	[MLX5_ACTION_IN_FIELD_OUT_SIPV4] = {
211 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV4_OUT_0, .start = 0, .end = 31,
212 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV4,
213 	},
214 	[MLX5_ACTION_IN_FIELD_OUT_DIPV4] = {
215 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_IPV4_OUT_1, .start = 0, .end = 31,
216 		.l3_type = DR_STE_ACTION_MDFY_TYPE_L3_IPV4,
217 	},
218 	[MLX5_ACTION_IN_FIELD_METADATA_REG_A] = {
219 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_GNRL_PURPOSE, .start = 0, .end = 31,
220 	},
221 	[MLX5_ACTION_IN_FIELD_METADATA_REG_B] = {
222 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_METADATA_2_CQE, .start = 0, .end = 31,
223 	},
224 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_0] = {
225 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_6, .start = 0, .end = 31,
226 	},
227 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_1] = {
228 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_7, .start = 0, .end = 31,
229 	},
230 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_2] = {
231 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_4, .start = 0, .end = 31,
232 	},
233 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_3] = {
234 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_5, .start = 0, .end = 31,
235 	},
236 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_4] = {
237 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_2, .start = 0, .end = 31,
238 	},
239 	[MLX5_ACTION_IN_FIELD_METADATA_REG_C_5] = {
240 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_REGISTER_3, .start = 0, .end = 31,
241 	},
242 	[MLX5_ACTION_IN_FIELD_OUT_TCP_SEQ_NUM] = {
243 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_TCP_MISC_0, .start = 0, .end = 31,
244 	},
245 	[MLX5_ACTION_IN_FIELD_OUT_TCP_ACK_NUM] = {
246 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_TCP_MISC_1, .start = 0, .end = 31,
247 	},
248 	[MLX5_ACTION_IN_FIELD_OUT_FIRST_VID] = {
249 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_L2_OUT_2, .start = 0, .end = 15,
250 	},
251 	[MLX5_ACTION_IN_FIELD_OUT_EMD_31_0] = {
252 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_CFG_HDR_0_1, .start = 0, .end = 31,
253 	},
254 	[MLX5_ACTION_IN_FIELD_OUT_EMD_47_32] = {
255 		.hw_field = DR_STE_V1_ACTION_MDFY_FLD_CFG_HDR_0_0, .start = 0, .end = 15,
256 	},
257 };
258 
dr_ste_v1_set_entry_type(u8 * hw_ste_p,u8 entry_type)259 static void dr_ste_v1_set_entry_type(u8 *hw_ste_p, u8 entry_type)
260 {
261 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, entry_format, entry_type);
262 }
263 
dr_ste_v1_set_miss_addr(u8 * hw_ste_p,u64 miss_addr)264 static void dr_ste_v1_set_miss_addr(u8 *hw_ste_p, u64 miss_addr)
265 {
266 	u64 index = miss_addr >> 6;
267 
268 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, miss_address_39_32, index >> 26);
269 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, miss_address_31_6, index);
270 }
271 
dr_ste_v1_get_miss_addr(u8 * hw_ste_p)272 static u64 dr_ste_v1_get_miss_addr(u8 *hw_ste_p)
273 {
274 	u64 index =
275 		((u64)MLX5_GET(ste_match_bwc_v1, hw_ste_p, miss_address_31_6) |
276 		 ((u64)MLX5_GET(ste_match_bwc_v1, hw_ste_p, miss_address_39_32)) << 26);
277 
278 	return index << 6;
279 }
280 
dr_ste_v1_set_byte_mask(u8 * hw_ste_p,u16 byte_mask)281 static void dr_ste_v1_set_byte_mask(u8 *hw_ste_p, u16 byte_mask)
282 {
283 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, byte_mask, byte_mask);
284 }
285 
dr_ste_v1_get_byte_mask(u8 * hw_ste_p)286 static u16 dr_ste_v1_get_byte_mask(u8 *hw_ste_p)
287 {
288 	return MLX5_GET(ste_match_bwc_v1, hw_ste_p, byte_mask);
289 }
290 
dr_ste_v1_set_lu_type(u8 * hw_ste_p,u16 lu_type)291 static void dr_ste_v1_set_lu_type(u8 *hw_ste_p, u16 lu_type)
292 {
293 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, entry_format, lu_type >> 8);
294 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, match_definer_ctx_idx, lu_type & 0xFF);
295 }
296 
dr_ste_v1_set_next_lu_type(u8 * hw_ste_p,u16 lu_type)297 static void dr_ste_v1_set_next_lu_type(u8 *hw_ste_p, u16 lu_type)
298 {
299 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, next_entry_format, lu_type >> 8);
300 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, hash_definer_ctx_idx, lu_type & 0xFF);
301 }
302 
dr_ste_v1_get_next_lu_type(u8 * hw_ste_p)303 static u16 dr_ste_v1_get_next_lu_type(u8 *hw_ste_p)
304 {
305 	u8 mode = MLX5_GET(ste_match_bwc_v1, hw_ste_p, next_entry_format);
306 	u8 index = MLX5_GET(ste_match_bwc_v1, hw_ste_p, hash_definer_ctx_idx);
307 
308 	return (mode << 8 | index);
309 }
310 
dr_ste_v1_set_hit_gvmi(u8 * hw_ste_p,u16 gvmi)311 static void dr_ste_v1_set_hit_gvmi(u8 *hw_ste_p, u16 gvmi)
312 {
313 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, next_table_base_63_48, gvmi);
314 }
315 
dr_ste_v1_set_hit_addr(u8 * hw_ste_p,u64 icm_addr,u32 ht_size)316 static void dr_ste_v1_set_hit_addr(u8 *hw_ste_p, u64 icm_addr, u32 ht_size)
317 {
318 	u64 index = (icm_addr >> 5) | ht_size;
319 
320 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, next_table_base_39_32_size, index >> 27);
321 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, next_table_base_31_5_size, index);
322 }
323 
dr_ste_v1_init(u8 * hw_ste_p,u16 lu_type,bool is_rx,u16 gvmi)324 static void dr_ste_v1_init(u8 *hw_ste_p, u16 lu_type,
325 			   bool is_rx, u16 gvmi)
326 {
327 	dr_ste_v1_set_lu_type(hw_ste_p, lu_type);
328 	dr_ste_v1_set_next_lu_type(hw_ste_p, MLX5DR_STE_LU_TYPE_DONT_CARE);
329 
330 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, gvmi, gvmi);
331 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, next_table_base_63_48, gvmi);
332 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, miss_address_63_48, gvmi);
333 }
334 
dr_ste_v1_prepare_for_postsend(u8 * hw_ste_p,u32 ste_size)335 static void dr_ste_v1_prepare_for_postsend(u8 *hw_ste_p,
336 					   u32 ste_size)
337 {
338 	u8 *tag = hw_ste_p + DR_STE_SIZE_CTRL;
339 	u8 *mask = tag + DR_STE_SIZE_TAG;
340 	u8 tmp_tag[DR_STE_SIZE_TAG] = {};
341 
342 	if (ste_size == DR_STE_SIZE_CTRL)
343 		return;
344 
345 	WARN_ON(ste_size != DR_STE_SIZE);
346 
347 	/* Backup tag */
348 	memcpy(tmp_tag, tag, DR_STE_SIZE_TAG);
349 
350 	/* Swap mask and tag  both are the same size */
351 	memcpy(tag, mask, DR_STE_SIZE_MASK);
352 	memcpy(mask, tmp_tag, DR_STE_SIZE_TAG);
353 }
354 
dr_ste_v1_set_rx_flow_tag(u8 * s_action,u32 flow_tag)355 static void dr_ste_v1_set_rx_flow_tag(u8 *s_action, u32 flow_tag)
356 {
357 	MLX5_SET(ste_single_action_flow_tag_v1, s_action, action_id,
358 		 DR_STE_V1_ACTION_ID_FLOW_TAG);
359 	MLX5_SET(ste_single_action_flow_tag_v1, s_action, flow_tag, flow_tag);
360 }
361 
dr_ste_v1_set_counter_id(u8 * hw_ste_p,u32 ctr_id)362 static void dr_ste_v1_set_counter_id(u8 *hw_ste_p, u32 ctr_id)
363 {
364 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, counter_id, ctr_id);
365 }
366 
dr_ste_v1_set_reparse(u8 * hw_ste_p)367 static void dr_ste_v1_set_reparse(u8 *hw_ste_p)
368 {
369 	MLX5_SET(ste_match_bwc_v1, hw_ste_p, reparse, 1);
370 }
371 
dr_ste_v1_set_encap(u8 * hw_ste_p,u8 * d_action,u32 reformat_id,int size)372 static void dr_ste_v1_set_encap(u8 *hw_ste_p, u8 *d_action,
373 				u32 reformat_id, int size)
374 {
375 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, action_id,
376 		 DR_STE_V1_ACTION_ID_INSERT_POINTER);
377 	/* The hardware expects here size in words (2 byte) */
378 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, size, size / 2);
379 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, pointer, reformat_id);
380 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, attributes,
381 		 DR_STE_V1_ACTION_INSERT_PTR_ATTR_ENCAP);
382 	dr_ste_v1_set_reparse(hw_ste_p);
383 }
384 
dr_ste_v1_set_insert_hdr(u8 * hw_ste_p,u8 * d_action,u32 reformat_id,u8 anchor,u8 offset,int size)385 static void dr_ste_v1_set_insert_hdr(u8 *hw_ste_p, u8 *d_action,
386 				     u32 reformat_id,
387 				     u8 anchor, u8 offset,
388 				     int size)
389 {
390 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action,
391 		 action_id, DR_STE_V1_ACTION_ID_INSERT_POINTER);
392 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, start_anchor, anchor);
393 
394 	/* The hardware expects here size and offset in words (2 byte) */
395 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, size, size / 2);
396 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, start_offset, offset / 2);
397 
398 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, pointer, reformat_id);
399 	MLX5_SET(ste_double_action_insert_with_ptr_v1, d_action, attributes,
400 		 DR_STE_V1_ACTION_INSERT_PTR_ATTR_NONE);
401 
402 	dr_ste_v1_set_reparse(hw_ste_p);
403 }
404 
dr_ste_v1_set_remove_hdr(u8 * hw_ste_p,u8 * s_action,u8 anchor,u8 offset,int size)405 static void dr_ste_v1_set_remove_hdr(u8 *hw_ste_p, u8 *s_action,
406 				     u8 anchor, u8 offset,
407 				     int size)
408 {
409 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action,
410 		 action_id, DR_STE_V1_ACTION_ID_REMOVE_BY_SIZE);
411 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action, start_anchor, anchor);
412 
413 	/* The hardware expects here size and offset in words (2 byte) */
414 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action, remove_size, size / 2);
415 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action, start_offset, offset / 2);
416 
417 	dr_ste_v1_set_reparse(hw_ste_p);
418 }
419 
dr_ste_v1_set_push_vlan(u8 * hw_ste_p,u8 * d_action,u32 vlan_hdr)420 static void dr_ste_v1_set_push_vlan(u8 *hw_ste_p, u8 *d_action,
421 				    u32 vlan_hdr)
422 {
423 	MLX5_SET(ste_double_action_insert_with_inline_v1, d_action,
424 		 action_id, DR_STE_V1_ACTION_ID_INSERT_INLINE);
425 	/* The hardware expects offset to vlan header in words (2 byte) */
426 	MLX5_SET(ste_double_action_insert_with_inline_v1, d_action,
427 		 start_offset, HDR_LEN_L2_MACS >> 1);
428 	MLX5_SET(ste_double_action_insert_with_inline_v1, d_action,
429 		 inline_data, vlan_hdr);
430 
431 	dr_ste_v1_set_reparse(hw_ste_p);
432 }
433 
dr_ste_v1_set_pop_vlan(u8 * hw_ste_p,u8 * s_action,u8 vlans_num)434 static void dr_ste_v1_set_pop_vlan(u8 *hw_ste_p, u8 *s_action, u8 vlans_num)
435 {
436 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action,
437 		 action_id, DR_STE_V1_ACTION_ID_REMOVE_BY_SIZE);
438 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action,
439 		 start_anchor, DR_STE_HEADER_ANCHOR_1ST_VLAN);
440 	/* The hardware expects here size in words (2 byte) */
441 	MLX5_SET(ste_single_action_remove_header_size_v1, s_action,
442 		 remove_size, (HDR_LEN_L2_VLAN >> 1) * vlans_num);
443 
444 	dr_ste_v1_set_reparse(hw_ste_p);
445 }
446 
dr_ste_v1_set_encap_l3(u8 * hw_ste_p,u8 * frst_s_action,u8 * scnd_d_action,u32 reformat_id,int size)447 static void dr_ste_v1_set_encap_l3(u8 *hw_ste_p,
448 				   u8 *frst_s_action,
449 				   u8 *scnd_d_action,
450 				   u32 reformat_id,
451 				   int size)
452 {
453 	/* Remove L2 headers */
454 	MLX5_SET(ste_single_action_remove_header_v1, frst_s_action, action_id,
455 		 DR_STE_V1_ACTION_ID_REMOVE_HEADER_TO_HEADER);
456 	MLX5_SET(ste_single_action_remove_header_v1, frst_s_action, end_anchor,
457 		 DR_STE_HEADER_ANCHOR_IPV6_IPV4);
458 
459 	/* Encapsulate with given reformat ID */
460 	MLX5_SET(ste_double_action_insert_with_ptr_v1, scnd_d_action, action_id,
461 		 DR_STE_V1_ACTION_ID_INSERT_POINTER);
462 	/* The hardware expects here size in words (2 byte) */
463 	MLX5_SET(ste_double_action_insert_with_ptr_v1, scnd_d_action, size, size / 2);
464 	MLX5_SET(ste_double_action_insert_with_ptr_v1, scnd_d_action, pointer, reformat_id);
465 	MLX5_SET(ste_double_action_insert_with_ptr_v1, scnd_d_action, attributes,
466 		 DR_STE_V1_ACTION_INSERT_PTR_ATTR_ENCAP);
467 
468 	dr_ste_v1_set_reparse(hw_ste_p);
469 }
470 
dr_ste_v1_set_rx_decap(u8 * hw_ste_p,u8 * s_action)471 static void dr_ste_v1_set_rx_decap(u8 *hw_ste_p, u8 *s_action)
472 {
473 	MLX5_SET(ste_single_action_remove_header_v1, s_action, action_id,
474 		 DR_STE_V1_ACTION_ID_REMOVE_HEADER_TO_HEADER);
475 	MLX5_SET(ste_single_action_remove_header_v1, s_action, decap, 1);
476 	MLX5_SET(ste_single_action_remove_header_v1, s_action, vni_to_cqe, 1);
477 	MLX5_SET(ste_single_action_remove_header_v1, s_action, end_anchor,
478 		 DR_STE_HEADER_ANCHOR_INNER_MAC);
479 
480 	dr_ste_v1_set_reparse(hw_ste_p);
481 }
482 
dr_ste_v1_set_rewrite_actions(u8 * hw_ste_p,u8 * s_action,u16 num_of_actions,u32 re_write_index)483 static void dr_ste_v1_set_rewrite_actions(u8 *hw_ste_p,
484 					  u8 *s_action,
485 					  u16 num_of_actions,
486 					  u32 re_write_index)
487 {
488 	MLX5_SET(ste_single_action_modify_list_v1, s_action, action_id,
489 		 DR_STE_V1_ACTION_ID_MODIFY_LIST);
490 	MLX5_SET(ste_single_action_modify_list_v1, s_action, num_of_modify_actions,
491 		 num_of_actions);
492 	MLX5_SET(ste_single_action_modify_list_v1, s_action, modify_actions_ptr,
493 		 re_write_index);
494 
495 	dr_ste_v1_set_reparse(hw_ste_p);
496 }
497 
dr_ste_v1_arr_init_next_match(u8 ** last_ste,u32 * added_stes,u16 gvmi)498 static void dr_ste_v1_arr_init_next_match(u8 **last_ste,
499 					  u32 *added_stes,
500 					  u16 gvmi)
501 {
502 	u8 *action;
503 
504 	(*added_stes)++;
505 	*last_ste += DR_STE_SIZE;
506 	dr_ste_v1_init(*last_ste, MLX5DR_STE_LU_TYPE_DONT_CARE, 0, gvmi);
507 	dr_ste_v1_set_entry_type(*last_ste, DR_STE_V1_TYPE_MATCH);
508 
509 	action = MLX5_ADDR_OF(ste_mask_and_match_v1, *last_ste, action);
510 	memset(action, 0, MLX5_FLD_SZ_BYTES(ste_mask_and_match_v1, action));
511 }
512 
dr_ste_v1_set_actions_tx(struct mlx5dr_domain * dmn,u8 * action_type_set,u8 * last_ste,struct mlx5dr_ste_actions_attr * attr,u32 * added_stes)513 static void dr_ste_v1_set_actions_tx(struct mlx5dr_domain *dmn,
514 				     u8 *action_type_set,
515 				     u8 *last_ste,
516 				     struct mlx5dr_ste_actions_attr *attr,
517 				     u32 *added_stes)
518 {
519 	u8 *action = MLX5_ADDR_OF(ste_match_bwc_v1, last_ste, action);
520 	u8 action_sz = DR_STE_ACTION_DOUBLE_SZ;
521 	bool allow_modify_hdr = true;
522 	bool allow_encap = true;
523 
524 	if (action_type_set[DR_ACTION_TYP_POP_VLAN]) {
525 		if (action_sz < DR_STE_ACTION_SINGLE_SZ) {
526 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes,
527 						      attr->gvmi);
528 			action = MLX5_ADDR_OF(ste_mask_and_match_v1,
529 					      last_ste, action);
530 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
531 		}
532 		dr_ste_v1_set_pop_vlan(last_ste, action, attr->vlans.count);
533 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
534 		action += DR_STE_ACTION_SINGLE_SZ;
535 		allow_modify_hdr = false;
536 	}
537 
538 	if (action_type_set[DR_ACTION_TYP_CTR])
539 		dr_ste_v1_set_counter_id(last_ste, attr->ctr_id);
540 
541 	if (action_type_set[DR_ACTION_TYP_MODIFY_HDR]) {
542 		if (!allow_modify_hdr || action_sz < DR_STE_ACTION_DOUBLE_SZ) {
543 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes,
544 						      attr->gvmi);
545 			action = MLX5_ADDR_OF(ste_mask_and_match_v1,
546 					      last_ste, action);
547 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
548 		}
549 		dr_ste_v1_set_rewrite_actions(last_ste, action,
550 					      attr->modify_actions,
551 					      attr->modify_index);
552 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
553 		action += DR_STE_ACTION_DOUBLE_SZ;
554 		allow_encap = false;
555 	}
556 
557 	if (action_type_set[DR_ACTION_TYP_PUSH_VLAN]) {
558 		int i;
559 
560 		for (i = 0; i < attr->vlans.count; i++) {
561 			if (action_sz < DR_STE_ACTION_DOUBLE_SZ || !allow_encap) {
562 				dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
563 				action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
564 				action_sz = DR_STE_ACTION_TRIPLE_SZ;
565 				allow_encap = true;
566 			}
567 			dr_ste_v1_set_push_vlan(last_ste, action,
568 						attr->vlans.headers[i]);
569 			action_sz -= DR_STE_ACTION_DOUBLE_SZ;
570 			action += DR_STE_ACTION_DOUBLE_SZ;
571 		}
572 	}
573 
574 	if (action_type_set[DR_ACTION_TYP_L2_TO_TNL_L2]) {
575 		if (!allow_encap || action_sz < DR_STE_ACTION_DOUBLE_SZ) {
576 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
577 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
578 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
579 			allow_encap = true;
580 		}
581 		dr_ste_v1_set_encap(last_ste, action,
582 				    attr->reformat.id,
583 				    attr->reformat.size);
584 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
585 		action += DR_STE_ACTION_DOUBLE_SZ;
586 	} else if (action_type_set[DR_ACTION_TYP_L2_TO_TNL_L3]) {
587 		u8 *d_action;
588 
589 		dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
590 		action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
591 		action_sz = DR_STE_ACTION_TRIPLE_SZ;
592 		d_action = action + DR_STE_ACTION_SINGLE_SZ;
593 
594 		dr_ste_v1_set_encap_l3(last_ste,
595 				       action, d_action,
596 				       attr->reformat.id,
597 				       attr->reformat.size);
598 		action_sz -= DR_STE_ACTION_TRIPLE_SZ;
599 		action += DR_STE_ACTION_TRIPLE_SZ;
600 	} else if (action_type_set[DR_ACTION_TYP_INSERT_HDR]) {
601 		if (!allow_encap || action_sz < DR_STE_ACTION_DOUBLE_SZ) {
602 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
603 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
604 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
605 		}
606 		dr_ste_v1_set_insert_hdr(last_ste, action,
607 					 attr->reformat.id,
608 					 attr->reformat.param_0,
609 					 attr->reformat.param_1,
610 					 attr->reformat.size);
611 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
612 		action += DR_STE_ACTION_DOUBLE_SZ;
613 	} else if (action_type_set[DR_ACTION_TYP_REMOVE_HDR]) {
614 		if (action_sz < DR_STE_ACTION_SINGLE_SZ) {
615 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
616 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
617 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
618 		}
619 		dr_ste_v1_set_remove_hdr(last_ste, action,
620 					 attr->reformat.param_0,
621 					 attr->reformat.param_1,
622 					 attr->reformat.size);
623 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
624 		action += DR_STE_ACTION_SINGLE_SZ;
625 	}
626 
627 	dr_ste_v1_set_hit_gvmi(last_ste, attr->hit_gvmi);
628 	dr_ste_v1_set_hit_addr(last_ste, attr->final_icm_addr, 1);
629 }
630 
dr_ste_v1_set_actions_rx(struct mlx5dr_domain * dmn,u8 * action_type_set,u8 * last_ste,struct mlx5dr_ste_actions_attr * attr,u32 * added_stes)631 static void dr_ste_v1_set_actions_rx(struct mlx5dr_domain *dmn,
632 				     u8 *action_type_set,
633 				     u8 *last_ste,
634 				     struct mlx5dr_ste_actions_attr *attr,
635 				     u32 *added_stes)
636 {
637 	u8 *action = MLX5_ADDR_OF(ste_match_bwc_v1, last_ste, action);
638 	u8 action_sz = DR_STE_ACTION_DOUBLE_SZ;
639 	bool allow_modify_hdr = true;
640 	bool allow_ctr = true;
641 
642 	if (action_type_set[DR_ACTION_TYP_TNL_L3_TO_L2]) {
643 		dr_ste_v1_set_rewrite_actions(last_ste, action,
644 					      attr->decap_actions,
645 					      attr->decap_index);
646 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
647 		action += DR_STE_ACTION_DOUBLE_SZ;
648 		allow_modify_hdr = false;
649 		allow_ctr = false;
650 	} else if (action_type_set[DR_ACTION_TYP_TNL_L2_TO_L2]) {
651 		dr_ste_v1_set_rx_decap(last_ste, action);
652 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
653 		action += DR_STE_ACTION_SINGLE_SZ;
654 		allow_modify_hdr = false;
655 		allow_ctr = false;
656 	}
657 
658 	if (action_type_set[DR_ACTION_TYP_TAG]) {
659 		if (action_sz < DR_STE_ACTION_SINGLE_SZ) {
660 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
661 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
662 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
663 			allow_modify_hdr = true;
664 			allow_ctr = true;
665 		}
666 		dr_ste_v1_set_rx_flow_tag(action, attr->flow_tag);
667 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
668 		action += DR_STE_ACTION_SINGLE_SZ;
669 	}
670 
671 	if (action_type_set[DR_ACTION_TYP_POP_VLAN]) {
672 		if (action_sz < DR_STE_ACTION_SINGLE_SZ ||
673 		    !allow_modify_hdr) {
674 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
675 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
676 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
677 			allow_modify_hdr = false;
678 			allow_ctr = false;
679 		}
680 
681 		dr_ste_v1_set_pop_vlan(last_ste, action, attr->vlans.count);
682 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
683 		action += DR_STE_ACTION_SINGLE_SZ;
684 	}
685 
686 	if (action_type_set[DR_ACTION_TYP_MODIFY_HDR]) {
687 		/* Modify header and decapsulation must use different STEs */
688 		if (!allow_modify_hdr || action_sz < DR_STE_ACTION_DOUBLE_SZ) {
689 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
690 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
691 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
692 			allow_modify_hdr = true;
693 			allow_ctr = true;
694 		}
695 		dr_ste_v1_set_rewrite_actions(last_ste, action,
696 					      attr->modify_actions,
697 					      attr->modify_index);
698 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
699 		action += DR_STE_ACTION_DOUBLE_SZ;
700 	}
701 
702 	if (action_type_set[DR_ACTION_TYP_PUSH_VLAN]) {
703 		int i;
704 
705 		for (i = 0; i < attr->vlans.count; i++) {
706 			if (action_sz < DR_STE_ACTION_DOUBLE_SZ ||
707 			    !allow_modify_hdr) {
708 				dr_ste_v1_arr_init_next_match(&last_ste,
709 							      added_stes,
710 							      attr->gvmi);
711 				action = MLX5_ADDR_OF(ste_mask_and_match_v1,
712 						      last_ste, action);
713 				action_sz = DR_STE_ACTION_TRIPLE_SZ;
714 			}
715 			dr_ste_v1_set_push_vlan(last_ste, action,
716 						attr->vlans.headers[i]);
717 			action_sz -= DR_STE_ACTION_DOUBLE_SZ;
718 			action += DR_STE_ACTION_DOUBLE_SZ;
719 		}
720 	}
721 
722 	if (action_type_set[DR_ACTION_TYP_CTR]) {
723 		/* Counter action set after decap and before insert_hdr
724 		 * to exclude decaped / encaped header respectively.
725 		 */
726 		if (!allow_ctr) {
727 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
728 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
729 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
730 			allow_modify_hdr = true;
731 			allow_ctr = false;
732 		}
733 		dr_ste_v1_set_counter_id(last_ste, attr->ctr_id);
734 	}
735 
736 	if (action_type_set[DR_ACTION_TYP_L2_TO_TNL_L2]) {
737 		if (action_sz < DR_STE_ACTION_DOUBLE_SZ) {
738 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
739 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
740 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
741 		}
742 		dr_ste_v1_set_encap(last_ste, action,
743 				    attr->reformat.id,
744 				    attr->reformat.size);
745 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
746 		action += DR_STE_ACTION_DOUBLE_SZ;
747 		allow_modify_hdr = false;
748 	} else if (action_type_set[DR_ACTION_TYP_L2_TO_TNL_L3]) {
749 		u8 *d_action;
750 
751 		if (action_sz < DR_STE_ACTION_TRIPLE_SZ) {
752 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
753 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
754 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
755 		}
756 
757 		d_action = action + DR_STE_ACTION_SINGLE_SZ;
758 
759 		dr_ste_v1_set_encap_l3(last_ste,
760 				       action, d_action,
761 				       attr->reformat.id,
762 				       attr->reformat.size);
763 		action_sz -= DR_STE_ACTION_TRIPLE_SZ;
764 		allow_modify_hdr = false;
765 	} else if (action_type_set[DR_ACTION_TYP_INSERT_HDR]) {
766 		/* Modify header, decap, and encap must use different STEs */
767 		if (!allow_modify_hdr || action_sz < DR_STE_ACTION_DOUBLE_SZ) {
768 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
769 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
770 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
771 		}
772 		dr_ste_v1_set_insert_hdr(last_ste, action,
773 					 attr->reformat.id,
774 					 attr->reformat.param_0,
775 					 attr->reformat.param_1,
776 					 attr->reformat.size);
777 		action_sz -= DR_STE_ACTION_DOUBLE_SZ;
778 		action += DR_STE_ACTION_DOUBLE_SZ;
779 		allow_modify_hdr = false;
780 	} else if (action_type_set[DR_ACTION_TYP_REMOVE_HDR]) {
781 		if (action_sz < DR_STE_ACTION_SINGLE_SZ) {
782 			dr_ste_v1_arr_init_next_match(&last_ste, added_stes, attr->gvmi);
783 			action = MLX5_ADDR_OF(ste_mask_and_match_v1, last_ste, action);
784 			action_sz = DR_STE_ACTION_TRIPLE_SZ;
785 			allow_modify_hdr = true;
786 			allow_ctr = true;
787 		}
788 		dr_ste_v1_set_remove_hdr(last_ste, action,
789 					 attr->reformat.param_0,
790 					 attr->reformat.param_1,
791 					 attr->reformat.size);
792 		action_sz -= DR_STE_ACTION_SINGLE_SZ;
793 		action += DR_STE_ACTION_SINGLE_SZ;
794 	}
795 
796 	dr_ste_v1_set_hit_gvmi(last_ste, attr->hit_gvmi);
797 	dr_ste_v1_set_hit_addr(last_ste, attr->final_icm_addr, 1);
798 }
799 
dr_ste_v1_set_action_set(u8 * d_action,u8 hw_field,u8 shifter,u8 length,u32 data)800 static void dr_ste_v1_set_action_set(u8 *d_action,
801 				     u8 hw_field,
802 				     u8 shifter,
803 				     u8 length,
804 				     u32 data)
805 {
806 	shifter += MLX5_MODIFY_HEADER_V1_QW_OFFSET;
807 	MLX5_SET(ste_double_action_set_v1, d_action, action_id, DR_STE_V1_ACTION_ID_SET);
808 	MLX5_SET(ste_double_action_set_v1, d_action, destination_dw_offset, hw_field);
809 	MLX5_SET(ste_double_action_set_v1, d_action, destination_left_shifter, shifter);
810 	MLX5_SET(ste_double_action_set_v1, d_action, destination_length, length);
811 	MLX5_SET(ste_double_action_set_v1, d_action, inline_data, data);
812 }
813 
dr_ste_v1_set_action_add(u8 * d_action,u8 hw_field,u8 shifter,u8 length,u32 data)814 static void dr_ste_v1_set_action_add(u8 *d_action,
815 				     u8 hw_field,
816 				     u8 shifter,
817 				     u8 length,
818 				     u32 data)
819 {
820 	shifter += MLX5_MODIFY_HEADER_V1_QW_OFFSET;
821 	MLX5_SET(ste_double_action_add_v1, d_action, action_id, DR_STE_V1_ACTION_ID_ADD);
822 	MLX5_SET(ste_double_action_add_v1, d_action, destination_dw_offset, hw_field);
823 	MLX5_SET(ste_double_action_add_v1, d_action, destination_left_shifter, shifter);
824 	MLX5_SET(ste_double_action_add_v1, d_action, destination_length, length);
825 	MLX5_SET(ste_double_action_add_v1, d_action, add_value, data);
826 }
827 
dr_ste_v1_set_action_copy(u8 * d_action,u8 dst_hw_field,u8 dst_shifter,u8 dst_len,u8 src_hw_field,u8 src_shifter)828 static void dr_ste_v1_set_action_copy(u8 *d_action,
829 				      u8 dst_hw_field,
830 				      u8 dst_shifter,
831 				      u8 dst_len,
832 				      u8 src_hw_field,
833 				      u8 src_shifter)
834 {
835 	dst_shifter += MLX5_MODIFY_HEADER_V1_QW_OFFSET;
836 	src_shifter += MLX5_MODIFY_HEADER_V1_QW_OFFSET;
837 	MLX5_SET(ste_double_action_copy_v1, d_action, action_id, DR_STE_V1_ACTION_ID_COPY);
838 	MLX5_SET(ste_double_action_copy_v1, d_action, destination_dw_offset, dst_hw_field);
839 	MLX5_SET(ste_double_action_copy_v1, d_action, destination_left_shifter, dst_shifter);
840 	MLX5_SET(ste_double_action_copy_v1, d_action, destination_length, dst_len);
841 	MLX5_SET(ste_double_action_copy_v1, d_action, source_dw_offset, src_hw_field);
842 	MLX5_SET(ste_double_action_copy_v1, d_action, source_right_shifter, src_shifter);
843 }
844 
845 #define DR_STE_DECAP_L3_ACTION_NUM	8
846 #define DR_STE_L2_HDR_MAX_SZ		20
847 
dr_ste_v1_set_action_decap_l3_list(void * data,u32 data_sz,u8 * hw_action,u32 hw_action_sz,u16 * used_hw_action_num)848 static int dr_ste_v1_set_action_decap_l3_list(void *data,
849 					      u32 data_sz,
850 					      u8 *hw_action,
851 					      u32 hw_action_sz,
852 					      u16 *used_hw_action_num)
853 {
854 	u8 padded_data[DR_STE_L2_HDR_MAX_SZ] = {};
855 	void *data_ptr = padded_data;
856 	u16 used_actions = 0;
857 	u32 inline_data_sz;
858 	u32 i;
859 
860 	if (hw_action_sz / DR_STE_ACTION_DOUBLE_SZ < DR_STE_DECAP_L3_ACTION_NUM)
861 		return -EINVAL;
862 
863 	inline_data_sz =
864 		MLX5_FLD_SZ_BYTES(ste_double_action_insert_with_inline_v1, inline_data);
865 
866 	/* Add an alignment padding  */
867 	memcpy(padded_data + data_sz % inline_data_sz, data, data_sz);
868 
869 	/* Remove L2L3 outer headers */
870 	MLX5_SET(ste_single_action_remove_header_v1, hw_action, action_id,
871 		 DR_STE_V1_ACTION_ID_REMOVE_HEADER_TO_HEADER);
872 	MLX5_SET(ste_single_action_remove_header_v1, hw_action, decap, 1);
873 	MLX5_SET(ste_single_action_remove_header_v1, hw_action, vni_to_cqe, 1);
874 	MLX5_SET(ste_single_action_remove_header_v1, hw_action, end_anchor,
875 		 DR_STE_HEADER_ANCHOR_INNER_IPV6_IPV4);
876 	hw_action += DR_STE_ACTION_DOUBLE_SZ;
877 	used_actions++; /* Remove and NOP are a single double action */
878 
879 	/* Point to the last dword of the header */
880 	data_ptr += (data_sz / inline_data_sz) * inline_data_sz;
881 
882 	/* Add the new header using inline action 4Byte at a time, the header
883 	 * is added in reversed order to the beginning of the packet to avoid
884 	 * incorrect parsing by the HW. Since header is 14B or 18B an extra
885 	 * two bytes are padded and later removed.
886 	 */
887 	for (i = 0; i < data_sz / inline_data_sz + 1; i++) {
888 		void *addr_inline;
889 
890 		MLX5_SET(ste_double_action_insert_with_inline_v1, hw_action, action_id,
891 			 DR_STE_V1_ACTION_ID_INSERT_INLINE);
892 		/* The hardware expects here offset to words (2 bytes) */
893 		MLX5_SET(ste_double_action_insert_with_inline_v1, hw_action, start_offset, 0);
894 
895 		/* Copy bytes one by one to avoid endianness problem */
896 		addr_inline = MLX5_ADDR_OF(ste_double_action_insert_with_inline_v1,
897 					   hw_action, inline_data);
898 		memcpy(addr_inline, data_ptr - i * inline_data_sz, inline_data_sz);
899 		hw_action += DR_STE_ACTION_DOUBLE_SZ;
900 		used_actions++;
901 	}
902 
903 	/* Remove first 2 extra bytes */
904 	MLX5_SET(ste_single_action_remove_header_size_v1, hw_action, action_id,
905 		 DR_STE_V1_ACTION_ID_REMOVE_BY_SIZE);
906 	MLX5_SET(ste_single_action_remove_header_size_v1, hw_action, start_offset, 0);
907 	/* The hardware expects here size in words (2 bytes) */
908 	MLX5_SET(ste_single_action_remove_header_size_v1, hw_action, remove_size, 1);
909 	used_actions++;
910 
911 	*used_hw_action_num = used_actions;
912 
913 	return 0;
914 }
915 
dr_ste_v1_build_eth_l2_src_dst_bit_mask(struct mlx5dr_match_param * value,bool inner,u8 * bit_mask)916 static void dr_ste_v1_build_eth_l2_src_dst_bit_mask(struct mlx5dr_match_param *value,
917 						    bool inner, u8 *bit_mask)
918 {
919 	struct mlx5dr_match_spec *mask = inner ? &value->inner : &value->outer;
920 
921 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, dmac_47_16, mask, dmac_47_16);
922 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, dmac_15_0, mask, dmac_15_0);
923 
924 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, smac_47_16, mask, smac_47_16);
925 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, smac_15_0, mask, smac_15_0);
926 
927 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, first_vlan_id, mask, first_vid);
928 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, first_cfi, mask, first_cfi);
929 	DR_STE_SET_TAG(eth_l2_src_dst_v1, bit_mask, first_priority, mask, first_prio);
930 	DR_STE_SET_ONES(eth_l2_src_dst_v1, bit_mask, l3_type, mask, ip_version);
931 
932 	if (mask->cvlan_tag) {
933 		MLX5_SET(ste_eth_l2_src_dst_v1, bit_mask, first_vlan_qualifier, -1);
934 		mask->cvlan_tag = 0;
935 	} else if (mask->svlan_tag) {
936 		MLX5_SET(ste_eth_l2_src_dst_v1, bit_mask, first_vlan_qualifier, -1);
937 		mask->svlan_tag = 0;
938 	}
939 }
940 
dr_ste_v1_build_eth_l2_src_dst_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)941 static int dr_ste_v1_build_eth_l2_src_dst_tag(struct mlx5dr_match_param *value,
942 					      struct mlx5dr_ste_build *sb,
943 					      u8 *tag)
944 {
945 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
946 
947 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, dmac_47_16, spec, dmac_47_16);
948 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, dmac_15_0, spec, dmac_15_0);
949 
950 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, smac_47_16, spec, smac_47_16);
951 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, smac_15_0, spec, smac_15_0);
952 
953 	if (spec->ip_version == IP_VERSION_IPV4) {
954 		MLX5_SET(ste_eth_l2_src_dst_v1, tag, l3_type, STE_IPV4);
955 		spec->ip_version = 0;
956 	} else if (spec->ip_version == IP_VERSION_IPV6) {
957 		MLX5_SET(ste_eth_l2_src_dst_v1, tag, l3_type, STE_IPV6);
958 		spec->ip_version = 0;
959 	} else if (spec->ip_version) {
960 		return -EINVAL;
961 	}
962 
963 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, first_vlan_id, spec, first_vid);
964 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, first_cfi, spec, first_cfi);
965 	DR_STE_SET_TAG(eth_l2_src_dst_v1, tag, first_priority, spec, first_prio);
966 
967 	if (spec->cvlan_tag) {
968 		MLX5_SET(ste_eth_l2_src_dst_v1, tag, first_vlan_qualifier, DR_STE_CVLAN);
969 		spec->cvlan_tag = 0;
970 	} else if (spec->svlan_tag) {
971 		MLX5_SET(ste_eth_l2_src_dst_v1, tag, first_vlan_qualifier, DR_STE_SVLAN);
972 		spec->svlan_tag = 0;
973 	}
974 	return 0;
975 }
976 
dr_ste_v1_build_eth_l2_src_dst_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)977 static void dr_ste_v1_build_eth_l2_src_dst_init(struct mlx5dr_ste_build *sb,
978 						struct mlx5dr_match_param *mask)
979 {
980 	dr_ste_v1_build_eth_l2_src_dst_bit_mask(mask, sb->inner, sb->bit_mask);
981 
982 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL2_SRC_DST, sb->inner);
983 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
984 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l2_src_dst_tag;
985 }
986 
dr_ste_v1_build_eth_l3_ipv6_dst_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)987 static int dr_ste_v1_build_eth_l3_ipv6_dst_tag(struct mlx5dr_match_param *value,
988 					       struct mlx5dr_ste_build *sb,
989 					       u8 *tag)
990 {
991 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
992 
993 	DR_STE_SET_TAG(eth_l3_ipv6_dst, tag, dst_ip_127_96, spec, dst_ip_127_96);
994 	DR_STE_SET_TAG(eth_l3_ipv6_dst, tag, dst_ip_95_64, spec, dst_ip_95_64);
995 	DR_STE_SET_TAG(eth_l3_ipv6_dst, tag, dst_ip_63_32, spec, dst_ip_63_32);
996 	DR_STE_SET_TAG(eth_l3_ipv6_dst, tag, dst_ip_31_0, spec, dst_ip_31_0);
997 
998 	return 0;
999 }
1000 
dr_ste_v1_build_eth_l3_ipv6_dst_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1001 static void dr_ste_v1_build_eth_l3_ipv6_dst_init(struct mlx5dr_ste_build *sb,
1002 						 struct mlx5dr_match_param *mask)
1003 {
1004 	dr_ste_v1_build_eth_l3_ipv6_dst_tag(mask, sb, sb->bit_mask);
1005 
1006 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(IPV6_DES, sb->inner);
1007 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1008 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l3_ipv6_dst_tag;
1009 }
1010 
dr_ste_v1_build_eth_l3_ipv6_src_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1011 static int dr_ste_v1_build_eth_l3_ipv6_src_tag(struct mlx5dr_match_param *value,
1012 					       struct mlx5dr_ste_build *sb,
1013 					       u8 *tag)
1014 {
1015 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1016 
1017 	DR_STE_SET_TAG(eth_l3_ipv6_src, tag, src_ip_127_96, spec, src_ip_127_96);
1018 	DR_STE_SET_TAG(eth_l3_ipv6_src, tag, src_ip_95_64, spec, src_ip_95_64);
1019 	DR_STE_SET_TAG(eth_l3_ipv6_src, tag, src_ip_63_32, spec, src_ip_63_32);
1020 	DR_STE_SET_TAG(eth_l3_ipv6_src, tag, src_ip_31_0, spec, src_ip_31_0);
1021 
1022 	return 0;
1023 }
1024 
dr_ste_v1_build_eth_l3_ipv6_src_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1025 static void dr_ste_v1_build_eth_l3_ipv6_src_init(struct mlx5dr_ste_build *sb,
1026 						 struct mlx5dr_match_param *mask)
1027 {
1028 	dr_ste_v1_build_eth_l3_ipv6_src_tag(mask, sb, sb->bit_mask);
1029 
1030 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(IPV6_SRC, sb->inner);
1031 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1032 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l3_ipv6_src_tag;
1033 }
1034 
dr_ste_v1_build_eth_l3_ipv4_5_tuple_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1035 static int dr_ste_v1_build_eth_l3_ipv4_5_tuple_tag(struct mlx5dr_match_param *value,
1036 						   struct mlx5dr_ste_build *sb,
1037 						   u8 *tag)
1038 {
1039 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1040 
1041 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, destination_address, spec, dst_ip_31_0);
1042 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, source_address, spec, src_ip_31_0);
1043 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, destination_port, spec, tcp_dport);
1044 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, destination_port, spec, udp_dport);
1045 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, source_port, spec, tcp_sport);
1046 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, source_port, spec, udp_sport);
1047 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, protocol, spec, ip_protocol);
1048 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, fragmented, spec, frag);
1049 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, dscp, spec, ip_dscp);
1050 	DR_STE_SET_TAG(eth_l3_ipv4_5_tuple_v1, tag, ecn, spec, ip_ecn);
1051 
1052 	if (spec->tcp_flags) {
1053 		DR_STE_SET_TCP_FLAGS(eth_l3_ipv4_5_tuple_v1, tag, spec);
1054 		spec->tcp_flags = 0;
1055 	}
1056 
1057 	return 0;
1058 }
1059 
dr_ste_v1_build_eth_l3_ipv4_5_tuple_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1060 static void dr_ste_v1_build_eth_l3_ipv4_5_tuple_init(struct mlx5dr_ste_build *sb,
1061 						     struct mlx5dr_match_param *mask)
1062 {
1063 	dr_ste_v1_build_eth_l3_ipv4_5_tuple_tag(mask, sb, sb->bit_mask);
1064 
1065 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL3_IPV4_5_TUPLE, sb->inner);
1066 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1067 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l3_ipv4_5_tuple_tag;
1068 }
1069 
dr_ste_v1_build_eth_l2_src_or_dst_bit_mask(struct mlx5dr_match_param * value,bool inner,u8 * bit_mask)1070 static void dr_ste_v1_build_eth_l2_src_or_dst_bit_mask(struct mlx5dr_match_param *value,
1071 						       bool inner, u8 *bit_mask)
1072 {
1073 	struct mlx5dr_match_spec *mask = inner ? &value->inner : &value->outer;
1074 	struct mlx5dr_match_misc *misc_mask = &value->misc;
1075 
1076 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, first_vlan_id, mask, first_vid);
1077 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, first_cfi, mask, first_cfi);
1078 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, first_priority, mask, first_prio);
1079 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, ip_fragmented, mask, frag); // ?
1080 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, l3_ethertype, mask, ethertype); // ?
1081 	DR_STE_SET_ONES(eth_l2_src_v1, bit_mask, l3_type, mask, ip_version);
1082 
1083 	if (mask->svlan_tag || mask->cvlan_tag) {
1084 		MLX5_SET(ste_eth_l2_src_v1, bit_mask, first_vlan_qualifier, -1);
1085 		mask->cvlan_tag = 0;
1086 		mask->svlan_tag = 0;
1087 	}
1088 
1089 	if (inner) {
1090 		if (misc_mask->inner_second_cvlan_tag ||
1091 		    misc_mask->inner_second_svlan_tag) {
1092 			MLX5_SET(ste_eth_l2_src_v1, bit_mask, second_vlan_qualifier, -1);
1093 			misc_mask->inner_second_cvlan_tag = 0;
1094 			misc_mask->inner_second_svlan_tag = 0;
1095 		}
1096 
1097 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1098 			       second_vlan_id, misc_mask, inner_second_vid);
1099 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1100 			       second_cfi, misc_mask, inner_second_cfi);
1101 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1102 			       second_priority, misc_mask, inner_second_prio);
1103 	} else {
1104 		if (misc_mask->outer_second_cvlan_tag ||
1105 		    misc_mask->outer_second_svlan_tag) {
1106 			MLX5_SET(ste_eth_l2_src_v1, bit_mask, second_vlan_qualifier, -1);
1107 			misc_mask->outer_second_cvlan_tag = 0;
1108 			misc_mask->outer_second_svlan_tag = 0;
1109 		}
1110 
1111 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1112 			       second_vlan_id, misc_mask, outer_second_vid);
1113 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1114 			       second_cfi, misc_mask, outer_second_cfi);
1115 		DR_STE_SET_TAG(eth_l2_src_v1, bit_mask,
1116 			       second_priority, misc_mask, outer_second_prio);
1117 	}
1118 }
1119 
dr_ste_v1_build_eth_l2_src_or_dst_tag(struct mlx5dr_match_param * value,bool inner,u8 * tag)1120 static int dr_ste_v1_build_eth_l2_src_or_dst_tag(struct mlx5dr_match_param *value,
1121 						 bool inner, u8 *tag)
1122 {
1123 	struct mlx5dr_match_spec *spec = inner ? &value->inner : &value->outer;
1124 	struct mlx5dr_match_misc *misc_spec = &value->misc;
1125 
1126 	DR_STE_SET_TAG(eth_l2_src_v1, tag, first_vlan_id, spec, first_vid);
1127 	DR_STE_SET_TAG(eth_l2_src_v1, tag, first_cfi, spec, first_cfi);
1128 	DR_STE_SET_TAG(eth_l2_src_v1, tag, first_priority, spec, first_prio);
1129 	DR_STE_SET_TAG(eth_l2_src_v1, tag, ip_fragmented, spec, frag);
1130 	DR_STE_SET_TAG(eth_l2_src_v1, tag, l3_ethertype, spec, ethertype);
1131 
1132 	if (spec->ip_version == IP_VERSION_IPV4) {
1133 		MLX5_SET(ste_eth_l2_src_v1, tag, l3_type, STE_IPV4);
1134 		spec->ip_version = 0;
1135 	} else if (spec->ip_version == IP_VERSION_IPV6) {
1136 		MLX5_SET(ste_eth_l2_src_v1, tag, l3_type, STE_IPV6);
1137 		spec->ip_version = 0;
1138 	} else if (spec->ip_version) {
1139 		return -EINVAL;
1140 	}
1141 
1142 	if (spec->cvlan_tag) {
1143 		MLX5_SET(ste_eth_l2_src_v1, tag, first_vlan_qualifier, DR_STE_CVLAN);
1144 		spec->cvlan_tag = 0;
1145 	} else if (spec->svlan_tag) {
1146 		MLX5_SET(ste_eth_l2_src_v1, tag, first_vlan_qualifier, DR_STE_SVLAN);
1147 		spec->svlan_tag = 0;
1148 	}
1149 
1150 	if (inner) {
1151 		if (misc_spec->inner_second_cvlan_tag) {
1152 			MLX5_SET(ste_eth_l2_src_v1, tag, second_vlan_qualifier, DR_STE_CVLAN);
1153 			misc_spec->inner_second_cvlan_tag = 0;
1154 		} else if (misc_spec->inner_second_svlan_tag) {
1155 			MLX5_SET(ste_eth_l2_src_v1, tag, second_vlan_qualifier, DR_STE_SVLAN);
1156 			misc_spec->inner_second_svlan_tag = 0;
1157 		}
1158 
1159 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_vlan_id, misc_spec, inner_second_vid);
1160 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_cfi, misc_spec, inner_second_cfi);
1161 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_priority, misc_spec, inner_second_prio);
1162 	} else {
1163 		if (misc_spec->outer_second_cvlan_tag) {
1164 			MLX5_SET(ste_eth_l2_src_v1, tag, second_vlan_qualifier, DR_STE_CVLAN);
1165 			misc_spec->outer_second_cvlan_tag = 0;
1166 		} else if (misc_spec->outer_second_svlan_tag) {
1167 			MLX5_SET(ste_eth_l2_src_v1, tag, second_vlan_qualifier, DR_STE_SVLAN);
1168 			misc_spec->outer_second_svlan_tag = 0;
1169 		}
1170 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_vlan_id, misc_spec, outer_second_vid);
1171 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_cfi, misc_spec, outer_second_cfi);
1172 		DR_STE_SET_TAG(eth_l2_src_v1, tag, second_priority, misc_spec, outer_second_prio);
1173 	}
1174 
1175 	return 0;
1176 }
1177 
dr_ste_v1_build_eth_l2_src_bit_mask(struct mlx5dr_match_param * value,bool inner,u8 * bit_mask)1178 static void dr_ste_v1_build_eth_l2_src_bit_mask(struct mlx5dr_match_param *value,
1179 						bool inner, u8 *bit_mask)
1180 {
1181 	struct mlx5dr_match_spec *mask = inner ? &value->inner : &value->outer;
1182 
1183 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, smac_47_16, mask, smac_47_16);
1184 	DR_STE_SET_TAG(eth_l2_src_v1, bit_mask, smac_15_0, mask, smac_15_0);
1185 
1186 	dr_ste_v1_build_eth_l2_src_or_dst_bit_mask(value, inner, bit_mask);
1187 }
1188 
dr_ste_v1_build_eth_l2_src_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1189 static int dr_ste_v1_build_eth_l2_src_tag(struct mlx5dr_match_param *value,
1190 					  struct mlx5dr_ste_build *sb,
1191 					  u8 *tag)
1192 {
1193 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1194 
1195 	DR_STE_SET_TAG(eth_l2_src_v1, tag, smac_47_16, spec, smac_47_16);
1196 	DR_STE_SET_TAG(eth_l2_src_v1, tag, smac_15_0, spec, smac_15_0);
1197 
1198 	return dr_ste_v1_build_eth_l2_src_or_dst_tag(value, sb->inner, tag);
1199 }
1200 
dr_ste_v1_build_eth_l2_src_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1201 static void dr_ste_v1_build_eth_l2_src_init(struct mlx5dr_ste_build *sb,
1202 					    struct mlx5dr_match_param *mask)
1203 {
1204 	dr_ste_v1_build_eth_l2_src_bit_mask(mask, sb->inner, sb->bit_mask);
1205 
1206 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL2_SRC, sb->inner);
1207 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1208 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l2_src_tag;
1209 }
1210 
dr_ste_v1_build_eth_l2_dst_bit_mask(struct mlx5dr_match_param * value,bool inner,u8 * bit_mask)1211 static void dr_ste_v1_build_eth_l2_dst_bit_mask(struct mlx5dr_match_param *value,
1212 						bool inner, u8 *bit_mask)
1213 {
1214 	struct mlx5dr_match_spec *mask = inner ? &value->inner : &value->outer;
1215 
1216 	DR_STE_SET_TAG(eth_l2_dst_v1, bit_mask, dmac_47_16, mask, dmac_47_16);
1217 	DR_STE_SET_TAG(eth_l2_dst_v1, bit_mask, dmac_15_0, mask, dmac_15_0);
1218 
1219 	dr_ste_v1_build_eth_l2_src_or_dst_bit_mask(value, inner, bit_mask);
1220 }
1221 
dr_ste_v1_build_eth_l2_dst_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1222 static int dr_ste_v1_build_eth_l2_dst_tag(struct mlx5dr_match_param *value,
1223 					  struct mlx5dr_ste_build *sb,
1224 					  u8 *tag)
1225 {
1226 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1227 
1228 	DR_STE_SET_TAG(eth_l2_dst_v1, tag, dmac_47_16, spec, dmac_47_16);
1229 	DR_STE_SET_TAG(eth_l2_dst_v1, tag, dmac_15_0, spec, dmac_15_0);
1230 
1231 	return dr_ste_v1_build_eth_l2_src_or_dst_tag(value, sb->inner, tag);
1232 }
1233 
dr_ste_v1_build_eth_l2_dst_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1234 static void dr_ste_v1_build_eth_l2_dst_init(struct mlx5dr_ste_build *sb,
1235 					    struct mlx5dr_match_param *mask)
1236 {
1237 	dr_ste_v1_build_eth_l2_dst_bit_mask(mask, sb->inner, sb->bit_mask);
1238 
1239 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL2, sb->inner);
1240 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1241 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l2_dst_tag;
1242 }
1243 
dr_ste_v1_build_eth_l2_tnl_bit_mask(struct mlx5dr_match_param * value,bool inner,u8 * bit_mask)1244 static void dr_ste_v1_build_eth_l2_tnl_bit_mask(struct mlx5dr_match_param *value,
1245 						bool inner, u8 *bit_mask)
1246 {
1247 	struct mlx5dr_match_spec *mask = inner ? &value->inner : &value->outer;
1248 	struct mlx5dr_match_misc *misc = &value->misc;
1249 
1250 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, dmac_47_16, mask, dmac_47_16);
1251 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, dmac_15_0, mask, dmac_15_0);
1252 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, first_vlan_id, mask, first_vid);
1253 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, first_cfi, mask, first_cfi);
1254 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, first_priority, mask, first_prio);
1255 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, ip_fragmented, mask, frag);
1256 	DR_STE_SET_TAG(eth_l2_tnl_v1, bit_mask, l3_ethertype, mask, ethertype);
1257 	DR_STE_SET_ONES(eth_l2_tnl_v1, bit_mask, l3_type, mask, ip_version);
1258 
1259 	if (misc->vxlan_vni) {
1260 		MLX5_SET(ste_eth_l2_tnl_v1, bit_mask,
1261 			 l2_tunneling_network_id, (misc->vxlan_vni << 8));
1262 		misc->vxlan_vni = 0;
1263 	}
1264 
1265 	if (mask->svlan_tag || mask->cvlan_tag) {
1266 		MLX5_SET(ste_eth_l2_tnl_v1, bit_mask, first_vlan_qualifier, -1);
1267 		mask->cvlan_tag = 0;
1268 		mask->svlan_tag = 0;
1269 	}
1270 }
1271 
dr_ste_v1_build_eth_l2_tnl_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1272 static int dr_ste_v1_build_eth_l2_tnl_tag(struct mlx5dr_match_param *value,
1273 					  struct mlx5dr_ste_build *sb,
1274 					  u8 *tag)
1275 {
1276 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1277 	struct mlx5dr_match_misc *misc = &value->misc;
1278 
1279 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, dmac_47_16, spec, dmac_47_16);
1280 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, dmac_15_0, spec, dmac_15_0);
1281 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, first_vlan_id, spec, first_vid);
1282 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, first_cfi, spec, first_cfi);
1283 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, ip_fragmented, spec, frag);
1284 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, first_priority, spec, first_prio);
1285 	DR_STE_SET_TAG(eth_l2_tnl_v1, tag, l3_ethertype, spec, ethertype);
1286 
1287 	if (misc->vxlan_vni) {
1288 		MLX5_SET(ste_eth_l2_tnl_v1, tag, l2_tunneling_network_id,
1289 			 (misc->vxlan_vni << 8));
1290 		misc->vxlan_vni = 0;
1291 	}
1292 
1293 	if (spec->cvlan_tag) {
1294 		MLX5_SET(ste_eth_l2_tnl_v1, tag, first_vlan_qualifier, DR_STE_CVLAN);
1295 		spec->cvlan_tag = 0;
1296 	} else if (spec->svlan_tag) {
1297 		MLX5_SET(ste_eth_l2_tnl_v1, tag, first_vlan_qualifier, DR_STE_SVLAN);
1298 		spec->svlan_tag = 0;
1299 	}
1300 
1301 	if (spec->ip_version == IP_VERSION_IPV4) {
1302 		MLX5_SET(ste_eth_l2_tnl_v1, tag, l3_type, STE_IPV4);
1303 		spec->ip_version = 0;
1304 	} else if (spec->ip_version == IP_VERSION_IPV6) {
1305 		MLX5_SET(ste_eth_l2_tnl_v1, tag, l3_type, STE_IPV6);
1306 		spec->ip_version = 0;
1307 	} else if (spec->ip_version) {
1308 		return -EINVAL;
1309 	}
1310 
1311 	return 0;
1312 }
1313 
dr_ste_v1_build_eth_l2_tnl_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1314 static void dr_ste_v1_build_eth_l2_tnl_init(struct mlx5dr_ste_build *sb,
1315 					    struct mlx5dr_match_param *mask)
1316 {
1317 	dr_ste_v1_build_eth_l2_tnl_bit_mask(mask, sb->inner, sb->bit_mask);
1318 
1319 	sb->lu_type = DR_STE_V1_LU_TYPE_ETHL2_TNL;
1320 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1321 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l2_tnl_tag;
1322 }
1323 
dr_ste_v1_build_eth_l3_ipv4_misc_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1324 static int dr_ste_v1_build_eth_l3_ipv4_misc_tag(struct mlx5dr_match_param *value,
1325 						struct mlx5dr_ste_build *sb,
1326 						u8 *tag)
1327 {
1328 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1329 
1330 	DR_STE_SET_TAG(eth_l3_ipv4_misc_v1, tag, time_to_live, spec, ttl_hoplimit);
1331 
1332 	return 0;
1333 }
1334 
dr_ste_v1_build_eth_l3_ipv4_misc_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1335 static void dr_ste_v1_build_eth_l3_ipv4_misc_init(struct mlx5dr_ste_build *sb,
1336 						  struct mlx5dr_match_param *mask)
1337 {
1338 	dr_ste_v1_build_eth_l3_ipv4_misc_tag(mask, sb, sb->bit_mask);
1339 
1340 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL3_IPV4_MISC, sb->inner);
1341 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1342 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l3_ipv4_misc_tag;
1343 }
1344 
dr_ste_v1_build_eth_ipv6_l3_l4_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1345 static int dr_ste_v1_build_eth_ipv6_l3_l4_tag(struct mlx5dr_match_param *value,
1346 					      struct mlx5dr_ste_build *sb,
1347 					      u8 *tag)
1348 {
1349 	struct mlx5dr_match_spec *spec = sb->inner ? &value->inner : &value->outer;
1350 	struct mlx5dr_match_misc *misc = &value->misc;
1351 
1352 	DR_STE_SET_TAG(eth_l4_v1, tag, dst_port, spec, tcp_dport);
1353 	DR_STE_SET_TAG(eth_l4_v1, tag, src_port, spec, tcp_sport);
1354 	DR_STE_SET_TAG(eth_l4_v1, tag, dst_port, spec, udp_dport);
1355 	DR_STE_SET_TAG(eth_l4_v1, tag, src_port, spec, udp_sport);
1356 	DR_STE_SET_TAG(eth_l4_v1, tag, protocol, spec, ip_protocol);
1357 	DR_STE_SET_TAG(eth_l4_v1, tag, fragmented, spec, frag);
1358 	DR_STE_SET_TAG(eth_l4_v1, tag, dscp, spec, ip_dscp);
1359 	DR_STE_SET_TAG(eth_l4_v1, tag, ecn, spec, ip_ecn);
1360 	DR_STE_SET_TAG(eth_l4_v1, tag, ipv6_hop_limit, spec, ttl_hoplimit);
1361 
1362 	if (sb->inner)
1363 		DR_STE_SET_TAG(eth_l4_v1, tag, flow_label, misc, inner_ipv6_flow_label);
1364 	else
1365 		DR_STE_SET_TAG(eth_l4_v1, tag, flow_label, misc, outer_ipv6_flow_label);
1366 
1367 	if (spec->tcp_flags) {
1368 		DR_STE_SET_TCP_FLAGS(eth_l4_v1, tag, spec);
1369 		spec->tcp_flags = 0;
1370 	}
1371 
1372 	return 0;
1373 }
1374 
dr_ste_v1_build_eth_ipv6_l3_l4_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1375 static void dr_ste_v1_build_eth_ipv6_l3_l4_init(struct mlx5dr_ste_build *sb,
1376 						struct mlx5dr_match_param *mask)
1377 {
1378 	dr_ste_v1_build_eth_ipv6_l3_l4_tag(mask, sb, sb->bit_mask);
1379 
1380 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(ETHL4, sb->inner);
1381 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1382 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_ipv6_l3_l4_tag;
1383 }
1384 
dr_ste_v1_build_mpls_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1385 static int dr_ste_v1_build_mpls_tag(struct mlx5dr_match_param *value,
1386 				    struct mlx5dr_ste_build *sb,
1387 				    u8 *tag)
1388 {
1389 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1390 
1391 	if (sb->inner)
1392 		DR_STE_SET_MPLS(mpls_v1, misc2, inner, tag);
1393 	else
1394 		DR_STE_SET_MPLS(mpls_v1, misc2, outer, tag);
1395 
1396 	return 0;
1397 }
1398 
dr_ste_v1_build_mpls_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1399 static void dr_ste_v1_build_mpls_init(struct mlx5dr_ste_build *sb,
1400 				      struct mlx5dr_match_param *mask)
1401 {
1402 	dr_ste_v1_build_mpls_tag(mask, sb, sb->bit_mask);
1403 
1404 	sb->lu_type = DR_STE_CALC_DFNR_TYPE(MPLS, sb->inner);
1405 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1406 	sb->ste_build_tag_func = &dr_ste_v1_build_mpls_tag;
1407 }
1408 
dr_ste_v1_build_tnl_gre_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1409 static int dr_ste_v1_build_tnl_gre_tag(struct mlx5dr_match_param *value,
1410 				       struct mlx5dr_ste_build *sb,
1411 				       u8 *tag)
1412 {
1413 	struct  mlx5dr_match_misc *misc = &value->misc;
1414 
1415 	DR_STE_SET_TAG(gre_v1, tag, gre_protocol, misc, gre_protocol);
1416 	DR_STE_SET_TAG(gre_v1, tag, gre_k_present, misc, gre_k_present);
1417 	DR_STE_SET_TAG(gre_v1, tag, gre_key_h, misc, gre_key_h);
1418 	DR_STE_SET_TAG(gre_v1, tag, gre_key_l, misc, gre_key_l);
1419 
1420 	DR_STE_SET_TAG(gre_v1, tag, gre_c_present, misc, gre_c_present);
1421 	DR_STE_SET_TAG(gre_v1, tag, gre_s_present, misc, gre_s_present);
1422 
1423 	return 0;
1424 }
1425 
dr_ste_v1_build_tnl_gre_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1426 static void dr_ste_v1_build_tnl_gre_init(struct mlx5dr_ste_build *sb,
1427 					 struct mlx5dr_match_param *mask)
1428 {
1429 	dr_ste_v1_build_tnl_gre_tag(mask, sb, sb->bit_mask);
1430 
1431 	sb->lu_type = DR_STE_V1_LU_TYPE_GRE;
1432 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1433 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_gre_tag;
1434 }
1435 
dr_ste_v1_build_tnl_mpls_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1436 static int dr_ste_v1_build_tnl_mpls_tag(struct mlx5dr_match_param *value,
1437 					struct mlx5dr_ste_build *sb,
1438 					u8 *tag)
1439 {
1440 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1441 
1442 	if (DR_STE_IS_OUTER_MPLS_OVER_GRE_SET(misc2)) {
1443 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_label,
1444 			       misc2, outer_first_mpls_over_gre_label);
1445 
1446 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_exp,
1447 			       misc2, outer_first_mpls_over_gre_exp);
1448 
1449 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_s_bos,
1450 			       misc2, outer_first_mpls_over_gre_s_bos);
1451 
1452 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_ttl,
1453 			       misc2, outer_first_mpls_over_gre_ttl);
1454 	} else {
1455 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_label,
1456 			       misc2, outer_first_mpls_over_udp_label);
1457 
1458 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_exp,
1459 			       misc2, outer_first_mpls_over_udp_exp);
1460 
1461 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_s_bos,
1462 			       misc2, outer_first_mpls_over_udp_s_bos);
1463 
1464 		DR_STE_SET_TAG(mpls_v1, tag, mpls0_ttl,
1465 			       misc2, outer_first_mpls_over_udp_ttl);
1466 	}
1467 
1468 	return 0;
1469 }
1470 
dr_ste_v1_build_tnl_mpls_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1471 static void dr_ste_v1_build_tnl_mpls_init(struct mlx5dr_ste_build *sb,
1472 					  struct mlx5dr_match_param *mask)
1473 {
1474 	dr_ste_v1_build_tnl_mpls_tag(mask, sb, sb->bit_mask);
1475 
1476 	sb->lu_type = DR_STE_V1_LU_TYPE_MPLS_I;
1477 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1478 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_mpls_tag;
1479 }
1480 
dr_ste_v1_build_tnl_mpls_over_udp_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1481 static int dr_ste_v1_build_tnl_mpls_over_udp_tag(struct mlx5dr_match_param *value,
1482 						 struct mlx5dr_ste_build *sb,
1483 						 u8 *tag)
1484 {
1485 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1486 	u8 *parser_ptr;
1487 	u8 parser_id;
1488 	u32 mpls_hdr;
1489 
1490 	mpls_hdr = misc2->outer_first_mpls_over_udp_label << HDR_MPLS_OFFSET_LABEL;
1491 	misc2->outer_first_mpls_over_udp_label = 0;
1492 	mpls_hdr |= misc2->outer_first_mpls_over_udp_exp << HDR_MPLS_OFFSET_EXP;
1493 	misc2->outer_first_mpls_over_udp_exp = 0;
1494 	mpls_hdr |= misc2->outer_first_mpls_over_udp_s_bos << HDR_MPLS_OFFSET_S_BOS;
1495 	misc2->outer_first_mpls_over_udp_s_bos = 0;
1496 	mpls_hdr |= misc2->outer_first_mpls_over_udp_ttl << HDR_MPLS_OFFSET_TTL;
1497 	misc2->outer_first_mpls_over_udp_ttl = 0;
1498 
1499 	parser_id = sb->caps->flex_parser_id_mpls_over_udp;
1500 	parser_ptr = dr_ste_calc_flex_parser_offset(tag, parser_id);
1501 	*(__be32 *)parser_ptr = cpu_to_be32(mpls_hdr);
1502 
1503 	return 0;
1504 }
1505 
dr_ste_v1_build_tnl_mpls_over_udp_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1506 static void dr_ste_v1_build_tnl_mpls_over_udp_init(struct mlx5dr_ste_build *sb,
1507 						   struct mlx5dr_match_param *mask)
1508 {
1509 	dr_ste_v1_build_tnl_mpls_over_udp_tag(mask, sb, sb->bit_mask);
1510 
1511 	/* STEs with lookup type FLEX_PARSER_{0/1} includes
1512 	 * flex parsers_{0-3}/{4-7} respectively.
1513 	 */
1514 	sb->lu_type = sb->caps->flex_parser_id_mpls_over_udp > DR_STE_MAX_FLEX_0_ID ?
1515 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_1 :
1516 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_0;
1517 
1518 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1519 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_mpls_over_udp_tag;
1520 }
1521 
dr_ste_v1_build_tnl_mpls_over_gre_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1522 static int dr_ste_v1_build_tnl_mpls_over_gre_tag(struct mlx5dr_match_param *value,
1523 						 struct mlx5dr_ste_build *sb,
1524 						 u8 *tag)
1525 {
1526 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1527 	u8 *parser_ptr;
1528 	u8 parser_id;
1529 	u32 mpls_hdr;
1530 
1531 	mpls_hdr = misc2->outer_first_mpls_over_gre_label << HDR_MPLS_OFFSET_LABEL;
1532 	misc2->outer_first_mpls_over_gre_label = 0;
1533 	mpls_hdr |= misc2->outer_first_mpls_over_gre_exp << HDR_MPLS_OFFSET_EXP;
1534 	misc2->outer_first_mpls_over_gre_exp = 0;
1535 	mpls_hdr |= misc2->outer_first_mpls_over_gre_s_bos << HDR_MPLS_OFFSET_S_BOS;
1536 	misc2->outer_first_mpls_over_gre_s_bos = 0;
1537 	mpls_hdr |= misc2->outer_first_mpls_over_gre_ttl << HDR_MPLS_OFFSET_TTL;
1538 	misc2->outer_first_mpls_over_gre_ttl = 0;
1539 
1540 	parser_id = sb->caps->flex_parser_id_mpls_over_gre;
1541 	parser_ptr = dr_ste_calc_flex_parser_offset(tag, parser_id);
1542 	*(__be32 *)parser_ptr = cpu_to_be32(mpls_hdr);
1543 
1544 	return 0;
1545 }
1546 
dr_ste_v1_build_tnl_mpls_over_gre_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1547 static void dr_ste_v1_build_tnl_mpls_over_gre_init(struct mlx5dr_ste_build *sb,
1548 						   struct mlx5dr_match_param *mask)
1549 {
1550 	dr_ste_v1_build_tnl_mpls_over_gre_tag(mask, sb, sb->bit_mask);
1551 
1552 	/* STEs with lookup type FLEX_PARSER_{0/1} includes
1553 	 * flex parsers_{0-3}/{4-7} respectively.
1554 	 */
1555 	sb->lu_type = sb->caps->flex_parser_id_mpls_over_gre > DR_STE_MAX_FLEX_0_ID ?
1556 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_1 :
1557 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_0;
1558 
1559 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1560 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_mpls_over_gre_tag;
1561 }
1562 
dr_ste_v1_build_icmp_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1563 static int dr_ste_v1_build_icmp_tag(struct mlx5dr_match_param *value,
1564 				    struct mlx5dr_ste_build *sb,
1565 				    u8 *tag)
1566 {
1567 	struct mlx5dr_match_misc3 *misc3 = &value->misc3;
1568 	bool is_ipv4 = DR_MASK_IS_ICMPV4_SET(misc3);
1569 	u32 *icmp_header_data;
1570 	u8 *icmp_type;
1571 	u8 *icmp_code;
1572 
1573 	if (is_ipv4) {
1574 		icmp_header_data	= &misc3->icmpv4_header_data;
1575 		icmp_type		= &misc3->icmpv4_type;
1576 		icmp_code		= &misc3->icmpv4_code;
1577 	} else {
1578 		icmp_header_data	= &misc3->icmpv6_header_data;
1579 		icmp_type		= &misc3->icmpv6_type;
1580 		icmp_code		= &misc3->icmpv6_code;
1581 	}
1582 
1583 	MLX5_SET(ste_icmp_v1, tag, icmp_header_data, *icmp_header_data);
1584 	MLX5_SET(ste_icmp_v1, tag, icmp_type, *icmp_type);
1585 	MLX5_SET(ste_icmp_v1, tag, icmp_code, *icmp_code);
1586 
1587 	*icmp_header_data = 0;
1588 	*icmp_type = 0;
1589 	*icmp_code = 0;
1590 
1591 	return 0;
1592 }
1593 
dr_ste_v1_build_icmp_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1594 static void dr_ste_v1_build_icmp_init(struct mlx5dr_ste_build *sb,
1595 				      struct mlx5dr_match_param *mask)
1596 {
1597 	dr_ste_v1_build_icmp_tag(mask, sb, sb->bit_mask);
1598 
1599 	sb->lu_type = DR_STE_V1_LU_TYPE_ETHL4_MISC_O;
1600 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1601 	sb->ste_build_tag_func = &dr_ste_v1_build_icmp_tag;
1602 }
1603 
dr_ste_v1_build_general_purpose_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1604 static int dr_ste_v1_build_general_purpose_tag(struct mlx5dr_match_param *value,
1605 					       struct mlx5dr_ste_build *sb,
1606 					       u8 *tag)
1607 {
1608 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1609 
1610 	DR_STE_SET_TAG(general_purpose, tag, general_purpose_lookup_field,
1611 		       misc2, metadata_reg_a);
1612 
1613 	return 0;
1614 }
1615 
dr_ste_v1_build_general_purpose_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1616 static void dr_ste_v1_build_general_purpose_init(struct mlx5dr_ste_build *sb,
1617 						 struct mlx5dr_match_param *mask)
1618 {
1619 	dr_ste_v1_build_general_purpose_tag(mask, sb, sb->bit_mask);
1620 
1621 	sb->lu_type = DR_STE_V1_LU_TYPE_GENERAL_PURPOSE;
1622 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1623 	sb->ste_build_tag_func = &dr_ste_v1_build_general_purpose_tag;
1624 }
1625 
dr_ste_v1_build_eth_l4_misc_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1626 static int dr_ste_v1_build_eth_l4_misc_tag(struct mlx5dr_match_param *value,
1627 					   struct mlx5dr_ste_build *sb,
1628 					   u8 *tag)
1629 {
1630 	struct mlx5dr_match_misc3 *misc3 = &value->misc3;
1631 
1632 	if (sb->inner) {
1633 		DR_STE_SET_TAG(eth_l4_misc_v1, tag, seq_num, misc3, inner_tcp_seq_num);
1634 		DR_STE_SET_TAG(eth_l4_misc_v1, tag, ack_num, misc3, inner_tcp_ack_num);
1635 	} else {
1636 		DR_STE_SET_TAG(eth_l4_misc_v1, tag, seq_num, misc3, outer_tcp_seq_num);
1637 		DR_STE_SET_TAG(eth_l4_misc_v1, tag, ack_num, misc3, outer_tcp_ack_num);
1638 	}
1639 
1640 	return 0;
1641 }
1642 
dr_ste_v1_build_eth_l4_misc_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1643 static void dr_ste_v1_build_eth_l4_misc_init(struct mlx5dr_ste_build *sb,
1644 					     struct mlx5dr_match_param *mask)
1645 {
1646 	dr_ste_v1_build_eth_l4_misc_tag(mask, sb, sb->bit_mask);
1647 
1648 	sb->lu_type = DR_STE_V1_LU_TYPE_ETHL4_MISC_O;
1649 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1650 	sb->ste_build_tag_func = &dr_ste_v1_build_eth_l4_misc_tag;
1651 }
1652 
1653 static int
dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1654 dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_tag(struct mlx5dr_match_param *value,
1655 					      struct mlx5dr_ste_build *sb,
1656 					      u8 *tag)
1657 {
1658 	struct mlx5dr_match_misc3 *misc3 = &value->misc3;
1659 
1660 	DR_STE_SET_TAG(flex_parser_tnl_vxlan_gpe, tag,
1661 		       outer_vxlan_gpe_flags, misc3,
1662 		       outer_vxlan_gpe_flags);
1663 	DR_STE_SET_TAG(flex_parser_tnl_vxlan_gpe, tag,
1664 		       outer_vxlan_gpe_next_protocol, misc3,
1665 		       outer_vxlan_gpe_next_protocol);
1666 	DR_STE_SET_TAG(flex_parser_tnl_vxlan_gpe, tag,
1667 		       outer_vxlan_gpe_vni, misc3,
1668 		       outer_vxlan_gpe_vni);
1669 
1670 	return 0;
1671 }
1672 
1673 static void
dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1674 dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_init(struct mlx5dr_ste_build *sb,
1675 					       struct mlx5dr_match_param *mask)
1676 {
1677 	dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_tag(mask, sb, sb->bit_mask);
1678 
1679 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_TNL_HEADER;
1680 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1681 	sb->ste_build_tag_func = &dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_tag;
1682 }
1683 
1684 static int
dr_ste_v1_build_flex_parser_tnl_geneve_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1685 dr_ste_v1_build_flex_parser_tnl_geneve_tag(struct mlx5dr_match_param *value,
1686 					   struct mlx5dr_ste_build *sb,
1687 					   u8 *tag)
1688 {
1689 	struct mlx5dr_match_misc *misc = &value->misc;
1690 
1691 	DR_STE_SET_TAG(flex_parser_tnl_geneve, tag,
1692 		       geneve_protocol_type, misc, geneve_protocol_type);
1693 	DR_STE_SET_TAG(flex_parser_tnl_geneve, tag,
1694 		       geneve_oam, misc, geneve_oam);
1695 	DR_STE_SET_TAG(flex_parser_tnl_geneve, tag,
1696 		       geneve_opt_len, misc, geneve_opt_len);
1697 	DR_STE_SET_TAG(flex_parser_tnl_geneve, tag,
1698 		       geneve_vni, misc, geneve_vni);
1699 
1700 	return 0;
1701 }
1702 
1703 static void
dr_ste_v1_build_flex_parser_tnl_geneve_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1704 dr_ste_v1_build_flex_parser_tnl_geneve_init(struct mlx5dr_ste_build *sb,
1705 					    struct mlx5dr_match_param *mask)
1706 {
1707 	dr_ste_v1_build_flex_parser_tnl_geneve_tag(mask, sb, sb->bit_mask);
1708 
1709 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_TNL_HEADER;
1710 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1711 	sb->ste_build_tag_func = &dr_ste_v1_build_flex_parser_tnl_geneve_tag;
1712 }
1713 
dr_ste_v1_build_register_0_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1714 static int dr_ste_v1_build_register_0_tag(struct mlx5dr_match_param *value,
1715 					  struct mlx5dr_ste_build *sb,
1716 					  u8 *tag)
1717 {
1718 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1719 
1720 	DR_STE_SET_TAG(register_0, tag, register_0_h, misc2, metadata_reg_c_0);
1721 	DR_STE_SET_TAG(register_0, tag, register_0_l, misc2, metadata_reg_c_1);
1722 	DR_STE_SET_TAG(register_0, tag, register_1_h, misc2, metadata_reg_c_2);
1723 	DR_STE_SET_TAG(register_0, tag, register_1_l, misc2, metadata_reg_c_3);
1724 
1725 	return 0;
1726 }
1727 
dr_ste_v1_build_register_0_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1728 static void dr_ste_v1_build_register_0_init(struct mlx5dr_ste_build *sb,
1729 					    struct mlx5dr_match_param *mask)
1730 {
1731 	dr_ste_v1_build_register_0_tag(mask, sb, sb->bit_mask);
1732 
1733 	sb->lu_type = DR_STE_V1_LU_TYPE_STEERING_REGISTERS_0;
1734 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1735 	sb->ste_build_tag_func = &dr_ste_v1_build_register_0_tag;
1736 }
1737 
dr_ste_v1_build_register_1_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1738 static int dr_ste_v1_build_register_1_tag(struct mlx5dr_match_param *value,
1739 					  struct mlx5dr_ste_build *sb,
1740 					  u8 *tag)
1741 {
1742 	struct mlx5dr_match_misc2 *misc2 = &value->misc2;
1743 
1744 	DR_STE_SET_TAG(register_1, tag, register_2_h, misc2, metadata_reg_c_4);
1745 	DR_STE_SET_TAG(register_1, tag, register_2_l, misc2, metadata_reg_c_5);
1746 	DR_STE_SET_TAG(register_1, tag, register_3_h, misc2, metadata_reg_c_6);
1747 	DR_STE_SET_TAG(register_1, tag, register_3_l, misc2, metadata_reg_c_7);
1748 
1749 	return 0;
1750 }
1751 
dr_ste_v1_build_register_1_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1752 static void dr_ste_v1_build_register_1_init(struct mlx5dr_ste_build *sb,
1753 					    struct mlx5dr_match_param *mask)
1754 {
1755 	dr_ste_v1_build_register_1_tag(mask, sb, sb->bit_mask);
1756 
1757 	sb->lu_type = DR_STE_V1_LU_TYPE_STEERING_REGISTERS_1;
1758 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1759 	sb->ste_build_tag_func = &dr_ste_v1_build_register_1_tag;
1760 }
1761 
dr_ste_v1_build_src_gvmi_qpn_bit_mask(struct mlx5dr_match_param * value,u8 * bit_mask)1762 static void dr_ste_v1_build_src_gvmi_qpn_bit_mask(struct mlx5dr_match_param *value,
1763 						  u8 *bit_mask)
1764 {
1765 	struct mlx5dr_match_misc *misc_mask = &value->misc;
1766 
1767 	DR_STE_SET_ONES(src_gvmi_qp_v1, bit_mask, source_gvmi, misc_mask, source_port);
1768 	DR_STE_SET_ONES(src_gvmi_qp_v1, bit_mask, source_qp, misc_mask, source_sqn);
1769 	misc_mask->source_eswitch_owner_vhca_id = 0;
1770 }
1771 
dr_ste_v1_build_src_gvmi_qpn_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1772 static int dr_ste_v1_build_src_gvmi_qpn_tag(struct mlx5dr_match_param *value,
1773 					    struct mlx5dr_ste_build *sb,
1774 					    u8 *tag)
1775 {
1776 	struct mlx5dr_match_misc *misc = &value->misc;
1777 	struct mlx5dr_cmd_vport_cap *vport_cap;
1778 	struct mlx5dr_domain *dmn = sb->dmn;
1779 	struct mlx5dr_cmd_caps *caps;
1780 	u8 *bit_mask = sb->bit_mask;
1781 
1782 	DR_STE_SET_TAG(src_gvmi_qp_v1, tag, source_qp, misc, source_sqn);
1783 
1784 	if (sb->vhca_id_valid) {
1785 		/* Find port GVMI based on the eswitch_owner_vhca_id */
1786 		if (misc->source_eswitch_owner_vhca_id == dmn->info.caps.gvmi)
1787 			caps = &dmn->info.caps;
1788 		else if (dmn->peer_dmn && (misc->source_eswitch_owner_vhca_id ==
1789 					   dmn->peer_dmn->info.caps.gvmi))
1790 			caps = &dmn->peer_dmn->info.caps;
1791 		else
1792 			return -EINVAL;
1793 
1794 		 misc->source_eswitch_owner_vhca_id = 0;
1795 	} else {
1796 		caps = &dmn->info.caps;
1797 	}
1798 
1799 	if (!MLX5_GET(ste_src_gvmi_qp_v1, bit_mask, source_gvmi))
1800 		return 0;
1801 
1802 	vport_cap = mlx5dr_get_vport_cap(caps, misc->source_port);
1803 	if (!vport_cap) {
1804 		mlx5dr_err(dmn, "Vport 0x%x is disabled or invalid\n",
1805 			   misc->source_port);
1806 		return -EINVAL;
1807 	}
1808 
1809 	if (vport_cap->vport_gvmi)
1810 		MLX5_SET(ste_src_gvmi_qp_v1, tag, source_gvmi, vport_cap->vport_gvmi);
1811 
1812 	misc->source_port = 0;
1813 	return 0;
1814 }
1815 
dr_ste_v1_build_src_gvmi_qpn_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1816 static void dr_ste_v1_build_src_gvmi_qpn_init(struct mlx5dr_ste_build *sb,
1817 					      struct mlx5dr_match_param *mask)
1818 {
1819 	dr_ste_v1_build_src_gvmi_qpn_bit_mask(mask, sb->bit_mask);
1820 
1821 	sb->lu_type = DR_STE_V1_LU_TYPE_SRC_QP_GVMI;
1822 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1823 	sb->ste_build_tag_func = &dr_ste_v1_build_src_gvmi_qpn_tag;
1824 }
1825 
dr_ste_v1_set_flex_parser(u32 * misc4_field_id,u32 * misc4_field_value,bool * parser_is_used,u8 * tag)1826 static void dr_ste_v1_set_flex_parser(u32 *misc4_field_id,
1827 				      u32 *misc4_field_value,
1828 				      bool *parser_is_used,
1829 				      u8 *tag)
1830 {
1831 	u32 id = *misc4_field_id;
1832 	u8 *parser_ptr;
1833 
1834 	if (parser_is_used[id])
1835 		return;
1836 
1837 	parser_is_used[id] = true;
1838 	parser_ptr = dr_ste_calc_flex_parser_offset(tag, id);
1839 
1840 	*(__be32 *)parser_ptr = cpu_to_be32(*misc4_field_value);
1841 	*misc4_field_id = 0;
1842 	*misc4_field_value = 0;
1843 }
1844 
dr_ste_v1_build_felx_parser_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1845 static int dr_ste_v1_build_felx_parser_tag(struct mlx5dr_match_param *value,
1846 					   struct mlx5dr_ste_build *sb,
1847 					   u8 *tag)
1848 {
1849 	struct mlx5dr_match_misc4 *misc_4_mask = &value->misc4;
1850 	bool parser_is_used[DR_NUM_OF_FLEX_PARSERS] = {};
1851 
1852 	dr_ste_v1_set_flex_parser(&misc_4_mask->prog_sample_field_id_0,
1853 				  &misc_4_mask->prog_sample_field_value_0,
1854 				  parser_is_used, tag);
1855 
1856 	dr_ste_v1_set_flex_parser(&misc_4_mask->prog_sample_field_id_1,
1857 				  &misc_4_mask->prog_sample_field_value_1,
1858 				  parser_is_used, tag);
1859 
1860 	dr_ste_v1_set_flex_parser(&misc_4_mask->prog_sample_field_id_2,
1861 				  &misc_4_mask->prog_sample_field_value_2,
1862 				  parser_is_used, tag);
1863 
1864 	dr_ste_v1_set_flex_parser(&misc_4_mask->prog_sample_field_id_3,
1865 				  &misc_4_mask->prog_sample_field_value_3,
1866 				  parser_is_used, tag);
1867 
1868 	return 0;
1869 }
1870 
dr_ste_v1_build_flex_parser_0_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1871 static void dr_ste_v1_build_flex_parser_0_init(struct mlx5dr_ste_build *sb,
1872 					       struct mlx5dr_match_param *mask)
1873 {
1874 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_0;
1875 	dr_ste_v1_build_felx_parser_tag(mask, sb, sb->bit_mask);
1876 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1877 	sb->ste_build_tag_func = &dr_ste_v1_build_felx_parser_tag;
1878 }
1879 
dr_ste_v1_build_flex_parser_1_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1880 static void dr_ste_v1_build_flex_parser_1_init(struct mlx5dr_ste_build *sb,
1881 					       struct mlx5dr_match_param *mask)
1882 {
1883 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_1;
1884 	dr_ste_v1_build_felx_parser_tag(mask, sb, sb->bit_mask);
1885 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1886 	sb->ste_build_tag_func = &dr_ste_v1_build_felx_parser_tag;
1887 }
1888 
1889 static int
dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1890 dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_tag(struct mlx5dr_match_param *value,
1891 						   struct mlx5dr_ste_build *sb,
1892 						   u8 *tag)
1893 {
1894 	struct mlx5dr_match_misc3 *misc3 = &value->misc3;
1895 	u8 parser_id = sb->caps->flex_parser_id_geneve_tlv_option_0;
1896 	u8 *parser_ptr = dr_ste_calc_flex_parser_offset(tag, parser_id);
1897 
1898 	MLX5_SET(ste_flex_parser_0, parser_ptr, flex_parser_3,
1899 		 misc3->geneve_tlv_option_0_data);
1900 	misc3->geneve_tlv_option_0_data = 0;
1901 
1902 	return 0;
1903 }
1904 
1905 static void
dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1906 dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_init(struct mlx5dr_ste_build *sb,
1907 						    struct mlx5dr_match_param *mask)
1908 {
1909 	dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_tag(mask, sb, sb->bit_mask);
1910 
1911 	/* STEs with lookup type FLEX_PARSER_{0/1} includes
1912 	 * flex parsers_{0-3}/{4-7} respectively.
1913 	 */
1914 	sb->lu_type = sb->caps->flex_parser_id_geneve_tlv_option_0 > 3 ?
1915 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_1 :
1916 		      DR_STE_V1_LU_TYPE_FLEX_PARSER_0;
1917 
1918 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1919 	sb->ste_build_tag_func = &dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_tag;
1920 }
1921 
dr_ste_v1_build_flex_parser_tnl_gtpu_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1922 static int dr_ste_v1_build_flex_parser_tnl_gtpu_tag(struct mlx5dr_match_param *value,
1923 						    struct mlx5dr_ste_build *sb,
1924 						    u8 *tag)
1925 {
1926 	struct mlx5dr_match_misc3 *misc3 = &value->misc3;
1927 
1928 	DR_STE_SET_TAG(flex_parser_tnl_gtpu, tag, gtpu_msg_flags, misc3, gtpu_msg_flags);
1929 	DR_STE_SET_TAG(flex_parser_tnl_gtpu, tag, gtpu_msg_type, misc3, gtpu_msg_type);
1930 	DR_STE_SET_TAG(flex_parser_tnl_gtpu, tag, gtpu_teid, misc3, gtpu_teid);
1931 
1932 	return 0;
1933 }
1934 
dr_ste_v1_build_flex_parser_tnl_gtpu_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1935 static void dr_ste_v1_build_flex_parser_tnl_gtpu_init(struct mlx5dr_ste_build *sb,
1936 						      struct mlx5dr_match_param *mask)
1937 {
1938 	dr_ste_v1_build_flex_parser_tnl_gtpu_tag(mask, sb, sb->bit_mask);
1939 
1940 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_TNL_HEADER;
1941 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1942 	sb->ste_build_tag_func = &dr_ste_v1_build_flex_parser_tnl_gtpu_tag;
1943 }
1944 
1945 static int
dr_ste_v1_build_tnl_gtpu_flex_parser_0_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1946 dr_ste_v1_build_tnl_gtpu_flex_parser_0_tag(struct mlx5dr_match_param *value,
1947 					   struct mlx5dr_ste_build *sb,
1948 					   u8 *tag)
1949 {
1950 	if (dr_is_flex_parser_0_id(sb->caps->flex_parser_id_gtpu_dw_0))
1951 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_dw_0, sb->caps, &value->misc3);
1952 	if (dr_is_flex_parser_0_id(sb->caps->flex_parser_id_gtpu_teid))
1953 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_teid, sb->caps, &value->misc3);
1954 	if (dr_is_flex_parser_0_id(sb->caps->flex_parser_id_gtpu_dw_2))
1955 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_dw_2, sb->caps, &value->misc3);
1956 	if (dr_is_flex_parser_0_id(sb->caps->flex_parser_id_gtpu_first_ext_dw_0))
1957 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_first_ext_dw_0, sb->caps, &value->misc3);
1958 	return 0;
1959 }
1960 
1961 static void
dr_ste_v1_build_tnl_gtpu_flex_parser_0_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1962 dr_ste_v1_build_tnl_gtpu_flex_parser_0_init(struct mlx5dr_ste_build *sb,
1963 					    struct mlx5dr_match_param *mask)
1964 {
1965 	dr_ste_v1_build_tnl_gtpu_flex_parser_0_tag(mask, sb, sb->bit_mask);
1966 
1967 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_0;
1968 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1969 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_gtpu_flex_parser_0_tag;
1970 }
1971 
1972 static int
dr_ste_v1_build_tnl_gtpu_flex_parser_1_tag(struct mlx5dr_match_param * value,struct mlx5dr_ste_build * sb,u8 * tag)1973 dr_ste_v1_build_tnl_gtpu_flex_parser_1_tag(struct mlx5dr_match_param *value,
1974 					   struct mlx5dr_ste_build *sb,
1975 					   u8 *tag)
1976 {
1977 	if (dr_is_flex_parser_1_id(sb->caps->flex_parser_id_gtpu_dw_0))
1978 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_dw_0, sb->caps, &value->misc3);
1979 	if (dr_is_flex_parser_1_id(sb->caps->flex_parser_id_gtpu_teid))
1980 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_teid, sb->caps, &value->misc3);
1981 	if (dr_is_flex_parser_1_id(sb->caps->flex_parser_id_gtpu_dw_2))
1982 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_dw_2, sb->caps, &value->misc3);
1983 	if (dr_is_flex_parser_1_id(sb->caps->flex_parser_id_gtpu_first_ext_dw_0))
1984 		DR_STE_SET_FLEX_PARSER_FIELD(tag, gtpu_first_ext_dw_0, sb->caps, &value->misc3);
1985 	return 0;
1986 }
1987 
1988 static void
dr_ste_v1_build_tnl_gtpu_flex_parser_1_init(struct mlx5dr_ste_build * sb,struct mlx5dr_match_param * mask)1989 dr_ste_v1_build_tnl_gtpu_flex_parser_1_init(struct mlx5dr_ste_build *sb,
1990 					    struct mlx5dr_match_param *mask)
1991 {
1992 	dr_ste_v1_build_tnl_gtpu_flex_parser_1_tag(mask, sb, sb->bit_mask);
1993 
1994 	sb->lu_type = DR_STE_V1_LU_TYPE_FLEX_PARSER_1;
1995 	sb->byte_mask = mlx5dr_ste_conv_bit_to_byte_mask(sb->bit_mask);
1996 	sb->ste_build_tag_func = &dr_ste_v1_build_tnl_gtpu_flex_parser_1_tag;
1997 }
1998 
1999 struct mlx5dr_ste_ctx ste_ctx_v1 = {
2000 	/* Builders */
2001 	.build_eth_l2_src_dst_init	= &dr_ste_v1_build_eth_l2_src_dst_init,
2002 	.build_eth_l3_ipv6_src_init	= &dr_ste_v1_build_eth_l3_ipv6_src_init,
2003 	.build_eth_l3_ipv6_dst_init	= &dr_ste_v1_build_eth_l3_ipv6_dst_init,
2004 	.build_eth_l3_ipv4_5_tuple_init	= &dr_ste_v1_build_eth_l3_ipv4_5_tuple_init,
2005 	.build_eth_l2_src_init		= &dr_ste_v1_build_eth_l2_src_init,
2006 	.build_eth_l2_dst_init		= &dr_ste_v1_build_eth_l2_dst_init,
2007 	.build_eth_l2_tnl_init		= &dr_ste_v1_build_eth_l2_tnl_init,
2008 	.build_eth_l3_ipv4_misc_init	= &dr_ste_v1_build_eth_l3_ipv4_misc_init,
2009 	.build_eth_ipv6_l3_l4_init	= &dr_ste_v1_build_eth_ipv6_l3_l4_init,
2010 	.build_mpls_init		= &dr_ste_v1_build_mpls_init,
2011 	.build_tnl_gre_init		= &dr_ste_v1_build_tnl_gre_init,
2012 	.build_tnl_mpls_init		= &dr_ste_v1_build_tnl_mpls_init,
2013 	.build_tnl_mpls_over_udp_init	= &dr_ste_v1_build_tnl_mpls_over_udp_init,
2014 	.build_tnl_mpls_over_gre_init	= &dr_ste_v1_build_tnl_mpls_over_gre_init,
2015 	.build_icmp_init		= &dr_ste_v1_build_icmp_init,
2016 	.build_general_purpose_init	= &dr_ste_v1_build_general_purpose_init,
2017 	.build_eth_l4_misc_init		= &dr_ste_v1_build_eth_l4_misc_init,
2018 	.build_tnl_vxlan_gpe_init	= &dr_ste_v1_build_flex_parser_tnl_vxlan_gpe_init,
2019 	.build_tnl_geneve_init		= &dr_ste_v1_build_flex_parser_tnl_geneve_init,
2020 	.build_tnl_geneve_tlv_opt_init	= &dr_ste_v1_build_flex_parser_tnl_geneve_tlv_opt_init,
2021 	.build_register_0_init		= &dr_ste_v1_build_register_0_init,
2022 	.build_register_1_init		= &dr_ste_v1_build_register_1_init,
2023 	.build_src_gvmi_qpn_init	= &dr_ste_v1_build_src_gvmi_qpn_init,
2024 	.build_flex_parser_0_init	= &dr_ste_v1_build_flex_parser_0_init,
2025 	.build_flex_parser_1_init	= &dr_ste_v1_build_flex_parser_1_init,
2026 	.build_tnl_gtpu_init		= &dr_ste_v1_build_flex_parser_tnl_gtpu_init,
2027 	.build_tnl_gtpu_flex_parser_0_init = &dr_ste_v1_build_tnl_gtpu_flex_parser_0_init,
2028 	.build_tnl_gtpu_flex_parser_1_init = &dr_ste_v1_build_tnl_gtpu_flex_parser_1_init,
2029 
2030 	/* Getters and Setters */
2031 	.ste_init			= &dr_ste_v1_init,
2032 	.set_next_lu_type		= &dr_ste_v1_set_next_lu_type,
2033 	.get_next_lu_type		= &dr_ste_v1_get_next_lu_type,
2034 	.set_miss_addr			= &dr_ste_v1_set_miss_addr,
2035 	.get_miss_addr			= &dr_ste_v1_get_miss_addr,
2036 	.set_hit_addr			= &dr_ste_v1_set_hit_addr,
2037 	.set_byte_mask			= &dr_ste_v1_set_byte_mask,
2038 	.get_byte_mask			= &dr_ste_v1_get_byte_mask,
2039 	/* Actions */
2040 	.actions_caps			= DR_STE_CTX_ACTION_CAP_TX_POP |
2041 					  DR_STE_CTX_ACTION_CAP_RX_PUSH |
2042 					  DR_STE_CTX_ACTION_CAP_RX_ENCAP,
2043 	.set_actions_rx			= &dr_ste_v1_set_actions_rx,
2044 	.set_actions_tx			= &dr_ste_v1_set_actions_tx,
2045 	.modify_field_arr_sz		= ARRAY_SIZE(dr_ste_v1_action_modify_field_arr),
2046 	.modify_field_arr		= dr_ste_v1_action_modify_field_arr,
2047 	.set_action_set			= &dr_ste_v1_set_action_set,
2048 	.set_action_add			= &dr_ste_v1_set_action_add,
2049 	.set_action_copy		= &dr_ste_v1_set_action_copy,
2050 	.set_action_decap_l3_list	= &dr_ste_v1_set_action_decap_l3_list,
2051 	/* Send */
2052 	.prepare_for_postsend		= &dr_ste_v1_prepare_for_postsend,
2053 };
2054