1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * aQuantia Corporation Network Driver 4 * Copyright (C) 2014-2017 aQuantia Corporation. All rights reserved 5 */ 6 7 /* File hw_atl_llh.h: Declarations of bitfield and register access functions for 8 * Atlantic registers. 9 */ 10 11 #ifndef HW_ATL_LLH_H 12 #define HW_ATL_LLH_H 13 14 #include <linux/types.h> 15 16 struct aq_hw_s; 17 18 /* global */ 19 20 /* set global microprocessor semaphore */ 21 void hw_atl_reg_glb_cpu_sem_set(struct aq_hw_s *aq_hw, u32 glb_cpu_sem, 22 u32 semaphore); 23 24 /* get global microprocessor semaphore */ 25 u32 hw_atl_reg_glb_cpu_sem_get(struct aq_hw_s *aq_hw, u32 semaphore); 26 27 /* set global register reset disable */ 28 void hw_atl_glb_glb_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 glb_reg_res_dis); 29 30 /* set soft reset */ 31 void hw_atl_glb_soft_res_set(struct aq_hw_s *aq_hw, u32 soft_res); 32 33 /* get soft reset */ 34 u32 hw_atl_glb_soft_res_get(struct aq_hw_s *aq_hw); 35 36 /* stats */ 37 38 u32 hw_atl_rpb_rx_dma_drop_pkt_cnt_get(struct aq_hw_s *aq_hw); 39 40 /* get rx dma good octet counter */ 41 u64 hw_atl_stats_rx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw); 42 43 /* get rx dma good packet counter */ 44 u64 hw_atl_stats_rx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw); 45 46 /* get tx dma good octet counter */ 47 u64 hw_atl_stats_tx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw); 48 49 /* get tx dma good packet counter */ 50 u64 hw_atl_stats_tx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw); 51 52 /* get msm rx errors counter register */ 53 u32 hw_atl_reg_mac_msm_rx_errs_cnt_get(struct aq_hw_s *aq_hw); 54 55 /* get msm rx unicast frames counter register */ 56 u32 hw_atl_reg_mac_msm_rx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw); 57 58 /* get msm rx multicast frames counter register */ 59 u32 hw_atl_reg_mac_msm_rx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw); 60 61 /* get msm rx broadcast frames counter register */ 62 u32 hw_atl_reg_mac_msm_rx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw); 63 64 /* get msm rx broadcast octets counter register 1 */ 65 u32 hw_atl_reg_mac_msm_rx_bcst_octets_counter1get(struct aq_hw_s *aq_hw); 66 67 /* get msm rx unicast octets counter register 0 */ 68 u32 hw_atl_reg_mac_msm_rx_ucst_octets_counter0get(struct aq_hw_s *aq_hw); 69 70 /* get msm tx errors counter register */ 71 u32 hw_atl_reg_mac_msm_tx_errs_cnt_get(struct aq_hw_s *aq_hw); 72 73 /* get msm tx unicast frames counter register */ 74 u32 hw_atl_reg_mac_msm_tx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw); 75 76 /* get msm tx multicast frames counter register */ 77 u32 hw_atl_reg_mac_msm_tx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw); 78 79 /* get msm tx broadcast frames counter register */ 80 u32 hw_atl_reg_mac_msm_tx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw); 81 82 /* get msm tx multicast octets counter register 1 */ 83 u32 hw_atl_reg_mac_msm_tx_mcst_octets_counter1get(struct aq_hw_s *aq_hw); 84 85 /* get msm tx broadcast octets counter register 1 */ 86 u32 hw_atl_reg_mac_msm_tx_bcst_octets_counter1get(struct aq_hw_s *aq_hw); 87 88 /* get msm tx unicast octets counter register 0 */ 89 u32 hw_atl_reg_mac_msm_tx_ucst_octets_counter0get(struct aq_hw_s *aq_hw); 90 91 /* get global mif identification */ 92 u32 hw_atl_reg_glb_mif_id_get(struct aq_hw_s *aq_hw); 93 94 /* interrupt */ 95 96 /* set interrupt auto mask lsw */ 97 void hw_atl_itr_irq_auto_masklsw_set(struct aq_hw_s *aq_hw, 98 u32 irq_auto_masklsw); 99 100 /* set interrupt mapping enable rx */ 101 void hw_atl_itr_irq_map_en_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_rx, 102 u32 rx); 103 104 /* set interrupt mapping enable tx */ 105 void hw_atl_itr_irq_map_en_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_tx, 106 u32 tx); 107 108 /* set interrupt mapping rx */ 109 void hw_atl_itr_irq_map_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_rx, u32 rx); 110 111 /* set interrupt mapping tx */ 112 void hw_atl_itr_irq_map_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_tx, u32 tx); 113 114 /* set interrupt mask clear lsw */ 115 void hw_atl_itr_irq_msk_clearlsw_set(struct aq_hw_s *aq_hw, 116 u32 irq_msk_clearlsw); 117 118 /* set interrupt mask set lsw */ 119 void hw_atl_itr_irq_msk_setlsw_set(struct aq_hw_s *aq_hw, u32 irq_msk_setlsw); 120 121 /* set interrupt register reset disable */ 122 void hw_atl_itr_irq_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 irq_reg_res_dis); 123 124 /* set interrupt status clear lsw */ 125 void hw_atl_itr_irq_status_clearlsw_set(struct aq_hw_s *aq_hw, 126 u32 irq_status_clearlsw); 127 128 /* get interrupt status lsw */ 129 u32 hw_atl_itr_irq_statuslsw_get(struct aq_hw_s *aq_hw); 130 131 /* get reset interrupt */ 132 u32 hw_atl_itr_res_irq_get(struct aq_hw_s *aq_hw); 133 134 /* set reset interrupt */ 135 void hw_atl_itr_res_irq_set(struct aq_hw_s *aq_hw, u32 res_irq); 136 137 /* set RSC interrupt */ 138 void hw_atl_itr_rsc_en_set(struct aq_hw_s *aq_hw, u32 enable); 139 140 /* set RSC delay */ 141 void hw_atl_itr_rsc_delay_set(struct aq_hw_s *aq_hw, u32 delay); 142 143 /* rdm */ 144 145 /* set cpu id */ 146 void hw_atl_rdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca); 147 148 /* set rx dca enable */ 149 void hw_atl_rdm_rx_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_dca_en); 150 151 /* set rx dca mode */ 152 void hw_atl_rdm_rx_dca_mode_set(struct aq_hw_s *aq_hw, u32 rx_dca_mode); 153 154 /* set rx descriptor data buffer size */ 155 void hw_atl_rdm_rx_desc_data_buff_size_set(struct aq_hw_s *aq_hw, 156 u32 rx_desc_data_buff_size, 157 u32 descriptor); 158 159 /* set rx descriptor dca enable */ 160 void hw_atl_rdm_rx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_dca_en, 161 u32 dca); 162 163 /* set rx descriptor enable */ 164 void hw_atl_rdm_rx_desc_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_en, 165 u32 descriptor); 166 167 /* set rx descriptor header splitting */ 168 void hw_atl_rdm_rx_desc_head_splitting_set(struct aq_hw_s *aq_hw, 169 u32 rx_desc_head_splitting, 170 u32 descriptor); 171 172 /* get rx descriptor head pointer */ 173 u32 hw_atl_rdm_rx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor); 174 175 /* set rx descriptor length */ 176 void hw_atl_rdm_rx_desc_len_set(struct aq_hw_s *aq_hw, u32 rx_desc_len, 177 u32 descriptor); 178 179 /* set rx descriptor write-back interrupt enable */ 180 void hw_atl_rdm_rx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw, 181 u32 rx_desc_wr_wb_irq_en); 182 183 /* set rx header dca enable */ 184 void hw_atl_rdm_rx_head_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_head_dca_en, 185 u32 dca); 186 187 /* set rx payload dca enable */ 188 void hw_atl_rdm_rx_pld_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_pld_dca_en, 189 u32 dca); 190 191 /* set rx descriptor header buffer size */ 192 void hw_atl_rdm_rx_desc_head_buff_size_set(struct aq_hw_s *aq_hw, 193 u32 rx_desc_head_buff_size, 194 u32 descriptor); 195 196 /* set rx descriptor reset */ 197 void hw_atl_rdm_rx_desc_res_set(struct aq_hw_s *aq_hw, u32 rx_desc_res, 198 u32 descriptor); 199 200 /* Set RDM Interrupt Moderation Enable */ 201 void hw_atl_rdm_rdm_intr_moder_en_set(struct aq_hw_s *aq_hw, 202 u32 rdm_intr_moder_en); 203 204 /* reg */ 205 206 /* set general interrupt mapping register */ 207 void hw_atl_reg_gen_irq_map_set(struct aq_hw_s *aq_hw, u32 gen_intr_map, 208 u32 regidx); 209 210 /* get general interrupt status register */ 211 u32 hw_atl_reg_gen_irq_status_get(struct aq_hw_s *aq_hw); 212 213 /* set interrupt global control register */ 214 void hw_atl_reg_irq_glb_ctl_set(struct aq_hw_s *aq_hw, u32 intr_glb_ctl); 215 216 /* set interrupt throttle register */ 217 void hw_atl_reg_irq_thr_set(struct aq_hw_s *aq_hw, u32 intr_thr, u32 throttle); 218 219 /* set rx dma descriptor base address lsw */ 220 void hw_atl_reg_rx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw, 221 u32 rx_dma_desc_base_addrlsw, 222 u32 descriptor); 223 224 /* set rx dma descriptor base address msw */ 225 void hw_atl_reg_rx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw, 226 u32 rx_dma_desc_base_addrmsw, 227 u32 descriptor); 228 229 /* get rx dma descriptor status register */ 230 u32 hw_atl_reg_rx_dma_desc_status_get(struct aq_hw_s *aq_hw, u32 descriptor); 231 232 /* set rx dma descriptor tail pointer register */ 233 void hw_atl_reg_rx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw, 234 u32 rx_dma_desc_tail_ptr, 235 u32 descriptor); 236 237 /* set rx filter multicast filter mask register */ 238 void hw_atl_reg_rx_flr_mcst_flr_msk_set(struct aq_hw_s *aq_hw, 239 u32 rx_flr_mcst_flr_msk); 240 241 /* set rx filter multicast filter register */ 242 void hw_atl_reg_rx_flr_mcst_flr_set(struct aq_hw_s *aq_hw, u32 rx_flr_mcst_flr, 243 u32 filter); 244 245 /* set rx filter rss control register 1 */ 246 void hw_atl_reg_rx_flr_rss_control1set(struct aq_hw_s *aq_hw, 247 u32 rx_flr_rss_control1); 248 249 /* Set RX Filter Control Register 2 */ 250 void hw_atl_reg_rx_flr_control2_set(struct aq_hw_s *aq_hw, u32 rx_flr_control2); 251 252 /* Set RX Interrupt Moderation Control Register */ 253 void hw_atl_reg_rx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw, 254 u32 rx_intr_moderation_ctl, 255 u32 queue); 256 257 /* set tx dma debug control */ 258 void hw_atl_reg_tx_dma_debug_ctl_set(struct aq_hw_s *aq_hw, 259 u32 tx_dma_debug_ctl); 260 261 /* set tx dma descriptor base address lsw */ 262 void hw_atl_reg_tx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw, 263 u32 tx_dma_desc_base_addrlsw, 264 u32 descriptor); 265 266 /* set tx dma descriptor base address msw */ 267 void hw_atl_reg_tx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw, 268 u32 tx_dma_desc_base_addrmsw, 269 u32 descriptor); 270 271 /* set tx dma descriptor tail pointer register */ 272 void hw_atl_reg_tx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw, 273 u32 tx_dma_desc_tail_ptr, 274 u32 descriptor); 275 276 /* Set TX Interrupt Moderation Control Register */ 277 void hw_atl_reg_tx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw, 278 u32 tx_intr_moderation_ctl, 279 u32 queue); 280 281 /* set global microprocessor scratch pad */ 282 void hw_atl_reg_glb_cpu_scratch_scp_set(struct aq_hw_s *aq_hw, 283 u32 glb_cpu_scratch_scp, 284 u32 scratch_scp); 285 286 /* rpb */ 287 288 /* set dma system loopback */ 289 void hw_atl_rpb_dma_sys_lbk_set(struct aq_hw_s *aq_hw, u32 dma_sys_lbk); 290 291 /* set rx traffic class mode */ 292 void hw_atl_rpb_rpf_rx_traf_class_mode_set(struct aq_hw_s *aq_hw, 293 u32 rx_traf_class_mode); 294 295 /* set rx buffer enable */ 296 void hw_atl_rpb_rx_buff_en_set(struct aq_hw_s *aq_hw, u32 rx_buff_en); 297 298 /* set rx buffer high threshold (per tc) */ 299 void hw_atl_rpb_rx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw, 300 u32 rx_buff_hi_threshold_per_tc, 301 u32 buffer); 302 303 /* set rx buffer low threshold (per tc) */ 304 void hw_atl_rpb_rx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw, 305 u32 rx_buff_lo_threshold_per_tc, 306 u32 buffer); 307 308 /* set rx flow control mode */ 309 void hw_atl_rpb_rx_flow_ctl_mode_set(struct aq_hw_s *aq_hw, u32 rx_flow_ctl_mode); 310 311 /* set rx packet buffer size (per tc) */ 312 void hw_atl_rpb_rx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw, 313 u32 rx_pkt_buff_size_per_tc, 314 u32 buffer); 315 316 /* toggle rdm rx dma descriptor cache init */ 317 void hw_atl_rdm_rx_dma_desc_cache_init_tgl(struct aq_hw_s *aq_hw); 318 319 /* get rdm rx dma descriptor cache init done */ 320 u32 hw_atl_rdm_rx_dma_desc_cache_init_done_get(struct aq_hw_s *aq_hw); 321 322 /* set rx xoff enable (per tc) */ 323 void hw_atl_rpb_rx_xoff_en_per_tc_set(struct aq_hw_s *aq_hw, u32 rx_xoff_en_per_tc, 324 u32 buffer); 325 326 /* rpf */ 327 328 /* set l2 broadcast count threshold */ 329 void hw_atl_rpfl2broadcast_count_threshold_set(struct aq_hw_s *aq_hw, 330 u32 l2broadcast_count_threshold); 331 332 /* set l2 broadcast enable */ 333 void hw_atl_rpfl2broadcast_en_set(struct aq_hw_s *aq_hw, u32 l2broadcast_en); 334 335 /* set l2 broadcast filter action */ 336 void hw_atl_rpfl2broadcast_flr_act_set(struct aq_hw_s *aq_hw, 337 u32 l2broadcast_flr_act); 338 339 /* set l2 multicast filter enable */ 340 void hw_atl_rpfl2multicast_flr_en_set(struct aq_hw_s *aq_hw, 341 u32 l2multicast_flr_en, 342 u32 filter); 343 344 /* set l2 promiscuous mode enable */ 345 void hw_atl_rpfl2promiscuous_mode_en_set(struct aq_hw_s *aq_hw, 346 u32 l2promiscuous_mode_en); 347 348 /* set l2 unicast filter action */ 349 void hw_atl_rpfl2unicast_flr_act_set(struct aq_hw_s *aq_hw, 350 u32 l2unicast_flr_act, 351 u32 filter); 352 353 /* set l2 unicast filter enable */ 354 void hw_atl_rpfl2_uc_flr_en_set(struct aq_hw_s *aq_hw, u32 l2unicast_flr_en, 355 u32 filter); 356 357 /* set l2 unicast destination address lsw */ 358 void hw_atl_rpfl2unicast_dest_addresslsw_set(struct aq_hw_s *aq_hw, 359 u32 l2unicast_dest_addresslsw, 360 u32 filter); 361 362 /* set l2 unicast destination address msw */ 363 void hw_atl_rpfl2unicast_dest_addressmsw_set(struct aq_hw_s *aq_hw, 364 u32 l2unicast_dest_addressmsw, 365 u32 filter); 366 367 /* Set L2 Accept all Multicast packets */ 368 void hw_atl_rpfl2_accept_all_mc_packets_set(struct aq_hw_s *aq_hw, 369 u32 l2_accept_all_mc_packets); 370 371 /* set user-priority tc mapping */ 372 void hw_atl_rpf_rpb_user_priority_tc_map_set(struct aq_hw_s *aq_hw, 373 u32 user_priority_tc_map, u32 tc); 374 375 /* set rss key address */ 376 void hw_atl_rpf_rss_key_addr_set(struct aq_hw_s *aq_hw, u32 rss_key_addr); 377 378 /* set rss key write data */ 379 void hw_atl_rpf_rss_key_wr_data_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_data); 380 381 /* get rss key write enable */ 382 u32 hw_atl_rpf_rss_key_wr_en_get(struct aq_hw_s *aq_hw); 383 384 /* set rss key write enable */ 385 void hw_atl_rpf_rss_key_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_en); 386 387 /* set rss redirection table address */ 388 void hw_atl_rpf_rss_redir_tbl_addr_set(struct aq_hw_s *aq_hw, 389 u32 rss_redir_tbl_addr); 390 391 /* set rss redirection table write data */ 392 void hw_atl_rpf_rss_redir_tbl_wr_data_set(struct aq_hw_s *aq_hw, 393 u32 rss_redir_tbl_wr_data); 394 395 /* get rss redirection write enable */ 396 u32 hw_atl_rpf_rss_redir_wr_en_get(struct aq_hw_s *aq_hw); 397 398 /* set rss redirection write enable */ 399 void hw_atl_rpf_rss_redir_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_redir_wr_en); 400 401 /* set tpo to rpf system loopback */ 402 void hw_atl_rpf_tpo_to_rpf_sys_lbk_set(struct aq_hw_s *aq_hw, 403 u32 tpo_to_rpf_sys_lbk); 404 405 /* set vlan inner ethertype */ 406 void hw_atl_rpf_vlan_inner_etht_set(struct aq_hw_s *aq_hw, u32 vlan_inner_etht); 407 408 /* set vlan outer ethertype */ 409 void hw_atl_rpf_vlan_outer_etht_set(struct aq_hw_s *aq_hw, u32 vlan_outer_etht); 410 411 /* set vlan promiscuous mode enable */ 412 void hw_atl_rpf_vlan_prom_mode_en_set(struct aq_hw_s *aq_hw, 413 u32 vlan_prom_mode_en); 414 415 /* Set VLAN untagged action */ 416 void hw_atl_rpf_vlan_untagged_act_set(struct aq_hw_s *aq_hw, 417 u32 vlan_untagged_act); 418 419 /* Set VLAN accept untagged packets */ 420 void hw_atl_rpf_vlan_accept_untagged_packets_set(struct aq_hw_s *aq_hw, 421 u32 vlan_acc_untagged_packets); 422 423 /* Set VLAN filter enable */ 424 void hw_atl_rpf_vlan_flr_en_set(struct aq_hw_s *aq_hw, u32 vlan_flr_en, 425 u32 filter); 426 427 /* Set VLAN Filter Action */ 428 void hw_atl_rpf_vlan_flr_act_set(struct aq_hw_s *aq_hw, u32 vlan_filter_act, 429 u32 filter); 430 431 /* Set VLAN ID Filter */ 432 void hw_atl_rpf_vlan_id_flr_set(struct aq_hw_s *aq_hw, u32 vlan_id_flr, 433 u32 filter); 434 435 /* Set VLAN RX queue assignment enable */ 436 void hw_atl_rpf_vlan_rxq_en_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq_en, 437 u32 filter); 438 439 /* Set VLAN RX queue */ 440 void hw_atl_rpf_vlan_rxq_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq, 441 u32 filter); 442 443 /* set ethertype filter enable */ 444 void hw_atl_rpf_etht_flr_en_set(struct aq_hw_s *aq_hw, u32 etht_flr_en, 445 u32 filter); 446 447 /* set ethertype user-priority enable */ 448 void hw_atl_rpf_etht_user_priority_en_set(struct aq_hw_s *aq_hw, 449 u32 etht_user_priority_en, 450 u32 filter); 451 452 /* set ethertype rx queue enable */ 453 void hw_atl_rpf_etht_rx_queue_en_set(struct aq_hw_s *aq_hw, 454 u32 etht_rx_queue_en, 455 u32 filter); 456 457 /* set ethertype rx queue */ 458 void hw_atl_rpf_etht_rx_queue_set(struct aq_hw_s *aq_hw, u32 etht_rx_queue, 459 u32 filter); 460 461 /* set ethertype user-priority */ 462 void hw_atl_rpf_etht_user_priority_set(struct aq_hw_s *aq_hw, 463 u32 etht_user_priority, 464 u32 filter); 465 466 /* set ethertype management queue */ 467 void hw_atl_rpf_etht_mgt_queue_set(struct aq_hw_s *aq_hw, u32 etht_mgt_queue, 468 u32 filter); 469 470 /* set ethertype filter action */ 471 void hw_atl_rpf_etht_flr_act_set(struct aq_hw_s *aq_hw, u32 etht_flr_act, 472 u32 filter); 473 474 /* set ethertype filter */ 475 void hw_atl_rpf_etht_flr_set(struct aq_hw_s *aq_hw, u32 etht_flr, u32 filter); 476 477 /* set L4 source port */ 478 void hw_atl_rpf_l4_spd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter); 479 480 /* set L4 destination port */ 481 void hw_atl_rpf_l4_dpd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter); 482 483 /* rpo */ 484 485 /* set ipv4 header checksum offload enable */ 486 void hw_atl_rpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw, 487 u32 ipv4header_crc_offload_en); 488 489 /* set rx descriptor vlan stripping */ 490 void hw_atl_rpo_rx_desc_vlan_stripping_set(struct aq_hw_s *aq_hw, 491 u32 rx_desc_vlan_stripping, 492 u32 descriptor); 493 494 void hw_atl_rpo_outer_vlan_tag_mode_set(void *context, 495 u32 outervlantagmode); 496 497 u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context); 498 499 /* set tcp/udp checksum offload enable */ 500 void hw_atl_rpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw, 501 u32 tcp_udp_crc_offload_en); 502 503 /* Set LRO Patch Optimization Enable. */ 504 void hw_atl_rpo_lro_patch_optimization_en_set(struct aq_hw_s *aq_hw, 505 u32 lro_patch_optimization_en); 506 507 /* Set Large Receive Offload Enable */ 508 void hw_atl_rpo_lro_en_set(struct aq_hw_s *aq_hw, u32 lro_en); 509 510 /* Set LRO Q Sessions Limit */ 511 void hw_atl_rpo_lro_qsessions_lim_set(struct aq_hw_s *aq_hw, 512 u32 lro_qsessions_lim); 513 514 /* Set LRO Total Descriptor Limit */ 515 void hw_atl_rpo_lro_total_desc_lim_set(struct aq_hw_s *aq_hw, 516 u32 lro_total_desc_lim); 517 518 /* Set LRO Min Payload of First Packet */ 519 void hw_atl_rpo_lro_min_pay_of_first_pkt_set(struct aq_hw_s *aq_hw, 520 u32 lro_min_pld_of_first_pkt); 521 522 /* Set LRO Packet Limit */ 523 void hw_atl_rpo_lro_pkt_lim_set(struct aq_hw_s *aq_hw, u32 lro_packet_lim); 524 525 /* Set LRO Max Number of Descriptors */ 526 void hw_atl_rpo_lro_max_num_of_descriptors_set(struct aq_hw_s *aq_hw, 527 u32 lro_max_desc_num, u32 lro); 528 529 /* Set LRO Time Base Divider */ 530 void hw_atl_rpo_lro_time_base_divider_set(struct aq_hw_s *aq_hw, 531 u32 lro_time_base_divider); 532 533 /*Set LRO Inactive Interval */ 534 void hw_atl_rpo_lro_inactive_interval_set(struct aq_hw_s *aq_hw, 535 u32 lro_inactive_interval); 536 537 /*Set LRO Max Coalescing Interval */ 538 void hw_atl_rpo_lro_max_coalescing_interval_set(struct aq_hw_s *aq_hw, 539 u32 lro_max_coal_interval); 540 541 /* rx */ 542 543 /* set rx register reset disable */ 544 void hw_atl_rx_rx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 rx_reg_res_dis); 545 546 /* tdm */ 547 548 /* set cpu id */ 549 void hw_atl_tdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca); 550 551 /* set large send offload enable */ 552 void hw_atl_tdm_large_send_offload_en_set(struct aq_hw_s *aq_hw, 553 u32 large_send_offload_en); 554 555 /* set tx descriptor enable */ 556 void hw_atl_tdm_tx_desc_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_en, 557 u32 descriptor); 558 559 /* set tx dca enable */ 560 void hw_atl_tdm_tx_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_dca_en); 561 562 /* set tx dca mode */ 563 void hw_atl_tdm_tx_dca_mode_set(struct aq_hw_s *aq_hw, u32 tx_dca_mode); 564 565 /* set tx descriptor dca enable */ 566 void hw_atl_tdm_tx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_dca_en, 567 u32 dca); 568 569 /* get tx descriptor head pointer */ 570 u32 hw_atl_tdm_tx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor); 571 572 /* set tx descriptor length */ 573 void hw_atl_tdm_tx_desc_len_set(struct aq_hw_s *aq_hw, u32 tx_desc_len, 574 u32 descriptor); 575 576 /* set tx descriptor write-back interrupt enable */ 577 void hw_atl_tdm_tx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw, 578 u32 tx_desc_wr_wb_irq_en); 579 580 /* set tx descriptor write-back threshold */ 581 void hw_atl_tdm_tx_desc_wr_wb_threshold_set(struct aq_hw_s *aq_hw, 582 u32 tx_desc_wr_wb_threshold, 583 u32 descriptor); 584 585 /* Set TDM Interrupt Moderation Enable */ 586 void hw_atl_tdm_tdm_intr_moder_en_set(struct aq_hw_s *aq_hw, 587 u32 tdm_irq_moderation_en); 588 /* thm */ 589 590 /* set lso tcp flag of first packet */ 591 void hw_atl_thm_lso_tcp_flag_of_first_pkt_set(struct aq_hw_s *aq_hw, 592 u32 lso_tcp_flag_of_first_pkt); 593 594 /* set lso tcp flag of last packet */ 595 void hw_atl_thm_lso_tcp_flag_of_last_pkt_set(struct aq_hw_s *aq_hw, 596 u32 lso_tcp_flag_of_last_pkt); 597 598 /* set lso tcp flag of middle packet */ 599 void hw_atl_thm_lso_tcp_flag_of_middle_pkt_set(struct aq_hw_s *aq_hw, 600 u32 lso_tcp_flag_of_middle_pkt); 601 602 /* tpb */ 603 604 /* set TX Traffic Class Mode */ 605 void hw_atl_rpb_tps_tx_tc_mode_set(struct aq_hw_s *aq_hw, 606 u32 tx_traf_class_mode); 607 608 /* set tx buffer enable */ 609 void hw_atl_tpb_tx_buff_en_set(struct aq_hw_s *aq_hw, u32 tx_buff_en); 610 611 /* set tx buffer high threshold (per tc) */ 612 void hw_atl_tpb_tx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw, 613 u32 tx_buff_hi_threshold_per_tc, 614 u32 buffer); 615 616 /* set tx buffer low threshold (per tc) */ 617 void hw_atl_tpb_tx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw, 618 u32 tx_buff_lo_threshold_per_tc, 619 u32 buffer); 620 621 /* set tx dma system loopback enable */ 622 void hw_atl_tpb_tx_dma_sys_lbk_en_set(struct aq_hw_s *aq_hw, u32 tx_dma_sys_lbk_en); 623 624 /* set tx packet buffer size (per tc) */ 625 void hw_atl_tpb_tx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw, 626 u32 tx_pkt_buff_size_per_tc, u32 buffer); 627 628 /* set tx path pad insert enable */ 629 void hw_atl_tpb_tx_path_scp_ins_en_set(struct aq_hw_s *aq_hw, u32 tx_path_scp_ins_en); 630 631 /* tpo */ 632 633 /* set ipv4 header checksum offload enable */ 634 void hw_atl_tpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw, 635 u32 ipv4header_crc_offload_en); 636 637 /* set tcp/udp checksum offload enable */ 638 void hw_atl_tpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw, 639 u32 tcp_udp_crc_offload_en); 640 641 /* set tx pkt system loopback enable */ 642 void hw_atl_tpo_tx_pkt_sys_lbk_en_set(struct aq_hw_s *aq_hw, 643 u32 tx_pkt_sys_lbk_en); 644 645 /* tps */ 646 647 /* set tx packet scheduler data arbitration mode */ 648 void hw_atl_tps_tx_pkt_shed_data_arb_mode_set(struct aq_hw_s *aq_hw, 649 u32 tx_pkt_shed_data_arb_mode); 650 651 /* set tx packet scheduler descriptor rate current time reset */ 652 void hw_atl_tps_tx_pkt_shed_desc_rate_curr_time_res_set(struct aq_hw_s *aq_hw, 653 u32 curr_time_res); 654 655 /* set tx packet scheduler descriptor rate limit */ 656 void hw_atl_tps_tx_pkt_shed_desc_rate_lim_set(struct aq_hw_s *aq_hw, 657 u32 tx_pkt_shed_desc_rate_lim); 658 659 /* set tx packet scheduler descriptor tc arbitration mode */ 660 void hw_atl_tps_tx_pkt_shed_desc_tc_arb_mode_set(struct aq_hw_s *aq_hw, 661 u32 arb_mode); 662 663 /* set tx packet scheduler descriptor tc max credit */ 664 void hw_atl_tps_tx_pkt_shed_desc_tc_max_credit_set(struct aq_hw_s *aq_hw, 665 u32 max_credit, 666 u32 tc); 667 668 /* set tx packet scheduler descriptor tc weight */ 669 void hw_atl_tps_tx_pkt_shed_desc_tc_weight_set(struct aq_hw_s *aq_hw, 670 u32 tx_pkt_shed_desc_tc_weight, 671 u32 tc); 672 673 /* set tx packet scheduler descriptor vm arbitration mode */ 674 void hw_atl_tps_tx_pkt_shed_desc_vm_arb_mode_set(struct aq_hw_s *aq_hw, 675 u32 arb_mode); 676 677 /* set tx packet scheduler tc data max credit */ 678 void hw_atl_tps_tx_pkt_shed_tc_data_max_credit_set(struct aq_hw_s *aq_hw, 679 u32 max_credit, 680 u32 tc); 681 682 /* set tx packet scheduler tc data weight */ 683 void hw_atl_tps_tx_pkt_shed_tc_data_weight_set(struct aq_hw_s *aq_hw, 684 u32 tx_pkt_shed_tc_data_weight, 685 u32 tc); 686 687 /* tx */ 688 689 /* set tx register reset disable */ 690 void hw_atl_tx_tx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 tx_reg_res_dis); 691 692 /* msm */ 693 694 /* get register access status */ 695 u32 hw_atl_msm_reg_access_status_get(struct aq_hw_s *aq_hw); 696 697 /* set register address for indirect address */ 698 void hw_atl_msm_reg_addr_for_indirect_addr_set(struct aq_hw_s *aq_hw, 699 u32 reg_addr_for_indirect_addr); 700 701 /* set register read strobe */ 702 void hw_atl_msm_reg_rd_strobe_set(struct aq_hw_s *aq_hw, u32 reg_rd_strobe); 703 704 /* get register read data */ 705 u32 hw_atl_msm_reg_rd_data_get(struct aq_hw_s *aq_hw); 706 707 /* set register write data */ 708 void hw_atl_msm_reg_wr_data_set(struct aq_hw_s *aq_hw, u32 reg_wr_data); 709 710 /* set register write strobe */ 711 void hw_atl_msm_reg_wr_strobe_set(struct aq_hw_s *aq_hw, u32 reg_wr_strobe); 712 713 /* pci */ 714 715 /* set pci register reset disable */ 716 void hw_atl_pci_pci_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 pci_reg_res_dis); 717 718 /* set uP Force Interrupt */ 719 void hw_atl_mcp_up_force_intr_set(struct aq_hw_s *aq_hw, u32 up_force_intr); 720 721 /* clear ipv4 filter destination address */ 722 void hw_atl_rpfl3l4_ipv4_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location); 723 724 /* clear ipv4 filter source address */ 725 void hw_atl_rpfl3l4_ipv4_src_addr_clear(struct aq_hw_s *aq_hw, u8 location); 726 727 /* clear command for filter l3-l4 */ 728 void hw_atl_rpfl3l4_cmd_clear(struct aq_hw_s *aq_hw, u8 location); 729 730 /* clear ipv6 filter destination address */ 731 void hw_atl_rpfl3l4_ipv6_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location); 732 733 /* clear ipv6 filter source address */ 734 void hw_atl_rpfl3l4_ipv6_src_addr_clear(struct aq_hw_s *aq_hw, u8 location); 735 736 /* set ipv4 filter destination address */ 737 void hw_atl_rpfl3l4_ipv4_dest_addr_set(struct aq_hw_s *aq_hw, u8 location, 738 u32 ipv4_dest); 739 740 /* set ipv4 filter source address */ 741 void hw_atl_rpfl3l4_ipv4_src_addr_set(struct aq_hw_s *aq_hw, u8 location, 742 u32 ipv4_src); 743 744 /* set command for filter l3-l4 */ 745 void hw_atl_rpfl3l4_cmd_set(struct aq_hw_s *aq_hw, u8 location, u32 cmd); 746 747 /* set ipv6 filter source address */ 748 void hw_atl_rpfl3l4_ipv6_src_addr_set(struct aq_hw_s *aq_hw, u8 location, 749 u32 *ipv6_src); 750 751 /* set ipv6 filter destination address */ 752 void hw_atl_rpfl3l4_ipv6_dest_addr_set(struct aq_hw_s *aq_hw, u8 location, 753 u32 *ipv6_dest); 754 755 /* get global microprocessor ram semaphore */ 756 u32 hw_atl_sem_ram_get(struct aq_hw_s *self); 757 758 /* get global microprocessor scratch pad register */ 759 u32 hw_atl_scrpad_get(struct aq_hw_s *aq_hw, u32 scratch_scp); 760 761 /* get global microprocessor scratch pad 12 register */ 762 u32 hw_atl_scrpad12_get(struct aq_hw_s *self); 763 764 /* get global microprocessor scratch pad 25 register */ 765 u32 hw_atl_scrpad25_get(struct aq_hw_s *self); 766 767 #endif /* HW_ATL_LLH_H */ 768