1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * aQuantia Corporation Network Driver
4 * Copyright (C) 2014-2017 aQuantia Corporation. All rights reserved
7 /* File hw_atl_llh.h: Declarations of bitfield and register access functions for
14 #include <linux/types.h>
20 /* set global microprocessor semaphore */
21 void hw_atl_reg_glb_cpu_sem_set(struct aq_hw_s *aq_hw, u32 glb_cpu_sem,
24 /* get global microprocessor semaphore */
25 u32 hw_atl_reg_glb_cpu_sem_get(struct aq_hw_s *aq_hw, u32 semaphore);
27 /* set global register reset disable */
28 void hw_atl_glb_glb_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 glb_reg_res_dis);
31 void hw_atl_glb_soft_res_set(struct aq_hw_s *aq_hw, u32 soft_res);
34 u32 hw_atl_glb_soft_res_get(struct aq_hw_s *aq_hw);
38 u32 hw_atl_rpb_rx_dma_drop_pkt_cnt_get(struct aq_hw_s *aq_hw);
40 /* get rx dma good octet counter */
41 u64 hw_atl_stats_rx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw);
43 /* get rx dma good packet counter */
44 u64 hw_atl_stats_rx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw);
46 /* get tx dma good octet counter */
47 u64 hw_atl_stats_tx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw);
49 /* get tx dma good packet counter */
50 u64 hw_atl_stats_tx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw);
52 /* get msm rx errors counter register */
53 u32 hw_atl_reg_mac_msm_rx_errs_cnt_get(struct aq_hw_s *aq_hw);
55 /* get msm rx unicast frames counter register */
56 u32 hw_atl_reg_mac_msm_rx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw);
58 /* get msm rx multicast frames counter register */
59 u32 hw_atl_reg_mac_msm_rx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw);
61 /* get msm rx broadcast frames counter register */
62 u32 hw_atl_reg_mac_msm_rx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw);
64 /* get msm rx broadcast octets counter register 1 */
65 u32 hw_atl_reg_mac_msm_rx_bcst_octets_counter1get(struct aq_hw_s *aq_hw);
67 /* get msm rx unicast octets counter register 0 */
68 u32 hw_atl_reg_mac_msm_rx_ucst_octets_counter0get(struct aq_hw_s *aq_hw);
70 /* get msm tx errors counter register */
71 u32 hw_atl_reg_mac_msm_tx_errs_cnt_get(struct aq_hw_s *aq_hw);
73 /* get msm tx unicast frames counter register */
74 u32 hw_atl_reg_mac_msm_tx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw);
76 /* get msm tx multicast frames counter register */
77 u32 hw_atl_reg_mac_msm_tx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw);
79 /* get msm tx broadcast frames counter register */
80 u32 hw_atl_reg_mac_msm_tx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw);
82 /* get msm tx multicast octets counter register 1 */
83 u32 hw_atl_reg_mac_msm_tx_mcst_octets_counter1get(struct aq_hw_s *aq_hw);
85 /* get msm tx broadcast octets counter register 1 */
86 u32 hw_atl_reg_mac_msm_tx_bcst_octets_counter1get(struct aq_hw_s *aq_hw);
88 /* get msm tx unicast octets counter register 0 */
89 u32 hw_atl_reg_mac_msm_tx_ucst_octets_counter0get(struct aq_hw_s *aq_hw);
91 /* get global mif identification */
92 u32 hw_atl_reg_glb_mif_id_get(struct aq_hw_s *aq_hw);
96 /* set interrupt auto mask lsw */
97 void hw_atl_itr_irq_auto_masklsw_set(struct aq_hw_s *aq_hw,
98 u32 irq_auto_masklsw);
100 /* set interrupt mapping enable rx */
101 void hw_atl_itr_irq_map_en_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_rx,
104 /* set interrupt mapping enable tx */
105 void hw_atl_itr_irq_map_en_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_tx,
108 /* set interrupt mapping rx */
109 void hw_atl_itr_irq_map_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_rx, u32 rx);
111 /* set interrupt mapping tx */
112 void hw_atl_itr_irq_map_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_tx, u32 tx);
114 /* set interrupt mask clear lsw */
115 void hw_atl_itr_irq_msk_clearlsw_set(struct aq_hw_s *aq_hw,
116 u32 irq_msk_clearlsw);
118 /* set interrupt mask set lsw */
119 void hw_atl_itr_irq_msk_setlsw_set(struct aq_hw_s *aq_hw, u32 irq_msk_setlsw);
121 /* set interrupt register reset disable */
122 void hw_atl_itr_irq_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 irq_reg_res_dis);
124 /* set interrupt status clear lsw */
125 void hw_atl_itr_irq_status_clearlsw_set(struct aq_hw_s *aq_hw,
126 u32 irq_status_clearlsw);
128 /* get interrupt status lsw */
129 u32 hw_atl_itr_irq_statuslsw_get(struct aq_hw_s *aq_hw);
131 /* get reset interrupt */
132 u32 hw_atl_itr_res_irq_get(struct aq_hw_s *aq_hw);
134 /* set reset interrupt */
135 void hw_atl_itr_res_irq_set(struct aq_hw_s *aq_hw, u32 res_irq);
137 /* set RSC interrupt */
138 void hw_atl_itr_rsc_en_set(struct aq_hw_s *aq_hw, u32 enable);
141 void hw_atl_itr_rsc_delay_set(struct aq_hw_s *aq_hw, u32 delay);
146 void hw_atl_rdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca);
148 /* set rx dca enable */
149 void hw_atl_rdm_rx_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_dca_en);
151 /* set rx dca mode */
152 void hw_atl_rdm_rx_dca_mode_set(struct aq_hw_s *aq_hw, u32 rx_dca_mode);
154 /* set rx descriptor data buffer size */
155 void hw_atl_rdm_rx_desc_data_buff_size_set(struct aq_hw_s *aq_hw,
156 u32 rx_desc_data_buff_size,
159 /* set rx descriptor dca enable */
160 void hw_atl_rdm_rx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_dca_en,
163 /* set rx descriptor enable */
164 void hw_atl_rdm_rx_desc_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_en,
167 /* set rx descriptor header splitting */
168 void hw_atl_rdm_rx_desc_head_splitting_set(struct aq_hw_s *aq_hw,
169 u32 rx_desc_head_splitting,
172 /* get rx descriptor head pointer */
173 u32 hw_atl_rdm_rx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor);
175 /* set rx descriptor length */
176 void hw_atl_rdm_rx_desc_len_set(struct aq_hw_s *aq_hw, u32 rx_desc_len,
179 /* set rx descriptor write-back interrupt enable */
180 void hw_atl_rdm_rx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw,
181 u32 rx_desc_wr_wb_irq_en);
183 /* set rx header dca enable */
184 void hw_atl_rdm_rx_head_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_head_dca_en,
187 /* set rx payload dca enable */
188 void hw_atl_rdm_rx_pld_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_pld_dca_en,
191 /* set rx descriptor header buffer size */
192 void hw_atl_rdm_rx_desc_head_buff_size_set(struct aq_hw_s *aq_hw,
193 u32 rx_desc_head_buff_size,
196 /* set rx descriptor reset */
197 void hw_atl_rdm_rx_desc_res_set(struct aq_hw_s *aq_hw, u32 rx_desc_res,
200 /* Set RDM Interrupt Moderation Enable */
201 void hw_atl_rdm_rdm_intr_moder_en_set(struct aq_hw_s *aq_hw,
202 u32 rdm_intr_moder_en);
206 /* set general interrupt mapping register */
207 void hw_atl_reg_gen_irq_map_set(struct aq_hw_s *aq_hw, u32 gen_intr_map,
210 /* get general interrupt status register */
211 u32 hw_atl_reg_gen_irq_status_get(struct aq_hw_s *aq_hw);
213 /* set interrupt global control register */
214 void hw_atl_reg_irq_glb_ctl_set(struct aq_hw_s *aq_hw, u32 intr_glb_ctl);
216 /* set interrupt throttle register */
217 void hw_atl_reg_irq_thr_set(struct aq_hw_s *aq_hw, u32 intr_thr, u32 throttle);
219 /* set rx dma descriptor base address lsw */
220 void hw_atl_reg_rx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw,
221 u32 rx_dma_desc_base_addrlsw,
224 /* set rx dma descriptor base address msw */
225 void hw_atl_reg_rx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw,
226 u32 rx_dma_desc_base_addrmsw,
229 /* get rx dma descriptor status register */
230 u32 hw_atl_reg_rx_dma_desc_status_get(struct aq_hw_s *aq_hw, u32 descriptor);
232 /* set rx dma descriptor tail pointer register */
233 void hw_atl_reg_rx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw,
234 u32 rx_dma_desc_tail_ptr,
237 /* set rx filter multicast filter mask register */
238 void hw_atl_reg_rx_flr_mcst_flr_msk_set(struct aq_hw_s *aq_hw,
239 u32 rx_flr_mcst_flr_msk);
241 /* set rx filter multicast filter register */
242 void hw_atl_reg_rx_flr_mcst_flr_set(struct aq_hw_s *aq_hw, u32 rx_flr_mcst_flr,
245 /* set rx filter rss control register 1 */
246 void hw_atl_reg_rx_flr_rss_control1set(struct aq_hw_s *aq_hw,
247 u32 rx_flr_rss_control1);
249 /* Set RX Filter Control Register 2 */
250 void hw_atl_reg_rx_flr_control2_set(struct aq_hw_s *aq_hw, u32 rx_flr_control2);
252 /* Set RX Interrupt Moderation Control Register */
253 void hw_atl_reg_rx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw,
254 u32 rx_intr_moderation_ctl,
257 /* set tx dma debug control */
258 void hw_atl_reg_tx_dma_debug_ctl_set(struct aq_hw_s *aq_hw,
259 u32 tx_dma_debug_ctl);
261 /* set tx dma descriptor base address lsw */
262 void hw_atl_reg_tx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw,
263 u32 tx_dma_desc_base_addrlsw,
266 /* set tx dma descriptor base address msw */
267 void hw_atl_reg_tx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw,
268 u32 tx_dma_desc_base_addrmsw,
271 /* set tx dma descriptor tail pointer register */
272 void hw_atl_reg_tx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw,
273 u32 tx_dma_desc_tail_ptr,
276 /* Set TX Interrupt Moderation Control Register */
277 void hw_atl_reg_tx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw,
278 u32 tx_intr_moderation_ctl,
281 /* set global microprocessor scratch pad */
282 void hw_atl_reg_glb_cpu_scratch_scp_set(struct aq_hw_s *aq_hw,
283 u32 glb_cpu_scratch_scp,
288 /* set dma system loopback */
289 void hw_atl_rpb_dma_sys_lbk_set(struct aq_hw_s *aq_hw, u32 dma_sys_lbk);
291 /* set rx traffic class mode */
292 void hw_atl_rpb_rpf_rx_traf_class_mode_set(struct aq_hw_s *aq_hw,
293 u32 rx_traf_class_mode);
295 /* set rx buffer enable */
296 void hw_atl_rpb_rx_buff_en_set(struct aq_hw_s *aq_hw, u32 rx_buff_en);
298 /* set rx buffer high threshold (per tc) */
299 void hw_atl_rpb_rx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw,
300 u32 rx_buff_hi_threshold_per_tc,
303 /* set rx buffer low threshold (per tc) */
304 void hw_atl_rpb_rx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw,
305 u32 rx_buff_lo_threshold_per_tc,
308 /* set rx flow control mode */
309 void hw_atl_rpb_rx_flow_ctl_mode_set(struct aq_hw_s *aq_hw, u32 rx_flow_ctl_mode);
311 /* set rx packet buffer size (per tc) */
312 void hw_atl_rpb_rx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw,
313 u32 rx_pkt_buff_size_per_tc,
316 /* set rdm rx dma descriptor cache init */
317 void hw_atl_rdm_rx_dma_desc_cache_init_set(struct aq_hw_s *aq_hw, u32 init);
319 /* set rx xoff enable (per tc) */
320 void hw_atl_rpb_rx_xoff_en_per_tc_set(struct aq_hw_s *aq_hw, u32 rx_xoff_en_per_tc,
325 /* set l2 broadcast count threshold */
326 void hw_atl_rpfl2broadcast_count_threshold_set(struct aq_hw_s *aq_hw,
327 u32 l2broadcast_count_threshold);
329 /* set l2 broadcast enable */
330 void hw_atl_rpfl2broadcast_en_set(struct aq_hw_s *aq_hw, u32 l2broadcast_en);
332 /* set l2 broadcast filter action */
333 void hw_atl_rpfl2broadcast_flr_act_set(struct aq_hw_s *aq_hw,
334 u32 l2broadcast_flr_act);
336 /* set l2 multicast filter enable */
337 void hw_atl_rpfl2multicast_flr_en_set(struct aq_hw_s *aq_hw,
338 u32 l2multicast_flr_en,
341 /* set l2 promiscuous mode enable */
342 void hw_atl_rpfl2promiscuous_mode_en_set(struct aq_hw_s *aq_hw,
343 u32 l2promiscuous_mode_en);
345 /* set l2 unicast filter action */
346 void hw_atl_rpfl2unicast_flr_act_set(struct aq_hw_s *aq_hw,
347 u32 l2unicast_flr_act,
350 /* set l2 unicast filter enable */
351 void hw_atl_rpfl2_uc_flr_en_set(struct aq_hw_s *aq_hw, u32 l2unicast_flr_en,
354 /* set l2 unicast destination address lsw */
355 void hw_atl_rpfl2unicast_dest_addresslsw_set(struct aq_hw_s *aq_hw,
356 u32 l2unicast_dest_addresslsw,
359 /* set l2 unicast destination address msw */
360 void hw_atl_rpfl2unicast_dest_addressmsw_set(struct aq_hw_s *aq_hw,
361 u32 l2unicast_dest_addressmsw,
364 /* Set L2 Accept all Multicast packets */
365 void hw_atl_rpfl2_accept_all_mc_packets_set(struct aq_hw_s *aq_hw,
366 u32 l2_accept_all_mc_packets);
368 /* set user-priority tc mapping */
369 void hw_atl_rpf_rpb_user_priority_tc_map_set(struct aq_hw_s *aq_hw,
370 u32 user_priority_tc_map, u32 tc);
372 /* set rss key address */
373 void hw_atl_rpf_rss_key_addr_set(struct aq_hw_s *aq_hw, u32 rss_key_addr);
375 /* set rss key write data */
376 void hw_atl_rpf_rss_key_wr_data_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_data);
378 /* get rss key write enable */
379 u32 hw_atl_rpf_rss_key_wr_en_get(struct aq_hw_s *aq_hw);
381 /* set rss key write enable */
382 void hw_atl_rpf_rss_key_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_en);
384 /* set rss redirection table address */
385 void hw_atl_rpf_rss_redir_tbl_addr_set(struct aq_hw_s *aq_hw,
386 u32 rss_redir_tbl_addr);
388 /* set rss redirection table write data */
389 void hw_atl_rpf_rss_redir_tbl_wr_data_set(struct aq_hw_s *aq_hw,
390 u32 rss_redir_tbl_wr_data);
392 /* get rss redirection write enable */
393 u32 hw_atl_rpf_rss_redir_wr_en_get(struct aq_hw_s *aq_hw);
395 /* set rss redirection write enable */
396 void hw_atl_rpf_rss_redir_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_redir_wr_en);
398 /* set tpo to rpf system loopback */
399 void hw_atl_rpf_tpo_to_rpf_sys_lbk_set(struct aq_hw_s *aq_hw,
400 u32 tpo_to_rpf_sys_lbk);
402 /* set vlan inner ethertype */
403 void hw_atl_rpf_vlan_inner_etht_set(struct aq_hw_s *aq_hw, u32 vlan_inner_etht);
405 /* set vlan outer ethertype */
406 void hw_atl_rpf_vlan_outer_etht_set(struct aq_hw_s *aq_hw, u32 vlan_outer_etht);
408 /* set vlan promiscuous mode enable */
409 void hw_atl_rpf_vlan_prom_mode_en_set(struct aq_hw_s *aq_hw,
410 u32 vlan_prom_mode_en);
412 /* Set VLAN untagged action */
413 void hw_atl_rpf_vlan_untagged_act_set(struct aq_hw_s *aq_hw,
414 u32 vlan_untagged_act);
416 /* Set VLAN accept untagged packets */
417 void hw_atl_rpf_vlan_accept_untagged_packets_set(struct aq_hw_s *aq_hw,
418 u32 vlan_acc_untagged_packets);
420 /* Set VLAN filter enable */
421 void hw_atl_rpf_vlan_flr_en_set(struct aq_hw_s *aq_hw, u32 vlan_flr_en,
424 /* Set VLAN Filter Action */
425 void hw_atl_rpf_vlan_flr_act_set(struct aq_hw_s *aq_hw, u32 vlan_filter_act,
428 /* Set VLAN ID Filter */
429 void hw_atl_rpf_vlan_id_flr_set(struct aq_hw_s *aq_hw, u32 vlan_id_flr,
432 /* Set VLAN RX queue assignment enable */
433 void hw_atl_rpf_vlan_rxq_en_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq_en,
436 /* Set VLAN RX queue */
437 void hw_atl_rpf_vlan_rxq_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq,
440 /* set ethertype filter enable */
441 void hw_atl_rpf_etht_flr_en_set(struct aq_hw_s *aq_hw, u32 etht_flr_en,
444 /* set ethertype user-priority enable */
445 void hw_atl_rpf_etht_user_priority_en_set(struct aq_hw_s *aq_hw,
446 u32 etht_user_priority_en,
449 /* set ethertype rx queue enable */
450 void hw_atl_rpf_etht_rx_queue_en_set(struct aq_hw_s *aq_hw,
451 u32 etht_rx_queue_en,
454 /* set ethertype rx queue */
455 void hw_atl_rpf_etht_rx_queue_set(struct aq_hw_s *aq_hw, u32 etht_rx_queue,
458 /* set ethertype user-priority */
459 void hw_atl_rpf_etht_user_priority_set(struct aq_hw_s *aq_hw,
460 u32 etht_user_priority,
463 /* set ethertype management queue */
464 void hw_atl_rpf_etht_mgt_queue_set(struct aq_hw_s *aq_hw, u32 etht_mgt_queue,
467 /* set ethertype filter action */
468 void hw_atl_rpf_etht_flr_act_set(struct aq_hw_s *aq_hw, u32 etht_flr_act,
471 /* set ethertype filter */
472 void hw_atl_rpf_etht_flr_set(struct aq_hw_s *aq_hw, u32 etht_flr, u32 filter);
474 /* set L4 source port */
475 void hw_atl_rpf_l4_spd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter);
477 /* set L4 destination port */
478 void hw_atl_rpf_l4_dpd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter);
482 /* set ipv4 header checksum offload enable */
483 void hw_atl_rpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw,
484 u32 ipv4header_crc_offload_en);
486 /* set rx descriptor vlan stripping */
487 void hw_atl_rpo_rx_desc_vlan_stripping_set(struct aq_hw_s *aq_hw,
488 u32 rx_desc_vlan_stripping,
491 void hw_atl_rpo_outer_vlan_tag_mode_set(void *context,
492 u32 outervlantagmode);
494 u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context);
496 /* set tcp/udp checksum offload enable */
497 void hw_atl_rpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw,
498 u32 tcp_udp_crc_offload_en);
500 /* Set LRO Patch Optimization Enable. */
501 void hw_atl_rpo_lro_patch_optimization_en_set(struct aq_hw_s *aq_hw,
502 u32 lro_patch_optimization_en);
504 /* Set Large Receive Offload Enable */
505 void hw_atl_rpo_lro_en_set(struct aq_hw_s *aq_hw, u32 lro_en);
507 /* Set LRO Q Sessions Limit */
508 void hw_atl_rpo_lro_qsessions_lim_set(struct aq_hw_s *aq_hw,
509 u32 lro_qsessions_lim);
511 /* Set LRO Total Descriptor Limit */
512 void hw_atl_rpo_lro_total_desc_lim_set(struct aq_hw_s *aq_hw,
513 u32 lro_total_desc_lim);
515 /* Set LRO Min Payload of First Packet */
516 void hw_atl_rpo_lro_min_pay_of_first_pkt_set(struct aq_hw_s *aq_hw,
517 u32 lro_min_pld_of_first_pkt);
519 /* Set LRO Packet Limit */
520 void hw_atl_rpo_lro_pkt_lim_set(struct aq_hw_s *aq_hw, u32 lro_packet_lim);
522 /* Set LRO Max Number of Descriptors */
523 void hw_atl_rpo_lro_max_num_of_descriptors_set(struct aq_hw_s *aq_hw,
524 u32 lro_max_desc_num, u32 lro);
526 /* Set LRO Time Base Divider */
527 void hw_atl_rpo_lro_time_base_divider_set(struct aq_hw_s *aq_hw,
528 u32 lro_time_base_divider);
530 /*Set LRO Inactive Interval */
531 void hw_atl_rpo_lro_inactive_interval_set(struct aq_hw_s *aq_hw,
532 u32 lro_inactive_interval);
534 /*Set LRO Max Coalescing Interval */
535 void hw_atl_rpo_lro_max_coalescing_interval_set(struct aq_hw_s *aq_hw,
536 u32 lro_max_coal_interval);
540 /* set rx register reset disable */
541 void hw_atl_rx_rx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 rx_reg_res_dis);
546 void hw_atl_tdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca);
548 /* set large send offload enable */
549 void hw_atl_tdm_large_send_offload_en_set(struct aq_hw_s *aq_hw,
550 u32 large_send_offload_en);
552 /* set tx descriptor enable */
553 void hw_atl_tdm_tx_desc_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_en,
556 /* set tx dca enable */
557 void hw_atl_tdm_tx_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_dca_en);
559 /* set tx dca mode */
560 void hw_atl_tdm_tx_dca_mode_set(struct aq_hw_s *aq_hw, u32 tx_dca_mode);
562 /* set tx descriptor dca enable */
563 void hw_atl_tdm_tx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_dca_en,
566 /* get tx descriptor head pointer */
567 u32 hw_atl_tdm_tx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor);
569 /* set tx descriptor length */
570 void hw_atl_tdm_tx_desc_len_set(struct aq_hw_s *aq_hw, u32 tx_desc_len,
573 /* set tx descriptor write-back interrupt enable */
574 void hw_atl_tdm_tx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw,
575 u32 tx_desc_wr_wb_irq_en);
577 /* set tx descriptor write-back threshold */
578 void hw_atl_tdm_tx_desc_wr_wb_threshold_set(struct aq_hw_s *aq_hw,
579 u32 tx_desc_wr_wb_threshold,
582 /* Set TDM Interrupt Moderation Enable */
583 void hw_atl_tdm_tdm_intr_moder_en_set(struct aq_hw_s *aq_hw,
584 u32 tdm_irq_moderation_en);
587 /* set lso tcp flag of first packet */
588 void hw_atl_thm_lso_tcp_flag_of_first_pkt_set(struct aq_hw_s *aq_hw,
589 u32 lso_tcp_flag_of_first_pkt);
591 /* set lso tcp flag of last packet */
592 void hw_atl_thm_lso_tcp_flag_of_last_pkt_set(struct aq_hw_s *aq_hw,
593 u32 lso_tcp_flag_of_last_pkt);
595 /* set lso tcp flag of middle packet */
596 void hw_atl_thm_lso_tcp_flag_of_middle_pkt_set(struct aq_hw_s *aq_hw,
597 u32 lso_tcp_flag_of_middle_pkt);
601 /* set TX Traffic Class Mode */
602 void hw_atl_rpb_tps_tx_tc_mode_set(struct aq_hw_s *aq_hw,
603 u32 tx_traf_class_mode);
605 /* set tx buffer enable */
606 void hw_atl_tpb_tx_buff_en_set(struct aq_hw_s *aq_hw, u32 tx_buff_en);
608 /* set tx buffer high threshold (per tc) */
609 void hw_atl_tpb_tx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw,
610 u32 tx_buff_hi_threshold_per_tc,
613 /* set tx buffer low threshold (per tc) */
614 void hw_atl_tpb_tx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw,
615 u32 tx_buff_lo_threshold_per_tc,
618 /* set tx dma system loopback enable */
619 void hw_atl_tpb_tx_dma_sys_lbk_en_set(struct aq_hw_s *aq_hw, u32 tx_dma_sys_lbk_en);
621 /* set tx packet buffer size (per tc) */
622 void hw_atl_tpb_tx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw,
623 u32 tx_pkt_buff_size_per_tc, u32 buffer);
625 /* set tx path pad insert enable */
626 void hw_atl_tpb_tx_path_scp_ins_en_set(struct aq_hw_s *aq_hw, u32 tx_path_scp_ins_en);
630 /* set ipv4 header checksum offload enable */
631 void hw_atl_tpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw,
632 u32 ipv4header_crc_offload_en);
634 /* set tcp/udp checksum offload enable */
635 void hw_atl_tpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw,
636 u32 tcp_udp_crc_offload_en);
638 /* set tx pkt system loopback enable */
639 void hw_atl_tpo_tx_pkt_sys_lbk_en_set(struct aq_hw_s *aq_hw,
640 u32 tx_pkt_sys_lbk_en);
644 /* set tx packet scheduler data arbitration mode */
645 void hw_atl_tps_tx_pkt_shed_data_arb_mode_set(struct aq_hw_s *aq_hw,
646 u32 tx_pkt_shed_data_arb_mode);
648 /* set tx packet scheduler descriptor rate current time reset */
649 void hw_atl_tps_tx_pkt_shed_desc_rate_curr_time_res_set(struct aq_hw_s *aq_hw,
652 /* set tx packet scheduler descriptor rate limit */
653 void hw_atl_tps_tx_pkt_shed_desc_rate_lim_set(struct aq_hw_s *aq_hw,
654 u32 tx_pkt_shed_desc_rate_lim);
656 /* set tx packet scheduler descriptor tc arbitration mode */
657 void hw_atl_tps_tx_pkt_shed_desc_tc_arb_mode_set(struct aq_hw_s *aq_hw,
660 /* set tx packet scheduler descriptor tc max credit */
661 void hw_atl_tps_tx_pkt_shed_desc_tc_max_credit_set(struct aq_hw_s *aq_hw,
665 /* set tx packet scheduler descriptor tc weight */
666 void hw_atl_tps_tx_pkt_shed_desc_tc_weight_set(struct aq_hw_s *aq_hw,
667 u32 tx_pkt_shed_desc_tc_weight,
670 /* set tx packet scheduler descriptor vm arbitration mode */
671 void hw_atl_tps_tx_pkt_shed_desc_vm_arb_mode_set(struct aq_hw_s *aq_hw,
674 /* set tx packet scheduler tc data max credit */
675 void hw_atl_tps_tx_pkt_shed_tc_data_max_credit_set(struct aq_hw_s *aq_hw,
679 /* set tx packet scheduler tc data weight */
680 void hw_atl_tps_tx_pkt_shed_tc_data_weight_set(struct aq_hw_s *aq_hw,
681 u32 tx_pkt_shed_tc_data_weight,
686 /* set tx register reset disable */
687 void hw_atl_tx_tx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 tx_reg_res_dis);
691 /* get register access status */
692 u32 hw_atl_msm_reg_access_status_get(struct aq_hw_s *aq_hw);
694 /* set register address for indirect address */
695 void hw_atl_msm_reg_addr_for_indirect_addr_set(struct aq_hw_s *aq_hw,
696 u32 reg_addr_for_indirect_addr);
698 /* set register read strobe */
699 void hw_atl_msm_reg_rd_strobe_set(struct aq_hw_s *aq_hw, u32 reg_rd_strobe);
701 /* get register read data */
702 u32 hw_atl_msm_reg_rd_data_get(struct aq_hw_s *aq_hw);
704 /* set register write data */
705 void hw_atl_msm_reg_wr_data_set(struct aq_hw_s *aq_hw, u32 reg_wr_data);
707 /* set register write strobe */
708 void hw_atl_msm_reg_wr_strobe_set(struct aq_hw_s *aq_hw, u32 reg_wr_strobe);
712 /* set pci register reset disable */
713 void hw_atl_pci_pci_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 pci_reg_res_dis);
715 /* set uP Force Interrupt */
716 void hw_atl_mcp_up_force_intr_set(struct aq_hw_s *aq_hw, u32 up_force_intr);
718 /* clear ipv4 filter destination address */
719 void hw_atl_rpfl3l4_ipv4_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location);
721 /* clear ipv4 filter source address */
722 void hw_atl_rpfl3l4_ipv4_src_addr_clear(struct aq_hw_s *aq_hw, u8 location);
724 /* clear command for filter l3-l4 */
725 void hw_atl_rpfl3l4_cmd_clear(struct aq_hw_s *aq_hw, u8 location);
727 /* clear ipv6 filter destination address */
728 void hw_atl_rpfl3l4_ipv6_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location);
730 /* clear ipv6 filter source address */
731 void hw_atl_rpfl3l4_ipv6_src_addr_clear(struct aq_hw_s *aq_hw, u8 location);
733 /* set ipv4 filter destination address */
734 void hw_atl_rpfl3l4_ipv4_dest_addr_set(struct aq_hw_s *aq_hw, u8 location,
737 /* set ipv4 filter source address */
738 void hw_atl_rpfl3l4_ipv4_src_addr_set(struct aq_hw_s *aq_hw, u8 location,
741 /* set command for filter l3-l4 */
742 void hw_atl_rpfl3l4_cmd_set(struct aq_hw_s *aq_hw, u8 location, u32 cmd);
744 /* set ipv6 filter source address */
745 void hw_atl_rpfl3l4_ipv6_src_addr_set(struct aq_hw_s *aq_hw, u8 location,
748 /* set ipv6 filter destination address */
749 void hw_atl_rpfl3l4_ipv6_dest_addr_set(struct aq_hw_s *aq_hw, u8 location,
752 /* get global microprocessor ram semaphore */
753 u32 hw_atl_sem_ram_get(struct aq_hw_s *self);
755 /* get global microprocessor scratch pad register */
756 u32 hw_atl_scrpad_get(struct aq_hw_s *aq_hw, u32 scratch_scp);
758 /* get global microprocessor scratch pad 12 register */
759 u32 hw_atl_scrpad12_get(struct aq_hw_s *self);
761 /* get global microprocessor scratch pad 25 register */
762 u32 hw_atl_scrpad25_get(struct aq_hw_s *self);
764 #endif /* HW_ATL_LLH_H */