1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2019-2022 Realtek Corporation
3 */
4
5 #include "coex.h"
6 #include "debug.h"
7 #include "fw.h"
8 #include "mac.h"
9 #include "phy.h"
10 #include "reg.h"
11 #include "rtw8852c.h"
12 #include "rtw8852c_rfk.h"
13 #include "rtw8852c_table.h"
14 #include "util.h"
15
16 static const struct rtw89_hfc_ch_cfg rtw8852c_hfc_chcfg_pcie[] = {
17 {13, 1614, grp_0}, /* ACH 0 */
18 {13, 1614, grp_0}, /* ACH 1 */
19 {13, 1614, grp_0}, /* ACH 2 */
20 {13, 1614, grp_0}, /* ACH 3 */
21 {13, 1614, grp_1}, /* ACH 4 */
22 {13, 1614, grp_1}, /* ACH 5 */
23 {13, 1614, grp_1}, /* ACH 6 */
24 {13, 1614, grp_1}, /* ACH 7 */
25 {13, 1614, grp_0}, /* B0MGQ */
26 {13, 1614, grp_0}, /* B0HIQ */
27 {13, 1614, grp_1}, /* B1MGQ */
28 {13, 1614, grp_1}, /* B1HIQ */
29 {40, 0, 0} /* FWCMDQ */
30 };
31
32 static const struct rtw89_hfc_pub_cfg rtw8852c_hfc_pubcfg_pcie = {
33 1614, /* Group 0 */
34 1614, /* Group 1 */
35 3228, /* Public Max */
36 0 /* WP threshold */
37 };
38
39 static const struct rtw89_hfc_param_ini rtw8852c_hfc_param_ini_pcie[] = {
40 [RTW89_QTA_SCC] = {rtw8852c_hfc_chcfg_pcie, &rtw8852c_hfc_pubcfg_pcie,
41 &rtw89_mac_size.hfc_preccfg_pcie, RTW89_HCIFC_POH},
42 [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_preccfg_pcie,
43 RTW89_HCIFC_POH},
44 [RTW89_QTA_INVALID] = {NULL},
45 };
46
47 static const struct rtw89_dle_mem rtw8852c_dle_mem_pcie[] = {
48 [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size19,
49 &rtw89_mac_size.ple_size19, &rtw89_mac_size.wde_qt18,
50 &rtw89_mac_size.wde_qt18, &rtw89_mac_size.ple_qt46,
51 &rtw89_mac_size.ple_qt47},
52 [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size18,
53 &rtw89_mac_size.ple_size18, &rtw89_mac_size.wde_qt17,
54 &rtw89_mac_size.wde_qt17, &rtw89_mac_size.ple_qt44,
55 &rtw89_mac_size.ple_qt45},
56 [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
57 NULL},
58 };
59
60 static const u32 rtw8852c_h2c_regs[RTW89_H2CREG_MAX] = {
61 R_AX_H2CREG_DATA0_V1, R_AX_H2CREG_DATA1_V1, R_AX_H2CREG_DATA2_V1,
62 R_AX_H2CREG_DATA3_V1
63 };
64
65 static const u32 rtw8852c_c2h_regs[RTW89_H2CREG_MAX] = {
66 R_AX_C2HREG_DATA0_V1, R_AX_C2HREG_DATA1_V1, R_AX_C2HREG_DATA2_V1,
67 R_AX_C2HREG_DATA3_V1
68 };
69
70 static const struct rtw89_page_regs rtw8852c_page_regs = {
71 .hci_fc_ctrl = R_AX_HCI_FC_CTRL_V1,
72 .ch_page_ctrl = R_AX_CH_PAGE_CTRL_V1,
73 .ach_page_ctrl = R_AX_ACH0_PAGE_CTRL_V1,
74 .ach_page_info = R_AX_ACH0_PAGE_INFO_V1,
75 .pub_page_info3 = R_AX_PUB_PAGE_INFO3_V1,
76 .pub_page_ctrl1 = R_AX_PUB_PAGE_CTRL1_V1,
77 .pub_page_ctrl2 = R_AX_PUB_PAGE_CTRL2_V1,
78 .pub_page_info1 = R_AX_PUB_PAGE_INFO1_V1,
79 .pub_page_info2 = R_AX_PUB_PAGE_INFO2_V1,
80 .wp_page_ctrl1 = R_AX_WP_PAGE_CTRL1_V1,
81 .wp_page_ctrl2 = R_AX_WP_PAGE_CTRL2_V1,
82 .wp_page_info1 = R_AX_WP_PAGE_INFO1_V1,
83 };
84
85 static const struct rtw89_reg_def rtw8852c_dcfo_comp = {
86 R_DCFO_COMP_S0_V1, B_DCFO_COMP_S0_V1_MSK
87 };
88
89 static const struct rtw89_imr_info rtw8852c_imr_info = {
90 .wdrls_imr_set = B_AX_WDRLS_IMR_SET_V1,
91 .wsec_imr_reg = R_AX_SEC_ERROR_FLAG_IMR,
92 .wsec_imr_set = B_AX_TX_HANG_IMR | B_AX_RX_HANG_IMR,
93 .mpdu_tx_imr_set = B_AX_MPDU_TX_IMR_SET_V1,
94 .mpdu_rx_imr_set = B_AX_MPDU_RX_IMR_SET_V1,
95 .sta_sch_imr_set = B_AX_STA_SCHEDULER_IMR_SET,
96 .txpktctl_imr_b0_reg = R_AX_TXPKTCTL_B0_ERRFLAG_IMR,
97 .txpktctl_imr_b0_clr = B_AX_TXPKTCTL_IMR_B0_CLR_V1,
98 .txpktctl_imr_b0_set = B_AX_TXPKTCTL_IMR_B0_SET_V1,
99 .txpktctl_imr_b1_reg = R_AX_TXPKTCTL_B1_ERRFLAG_IMR,
100 .txpktctl_imr_b1_clr = B_AX_TXPKTCTL_IMR_B1_CLR_V1,
101 .txpktctl_imr_b1_set = B_AX_TXPKTCTL_IMR_B1_SET_V1,
102 .wde_imr_clr = B_AX_WDE_IMR_CLR_V1,
103 .wde_imr_set = B_AX_WDE_IMR_SET_V1,
104 .ple_imr_clr = B_AX_PLE_IMR_CLR_V1,
105 .ple_imr_set = B_AX_PLE_IMR_SET_V1,
106 .host_disp_imr_clr = B_AX_HOST_DISP_IMR_CLR_V1,
107 .host_disp_imr_set = B_AX_HOST_DISP_IMR_SET_V1,
108 .cpu_disp_imr_clr = B_AX_CPU_DISP_IMR_CLR_V1,
109 .cpu_disp_imr_set = B_AX_CPU_DISP_IMR_SET_V1,
110 .other_disp_imr_clr = B_AX_OTHER_DISP_IMR_CLR_V1,
111 .other_disp_imr_set = B_AX_OTHER_DISP_IMR_SET_V1,
112 .bbrpt_com_err_imr_reg = R_AX_BBRPT_COM_ERR_IMR,
113 .bbrpt_chinfo_err_imr_reg = R_AX_BBRPT_CHINFO_ERR_IMR,
114 .bbrpt_err_imr_set = R_AX_BBRPT_CHINFO_IMR_SET_V1,
115 .bbrpt_dfs_err_imr_reg = R_AX_BBRPT_DFS_ERR_IMR,
116 .ptcl_imr_clr = B_AX_PTCL_IMR_CLR_V1,
117 .ptcl_imr_set = B_AX_PTCL_IMR_SET_V1,
118 .cdma_imr_0_reg = R_AX_RX_ERR_FLAG_IMR,
119 .cdma_imr_0_clr = B_AX_RX_ERR_IMR_CLR_V1,
120 .cdma_imr_0_set = B_AX_RX_ERR_IMR_SET_V1,
121 .cdma_imr_1_reg = R_AX_TX_ERR_FLAG_IMR,
122 .cdma_imr_1_clr = B_AX_TX_ERR_IMR_CLR_V1,
123 .cdma_imr_1_set = B_AX_TX_ERR_IMR_SET_V1,
124 .phy_intf_imr_reg = R_AX_PHYINFO_ERR_IMR_V1,
125 .phy_intf_imr_clr = B_AX_PHYINFO_IMR_CLR_V1,
126 .phy_intf_imr_set = B_AX_PHYINFO_IMR_SET_V1,
127 .rmac_imr_reg = R_AX_RX_ERR_IMR,
128 .rmac_imr_clr = B_AX_RMAC_IMR_CLR_V1,
129 .rmac_imr_set = B_AX_RMAC_IMR_SET_V1,
130 .tmac_imr_reg = R_AX_TRXPTCL_ERROR_INDICA_MASK,
131 .tmac_imr_clr = B_AX_TMAC_IMR_CLR_V1,
132 .tmac_imr_set = B_AX_TMAC_IMR_SET_V1,
133 };
134
135 static const struct rtw89_rrsr_cfgs rtw8852c_rrsr_cfgs = {
136 .ref_rate = {R_AX_TRXPTCL_RRSR_CTL_0, B_AX_WMAC_RESP_REF_RATE_SEL, 0},
137 .rsc = {R_AX_PTCL_RRSR1, B_AX_RSC_MASK, 2},
138 };
139
140 static const struct rtw89_dig_regs rtw8852c_dig_regs = {
141 .seg0_pd_reg = R_SEG0R_PD,
142 .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
143 .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK,
144 .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
145 .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
146 .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
147 .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
148 .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
149 .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
150 .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V1,
151 B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
152 .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V1,
153 B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
154 .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V1,
155 B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
156 .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V1,
157 B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
158 };
159
160 static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg);
161 static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
162 enum rtw89_mac_idx mac_idx);
163
rtw8852c_pwr_on_func(struct rtw89_dev * rtwdev)164 static int rtw8852c_pwr_on_func(struct rtw89_dev *rtwdev)
165 {
166 u32 val32;
167 u32 ret;
168
169 val32 = rtw89_read32_mask(rtwdev, R_AX_SYS_STATUS1, B_AX_PAD_HCI_SEL_V2_MASK);
170 if (val32 == MAC_AX_HCI_SEL_PCIE_USB)
171 rtw89_write32_set(rtwdev, R_AX_LDO_AON_CTRL0, B_AX_PD_REGU_L);
172
173 rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_AFSM_WLSUS_EN |
174 B_AX_AFSM_PCIE_SUS_EN);
175 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_DIS_WLBT_PDNSUSEN_SOPC);
176 rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_DIS_WLBT_LPSEN_LOPC);
177 rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APDM_HPDN);
178 rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
179
180 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_AX_RDY_SYSPWR,
181 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
182 if (ret)
183 return ret;
184
185 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
186 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFN_ONMAC);
187
188 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFN_ONMAC),
189 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
190 if (ret)
191 return ret;
192
193 rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
194 rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
195 rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
196 rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
197
198 rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
199 rtw89_write32_clr(rtwdev, R_AX_SYS_SDIO_CTRL, B_AX_PCIE_CALIB_EN_V1);
200
201 rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_CMAC1_FEN);
202 rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_R_SYM_ISO_CMAC12PP);
203 rtw89_write32_clr(rtwdev, R_AX_AFE_CTRL1, B_AX_R_SYM_WLCMAC1_P4_PC_EN |
204 B_AX_R_SYM_WLCMAC1_P3_PC_EN |
205 B_AX_R_SYM_WLCMAC1_P2_PC_EN |
206 B_AX_R_SYM_WLCMAC1_P1_PC_EN |
207 B_AX_R_SYM_WLCMAC1_PC_EN);
208 rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
209
210 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
211 XTAL_SI_GND_SHDN_WL, XTAL_SI_GND_SHDN_WL);
212 if (ret)
213 return ret;
214
215 rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
216
217 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
218 XTAL_SI_SHDN_WL, XTAL_SI_SHDN_WL);
219 if (ret)
220 return ret;
221 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_WEI,
222 XTAL_SI_OFF_WEI);
223 if (ret)
224 return ret;
225 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_EI,
226 XTAL_SI_OFF_EI);
227 if (ret)
228 return ret;
229 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_RFC2RF);
230 if (ret)
231 return ret;
232 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_WEI,
233 XTAL_SI_PON_WEI);
234 if (ret)
235 return ret;
236 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_EI,
237 XTAL_SI_PON_EI);
238 if (ret)
239 return ret;
240 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SRAM2RFC);
241 if (ret)
242 return ret;
243 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_2, 0, XTAL_SI_LDO_LPS);
244 if (ret)
245 return ret;
246 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_4, 0, XTAL_SI_LPS_CAP);
247 if (ret)
248 return ret;
249
250 rtw89_write32_set(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
251 rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_ISO_EB2CORE);
252 rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B15);
253
254 fsleep(1000);
255
256 rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B14);
257 rtw89_write32_clr(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
258 rtw89_write32_set(rtwdev, R_AX_GPIO0_15_EECS_EESK_LED1_PULL_LOW_EN,
259 B_AX_EECS_PULL_LOW_EN | B_AX_EESK_PULL_LOW_EN |
260 B_AX_LED1_PULL_LOW_EN);
261
262 rtw89_write32_set(rtwdev, R_AX_DMAC_FUNC_EN,
263 B_AX_MAC_FUNC_EN | B_AX_DMAC_FUNC_EN | B_AX_MPDU_PROC_EN |
264 B_AX_WD_RLS_EN | B_AX_DLE_WDE_EN | B_AX_TXPKT_CTRL_EN |
265 B_AX_STA_SCH_EN | B_AX_DLE_PLE_EN | B_AX_PKT_BUF_EN |
266 B_AX_DMAC_TBL_EN | B_AX_PKT_IN_EN | B_AX_DLE_CPUIO_EN |
267 B_AX_DISPATCHER_EN | B_AX_BBRPT_EN | B_AX_MAC_SEC_EN |
268 B_AX_MAC_UN_EN | B_AX_H_AXIDMA_EN);
269
270 rtw89_write32_set(rtwdev, R_AX_CMAC_FUNC_EN,
271 B_AX_CMAC_EN | B_AX_CMAC_TXEN | B_AX_CMAC_RXEN |
272 B_AX_FORCE_CMACREG_GCKEN | B_AX_PHYINTF_EN |
273 B_AX_CMAC_DMA_EN | B_AX_PTCLTOP_EN | B_AX_SCHEDULER_EN |
274 B_AX_TMAC_EN | B_AX_RMAC_EN);
275
276 return 0;
277 }
278
rtw8852c_pwr_off_func(struct rtw89_dev * rtwdev)279 static int rtw8852c_pwr_off_func(struct rtw89_dev *rtwdev)
280 {
281 u32 val32;
282 u32 ret;
283
284 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_RFC2RF,
285 XTAL_SI_RFC2RF);
286 if (ret)
287 return ret;
288 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_EI);
289 if (ret)
290 return ret;
291 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_WEI);
292 if (ret)
293 return ret;
294 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0, XTAL_SI_RF00);
295 if (ret)
296 return ret;
297 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0, XTAL_SI_RF10);
298 if (ret)
299 return ret;
300 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_SRAM2RFC,
301 XTAL_SI_SRAM2RFC);
302 if (ret)
303 return ret;
304 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_EI);
305 if (ret)
306 return ret;
307 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_WEI);
308 if (ret)
309 return ret;
310
311 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
312 rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN, B_AX_FEN_BB_GLB_RSTN | B_AX_FEN_BBRSTB);
313 rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND,
314 B_AX_R_SYM_FEN_WLBBGLB_1 | B_AX_R_SYM_FEN_WLBBFUN_1);
315 rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
316
317 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SHDN_WL);
318 if (ret)
319 return ret;
320
321 rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
322
323 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_GND_SHDN_WL);
324 if (ret)
325 return ret;
326
327 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_OFFMAC);
328
329 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFM_OFFMAC),
330 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
331 if (ret)
332 return ret;
333
334 rtw89_write32(rtwdev, R_AX_WLLPS_CTRL, 0x0001A0B0);
335 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_XTAL_OFF_A_DIE);
336 rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
337
338 return 0;
339 }
340
rtw8852c_e_efuse_parsing(struct rtw89_efuse * efuse,struct rtw8852c_efuse * map)341 static void rtw8852c_e_efuse_parsing(struct rtw89_efuse *efuse,
342 struct rtw8852c_efuse *map)
343 {
344 ether_addr_copy(efuse->addr, map->e.mac_addr);
345 efuse->rfe_type = map->rfe_type;
346 efuse->xtal_cap = map->xtal_k;
347 }
348
rtw8852c_efuse_parsing_tssi(struct rtw89_dev * rtwdev,struct rtw8852c_efuse * map)349 static void rtw8852c_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
350 struct rtw8852c_efuse *map)
351 {
352 struct rtw89_tssi_info *tssi = &rtwdev->tssi;
353 struct rtw8852c_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
354 u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
355 u8 i, j;
356
357 tssi->thermal[RF_PATH_A] = map->path_a_therm;
358 tssi->thermal[RF_PATH_B] = map->path_b_therm;
359
360 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
361 memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
362 sizeof(ofst[i]->cck_tssi));
363
364 for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
365 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
366 "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
367 i, j, tssi->tssi_cck[i][j]);
368
369 memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
370 sizeof(ofst[i]->bw40_tssi));
371 memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
372 ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
373 memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
374 sizeof(tssi->tssi_6g_mcs[i]));
375
376 for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
377 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
378 "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
379 i, j, tssi->tssi_mcs[i][j]);
380 }
381 }
382
_decode_efuse_gain(u8 data,s8 * high,s8 * low)383 static bool _decode_efuse_gain(u8 data, s8 *high, s8 *low)
384 {
385 if (high)
386 *high = sign_extend32(FIELD_GET(GENMASK(7, 4), data), 3);
387 if (low)
388 *low = sign_extend32(FIELD_GET(GENMASK(3, 0), data), 3);
389
390 return data != 0xff;
391 }
392
rtw8852c_efuse_parsing_gain_offset(struct rtw89_dev * rtwdev,struct rtw8852c_efuse * map)393 static void rtw8852c_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
394 struct rtw8852c_efuse *map)
395 {
396 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
397 bool valid = false;
398
399 valid |= _decode_efuse_gain(map->rx_gain_2g_cck,
400 &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK],
401 &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK]);
402 valid |= _decode_efuse_gain(map->rx_gain_2g_ofdm,
403 &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM],
404 &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM]);
405 valid |= _decode_efuse_gain(map->rx_gain_5g_low,
406 &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW],
407 &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW]);
408 valid |= _decode_efuse_gain(map->rx_gain_5g_mid,
409 &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID],
410 &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID]);
411 valid |= _decode_efuse_gain(map->rx_gain_5g_high,
412 &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH],
413 &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH]);
414
415 gain->offset_valid = valid;
416 }
417
rtw8852c_read_efuse(struct rtw89_dev * rtwdev,u8 * log_map)418 static int rtw8852c_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map)
419 {
420 struct rtw89_efuse *efuse = &rtwdev->efuse;
421 struct rtw8852c_efuse *map;
422
423 map = (struct rtw8852c_efuse *)log_map;
424
425 efuse->country_code[0] = map->country_code[0];
426 efuse->country_code[1] = map->country_code[1];
427 rtw8852c_efuse_parsing_tssi(rtwdev, map);
428 rtw8852c_efuse_parsing_gain_offset(rtwdev, map);
429
430 switch (rtwdev->hci.type) {
431 case RTW89_HCI_TYPE_PCIE:
432 rtw8852c_e_efuse_parsing(efuse, map);
433 break;
434 default:
435 return -ENOTSUPP;
436 }
437
438 rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
439
440 return 0;
441 }
442
rtw8852c_phycap_parsing_tssi(struct rtw89_dev * rtwdev,u8 * phycap_map)443 static void rtw8852c_phycap_parsing_tssi(struct rtw89_dev *rtwdev, u8 *phycap_map)
444 {
445 struct rtw89_tssi_info *tssi = &rtwdev->tssi;
446 static const u32 tssi_trim_addr[RF_PATH_NUM_8852C] = {0x5D6, 0x5AB};
447 static const u32 tssi_trim_addr_6g[RF_PATH_NUM_8852C] = {0x5CE, 0x5A3};
448 u32 addr = rtwdev->chip->phycap_addr;
449 bool pg = false;
450 u32 ofst;
451 u8 i, j;
452
453 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
454 for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++) {
455 /* addrs are in decreasing order */
456 ofst = tssi_trim_addr[i] - addr - j;
457 tssi->tssi_trim[i][j] = phycap_map[ofst];
458
459 if (phycap_map[ofst] != 0xff)
460 pg = true;
461 }
462
463 for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM_6G; j++) {
464 /* addrs are in decreasing order */
465 ofst = tssi_trim_addr_6g[i] - addr - j;
466 tssi->tssi_trim_6g[i][j] = phycap_map[ofst];
467
468 if (phycap_map[ofst] != 0xff)
469 pg = true;
470 }
471 }
472
473 if (!pg) {
474 memset(tssi->tssi_trim, 0, sizeof(tssi->tssi_trim));
475 memset(tssi->tssi_trim_6g, 0, sizeof(tssi->tssi_trim_6g));
476 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
477 "[TSSI][TRIM] no PG, set all trim info to 0\n");
478 }
479
480 for (i = 0; i < RF_PATH_NUM_8852C; i++)
481 for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++)
482 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
483 "[TSSI] path=%d idx=%d trim=0x%x addr=0x%x\n",
484 i, j, tssi->tssi_trim[i][j],
485 tssi_trim_addr[i] - j);
486 }
487
rtw8852c_phycap_parsing_thermal_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)488 static void rtw8852c_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
489 u8 *phycap_map)
490 {
491 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
492 static const u32 thm_trim_addr[RF_PATH_NUM_8852C] = {0x5DF, 0x5DC};
493 u32 addr = rtwdev->chip->phycap_addr;
494 u8 i;
495
496 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
497 info->thermal_trim[i] = phycap_map[thm_trim_addr[i] - addr];
498
499 rtw89_debug(rtwdev, RTW89_DBG_RFK,
500 "[THERMAL][TRIM] path=%d thermal_trim=0x%x\n",
501 i, info->thermal_trim[i]);
502
503 if (info->thermal_trim[i] != 0xff)
504 info->pg_thermal_trim = true;
505 }
506 }
507
rtw8852c_thermal_trim(struct rtw89_dev * rtwdev)508 static void rtw8852c_thermal_trim(struct rtw89_dev *rtwdev)
509 {
510 #define __thm_setting(raw) \
511 ({ \
512 u8 __v = (raw); \
513 ((__v & 0x1) << 3) | ((__v & 0x1f) >> 1); \
514 })
515 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
516 u8 i, val;
517
518 if (!info->pg_thermal_trim) {
519 rtw89_debug(rtwdev, RTW89_DBG_RFK,
520 "[THERMAL][TRIM] no PG, do nothing\n");
521
522 return;
523 }
524
525 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
526 val = __thm_setting(info->thermal_trim[i]);
527 rtw89_write_rf(rtwdev, i, RR_TM2, RR_TM2_OFF, val);
528
529 rtw89_debug(rtwdev, RTW89_DBG_RFK,
530 "[THERMAL][TRIM] path=%d thermal_setting=0x%x\n",
531 i, val);
532 }
533 #undef __thm_setting
534 }
535
rtw8852c_phycap_parsing_pa_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)536 static void rtw8852c_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
537 u8 *phycap_map)
538 {
539 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
540 static const u32 pabias_trim_addr[RF_PATH_NUM_8852C] = {0x5DE, 0x5DB};
541 u32 addr = rtwdev->chip->phycap_addr;
542 u8 i;
543
544 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
545 info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
546
547 rtw89_debug(rtwdev, RTW89_DBG_RFK,
548 "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
549 i, info->pa_bias_trim[i]);
550
551 if (info->pa_bias_trim[i] != 0xff)
552 info->pg_pa_bias_trim = true;
553 }
554 }
555
rtw8852c_pa_bias_trim(struct rtw89_dev * rtwdev)556 static void rtw8852c_pa_bias_trim(struct rtw89_dev *rtwdev)
557 {
558 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
559 u8 pabias_2g, pabias_5g;
560 u8 i;
561
562 if (!info->pg_pa_bias_trim) {
563 rtw89_debug(rtwdev, RTW89_DBG_RFK,
564 "[PA_BIAS][TRIM] no PG, do nothing\n");
565
566 return;
567 }
568
569 for (i = 0; i < RF_PATH_NUM_8852C; i++) {
570 pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
571 pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
572
573 rtw89_debug(rtwdev, RTW89_DBG_RFK,
574 "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
575 i, pabias_2g, pabias_5g);
576
577 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG, pabias_2g);
578 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA, pabias_5g);
579 }
580 }
581
rtw8852c_read_phycap(struct rtw89_dev * rtwdev,u8 * phycap_map)582 static int rtw8852c_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
583 {
584 rtw8852c_phycap_parsing_tssi(rtwdev, phycap_map);
585 rtw8852c_phycap_parsing_thermal_trim(rtwdev, phycap_map);
586 rtw8852c_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
587
588 return 0;
589 }
590
rtw8852c_power_trim(struct rtw89_dev * rtwdev)591 static void rtw8852c_power_trim(struct rtw89_dev *rtwdev)
592 {
593 rtw8852c_thermal_trim(rtwdev);
594 rtw8852c_pa_bias_trim(rtwdev);
595 }
596
rtw8852c_set_channel_mac(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,u8 mac_idx)597 static void rtw8852c_set_channel_mac(struct rtw89_dev *rtwdev,
598 const struct rtw89_chan *chan,
599 u8 mac_idx)
600 {
601 u32 rf_mod = rtw89_mac_reg_by_idx(R_AX_WMAC_RFMOD, mac_idx);
602 u32 sub_carr = rtw89_mac_reg_by_idx(R_AX_TX_SUB_CARRIER_VALUE,
603 mac_idx);
604 u32 chk_rate = rtw89_mac_reg_by_idx(R_AX_TXRATE_CHK, mac_idx);
605 u8 txsc20 = 0, txsc40 = 0, txsc80 = 0;
606 u8 rf_mod_val = 0, chk_rate_mask = 0;
607 u32 txsc;
608
609 switch (chan->band_width) {
610 case RTW89_CHANNEL_WIDTH_160:
611 txsc80 = rtw89_phy_get_txsc(rtwdev, chan,
612 RTW89_CHANNEL_WIDTH_80);
613 fallthrough;
614 case RTW89_CHANNEL_WIDTH_80:
615 txsc40 = rtw89_phy_get_txsc(rtwdev, chan,
616 RTW89_CHANNEL_WIDTH_40);
617 fallthrough;
618 case RTW89_CHANNEL_WIDTH_40:
619 txsc20 = rtw89_phy_get_txsc(rtwdev, chan,
620 RTW89_CHANNEL_WIDTH_20);
621 break;
622 default:
623 break;
624 }
625
626 switch (chan->band_width) {
627 case RTW89_CHANNEL_WIDTH_160:
628 rf_mod_val = AX_WMAC_RFMOD_160M;
629 txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
630 FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40) |
631 FIELD_PREP(B_AX_TXSC_80M_MASK, txsc80);
632 break;
633 case RTW89_CHANNEL_WIDTH_80:
634 rf_mod_val = AX_WMAC_RFMOD_80M;
635 txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
636 FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40);
637 break;
638 case RTW89_CHANNEL_WIDTH_40:
639 rf_mod_val = AX_WMAC_RFMOD_40M;
640 txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20);
641 break;
642 case RTW89_CHANNEL_WIDTH_20:
643 default:
644 rf_mod_val = AX_WMAC_RFMOD_20M;
645 txsc = 0;
646 break;
647 }
648 rtw89_write8_mask(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK, rf_mod_val);
649 rtw89_write32(rtwdev, sub_carr, txsc);
650
651 switch (chan->band_type) {
652 case RTW89_BAND_2G:
653 chk_rate_mask = B_AX_BAND_MODE;
654 break;
655 case RTW89_BAND_5G:
656 case RTW89_BAND_6G:
657 chk_rate_mask = B_AX_CHECK_CCK_EN | B_AX_RTS_LIMIT_IN_OFDM6;
658 break;
659 default:
660 rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
661 return;
662 }
663 rtw89_write8_clr(rtwdev, chk_rate, B_AX_BAND_MODE | B_AX_CHECK_CCK_EN |
664 B_AX_RTS_LIMIT_IN_OFDM6);
665 rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
666 }
667
668 static const u32 rtw8852c_sco_barker_threshold[14] = {
669 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
670 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
671 };
672
673 static const u32 rtw8852c_sco_cck_threshold[14] = {
674 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
675 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
676 };
677
rtw8852c_ctrl_sco_cck(struct rtw89_dev * rtwdev,u8 central_ch,u8 primary_ch,enum rtw89_bandwidth bw)678 static int rtw8852c_ctrl_sco_cck(struct rtw89_dev *rtwdev, u8 central_ch,
679 u8 primary_ch, enum rtw89_bandwidth bw)
680 {
681 u8 ch_element;
682
683 if (bw == RTW89_CHANNEL_WIDTH_20) {
684 ch_element = central_ch - 1;
685 } else if (bw == RTW89_CHANNEL_WIDTH_40) {
686 if (primary_ch == 1)
687 ch_element = central_ch - 1 + 2;
688 else
689 ch_element = central_ch - 1 - 2;
690 } else {
691 rtw89_warn(rtwdev, "Invalid BW:%d for CCK\n", bw);
692 return -EINVAL;
693 }
694 rtw89_phy_write32_mask(rtwdev, R_BK_FC0_INV_V1, B_BK_FC0_INV_MSK_V1,
695 rtw8852c_sco_barker_threshold[ch_element]);
696 rtw89_phy_write32_mask(rtwdev, R_CCK_FC0_INV_V1, B_CCK_FC0_INV_MSK_V1,
697 rtw8852c_sco_cck_threshold[ch_element]);
698
699 return 0;
700 }
701
702 struct rtw8852c_bb_gain {
703 u32 gain_g[BB_PATH_NUM_8852C];
704 u32 gain_a[BB_PATH_NUM_8852C];
705 u32 gain_mask;
706 };
707
708 static const struct rtw8852c_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
709 { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
710 .gain_mask = 0x00ff0000 },
711 { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
712 .gain_mask = 0xff000000 },
713 { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
714 .gain_mask = 0x000000ff },
715 { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
716 .gain_mask = 0x0000ff00 },
717 { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
718 .gain_mask = 0x00ff0000 },
719 { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
720 .gain_mask = 0xff000000 },
721 { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
722 .gain_mask = 0x000000ff },
723 };
724
725 static const struct rtw8852c_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
726 { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
727 .gain_mask = 0x00ff0000 },
728 { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
729 .gain_mask = 0xff000000 },
730 };
731
732 struct rtw8852c_bb_gain_bypass {
733 u32 gain_g[BB_PATH_NUM_8852C];
734 u32 gain_a[BB_PATH_NUM_8852C];
735 u32 gain_mask_g;
736 u32 gain_mask_a;
737 };
738
739 static
740 const struct rtw8852c_bb_gain_bypass bb_gain_bypass_lna[LNA_GAIN_NUM] = {
741 { .gain_g = {0x4BB8, 0x4C7C}, .gain_a = {0x4BB4, 0x4C78},
742 .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
743 { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
744 .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
745 { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
746 .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
747 { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
748 .gain_mask_g = 0xff0000, .gain_mask_a = 0xff000000},
749 { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB8, 0x4C7C},
750 .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
751 { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
752 .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
753 { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
754 .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
755 };
756
757 struct rtw8852c_bb_gain_op1db {
758 struct {
759 u32 lna[BB_PATH_NUM_8852C];
760 u32 tia_lna[BB_PATH_NUM_8852C];
761 u32 mask;
762 } reg[LNA_GAIN_NUM];
763 u32 reg_tia0_lna6[BB_PATH_NUM_8852C];
764 u32 mask_tia0_lna6;
765 };
766
767 static const struct rtw8852c_bb_gain_op1db bb_gain_op1db_a = {
768 .reg = {
769 { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
770 .mask = 0xff},
771 { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
772 .mask = 0xff00},
773 { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
774 .mask = 0xff0000},
775 { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
776 .mask = 0xff000000},
777 { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
778 .mask = 0xff},
779 { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
780 .mask = 0xff00},
781 { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
782 .mask = 0xff0000},
783 },
784 .reg_tia0_lna6 = {0x4674, 0x4758},
785 .mask_tia0_lna6 = 0xff000000,
786 };
787
788 static enum rtw89_phy_bb_gain_band
rtw8852c_mapping_gain_band(enum rtw89_subband subband)789 rtw8852c_mapping_gain_band(enum rtw89_subband subband)
790 {
791 switch (subband) {
792 default:
793 case RTW89_CH_2G:
794 return RTW89_BB_GAIN_BAND_2G;
795 case RTW89_CH_5G_BAND_1:
796 return RTW89_BB_GAIN_BAND_5G_L;
797 case RTW89_CH_5G_BAND_3:
798 return RTW89_BB_GAIN_BAND_5G_M;
799 case RTW89_CH_5G_BAND_4:
800 return RTW89_BB_GAIN_BAND_5G_H;
801 case RTW89_CH_6G_BAND_IDX0:
802 case RTW89_CH_6G_BAND_IDX1:
803 return RTW89_BB_GAIN_BAND_6G_L;
804 case RTW89_CH_6G_BAND_IDX2:
805 case RTW89_CH_6G_BAND_IDX3:
806 return RTW89_BB_GAIN_BAND_6G_M;
807 case RTW89_CH_6G_BAND_IDX4:
808 case RTW89_CH_6G_BAND_IDX5:
809 return RTW89_BB_GAIN_BAND_6G_H;
810 case RTW89_CH_6G_BAND_IDX6:
811 case RTW89_CH_6G_BAND_IDX7:
812 return RTW89_BB_GAIN_BAND_6G_UH;
813 }
814 }
815
rtw8852c_set_gain_error(struct rtw89_dev * rtwdev,enum rtw89_subband subband,enum rtw89_rf_path path)816 static void rtw8852c_set_gain_error(struct rtw89_dev *rtwdev,
817 enum rtw89_subband subband,
818 enum rtw89_rf_path path)
819 {
820 const struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain;
821 u8 gain_band = rtw8852c_mapping_gain_band(subband);
822 s32 val;
823 u32 reg;
824 u32 mask;
825 int i;
826
827 for (i = 0; i < LNA_GAIN_NUM; i++) {
828 if (subband == RTW89_CH_2G)
829 reg = bb_gain_lna[i].gain_g[path];
830 else
831 reg = bb_gain_lna[i].gain_a[path];
832
833 mask = bb_gain_lna[i].gain_mask;
834 val = gain->lna_gain[gain_band][path][i];
835 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
836
837 if (subband == RTW89_CH_2G) {
838 reg = bb_gain_bypass_lna[i].gain_g[path];
839 mask = bb_gain_bypass_lna[i].gain_mask_g;
840 } else {
841 reg = bb_gain_bypass_lna[i].gain_a[path];
842 mask = bb_gain_bypass_lna[i].gain_mask_a;
843 }
844
845 val = gain->lna_gain_bypass[gain_band][path][i];
846 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
847
848 if (subband != RTW89_CH_2G) {
849 reg = bb_gain_op1db_a.reg[i].lna[path];
850 mask = bb_gain_op1db_a.reg[i].mask;
851 val = gain->lna_op1db[gain_band][path][i];
852 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
853
854 reg = bb_gain_op1db_a.reg[i].tia_lna[path];
855 mask = bb_gain_op1db_a.reg[i].mask;
856 val = gain->tia_lna_op1db[gain_band][path][i];
857 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
858 }
859 }
860
861 if (subband != RTW89_CH_2G) {
862 reg = bb_gain_op1db_a.reg_tia0_lna6[path];
863 mask = bb_gain_op1db_a.mask_tia0_lna6;
864 val = gain->tia_lna_op1db[gain_band][path][7];
865 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
866 }
867
868 for (i = 0; i < TIA_GAIN_NUM; i++) {
869 if (subband == RTW89_CH_2G)
870 reg = bb_gain_tia[i].gain_g[path];
871 else
872 reg = bb_gain_tia[i].gain_a[path];
873
874 mask = bb_gain_tia[i].gain_mask;
875 val = gain->tia_gain[gain_band][path][i];
876 rtw89_phy_write32_mask(rtwdev, reg, mask, val);
877 }
878 }
879
880 static
881 const u8 rtw8852c_ch_base_table[16] = {1, 0xff,
882 36, 100, 132, 149, 0xff,
883 1, 33, 65, 97, 129, 161, 193, 225, 0xff};
884 #define RTW8852C_CH_BASE_IDX_2G 0
885 #define RTW8852C_CH_BASE_IDX_5G_FIRST 2
886 #define RTW8852C_CH_BASE_IDX_5G_LAST 5
887 #define RTW8852C_CH_BASE_IDX_6G_FIRST 7
888 #define RTW8852C_CH_BASE_IDX_6G_LAST 14
889
890 #define RTW8852C_CH_BASE_IDX_MASK GENMASK(7, 4)
891 #define RTW8852C_CH_OFFSET_MASK GENMASK(3, 0)
892
rtw8852c_encode_chan_idx(struct rtw89_dev * rtwdev,u8 central_ch,u8 band)893 static u8 rtw8852c_encode_chan_idx(struct rtw89_dev *rtwdev, u8 central_ch, u8 band)
894 {
895 u8 chan_idx;
896 u8 last, first;
897 u8 idx;
898
899 switch (band) {
900 case RTW89_BAND_2G:
901 chan_idx = FIELD_PREP(RTW8852C_CH_BASE_IDX_MASK, RTW8852C_CH_BASE_IDX_2G) |
902 FIELD_PREP(RTW8852C_CH_OFFSET_MASK, central_ch);
903 return chan_idx;
904 case RTW89_BAND_5G:
905 first = RTW8852C_CH_BASE_IDX_5G_FIRST;
906 last = RTW8852C_CH_BASE_IDX_5G_LAST;
907 break;
908 case RTW89_BAND_6G:
909 first = RTW8852C_CH_BASE_IDX_6G_FIRST;
910 last = RTW8852C_CH_BASE_IDX_6G_LAST;
911 break;
912 default:
913 rtw89_warn(rtwdev, "Unsupported band %d\n", band);
914 return 0;
915 }
916
917 for (idx = last; idx >= first; idx--)
918 if (central_ch >= rtw8852c_ch_base_table[idx])
919 break;
920
921 if (idx < first) {
922 rtw89_warn(rtwdev, "Unknown band %d channel %d\n", band, central_ch);
923 return 0;
924 }
925
926 chan_idx = FIELD_PREP(RTW8852C_CH_BASE_IDX_MASK, idx) |
927 FIELD_PREP(RTW8852C_CH_OFFSET_MASK,
928 (central_ch - rtw8852c_ch_base_table[idx]) >> 1);
929 return chan_idx;
930 }
931
rtw8852c_decode_chan_idx(struct rtw89_dev * rtwdev,u8 chan_idx,u8 * ch,enum nl80211_band * band)932 static void rtw8852c_decode_chan_idx(struct rtw89_dev *rtwdev, u8 chan_idx,
933 u8 *ch, enum nl80211_band *band)
934 {
935 u8 idx, offset;
936
937 idx = FIELD_GET(RTW8852C_CH_BASE_IDX_MASK, chan_idx);
938 offset = FIELD_GET(RTW8852C_CH_OFFSET_MASK, chan_idx);
939
940 if (idx == RTW8852C_CH_BASE_IDX_2G) {
941 *band = NL80211_BAND_2GHZ;
942 *ch = offset;
943 return;
944 }
945
946 *band = idx <= RTW8852C_CH_BASE_IDX_5G_LAST ? NL80211_BAND_5GHZ : NL80211_BAND_6GHZ;
947 *ch = rtw8852c_ch_base_table[idx] + (offset << 1);
948 }
949
rtw8852c_set_gain_offset(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx,enum rtw89_rf_path path)950 static void rtw8852c_set_gain_offset(struct rtw89_dev *rtwdev,
951 const struct rtw89_chan *chan,
952 enum rtw89_phy_idx phy_idx,
953 enum rtw89_rf_path path)
954 {
955 static const u32 rssi_ofst_addr[2] = {R_PATH0_G_TIA0_LNA6_OP1DB_V1,
956 R_PATH1_G_TIA0_LNA6_OP1DB_V1};
957 static const u32 rpl_mask[2] = {B_RPL_PATHA_MASK, B_RPL_PATHB_MASK};
958 static const u32 rpl_tb_mask[2] = {B_RSSI_M_PATHA_MASK, B_RSSI_M_PATHB_MASK};
959 struct rtw89_phy_efuse_gain *efuse_gain = &rtwdev->efuse_gain;
960 enum rtw89_gain_offset gain_band;
961 s32 offset_q0, offset_base_q4;
962 s32 tmp = 0;
963
964 if (!efuse_gain->offset_valid)
965 return;
966
967 if (rtwdev->dbcc_en && path == RF_PATH_B)
968 phy_idx = RTW89_PHY_1;
969
970 if (chan->band_type == RTW89_BAND_2G) {
971 offset_q0 = efuse_gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK];
972 offset_base_q4 = efuse_gain->offset_base[phy_idx];
973
974 tmp = clamp_t(s32, (-offset_q0 << 3) + (offset_base_q4 >> 1),
975 S8_MIN >> 1, S8_MAX >> 1);
976 rtw89_phy_write32_mask(rtwdev, R_RPL_OFST, B_RPL_OFST_MASK, tmp & 0x7f);
977 }
978
979 switch (chan->subband_type) {
980 default:
981 case RTW89_CH_2G:
982 gain_band = RTW89_GAIN_OFFSET_2G_OFDM;
983 break;
984 case RTW89_CH_5G_BAND_1:
985 gain_band = RTW89_GAIN_OFFSET_5G_LOW;
986 break;
987 case RTW89_CH_5G_BAND_3:
988 gain_band = RTW89_GAIN_OFFSET_5G_MID;
989 break;
990 case RTW89_CH_5G_BAND_4:
991 gain_band = RTW89_GAIN_OFFSET_5G_HIGH;
992 break;
993 }
994
995 offset_q0 = -efuse_gain->offset[path][gain_band];
996 offset_base_q4 = efuse_gain->offset_base[phy_idx];
997
998 tmp = (offset_q0 << 2) + (offset_base_q4 >> 2);
999 tmp = clamp_t(s32, -tmp, S8_MIN, S8_MAX);
1000 rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], B_PATH0_R_G_OFST_MASK, tmp & 0xff);
1001
1002 tmp = clamp_t(s32, offset_q0 << 4, S8_MIN, S8_MAX);
1003 rtw89_phy_write32_idx(rtwdev, R_RPL_PATHAB, rpl_mask[path], tmp & 0xff, phy_idx);
1004 rtw89_phy_write32_idx(rtwdev, R_RSSI_M_PATHAB, rpl_tb_mask[path], tmp & 0xff, phy_idx);
1005 }
1006
rtw8852c_ctrl_ch(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1007 static void rtw8852c_ctrl_ch(struct rtw89_dev *rtwdev,
1008 const struct rtw89_chan *chan,
1009 enum rtw89_phy_idx phy_idx)
1010 {
1011 u8 sco;
1012 u16 central_freq = chan->freq;
1013 u8 central_ch = chan->channel;
1014 u8 band = chan->band_type;
1015 u8 subband = chan->subband_type;
1016 bool is_2g = band == RTW89_BAND_2G;
1017 u8 chan_idx;
1018
1019 if (!central_freq) {
1020 rtw89_warn(rtwdev, "Invalid central_freq\n");
1021 return;
1022 }
1023
1024 if (phy_idx == RTW89_PHY_0) {
1025 /* Path A */
1026 rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_A);
1027 rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_A);
1028
1029 if (is_2g)
1030 rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
1031 B_PATH0_BAND_SEL_MSK_V1, 1,
1032 phy_idx);
1033 else
1034 rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
1035 B_PATH0_BAND_SEL_MSK_V1, 0,
1036 phy_idx);
1037 /* Path B */
1038 if (!rtwdev->dbcc_en) {
1039 rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
1040 rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
1041
1042 if (is_2g)
1043 rtw89_phy_write32_idx(rtwdev,
1044 R_PATH1_BAND_SEL_V1,
1045 B_PATH1_BAND_SEL_MSK_V1,
1046 1, phy_idx);
1047 else
1048 rtw89_phy_write32_idx(rtwdev,
1049 R_PATH1_BAND_SEL_V1,
1050 B_PATH1_BAND_SEL_MSK_V1,
1051 0, phy_idx);
1052 rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
1053 } else {
1054 if (is_2g)
1055 rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
1056 else
1057 rtw89_phy_write32_set(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
1058 }
1059 /* SCO compensate FC setting */
1060 rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
1061 central_freq, phy_idx);
1062 /* round_up((1/fc0)*pow(2,18)) */
1063 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1064 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
1065 phy_idx);
1066 } else {
1067 /* Path B */
1068 rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
1069 rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
1070
1071 if (is_2g)
1072 rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
1073 B_PATH1_BAND_SEL_MSK_V1,
1074 1, phy_idx);
1075 else
1076 rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
1077 B_PATH1_BAND_SEL_MSK_V1,
1078 0, phy_idx);
1079 /* SCO compensate FC setting */
1080 rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
1081 central_freq, phy_idx);
1082 /* round_up((1/fc0)*pow(2,18)) */
1083 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1084 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
1085 phy_idx);
1086 }
1087 /* CCK parameters */
1088 if (band == RTW89_BAND_2G) {
1089 if (central_ch == 14) {
1090 rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
1091 B_PCOEFF01_MSK_V1, 0x3b13ff);
1092 rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
1093 B_PCOEFF23_MSK_V1, 0x1c42de);
1094 rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
1095 B_PCOEFF45_MSK_V1, 0xfdb0ad);
1096 rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
1097 B_PCOEFF67_MSK_V1, 0xf60f6e);
1098 rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
1099 B_PCOEFF89_MSK_V1, 0xfd8f92);
1100 rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
1101 B_PCOEFFAB_MSK_V1, 0x2d011);
1102 rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
1103 B_PCOEFFCD_MSK_V1, 0x1c02c);
1104 rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
1105 B_PCOEFFEF_MSK_V1, 0xfff00a);
1106 } else {
1107 rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
1108 B_PCOEFF01_MSK_V1, 0x3d23ff);
1109 rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
1110 B_PCOEFF23_MSK_V1, 0x29b354);
1111 rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
1112 B_PCOEFF45_MSK_V1, 0xfc1c8);
1113 rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
1114 B_PCOEFF67_MSK_V1, 0xfdb053);
1115 rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
1116 B_PCOEFF89_MSK_V1, 0xf86f9a);
1117 rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
1118 B_PCOEFFAB_MSK_V1, 0xfaef92);
1119 rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
1120 B_PCOEFFCD_MSK_V1, 0xfe5fcc);
1121 rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
1122 B_PCOEFFEF_MSK_V1, 0xffdff5);
1123 }
1124 }
1125
1126 chan_idx = rtw8852c_encode_chan_idx(rtwdev, chan->primary_channel, band);
1127 rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
1128 }
1129
rtw8852c_bw_setting(struct rtw89_dev * rtwdev,u8 bw,u8 path)1130 static void rtw8852c_bw_setting(struct rtw89_dev *rtwdev, u8 bw, u8 path)
1131 {
1132 static const u32 adc_sel[2] = {0xC0EC, 0xC1EC};
1133 static const u32 wbadc_sel[2] = {0xC0E4, 0xC1E4};
1134
1135 switch (bw) {
1136 case RTW89_CHANNEL_WIDTH_5:
1137 rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x1);
1138 rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x0);
1139 break;
1140 case RTW89_CHANNEL_WIDTH_10:
1141 rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x2);
1142 rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x1);
1143 break;
1144 case RTW89_CHANNEL_WIDTH_20:
1145 case RTW89_CHANNEL_WIDTH_40:
1146 case RTW89_CHANNEL_WIDTH_80:
1147 case RTW89_CHANNEL_WIDTH_160:
1148 rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x0);
1149 rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x2);
1150 break;
1151 default:
1152 rtw89_warn(rtwdev, "Fail to set ADC\n");
1153 }
1154 }
1155
rtw8852c_edcca_per20_bitmap_sifs(struct rtw89_dev * rtwdev,u8 bw,enum rtw89_phy_idx phy_idx)1156 static void rtw8852c_edcca_per20_bitmap_sifs(struct rtw89_dev *rtwdev, u8 bw,
1157 enum rtw89_phy_idx phy_idx)
1158 {
1159 if (bw == RTW89_CHANNEL_WIDTH_20) {
1160 rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0xff, phy_idx);
1161 rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
1162 } else {
1163 rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0, phy_idx);
1164 rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
1165 }
1166 }
1167
1168 static void
rtw8852c_ctrl_bw(struct rtw89_dev * rtwdev,u8 pri_ch,u8 bw,enum rtw89_phy_idx phy_idx)1169 rtw8852c_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_ch, u8 bw,
1170 enum rtw89_phy_idx phy_idx)
1171 {
1172 u8 mod_sbw = 0;
1173
1174 switch (bw) {
1175 case RTW89_CHANNEL_WIDTH_5:
1176 case RTW89_CHANNEL_WIDTH_10:
1177 case RTW89_CHANNEL_WIDTH_20:
1178 if (bw == RTW89_CHANNEL_WIDTH_5)
1179 mod_sbw = 0x1;
1180 else if (bw == RTW89_CHANNEL_WIDTH_10)
1181 mod_sbw = 0x2;
1182 else if (bw == RTW89_CHANNEL_WIDTH_20)
1183 mod_sbw = 0x0;
1184 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x0,
1185 phy_idx);
1186 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW,
1187 mod_sbw, phy_idx);
1188 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH, 0x0,
1189 phy_idx);
1190 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
1191 B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
1192 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
1193 B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
1194 rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
1195 B_PATH0_BW_SEL_MSK_V1, 0xf);
1196 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
1197 B_PATH1_BW_SEL_MSK_V1, 0xf);
1198 break;
1199 case RTW89_CHANNEL_WIDTH_40:
1200 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x1,
1201 phy_idx);
1202 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
1203 phy_idx);
1204 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
1205 pri_ch,
1206 phy_idx);
1207 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
1208 B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
1209 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
1210 B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
1211 rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
1212 B_PATH0_BW_SEL_MSK_V1, 0xf);
1213 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
1214 B_PATH1_BW_SEL_MSK_V1, 0xf);
1215 break;
1216 case RTW89_CHANNEL_WIDTH_80:
1217 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x2,
1218 phy_idx);
1219 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
1220 phy_idx);
1221 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
1222 pri_ch,
1223 phy_idx);
1224 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
1225 B_PATH0_SAMPL_DLY_T_MSK_V1, 0x2);
1226 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
1227 B_PATH1_SAMPL_DLY_T_MSK_V1, 0x2);
1228 rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
1229 B_PATH0_BW_SEL_MSK_V1, 0xd);
1230 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
1231 B_PATH1_BW_SEL_MSK_V1, 0xd);
1232 break;
1233 case RTW89_CHANNEL_WIDTH_160:
1234 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x3,
1235 phy_idx);
1236 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
1237 phy_idx);
1238 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
1239 pri_ch,
1240 phy_idx);
1241 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
1242 B_PATH0_SAMPL_DLY_T_MSK_V1, 0x1);
1243 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
1244 B_PATH1_SAMPL_DLY_T_MSK_V1, 0x1);
1245 rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
1246 B_PATH0_BW_SEL_MSK_V1, 0xb);
1247 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
1248 B_PATH1_BW_SEL_MSK_V1, 0xb);
1249 break;
1250 default:
1251 rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri ch:%d)\n", bw,
1252 pri_ch);
1253 }
1254
1255 if (bw == RTW89_CHANNEL_WIDTH_40) {
1256 rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
1257 B_RX_BW40_2XFFT_EN_MSK_V1, 0x1, phy_idx);
1258 rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 1, phy_idx);
1259 } else {
1260 rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
1261 B_RX_BW40_2XFFT_EN_MSK_V1, 0x0, phy_idx);
1262 rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 0, phy_idx);
1263 }
1264
1265 if (phy_idx == RTW89_PHY_0) {
1266 rtw8852c_bw_setting(rtwdev, bw, RF_PATH_A);
1267 if (!rtwdev->dbcc_en)
1268 rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
1269 } else {
1270 rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
1271 }
1272
1273 rtw8852c_edcca_per20_bitmap_sifs(rtwdev, bw, phy_idx);
1274 }
1275
rtw8852c_spur_freq(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan)1276 static u32 rtw8852c_spur_freq(struct rtw89_dev *rtwdev,
1277 const struct rtw89_chan *chan)
1278 {
1279 u8 center_chan = chan->channel;
1280 u8 bw = chan->band_width;
1281
1282 switch (chan->band_type) {
1283 case RTW89_BAND_2G:
1284 if (bw == RTW89_CHANNEL_WIDTH_20) {
1285 if (center_chan >= 5 && center_chan <= 8)
1286 return 2440;
1287 if (center_chan == 13)
1288 return 2480;
1289 } else if (bw == RTW89_CHANNEL_WIDTH_40) {
1290 if (center_chan >= 3 && center_chan <= 10)
1291 return 2440;
1292 }
1293 break;
1294 case RTW89_BAND_5G:
1295 if (center_chan == 151 || center_chan == 153 ||
1296 center_chan == 155 || center_chan == 163)
1297 return 5760;
1298 break;
1299 case RTW89_BAND_6G:
1300 if (center_chan == 195 || center_chan == 197 ||
1301 center_chan == 199 || center_chan == 207)
1302 return 6920;
1303 break;
1304 default:
1305 break;
1306 }
1307
1308 return 0;
1309 }
1310
1311 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
1312 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
1313 #define MAX_TONE_NUM 2048
1314
rtw8852c_set_csi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1315 static void rtw8852c_set_csi_tone_idx(struct rtw89_dev *rtwdev,
1316 const struct rtw89_chan *chan,
1317 enum rtw89_phy_idx phy_idx)
1318 {
1319 u32 spur_freq;
1320 s32 freq_diff, csi_idx, csi_tone_idx;
1321
1322 spur_freq = rtw8852c_spur_freq(rtwdev, chan);
1323 if (spur_freq == 0) {
1324 rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 0, phy_idx);
1325 return;
1326 }
1327
1328 freq_diff = (spur_freq - chan->freq) * 1000000;
1329 csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
1330 s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
1331
1332 rtw89_phy_write32_idx(rtwdev, R_SEG0CSI, B_SEG0CSI_IDX, csi_tone_idx, phy_idx);
1333 rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 1, phy_idx);
1334 }
1335
1336 static const struct rtw89_nbi_reg_def rtw8852c_nbi_reg_def[] = {
1337 [RF_PATH_A] = {
1338 .notch1_idx = {0x4C14, 0xFF},
1339 .notch1_frac_idx = {0x4C14, 0xC00},
1340 .notch1_en = {0x4C14, 0x1000},
1341 .notch2_idx = {0x4C20, 0xFF},
1342 .notch2_frac_idx = {0x4C20, 0xC00},
1343 .notch2_en = {0x4C20, 0x1000},
1344 },
1345 [RF_PATH_B] = {
1346 .notch1_idx = {0x4CD8, 0xFF},
1347 .notch1_frac_idx = {0x4CD8, 0xC00},
1348 .notch1_en = {0x4CD8, 0x1000},
1349 .notch2_idx = {0x4CE4, 0xFF},
1350 .notch2_frac_idx = {0x4CE4, 0xC00},
1351 .notch2_en = {0x4CE4, 0x1000},
1352 },
1353 };
1354
rtw8852c_set_nbi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1355 static void rtw8852c_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
1356 const struct rtw89_chan *chan,
1357 enum rtw89_rf_path path)
1358 {
1359 const struct rtw89_nbi_reg_def *nbi = &rtw8852c_nbi_reg_def[path];
1360 u32 spur_freq, fc;
1361 s32 freq_diff;
1362 s32 nbi_idx, nbi_tone_idx;
1363 s32 nbi_frac_idx, nbi_frac_tone_idx;
1364 bool notch2_chk = false;
1365
1366 spur_freq = rtw8852c_spur_freq(rtwdev, chan);
1367 if (spur_freq == 0) {
1368 rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
1369 rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
1370 return;
1371 }
1372
1373 fc = chan->freq;
1374 if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
1375 fc = (spur_freq > fc) ? fc + 40 : fc - 40;
1376 if ((fc > spur_freq &&
1377 chan->channel < chan->primary_channel) ||
1378 (fc < spur_freq &&
1379 chan->channel > chan->primary_channel))
1380 notch2_chk = true;
1381 }
1382
1383 freq_diff = (spur_freq - fc) * 1000000;
1384 nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5, &nbi_frac_idx);
1385
1386 if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
1387 s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
1388 } else {
1389 u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
1390 128 : 256;
1391
1392 s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
1393 }
1394 nbi_frac_tone_idx = s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
1395
1396 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
1397 rtw89_phy_write32_mask(rtwdev, nbi->notch2_idx.addr,
1398 nbi->notch2_idx.mask, nbi_tone_idx);
1399 rtw89_phy_write32_mask(rtwdev, nbi->notch2_frac_idx.addr,
1400 nbi->notch2_frac_idx.mask, nbi_frac_tone_idx);
1401 rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
1402 rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 1);
1403 rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
1404 } else {
1405 rtw89_phy_write32_mask(rtwdev, nbi->notch1_idx.addr,
1406 nbi->notch1_idx.mask, nbi_tone_idx);
1407 rtw89_phy_write32_mask(rtwdev, nbi->notch1_frac_idx.addr,
1408 nbi->notch1_frac_idx.mask, nbi_frac_tone_idx);
1409 rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
1410 rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 1);
1411 rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
1412 }
1413 }
1414
rtw8852c_spur_notch(struct rtw89_dev * rtwdev,u32 val,enum rtw89_phy_idx phy_idx)1415 static void rtw8852c_spur_notch(struct rtw89_dev *rtwdev, u32 val,
1416 enum rtw89_phy_idx phy_idx)
1417 {
1418 u32 notch;
1419 u32 notch2;
1420
1421 if (phy_idx == RTW89_PHY_0) {
1422 notch = R_PATH0_NOTCH;
1423 notch2 = R_PATH0_NOTCH2;
1424 } else {
1425 notch = R_PATH1_NOTCH;
1426 notch2 = R_PATH1_NOTCH2;
1427 }
1428
1429 rtw89_phy_write32_mask(rtwdev, notch,
1430 B_PATH0_NOTCH_VAL | B_PATH0_NOTCH_EN, val);
1431 rtw89_phy_write32_set(rtwdev, notch, B_PATH0_NOTCH_EN);
1432 rtw89_phy_write32_mask(rtwdev, notch2,
1433 B_PATH0_NOTCH2_VAL | B_PATH0_NOTCH2_EN, val);
1434 rtw89_phy_write32_set(rtwdev, notch2, B_PATH0_NOTCH2_EN);
1435 }
1436
rtw8852c_spur_elimination(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,u8 pri_ch_idx,enum rtw89_phy_idx phy_idx)1437 static void rtw8852c_spur_elimination(struct rtw89_dev *rtwdev,
1438 const struct rtw89_chan *chan,
1439 u8 pri_ch_idx,
1440 enum rtw89_phy_idx phy_idx)
1441 {
1442 rtw8852c_set_csi_tone_idx(rtwdev, chan, phy_idx);
1443
1444 if (phy_idx == RTW89_PHY_0) {
1445 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
1446 (pri_ch_idx == RTW89_SC_20_LOWER ||
1447 pri_ch_idx == RTW89_SC_20_UP3X)) {
1448 rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_0);
1449 if (!rtwdev->dbcc_en)
1450 rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
1451 } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
1452 (pri_ch_idx == RTW89_SC_20_UPPER ||
1453 pri_ch_idx == RTW89_SC_20_LOW3X)) {
1454 rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_0);
1455 if (!rtwdev->dbcc_en)
1456 rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
1457 } else {
1458 rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A);
1459 if (!rtwdev->dbcc_en)
1460 rtw8852c_set_nbi_tone_idx(rtwdev, chan,
1461 RF_PATH_B);
1462 }
1463 } else {
1464 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
1465 (pri_ch_idx == RTW89_SC_20_LOWER ||
1466 pri_ch_idx == RTW89_SC_20_UP3X)) {
1467 rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
1468 } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
1469 (pri_ch_idx == RTW89_SC_20_UPPER ||
1470 pri_ch_idx == RTW89_SC_20_LOW3X)) {
1471 rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
1472 } else {
1473 rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B);
1474 }
1475 }
1476
1477 if (pri_ch_idx == RTW89_SC_20_UP3X || pri_ch_idx == RTW89_SC_20_LOW3X)
1478 rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 0, phy_idx);
1479 else
1480 rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 1, phy_idx);
1481 }
1482
rtw8852c_5m_mask(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1483 static void rtw8852c_5m_mask(struct rtw89_dev *rtwdev,
1484 const struct rtw89_chan *chan,
1485 enum rtw89_phy_idx phy_idx)
1486 {
1487 u8 pri_ch = chan->primary_channel;
1488 bool mask_5m_low;
1489 bool mask_5m_en;
1490
1491 switch (chan->band_width) {
1492 case RTW89_CHANNEL_WIDTH_40:
1493 mask_5m_en = true;
1494 mask_5m_low = pri_ch == 2;
1495 break;
1496 case RTW89_CHANNEL_WIDTH_80:
1497 mask_5m_en = ((pri_ch == 3) || (pri_ch == 4));
1498 mask_5m_low = pri_ch == 4;
1499 break;
1500 default:
1501 mask_5m_en = false;
1502 mask_5m_low = false;
1503 break;
1504 }
1505
1506 if (!mask_5m_en) {
1507 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x0);
1508 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x0);
1509 rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT,
1510 B_ASSIGN_SBD_OPT_EN, 0x0, phy_idx);
1511 } else {
1512 if (mask_5m_low) {
1513 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
1514 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
1515 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x0);
1516 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x1);
1517 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
1518 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
1519 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x0);
1520 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x1);
1521 } else {
1522 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
1523 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
1524 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x1);
1525 rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x0);
1526 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
1527 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
1528 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x1);
1529 rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x0);
1530 }
1531 rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT, B_ASSIGN_SBD_OPT_EN, 0x1, phy_idx);
1532 }
1533 }
1534
rtw8852c_bb_reset_all(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1535 static void rtw8852c_bb_reset_all(struct rtw89_dev *rtwdev,
1536 enum rtw89_phy_idx phy_idx)
1537 {
1538 /*HW SI reset*/
1539 rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
1540 0x7);
1541 rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
1542 0x7);
1543
1544 udelay(1);
1545
1546 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
1547 phy_idx);
1548 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
1549 phy_idx);
1550 /*HW SI reset*/
1551 rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
1552 0x0);
1553 rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
1554 0x0);
1555
1556 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
1557 phy_idx);
1558 }
1559
rtw8852c_bb_reset_en(struct rtw89_dev * rtwdev,enum rtw89_band band,enum rtw89_phy_idx phy_idx,bool en)1560 static void rtw8852c_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
1561 enum rtw89_phy_idx phy_idx, bool en)
1562 {
1563 if (en) {
1564 rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
1565 B_S0_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
1566 rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
1567 B_S1_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
1568 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
1569 phy_idx);
1570 if (band == RTW89_BAND_2G)
1571 rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x0);
1572 rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0);
1573 } else {
1574 rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x1);
1575 rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1);
1576 rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
1577 B_S0_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
1578 rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
1579 B_S1_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
1580 fsleep(1);
1581 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
1582 phy_idx);
1583 }
1584 }
1585
rtw8852c_bb_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1586 static void rtw8852c_bb_reset(struct rtw89_dev *rtwdev,
1587 enum rtw89_phy_idx phy_idx)
1588 {
1589 rtw8852c_bb_reset_all(rtwdev, phy_idx);
1590 }
1591
1592 static
rtw8852c_bb_gpio_trsw(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,u8 tx_path_en,u8 trsw_tx,u8 trsw_rx,u8 trsw,u8 trsw_b)1593 void rtw8852c_bb_gpio_trsw(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
1594 u8 tx_path_en, u8 trsw_tx,
1595 u8 trsw_rx, u8 trsw, u8 trsw_b)
1596 {
1597 static const u32 path_cr_bases[] = {0x5868, 0x7868};
1598 u32 mask_ofst = 16;
1599 u32 cr;
1600 u32 val;
1601
1602 if (path >= ARRAY_SIZE(path_cr_bases))
1603 return;
1604
1605 cr = path_cr_bases[path];
1606
1607 mask_ofst += (tx_path_en * 4 + trsw_tx * 2 + trsw_rx) * 2;
1608 val = FIELD_PREP(B_P0_TRSW_A, trsw) | FIELD_PREP(B_P0_TRSW_B, trsw_b);
1609
1610 rtw89_phy_write32_mask(rtwdev, cr, (B_P0_TRSW_A | B_P0_TRSW_B) << mask_ofst, val);
1611 }
1612
1613 enum rtw8852c_rfe_src {
1614 PAPE_RFM,
1615 TRSW_RFM,
1616 LNAON_RFM,
1617 };
1618
1619 static
rtw8852c_bb_gpio_rfm(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,enum rtw8852c_rfe_src src,u8 dis_tx_gnt_wl,u8 active_tx_opt,u8 act_bt_en,u8 rfm_output_val)1620 void rtw8852c_bb_gpio_rfm(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
1621 enum rtw8852c_rfe_src src, u8 dis_tx_gnt_wl,
1622 u8 active_tx_opt, u8 act_bt_en, u8 rfm_output_val)
1623 {
1624 static const u32 path_cr_bases[] = {0x5894, 0x7894};
1625 static const u32 masks[] = {0, 8, 16};
1626 u32 mask, mask_ofst;
1627 u32 cr;
1628 u32 val;
1629
1630 if (src >= ARRAY_SIZE(masks) || path >= ARRAY_SIZE(path_cr_bases))
1631 return;
1632
1633 mask_ofst = masks[src];
1634 cr = path_cr_bases[path];
1635
1636 val = FIELD_PREP(B_P0_RFM_DIS_WL, dis_tx_gnt_wl) |
1637 FIELD_PREP(B_P0_RFM_TX_OPT, active_tx_opt) |
1638 FIELD_PREP(B_P0_RFM_BT_EN, act_bt_en) |
1639 FIELD_PREP(B_P0_RFM_OUT, rfm_output_val);
1640 mask = 0xff << mask_ofst;
1641
1642 rtw89_phy_write32_mask(rtwdev, cr, mask, val);
1643 }
1644
rtw8852c_bb_gpio_init(struct rtw89_dev * rtwdev)1645 static void rtw8852c_bb_gpio_init(struct rtw89_dev *rtwdev)
1646 {
1647 static const u32 cr_bases[] = {0x5800, 0x7800};
1648 u32 addr;
1649 u8 i;
1650
1651 for (i = 0; i < ARRAY_SIZE(cr_bases); i++) {
1652 addr = cr_bases[i];
1653 rtw89_phy_write32_set(rtwdev, (addr | 0x68), B_P0_TRSW_A);
1654 rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_X);
1655 rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_SO_A2);
1656 rtw89_phy_write32(rtwdev, (addr | 0x80), 0x77777777);
1657 rtw89_phy_write32(rtwdev, (addr | 0x84), 0x77777777);
1658 }
1659
1660 rtw89_phy_write32(rtwdev, R_RFE_E_A2, 0xffffffff);
1661 rtw89_phy_write32(rtwdev, R_RFE_O_SEL_A2, 0);
1662 rtw89_phy_write32(rtwdev, R_RFE_SEL0_A2, 0);
1663 rtw89_phy_write32(rtwdev, R_RFE_SEL32_A2, 0);
1664
1665 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 0, 0, 1);
1666 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 1, 1, 0);
1667 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 0, 1, 0);
1668 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 1, 1, 0);
1669 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 0, 0, 1);
1670 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 1, 1, 0);
1671 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 0, 1, 0);
1672 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 1, 1, 0);
1673
1674 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 0, 0, 1);
1675 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 1, 1, 0);
1676 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 0, 1, 0);
1677 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 1, 1, 0);
1678 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 0, 0, 1);
1679 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 1, 1, 0);
1680 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 0, 1, 0);
1681 rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 1, 1, 0);
1682
1683 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, PAPE_RFM, 0, 0, 0, 0x0);
1684 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, TRSW_RFM, 0, 0, 0, 0x4);
1685 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, LNAON_RFM, 0, 0, 0, 0x8);
1686
1687 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, PAPE_RFM, 0, 0, 0, 0x0);
1688 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, TRSW_RFM, 0, 0, 0, 0x4);
1689 rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, LNAON_RFM, 0, 0, 0, 0x8);
1690 }
1691
rtw8852c_bb_macid_ctrl_init(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1692 static void rtw8852c_bb_macid_ctrl_init(struct rtw89_dev *rtwdev,
1693 enum rtw89_phy_idx phy_idx)
1694 {
1695 u32 addr;
1696
1697 for (addr = R_AX_PWR_MACID_LMT_TABLE0;
1698 addr <= R_AX_PWR_MACID_LMT_TABLE127; addr += 4)
1699 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
1700 }
1701
rtw8852c_bb_sethw(struct rtw89_dev * rtwdev)1702 static void rtw8852c_bb_sethw(struct rtw89_dev *rtwdev)
1703 {
1704 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1705
1706 rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT,
1707 B_DBCC_80P80_SEL_EVM_RPT_EN);
1708 rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT2,
1709 B_DBCC_80P80_SEL_EVM_RPT2_EN);
1710
1711 rtw8852c_bb_macid_ctrl_init(rtwdev, RTW89_PHY_0);
1712 rtw8852c_bb_gpio_init(rtwdev);
1713
1714 /* read these registers after loading BB parameters */
1715 gain->offset_base[RTW89_PHY_0] =
1716 rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP, B_RPL_BIAS_COMP_MASK);
1717 gain->offset_base[RTW89_PHY_1] =
1718 rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP1, B_RPL_BIAS_COMP1_MASK);
1719 }
1720
rtw8852c_set_channel_bb(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1721 static void rtw8852c_set_channel_bb(struct rtw89_dev *rtwdev,
1722 const struct rtw89_chan *chan,
1723 enum rtw89_phy_idx phy_idx)
1724 {
1725 struct rtw89_hal *hal = &rtwdev->hal;
1726 bool cck_en = chan->band_type == RTW89_BAND_2G;
1727 u8 pri_ch_idx = chan->pri_ch_idx;
1728 u32 mask, reg;
1729 u32 ru_alloc_msk[2] = {B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY0,
1730 B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY1};
1731 u8 ntx_path;
1732
1733 if (chan->band_type == RTW89_BAND_2G)
1734 rtw8852c_ctrl_sco_cck(rtwdev, chan->channel,
1735 chan->primary_channel,
1736 chan->band_width);
1737
1738 rtw8852c_ctrl_ch(rtwdev, chan, phy_idx);
1739 rtw8852c_ctrl_bw(rtwdev, pri_ch_idx, chan->band_width, phy_idx);
1740 if (cck_en) {
1741 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1);
1742 rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0);
1743 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
1744 B_PD_ARBITER_OFF, 0x0, phy_idx);
1745 } else {
1746 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0);
1747 rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 1);
1748 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
1749 B_PD_ARBITER_OFF, 0x1, phy_idx);
1750 }
1751
1752 rtw8852c_spur_elimination(rtwdev, chan, pri_ch_idx, phy_idx);
1753 rtw8852c_ctrl_btg(rtwdev, chan->band_type == RTW89_BAND_2G);
1754 rtw8852c_5m_mask(rtwdev, chan, phy_idx);
1755
1756 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
1757 rtwdev->hal.cv != CHIP_CAV) {
1758 rtw89_phy_write32_idx(rtwdev, R_P80_AT_HIGH_FREQ,
1759 B_P80_AT_HIGH_FREQ, 0x0, phy_idx);
1760 reg = rtw89_mac_reg_by_idx(R_P80_AT_HIGH_FREQ_BB_WRP,
1761 phy_idx);
1762 if (chan->primary_channel > chan->channel) {
1763 rtw89_phy_write32_mask(rtwdev,
1764 R_P80_AT_HIGH_FREQ_RU_ALLOC,
1765 ru_alloc_msk[phy_idx], 1);
1766 rtw89_write32_mask(rtwdev, reg,
1767 B_P80_AT_HIGH_FREQ_BB_WRP, 1);
1768 } else {
1769 rtw89_phy_write32_mask(rtwdev,
1770 R_P80_AT_HIGH_FREQ_RU_ALLOC,
1771 ru_alloc_msk[phy_idx], 0);
1772 rtw89_write32_mask(rtwdev, reg,
1773 B_P80_AT_HIGH_FREQ_BB_WRP, 0);
1774 }
1775 }
1776
1777 if (chan->band_type == RTW89_BAND_6G &&
1778 chan->band_width == RTW89_CHANNEL_WIDTH_160)
1779 rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
1780 B_CDD_EVM_CHK_EN, 0, phy_idx);
1781 else
1782 rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
1783 B_CDD_EVM_CHK_EN, 1, phy_idx);
1784
1785 if (!rtwdev->dbcc_en) {
1786 mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
1787 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
1788 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
1789 mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
1790 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
1791 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
1792 } else {
1793 if (phy_idx == RTW89_PHY_0) {
1794 mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
1795 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
1796 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
1797 } else {
1798 mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
1799 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
1800 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
1801 }
1802 }
1803
1804 if (chan->band_type == RTW89_BAND_6G)
1805 rtw89_phy_write32_set(rtwdev, R_MUIC, B_MUIC_EN);
1806 else
1807 rtw89_phy_write32_clr(rtwdev, R_MUIC, B_MUIC_EN);
1808
1809 if (hal->antenna_tx)
1810 ntx_path = hal->antenna_tx;
1811 else
1812 ntx_path = chan->band_type == RTW89_BAND_6G ? RF_B : RF_AB;
1813
1814 rtw8852c_ctrl_tx_path_tmac(rtwdev, ntx_path, (enum rtw89_mac_idx)phy_idx);
1815
1816 rtw8852c_bb_reset_all(rtwdev, phy_idx);
1817 }
1818
rtw8852c_set_channel(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)1819 static void rtw8852c_set_channel(struct rtw89_dev *rtwdev,
1820 const struct rtw89_chan *chan,
1821 enum rtw89_mac_idx mac_idx,
1822 enum rtw89_phy_idx phy_idx)
1823 {
1824 rtw8852c_set_channel_mac(rtwdev, chan, mac_idx);
1825 rtw8852c_set_channel_bb(rtwdev, chan, phy_idx);
1826 rtw8852c_set_channel_rf(rtwdev, chan, phy_idx);
1827 }
1828
rtw8852c_dfs_en(struct rtw89_dev * rtwdev,bool en)1829 static void rtw8852c_dfs_en(struct rtw89_dev *rtwdev, bool en)
1830 {
1831 if (en)
1832 rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 1);
1833 else
1834 rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 0);
1835 }
1836
rtw8852c_adc_en(struct rtw89_dev * rtwdev,bool en)1837 static void rtw8852c_adc_en(struct rtw89_dev *rtwdev, bool en)
1838 {
1839 if (en)
1840 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
1841 0x0);
1842 else
1843 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
1844 0xf);
1845 }
1846
rtw8852c_set_channel_help(struct rtw89_dev * rtwdev,bool enter,struct rtw89_channel_help_params * p,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)1847 static void rtw8852c_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
1848 struct rtw89_channel_help_params *p,
1849 const struct rtw89_chan *chan,
1850 enum rtw89_mac_idx mac_idx,
1851 enum rtw89_phy_idx phy_idx)
1852 {
1853 if (enter) {
1854 rtw89_chip_stop_sch_tx(rtwdev, mac_idx, &p->tx_en,
1855 RTW89_SCH_TX_SEL_ALL);
1856 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
1857 rtw8852c_dfs_en(rtwdev, false);
1858 rtw8852c_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
1859 rtw8852c_adc_en(rtwdev, false);
1860 fsleep(40);
1861 rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, false);
1862 } else {
1863 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
1864 rtw8852c_adc_en(rtwdev, true);
1865 rtw8852c_dfs_en(rtwdev, true);
1866 rtw8852c_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
1867 rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, true);
1868 rtw89_chip_resume_sch_tx(rtwdev, mac_idx, p->tx_en);
1869 }
1870 }
1871
rtw8852c_rfk_init(struct rtw89_dev * rtwdev)1872 static void rtw8852c_rfk_init(struct rtw89_dev *rtwdev)
1873 {
1874 struct rtw89_mcc_info *mcc_info = &rtwdev->mcc;
1875
1876 rtwdev->is_tssi_mode[RF_PATH_A] = false;
1877 rtwdev->is_tssi_mode[RF_PATH_B] = false;
1878 memset(mcc_info, 0, sizeof(*mcc_info));
1879 rtw8852c_lck_init(rtwdev);
1880
1881 rtw8852c_rck(rtwdev);
1882 rtw8852c_dack(rtwdev);
1883 rtw8852c_rx_dck(rtwdev, RTW89_PHY_0, false);
1884 }
1885
rtw8852c_rfk_channel(struct rtw89_dev * rtwdev)1886 static void rtw8852c_rfk_channel(struct rtw89_dev *rtwdev)
1887 {
1888 enum rtw89_phy_idx phy_idx = RTW89_PHY_0;
1889
1890 rtw8852c_mcc_get_ch_info(rtwdev, phy_idx);
1891 rtw8852c_rx_dck(rtwdev, phy_idx, false);
1892 rtw8852c_iqk(rtwdev, phy_idx);
1893 rtw8852c_tssi(rtwdev, phy_idx);
1894 rtw8852c_dpk(rtwdev, phy_idx);
1895 rtw89_fw_h2c_rf_ntfy_mcc(rtwdev);
1896 }
1897
rtw8852c_rfk_band_changed(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1898 static void rtw8852c_rfk_band_changed(struct rtw89_dev *rtwdev,
1899 enum rtw89_phy_idx phy_idx)
1900 {
1901 rtw8852c_tssi_scan(rtwdev, phy_idx);
1902 }
1903
rtw8852c_rfk_scan(struct rtw89_dev * rtwdev,bool start)1904 static void rtw8852c_rfk_scan(struct rtw89_dev *rtwdev, bool start)
1905 {
1906 rtw8852c_wifi_scan_notify(rtwdev, start, RTW89_PHY_0);
1907 }
1908
rtw8852c_rfk_track(struct rtw89_dev * rtwdev)1909 static void rtw8852c_rfk_track(struct rtw89_dev *rtwdev)
1910 {
1911 rtw8852c_dpk_track(rtwdev);
1912 rtw8852c_lck_track(rtwdev);
1913 rtw8852c_rx_dck_track(rtwdev);
1914 }
1915
rtw8852c_bb_cal_txpwr_ref(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,s16 ref)1916 static u32 rtw8852c_bb_cal_txpwr_ref(struct rtw89_dev *rtwdev,
1917 enum rtw89_phy_idx phy_idx, s16 ref)
1918 {
1919 s8 ofst_int = 0;
1920 u8 base_cw_0db = 0x27;
1921 u16 tssi_16dbm_cw = 0x12c;
1922 s16 pwr_s10_3 = 0;
1923 s16 rf_pwr_cw = 0;
1924 u16 bb_pwr_cw = 0;
1925 u32 pwr_cw = 0;
1926 u32 tssi_ofst_cw = 0;
1927
1928 pwr_s10_3 = (ref << 1) + (s16)(ofst_int) + (s16)(base_cw_0db << 3);
1929 bb_pwr_cw = FIELD_GET(GENMASK(2, 0), pwr_s10_3);
1930 rf_pwr_cw = FIELD_GET(GENMASK(8, 3), pwr_s10_3);
1931 rf_pwr_cw = clamp_t(s16, rf_pwr_cw, 15, 63);
1932 pwr_cw = (rf_pwr_cw << 3) | bb_pwr_cw;
1933
1934 tssi_ofst_cw = (u32)((s16)tssi_16dbm_cw + (ref << 1) - (16 << 3));
1935 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
1936 "[TXPWR] tssi_ofst_cw=%d rf_cw=0x%x bb_cw=0x%x\n",
1937 tssi_ofst_cw, rf_pwr_cw, bb_pwr_cw);
1938
1939 return (tssi_ofst_cw << 18) | (pwr_cw << 9) | (ref & GENMASK(8, 0));
1940 }
1941
1942 static
rtw8852c_set_txpwr_ul_tb_offset(struct rtw89_dev * rtwdev,s8 pw_ofst,enum rtw89_mac_idx mac_idx)1943 void rtw8852c_set_txpwr_ul_tb_offset(struct rtw89_dev *rtwdev,
1944 s8 pw_ofst, enum rtw89_mac_idx mac_idx)
1945 {
1946 s8 pw_ofst_2tx;
1947 s8 val_1t;
1948 s8 val_2t;
1949 u32 reg;
1950 u8 i;
1951
1952 if (pw_ofst < -32 || pw_ofst > 31) {
1953 rtw89_warn(rtwdev, "[ULTB] Err pwr_offset=%d\n", pw_ofst);
1954 return;
1955 }
1956 val_1t = pw_ofst << 2;
1957 pw_ofst_2tx = max(pw_ofst - 3, -32);
1958 val_2t = pw_ofst_2tx << 2;
1959
1960 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_1tx=0x%x\n", val_1t);
1961 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_2tx=0x%x\n", val_2t);
1962
1963 for (i = 0; i < 4; i++) {
1964 /* 1TX */
1965 reg = rtw89_mac_reg_by_idx(R_AX_PWR_UL_TB_1T, mac_idx);
1966 rtw89_write32_mask(rtwdev, reg,
1967 B_AX_PWR_UL_TB_1T_V1_MASK << (8 * i),
1968 val_1t);
1969 /* 2TX */
1970 reg = rtw89_mac_reg_by_idx(R_AX_PWR_UL_TB_2T, mac_idx);
1971 rtw89_write32_mask(rtwdev, reg,
1972 B_AX_PWR_UL_TB_2T_V1_MASK << (8 * i),
1973 val_2t);
1974 }
1975 }
1976
rtw8852c_set_txpwr_ref(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1977 static void rtw8852c_set_txpwr_ref(struct rtw89_dev *rtwdev,
1978 enum rtw89_phy_idx phy_idx)
1979 {
1980 static const u32 addr[RF_PATH_NUM_8852C] = {0x5800, 0x7800};
1981 const u32 mask = 0x7FFFFFF;
1982 const u8 ofst_ofdm = 0x4;
1983 const u8 ofst_cck = 0x8;
1984 s16 ref_ofdm = 0;
1985 s16 ref_cck = 0;
1986 u32 val;
1987 u8 i;
1988
1989 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
1990
1991 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_CTRL,
1992 GENMASK(27, 10), 0x0);
1993
1994 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb ofdm txpwr ref\n");
1995 val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_ofdm);
1996
1997 for (i = 0; i < RF_PATH_NUM_8852C; i++)
1998 rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_ofdm, mask, val,
1999 phy_idx);
2000
2001 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb cck txpwr ref\n");
2002 val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_cck);
2003
2004 for (i = 0; i < RF_PATH_NUM_8852C; i++)
2005 rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_cck, mask, val,
2006 phy_idx);
2007 }
2008
rtw8852c_set_txpwr_byrate(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2009 static void rtw8852c_set_txpwr_byrate(struct rtw89_dev *rtwdev,
2010 const struct rtw89_chan *chan,
2011 enum rtw89_phy_idx phy_idx)
2012 {
2013 u8 band = chan->band_type;
2014 u8 ch = chan->channel;
2015 static const u8 rs[] = {
2016 RTW89_RS_CCK,
2017 RTW89_RS_OFDM,
2018 RTW89_RS_MCS,
2019 RTW89_RS_HEDCM,
2020 };
2021 s8 tmp;
2022 u8 i, j;
2023 u32 val, shf, addr = R_AX_PWR_BY_RATE;
2024 struct rtw89_rate_desc cur;
2025
2026 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
2027 "[TXPWR] set txpwr byrate with ch=%d\n", ch);
2028
2029 for (cur.nss = 0; cur.nss <= RTW89_NSS_2; cur.nss++) {
2030 for (i = 0; i < ARRAY_SIZE(rs); i++) {
2031 if (cur.nss >= rtw89_rs_nss_max[rs[i]])
2032 continue;
2033
2034 val = 0;
2035 cur.rs = rs[i];
2036
2037 for (j = 0; j < rtw89_rs_idx_max[rs[i]]; j++) {
2038 cur.idx = j;
2039 shf = (j % 4) * 8;
2040 tmp = rtw89_phy_read_txpwr_byrate(rtwdev, band,
2041 &cur);
2042 val |= (tmp << shf);
2043
2044 if ((j + 1) % 4)
2045 continue;
2046
2047 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
2048 val = 0;
2049 addr += 4;
2050 }
2051 }
2052 }
2053 }
2054
rtw8852c_set_txpwr_offset(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2055 static void rtw8852c_set_txpwr_offset(struct rtw89_dev *rtwdev,
2056 const struct rtw89_chan *chan,
2057 enum rtw89_phy_idx phy_idx)
2058 {
2059 u8 band = chan->band_type;
2060 struct rtw89_rate_desc desc = {
2061 .nss = RTW89_NSS_1,
2062 .rs = RTW89_RS_OFFSET,
2063 };
2064 u32 val = 0;
2065 s8 v;
2066
2067 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr offset\n");
2068
2069 for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_MAX; desc.idx++) {
2070 v = rtw89_phy_read_txpwr_byrate(rtwdev, band, &desc);
2071 val |= ((v & 0xf) << (4 * desc.idx));
2072 }
2073
2074 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_OFST_CTRL,
2075 GENMASK(19, 0), val);
2076 }
2077
rtw8852c_bb_set_tx_shape_dfir(struct rtw89_dev * rtwdev,u8 tx_shape_idx,enum rtw89_phy_idx phy_idx)2078 static void rtw8852c_bb_set_tx_shape_dfir(struct rtw89_dev *rtwdev,
2079 u8 tx_shape_idx,
2080 enum rtw89_phy_idx phy_idx)
2081 {
2082 #define __DFIR_CFG_MASK 0xffffff
2083 #define __DFIR_CFG_NR 8
2084 #define __DECL_DFIR_VAR(_prefix, _name, _val...) \
2085 static const u32 _prefix ## _ ## _name[] = {_val}; \
2086 static_assert(ARRAY_SIZE(_prefix ## _ ## _name) == __DFIR_CFG_NR)
2087 #define __DECL_DFIR_PARAM(_name, _val...) __DECL_DFIR_VAR(param, _name, _val)
2088 #define __DECL_DFIR_ADDR(_name, _val...) __DECL_DFIR_VAR(addr, _name, _val)
2089
2090 __DECL_DFIR_PARAM(flat,
2091 0x003D23FF, 0x0029B354, 0x000FC1C8, 0x00FDB053,
2092 0x00F86F9A, 0x00FAEF92, 0x00FE5FCC, 0x00FFDFF5);
2093 __DECL_DFIR_PARAM(sharp,
2094 0x003D83FF, 0x002C636A, 0x0013F204, 0x00008090,
2095 0x00F87FB0, 0x00F99F83, 0x00FDBFBA, 0x00003FF5);
2096 __DECL_DFIR_PARAM(sharp_14,
2097 0x003B13FF, 0x001C42DE, 0x00FDB0AD, 0x00F60F6E,
2098 0x00FD8F92, 0x0002D011, 0x0001C02C, 0x00FFF00A);
2099 __DECL_DFIR_ADDR(filter,
2100 0x45BC, 0x45CC, 0x45D0, 0x45D4, 0x45D8, 0x45C0,
2101 0x45C4, 0x45C8);
2102 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2103 u8 ch = chan->channel;
2104 const u32 *param;
2105 int i;
2106
2107 if (ch > 14) {
2108 rtw89_warn(rtwdev,
2109 "set tx shape dfir by unknown ch: %d on 2G\n", ch);
2110 return;
2111 }
2112
2113 if (ch == 14)
2114 param = param_sharp_14;
2115 else
2116 param = tx_shape_idx == 0 ? param_flat : param_sharp;
2117
2118 for (i = 0; i < __DFIR_CFG_NR; i++) {
2119 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
2120 "set tx shape dfir: 0x%x: 0x%x\n", addr_filter[i],
2121 param[i]);
2122 rtw89_phy_write32_idx(rtwdev, addr_filter[i], __DFIR_CFG_MASK,
2123 param[i], phy_idx);
2124 }
2125
2126 #undef __DECL_DFIR_ADDR
2127 #undef __DECL_DFIR_PARAM
2128 #undef __DECL_DFIR_VAR
2129 #undef __DFIR_CFG_NR
2130 #undef __DFIR_CFG_MASK
2131 }
2132
rtw8852c_set_tx_shape(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2133 static void rtw8852c_set_tx_shape(struct rtw89_dev *rtwdev,
2134 const struct rtw89_chan *chan,
2135 enum rtw89_phy_idx phy_idx)
2136 {
2137 u8 band = chan->band_type;
2138 u8 regd = rtw89_regd_get(rtwdev, band);
2139 u8 tx_shape_cck = rtw89_8852c_tx_shape[band][RTW89_RS_CCK][regd];
2140 u8 tx_shape_ofdm = rtw89_8852c_tx_shape[band][RTW89_RS_OFDM][regd];
2141
2142 if (band == RTW89_BAND_2G)
2143 rtw8852c_bb_set_tx_shape_dfir(rtwdev, tx_shape_cck, phy_idx);
2144
2145 rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
2146 (enum rtw89_mac_idx)phy_idx,
2147 tx_shape_ofdm);
2148 }
2149
rtw8852c_set_txpwr_limit(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2150 static void rtw8852c_set_txpwr_limit(struct rtw89_dev *rtwdev,
2151 const struct rtw89_chan *chan,
2152 enum rtw89_phy_idx phy_idx)
2153 {
2154 #define __MAC_TXPWR_LMT_PAGE_SIZE 40
2155 u8 ch = chan->channel;
2156 u8 bw = chan->band_width;
2157 struct rtw89_txpwr_limit lmt[NTX_NUM_8852C];
2158 u32 addr, val;
2159 const s8 *ptr;
2160 u8 i, j;
2161
2162 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
2163 "[TXPWR] set txpwr limit with ch=%d bw=%d\n", ch, bw);
2164
2165 for (i = 0; i < NTX_NUM_8852C; i++) {
2166 rtw89_phy_fill_txpwr_limit(rtwdev, chan, &lmt[i], i);
2167
2168 for (j = 0; j < __MAC_TXPWR_LMT_PAGE_SIZE; j += 4) {
2169 addr = R_AX_PWR_LMT + j + __MAC_TXPWR_LMT_PAGE_SIZE * i;
2170 ptr = (s8 *)&lmt[i] + j;
2171
2172 val = FIELD_PREP(GENMASK(7, 0), ptr[0]) |
2173 FIELD_PREP(GENMASK(15, 8), ptr[1]) |
2174 FIELD_PREP(GENMASK(23, 16), ptr[2]) |
2175 FIELD_PREP(GENMASK(31, 24), ptr[3]);
2176
2177 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
2178 }
2179 }
2180 #undef __MAC_TXPWR_LMT_PAGE_SIZE
2181 }
2182
rtw8852c_set_txpwr_limit_ru(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2183 static void rtw8852c_set_txpwr_limit_ru(struct rtw89_dev *rtwdev,
2184 const struct rtw89_chan *chan,
2185 enum rtw89_phy_idx phy_idx)
2186 {
2187 #define __MAC_TXPWR_LMT_RU_PAGE_SIZE 24
2188 u8 ch = chan->channel;
2189 u8 bw = chan->band_width;
2190 struct rtw89_txpwr_limit_ru lmt_ru[NTX_NUM_8852C];
2191 u32 addr, val;
2192 const s8 *ptr;
2193 u8 i, j;
2194
2195 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
2196 "[TXPWR] set txpwr limit ru with ch=%d bw=%d\n", ch, bw);
2197
2198 for (i = 0; i < NTX_NUM_8852C; i++) {
2199 rtw89_phy_fill_txpwr_limit_ru(rtwdev, chan, &lmt_ru[i], i);
2200
2201 for (j = 0; j < __MAC_TXPWR_LMT_RU_PAGE_SIZE; j += 4) {
2202 addr = R_AX_PWR_RU_LMT + j +
2203 __MAC_TXPWR_LMT_RU_PAGE_SIZE * i;
2204 ptr = (s8 *)&lmt_ru[i] + j;
2205
2206 val = FIELD_PREP(GENMASK(7, 0), ptr[0]) |
2207 FIELD_PREP(GENMASK(15, 8), ptr[1]) |
2208 FIELD_PREP(GENMASK(23, 16), ptr[2]) |
2209 FIELD_PREP(GENMASK(31, 24), ptr[3]);
2210
2211 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
2212 }
2213 }
2214
2215 #undef __MAC_TXPWR_LMT_RU_PAGE_SIZE
2216 }
2217
rtw8852c_set_txpwr(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2218 static void rtw8852c_set_txpwr(struct rtw89_dev *rtwdev,
2219 const struct rtw89_chan *chan,
2220 enum rtw89_phy_idx phy_idx)
2221 {
2222 rtw8852c_set_txpwr_byrate(rtwdev, chan, phy_idx);
2223 rtw8852c_set_txpwr_offset(rtwdev, chan, phy_idx);
2224 rtw8852c_set_tx_shape(rtwdev, chan, phy_idx);
2225 rtw8852c_set_txpwr_limit(rtwdev, chan, phy_idx);
2226 rtw8852c_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
2227 }
2228
rtw8852c_set_txpwr_ctrl(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2229 static void rtw8852c_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
2230 enum rtw89_phy_idx phy_idx)
2231 {
2232 rtw8852c_set_txpwr_ref(rtwdev, phy_idx);
2233 }
2234
2235 static void
rtw8852c_init_tssi_ctrl(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2236 rtw8852c_init_tssi_ctrl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
2237 {
2238 static const struct rtw89_reg2_def ctrl_ini[] = {
2239 {0xD938, 0x00010100},
2240 {0xD93C, 0x0500D500},
2241 {0xD940, 0x00000500},
2242 {0xD944, 0x00000005},
2243 {0xD94C, 0x00220000},
2244 {0xD950, 0x00030000},
2245 };
2246 u32 addr;
2247 int i;
2248
2249 for (addr = R_AX_TSSI_CTRL_HEAD; addr <= R_AX_TSSI_CTRL_TAIL; addr += 4)
2250 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
2251
2252 for (i = 0; i < ARRAY_SIZE(ctrl_ini); i++)
2253 rtw89_mac_txpwr_write32(rtwdev, phy_idx, ctrl_ini[i].addr,
2254 ctrl_ini[i].data);
2255
2256 rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
2257 (enum rtw89_mac_idx)phy_idx,
2258 RTW89_TSSI_BANDEDGE_FLAT);
2259 }
2260
2261 static int
rtw8852c_init_txpwr_unit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2262 rtw8852c_init_txpwr_unit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
2263 {
2264 int ret;
2265
2266 ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL2, 0x07763333);
2267 if (ret)
2268 return ret;
2269
2270 ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_COEXT_CTRL, 0x01ebf000);
2271 if (ret)
2272 return ret;
2273
2274 ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL0, 0x0002f8ff);
2275 if (ret)
2276 return ret;
2277
2278 rtw8852c_set_txpwr_ul_tb_offset(rtwdev, 0, phy_idx == RTW89_PHY_1 ?
2279 RTW89_MAC_1 :
2280 RTW89_MAC_0);
2281 rtw8852c_init_tssi_ctrl(rtwdev, phy_idx);
2282
2283 return 0;
2284 }
2285
rtw8852c_bb_cfg_rx_path(struct rtw89_dev * rtwdev,u8 rx_path)2286 static void rtw8852c_bb_cfg_rx_path(struct rtw89_dev *rtwdev, u8 rx_path)
2287 {
2288 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2289 u8 band = chan->band_type;
2290 u32 rst_mask0 = B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI;
2291 u32 rst_mask1 = B_P1_TXPW_RSTB_MANON | B_P1_TXPW_RSTB_TSSI;
2292
2293 if (rtwdev->dbcc_en) {
2294 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 1);
2295 rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 2,
2296 RTW89_PHY_1);
2297
2298 rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0,
2299 1);
2300 rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1,
2301 1);
2302 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0, 2,
2303 RTW89_PHY_1);
2304 rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1, 2,
2305 RTW89_PHY_1);
2306
2307 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
2308 B_RXHT_MCS_LIMIT, 0);
2309 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
2310 B_RXVHT_MCS_LIMIT, 0);
2311 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
2312 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
2313 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
2314
2315 rtw89_phy_write32_idx(rtwdev, R_RXHT_MCS_LIMIT,
2316 B_RXHT_MCS_LIMIT, 0, RTW89_PHY_1);
2317 rtw89_phy_write32_idx(rtwdev, R_RXVHT_MCS_LIMIT,
2318 B_RXVHT_MCS_LIMIT, 0, RTW89_PHY_1);
2319 rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_USER_MAX, 1,
2320 RTW89_PHY_1);
2321 rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0,
2322 RTW89_PHY_1);
2323 rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0,
2324 RTW89_PHY_1);
2325 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 1);
2326 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 3);
2327 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 1);
2328 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 3);
2329 } else {
2330 if (rx_path == RF_PATH_A) {
2331 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
2332 B_ANT_RX_SEG0, 1);
2333 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2334 B_ANT_RX_1RCCA_SEG0, 1);
2335 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2336 B_ANT_RX_1RCCA_SEG1, 1);
2337 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
2338 B_RXHT_MCS_LIMIT, 0);
2339 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
2340 B_RXVHT_MCS_LIMIT, 0);
2341 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
2342 0);
2343 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
2344 0);
2345 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
2346 rst_mask0, 1);
2347 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
2348 rst_mask0, 3);
2349 } else if (rx_path == RF_PATH_B) {
2350 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
2351 B_ANT_RX_SEG0, 2);
2352 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2353 B_ANT_RX_1RCCA_SEG0, 2);
2354 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2355 B_ANT_RX_1RCCA_SEG1, 2);
2356 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
2357 B_RXHT_MCS_LIMIT, 0);
2358 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
2359 B_RXVHT_MCS_LIMIT, 0);
2360 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
2361 0);
2362 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
2363 0);
2364 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
2365 rst_mask1, 1);
2366 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
2367 rst_mask1, 3);
2368 } else {
2369 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
2370 B_ANT_RX_SEG0, 3);
2371 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2372 B_ANT_RX_1RCCA_SEG0, 3);
2373 rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
2374 B_ANT_RX_1RCCA_SEG1, 3);
2375 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
2376 B_RXHT_MCS_LIMIT, 1);
2377 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
2378 B_RXVHT_MCS_LIMIT, 1);
2379 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
2380 1);
2381 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
2382 1);
2383 rtw8852c_ctrl_btg(rtwdev, band == RTW89_BAND_2G);
2384 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
2385 rst_mask0, 1);
2386 rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
2387 rst_mask0, 3);
2388 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
2389 rst_mask1, 1);
2390 rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
2391 rst_mask1, 3);
2392 }
2393 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
2394 }
2395 }
2396
rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev * rtwdev,u8 tx_path,enum rtw89_mac_idx mac_idx)2397 static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
2398 enum rtw89_mac_idx mac_idx)
2399 {
2400 struct rtw89_reg2_def path_com[] = {
2401 {R_AX_PATH_COM0, AX_PATH_COM0_DFVAL},
2402 {R_AX_PATH_COM1, AX_PATH_COM1_DFVAL},
2403 {R_AX_PATH_COM2, AX_PATH_COM2_DFVAL},
2404 {R_AX_PATH_COM3, AX_PATH_COM3_DFVAL},
2405 {R_AX_PATH_COM4, AX_PATH_COM4_DFVAL},
2406 {R_AX_PATH_COM5, AX_PATH_COM5_DFVAL},
2407 {R_AX_PATH_COM6, AX_PATH_COM6_DFVAL},
2408 {R_AX_PATH_COM7, AX_PATH_COM7_DFVAL},
2409 {R_AX_PATH_COM8, AX_PATH_COM8_DFVAL},
2410 {R_AX_PATH_COM9, AX_PATH_COM9_DFVAL},
2411 {R_AX_PATH_COM10, AX_PATH_COM10_DFVAL},
2412 {R_AX_PATH_COM11, AX_PATH_COM11_DFVAL},
2413 };
2414 u32 addr;
2415 u32 reg;
2416 u8 cr_size = ARRAY_SIZE(path_com);
2417 u8 i = 0;
2418
2419 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_0);
2420 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_1);
2421
2422 for (addr = R_AX_MACID_ANT_TABLE;
2423 addr <= R_AX_MACID_ANT_TABLE_LAST; addr += 4) {
2424 reg = rtw89_mac_reg_by_idx(addr, mac_idx);
2425 rtw89_write32(rtwdev, reg, 0);
2426 }
2427
2428 if (tx_path == RF_A) {
2429 path_com[0].data = AX_PATH_COM0_PATHA;
2430 path_com[1].data = AX_PATH_COM1_PATHA;
2431 path_com[2].data = AX_PATH_COM2_PATHA;
2432 path_com[7].data = AX_PATH_COM7_PATHA;
2433 path_com[8].data = AX_PATH_COM8_PATHA;
2434 } else if (tx_path == RF_B) {
2435 path_com[0].data = AX_PATH_COM0_PATHB;
2436 path_com[1].data = AX_PATH_COM1_PATHB;
2437 path_com[2].data = AX_PATH_COM2_PATHB;
2438 path_com[7].data = AX_PATH_COM7_PATHB;
2439 path_com[8].data = AX_PATH_COM8_PATHB;
2440 } else if (tx_path == RF_AB) {
2441 path_com[0].data = AX_PATH_COM0_PATHAB;
2442 path_com[1].data = AX_PATH_COM1_PATHAB;
2443 path_com[2].data = AX_PATH_COM2_PATHAB;
2444 path_com[7].data = AX_PATH_COM7_PATHAB;
2445 path_com[8].data = AX_PATH_COM8_PATHAB;
2446 } else {
2447 rtw89_warn(rtwdev, "[Invalid Tx Path]Tx Path: %d\n", tx_path);
2448 return;
2449 }
2450
2451 for (i = 0; i < cr_size; i++) {
2452 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "0x%x = 0x%x\n",
2453 path_com[i].addr, path_com[i].data);
2454 reg = rtw89_mac_reg_by_idx(path_com[i].addr, mac_idx);
2455 rtw89_write32(rtwdev, reg, path_com[i].data);
2456 }
2457 }
2458
rtw8852c_bb_ctrl_btc_preagc(struct rtw89_dev * rtwdev,bool bt_en)2459 static void rtw8852c_bb_ctrl_btc_preagc(struct rtw89_dev *rtwdev, bool bt_en)
2460 {
2461 if (bt_en) {
2462 rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
2463 B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x3);
2464 rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
2465 B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x3);
2466 rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
2467 B_PATH0_RXBB_MSK_V1, 0xf);
2468 rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
2469 B_PATH1_RXBB_MSK_V1, 0xf);
2470 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
2471 B_PATH0_G_LNA6_OP1DB_V1, 0x80);
2472 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
2473 B_PATH1_G_LNA6_OP1DB_V1, 0x80);
2474 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
2475 B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x80);
2476 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
2477 B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x80);
2478 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
2479 B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x80);
2480 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
2481 B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x80);
2482 rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
2483 B_PATH0_BT_BACKOFF_V1, 0x780D1E);
2484 rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
2485 B_PATH1_BT_BACKOFF_V1, 0x780D1E);
2486 rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
2487 B_P0_BACKOFF_IBADC_V1, 0x34);
2488 rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
2489 B_P1_BACKOFF_IBADC_V1, 0x34);
2490 } else {
2491 rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
2492 B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x0);
2493 rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
2494 B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x0);
2495 rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
2496 B_PATH0_RXBB_MSK_V1, 0x60);
2497 rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
2498 B_PATH1_RXBB_MSK_V1, 0x60);
2499 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
2500 B_PATH0_G_LNA6_OP1DB_V1, 0x1a);
2501 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
2502 B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
2503 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
2504 B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x2a);
2505 rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
2506 B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x2a);
2507 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
2508 B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
2509 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
2510 B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x2a);
2511 rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
2512 B_PATH0_BT_BACKOFF_V1, 0x79E99E);
2513 rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
2514 B_PATH1_BT_BACKOFF_V1, 0x79E99E);
2515 rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
2516 B_P0_BACKOFF_IBADC_V1, 0x26);
2517 rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
2518 B_P1_BACKOFF_IBADC_V1, 0x26);
2519 }
2520 }
2521
rtw8852c_bb_cfg_txrx_path(struct rtw89_dev * rtwdev)2522 static void rtw8852c_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
2523 {
2524 struct rtw89_hal *hal = &rtwdev->hal;
2525
2526 rtw8852c_bb_cfg_rx_path(rtwdev, RF_PATH_AB);
2527
2528 if (hal->rx_nss == 1) {
2529 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 0);
2530 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 0);
2531 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
2532 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
2533 } else {
2534 rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 1);
2535 rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 1);
2536 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 1);
2537 rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 1);
2538 }
2539 }
2540
rtw8852c_get_thermal(struct rtw89_dev * rtwdev,enum rtw89_rf_path rf_path)2541 static u8 rtw8852c_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
2542 {
2543 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2544 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
2545 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2546
2547 fsleep(200);
2548
2549 return rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL);
2550 }
2551
rtw8852c_btc_set_rfe(struct rtw89_dev * rtwdev)2552 static void rtw8852c_btc_set_rfe(struct rtw89_dev *rtwdev)
2553 {
2554 struct rtw89_btc *btc = &rtwdev->btc;
2555 struct rtw89_btc_module *module = &btc->mdinfo;
2556
2557 module->rfe_type = rtwdev->efuse.rfe_type;
2558 module->cv = rtwdev->hal.cv;
2559 module->bt_solo = 0;
2560 module->switch_type = BTC_SWITCH_INTERNAL;
2561
2562 if (module->rfe_type > 0)
2563 module->ant.num = (module->rfe_type % 2 ? 2 : 3);
2564 else
2565 module->ant.num = 2;
2566
2567 module->ant.diversity = 0;
2568 module->ant.isolation = 10;
2569
2570 if (module->ant.num == 3) {
2571 module->ant.type = BTC_ANT_DEDICATED;
2572 module->bt_pos = BTC_BT_ALONE;
2573 } else {
2574 module->ant.type = BTC_ANT_SHARED;
2575 module->bt_pos = BTC_BT_BTG;
2576 }
2577 }
2578
rtw8852c_ctrl_btg(struct rtw89_dev * rtwdev,bool btg)2579 static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg)
2580 {
2581 if (btg) {
2582 rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
2583 B_PATH0_BT_SHARE_V1, 0x1);
2584 rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
2585 B_PATH0_BTG_PATH_V1, 0x0);
2586 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
2587 B_PATH1_G_LNA6_OP1DB_V1, 0x20);
2588 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
2589 B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x30);
2590 rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
2591 B_PATH1_BT_SHARE_V1, 0x1);
2592 rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
2593 B_PATH1_BTG_PATH_V1, 0x1);
2594 rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0);
2595 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x1);
2596 rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x2);
2597 rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
2598 B_BT_DYN_DC_EST_EN_MSK, 0x1);
2599 rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
2600 0x1);
2601 } else {
2602 rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
2603 B_PATH0_BT_SHARE_V1, 0x0);
2604 rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
2605 B_PATH0_BTG_PATH_V1, 0x0);
2606 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
2607 B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
2608 rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
2609 B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
2610 rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
2611 B_PATH1_BT_SHARE_V1, 0x0);
2612 rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
2613 B_PATH1_BTG_PATH_V1, 0x0);
2614 rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xf);
2615 rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P2, 0x4);
2616 rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x0);
2617 rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x0);
2618 rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
2619 B_BT_DYN_DC_EST_EN_MSK, 0x0);
2620 rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
2621 0x0);
2622 }
2623 }
2624
2625 static
rtw8852c_set_trx_mask(struct rtw89_dev * rtwdev,u8 path,u8 group,u32 val)2626 void rtw8852c_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
2627 {
2628 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x20000);
2629 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
2630 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
2631 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x0);
2632 }
2633
rtw8852c_btc_init_cfg(struct rtw89_dev * rtwdev)2634 static void rtw8852c_btc_init_cfg(struct rtw89_dev *rtwdev)
2635 {
2636 struct rtw89_btc *btc = &rtwdev->btc;
2637 struct rtw89_btc_module *module = &btc->mdinfo;
2638 const struct rtw89_chip_info *chip = rtwdev->chip;
2639 const struct rtw89_mac_ax_coex coex_params = {
2640 .pta_mode = RTW89_MAC_AX_COEX_RTK_MODE,
2641 .direction = RTW89_MAC_AX_COEX_INNER,
2642 };
2643
2644 /* PTA init */
2645 rtw89_mac_coex_init_v1(rtwdev, &coex_params);
2646
2647 /* set WL Tx response = Hi-Pri */
2648 chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_TX_RESP, true);
2649 chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_BEACON, true);
2650
2651 /* set rf gnt debug off */
2652 rtw89_write_rf(rtwdev, RF_PATH_A, RR_WLSEL, RFREG_MASK, 0x0);
2653 rtw89_write_rf(rtwdev, RF_PATH_B, RR_WLSEL, RFREG_MASK, 0x0);
2654
2655 /* set WL Tx thru in TRX mask table if GNT_WL = 0 && BT_S1 = ss group */
2656 if (module->ant.type == BTC_ANT_SHARED) {
2657 rtw8852c_set_trx_mask(rtwdev,
2658 RF_PATH_A, BTC_BT_SS_GROUP, 0x5ff);
2659 rtw8852c_set_trx_mask(rtwdev,
2660 RF_PATH_B, BTC_BT_SS_GROUP, 0x5ff);
2661 /* set path-A(S0) Tx/Rx no-mask if GNT_WL=0 && BT_S1=tx group */
2662 rtw8852c_set_trx_mask(rtwdev,
2663 RF_PATH_A, BTC_BT_TX_GROUP, 0x5ff);
2664 } else { /* set WL Tx stb if GNT_WL = 0 && BT_S1 = ss group for 3-ant */
2665 rtw8852c_set_trx_mask(rtwdev,
2666 RF_PATH_A, BTC_BT_SS_GROUP, 0x5df);
2667 rtw8852c_set_trx_mask(rtwdev,
2668 RF_PATH_B, BTC_BT_SS_GROUP, 0x5df);
2669 }
2670
2671 /* set PTA break table */
2672 rtw89_write32(rtwdev, R_AX_BT_BREAK_TABLE, BTC_BREAK_PARAM);
2673
2674 /* enable BT counter 0xda10[1:0] = 2b'11 */
2675 rtw89_write32_set(rtwdev,
2676 R_AX_BT_CNT_CFG, B_AX_BT_CNT_EN |
2677 B_AX_BT_CNT_RST_V1);
2678 btc->cx.wl.status.map.init_ok = true;
2679 }
2680
2681 static
rtw8852c_btc_set_wl_pri(struct rtw89_dev * rtwdev,u8 map,bool state)2682 void rtw8852c_btc_set_wl_pri(struct rtw89_dev *rtwdev, u8 map, bool state)
2683 {
2684 u32 bitmap = 0;
2685 u32 reg = 0;
2686
2687 switch (map) {
2688 case BTC_PRI_MASK_TX_RESP:
2689 reg = R_BTC_COEX_WL_REQ;
2690 bitmap = B_BTC_RSP_ACK_HI;
2691 break;
2692 case BTC_PRI_MASK_BEACON:
2693 reg = R_BTC_COEX_WL_REQ;
2694 bitmap = B_BTC_TX_BCN_HI;
2695 break;
2696 default:
2697 return;
2698 }
2699
2700 if (state)
2701 rtw89_write32_set(rtwdev, reg, bitmap);
2702 else
2703 rtw89_write32_clr(rtwdev, reg, bitmap);
2704 }
2705
2706 union rtw8852c_btc_wl_txpwr_ctrl {
2707 u32 txpwr_val;
2708 struct {
2709 union {
2710 u16 ctrl_all_time;
2711 struct {
2712 s16 data:9;
2713 u16 rsvd:6;
2714 u16 flag:1;
2715 } all_time;
2716 };
2717 union {
2718 u16 ctrl_gnt_bt;
2719 struct {
2720 s16 data:9;
2721 u16 rsvd:7;
2722 } gnt_bt;
2723 };
2724 };
2725 } __packed;
2726
2727 static void
rtw8852c_btc_set_wl_txpwr_ctrl(struct rtw89_dev * rtwdev,u32 txpwr_val)2728 rtw8852c_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
2729 {
2730 union rtw8852c_btc_wl_txpwr_ctrl arg = { .txpwr_val = txpwr_val };
2731 s32 val;
2732
2733 #define __write_ctrl(_reg, _msk, _val, _en, _cond) \
2734 do { \
2735 u32 _wrt = FIELD_PREP(_msk, _val); \
2736 BUILD_BUG_ON((_msk & _en) != 0); \
2737 if (_cond) \
2738 _wrt |= _en; \
2739 else \
2740 _wrt &= ~_en; \
2741 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, _reg, \
2742 _msk | _en, _wrt); \
2743 } while (0)
2744
2745 switch (arg.ctrl_all_time) {
2746 case 0xffff:
2747 val = 0;
2748 break;
2749 default:
2750 val = arg.all_time.data;
2751 break;
2752 }
2753
2754 __write_ctrl(R_AX_PWR_RATE_CTRL, B_AX_FORCE_PWR_BY_RATE_VALUE_MASK,
2755 val, B_AX_FORCE_PWR_BY_RATE_EN,
2756 arg.ctrl_all_time != 0xffff);
2757
2758 switch (arg.ctrl_gnt_bt) {
2759 case 0xffff:
2760 val = 0;
2761 break;
2762 default:
2763 val = arg.gnt_bt.data;
2764 break;
2765 }
2766
2767 __write_ctrl(R_AX_PWR_COEXT_CTRL, B_AX_TXAGC_BT_MASK, val,
2768 B_AX_TXAGC_BT_EN, arg.ctrl_gnt_bt != 0xffff);
2769
2770 #undef __write_ctrl
2771 }
2772
2773 static
rtw8852c_btc_get_bt_rssi(struct rtw89_dev * rtwdev,s8 val)2774 s8 rtw8852c_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
2775 {
2776 return clamp_t(s8, val, -100, 0) + 100;
2777 }
2778
2779 static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_ul[] = {
2780 {255, 0, 0, 7}, /* 0 -> original */
2781 {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
2782 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2783 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2784 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2785 {255, 0, 0, 7}, /* the below id is for non-shared-antenna free-run */
2786 {6, 1, 0, 7},
2787 {13, 1, 0, 7},
2788 {13, 1, 0, 7}
2789 };
2790
2791 static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_dl[] = {
2792 {255, 0, 0, 7}, /* 0 -> original */
2793 {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
2794 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2795 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2796 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2797 {255, 0, 0, 7}, /* the below id is for non-shared-antenna free-run */
2798 {255, 1, 0, 7},
2799 {255, 1, 0, 7},
2800 {255, 1, 0, 7}
2801 };
2802
2803 static const u8 rtw89_btc_8852c_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
2804 static const u8 rtw89_btc_8852c_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {40, 36, 31, 28};
2805
2806 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8852c_mon_reg[] = {
2807 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda00),
2808 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda04),
2809 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda24),
2810 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda30),
2811 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda34),
2812 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda38),
2813 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda44),
2814 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda48),
2815 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda4c),
2816 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd200),
2817 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd220),
2818 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
2819 };
2820
2821 static
rtw8852c_btc_bt_aci_imp(struct rtw89_dev * rtwdev)2822 void rtw8852c_btc_bt_aci_imp(struct rtw89_dev *rtwdev)
2823 {
2824 struct rtw89_btc *btc = &rtwdev->btc;
2825 struct rtw89_btc_dm *dm = &btc->dm;
2826 struct rtw89_btc_bt_info *bt = &btc->cx.bt;
2827 struct rtw89_btc_bt_link_info *b = &bt->link_info;
2828
2829 /* fix LNA2 = level-5 for BT ACI issue at BTG */
2830 if (btc->dm.wl_btg_rx && b->profile_cnt.now != 0)
2831 dm->trx_para_level = 1;
2832 }
2833
2834 static
rtw8852c_btc_update_bt_cnt(struct rtw89_dev * rtwdev)2835 void rtw8852c_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
2836 {
2837 /* Feature move to firmware */
2838 }
2839
2840 static
rtw8852c_btc_wl_s1_standby(struct rtw89_dev * rtwdev,bool state)2841 void rtw8852c_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
2842 {
2843 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2844 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2845 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x620);
2846
2847 /* set WL standby = Rx for GNT_BT_Tx = 1->0 settle issue */
2848 if (state)
2849 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
2850 RFREG_MASK, 0x179c);
2851 else
2852 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
2853 RFREG_MASK, 0x208);
2854
2855 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
2856 }
2857
rtw8852c_set_wl_lna2(struct rtw89_dev * rtwdev,u8 level)2858 static void rtw8852c_set_wl_lna2(struct rtw89_dev *rtwdev, u8 level)
2859 {
2860 /* level=0 Default: TIA 1/0= (LNA2,TIAN6) = (7,1)/(5,1) = 21dB/12dB
2861 * level=1 Fix LNA2=5: TIA 1/0= (LNA2,TIAN6) = (5,0)/(5,1) = 18dB/12dB
2862 * To improve BT ACI in co-rx
2863 */
2864
2865 switch (level) {
2866 case 0: /* default */
2867 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
2868 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
2869 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
2870 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2871 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
2872 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
2873 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
2874 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
2875 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
2876 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
2877 break;
2878 case 1: /* Fix LNA2=5 */
2879 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
2880 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
2881 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
2882 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2883 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
2884 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
2885 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
2886 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
2887 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
2888 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
2889 break;
2890 }
2891 }
2892
rtw8852c_btc_set_wl_rx_gain(struct rtw89_dev * rtwdev,u32 level)2893 static void rtw8852c_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
2894 {
2895 switch (level) {
2896 case 0: /* original */
2897 rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
2898 rtw8852c_set_wl_lna2(rtwdev, 0);
2899 break;
2900 case 1: /* for FDD free-run */
2901 rtw8852c_bb_ctrl_btc_preagc(rtwdev, true);
2902 rtw8852c_set_wl_lna2(rtwdev, 0);
2903 break;
2904 case 2: /* for BTG Co-Rx*/
2905 rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
2906 rtw8852c_set_wl_lna2(rtwdev, 1);
2907 break;
2908 }
2909 }
2910
rtw8852c_fill_freq_with_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2911 static void rtw8852c_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
2912 struct rtw89_rx_phy_ppdu *phy_ppdu,
2913 struct ieee80211_rx_status *status)
2914 {
2915 u8 chan_idx = phy_ppdu->chan_idx;
2916 enum nl80211_band band;
2917 u8 ch;
2918
2919 if (chan_idx == 0)
2920 return;
2921
2922 rtw8852c_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
2923 status->freq = ieee80211_channel_to_frequency(ch, band);
2924 status->band = band;
2925 }
2926
rtw8852c_query_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2927 static void rtw8852c_query_ppdu(struct rtw89_dev *rtwdev,
2928 struct rtw89_rx_phy_ppdu *phy_ppdu,
2929 struct ieee80211_rx_status *status)
2930 {
2931 u8 path;
2932 u8 *rx_power = phy_ppdu->rssi;
2933
2934 status->signal = RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B]));
2935 for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
2936 status->chains |= BIT(path);
2937 status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
2938 }
2939 if (phy_ppdu->valid)
2940 rtw8852c_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
2941 }
2942
rtw8852c_mac_enable_bb_rf(struct rtw89_dev * rtwdev)2943 static int rtw8852c_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
2944 {
2945 int ret;
2946
2947 rtw89_write8_set(rtwdev, R_AX_SYS_FUNC_EN,
2948 B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
2949
2950 rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
2951 rtw89_write32_clr(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
2952 rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
2953
2954 rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S0_LDO_VSEL_F_MASK, 0x1);
2955 rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S1_LDO_VSEL_F_MASK, 0x1);
2956
2957 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL0, 0x7, FULL_BIT_MASK);
2958 if (ret)
2959 return ret;
2960
2961 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x6c, FULL_BIT_MASK);
2962 if (ret)
2963 return ret;
2964
2965 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xc7, FULL_BIT_MASK);
2966 if (ret)
2967 return ret;
2968
2969 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xc7, FULL_BIT_MASK);
2970 if (ret)
2971 return ret;
2972
2973 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL3, 0xd, FULL_BIT_MASK);
2974 if (ret)
2975 return ret;
2976
2977 return 0;
2978 }
2979
rtw8852c_mac_disable_bb_rf(struct rtw89_dev * rtwdev)2980 static int rtw8852c_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
2981 {
2982 rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN,
2983 B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
2984
2985 return 0;
2986 }
2987
2988 static const struct rtw89_chip_ops rtw8852c_chip_ops = {
2989 .enable_bb_rf = rtw8852c_mac_enable_bb_rf,
2990 .disable_bb_rf = rtw8852c_mac_disable_bb_rf,
2991 .bb_reset = rtw8852c_bb_reset,
2992 .bb_sethw = rtw8852c_bb_sethw,
2993 .read_rf = rtw89_phy_read_rf_v1,
2994 .write_rf = rtw89_phy_write_rf_v1,
2995 .set_channel = rtw8852c_set_channel,
2996 .set_channel_help = rtw8852c_set_channel_help,
2997 .read_efuse = rtw8852c_read_efuse,
2998 .read_phycap = rtw8852c_read_phycap,
2999 .fem_setup = NULL,
3000 .rfk_init = rtw8852c_rfk_init,
3001 .rfk_channel = rtw8852c_rfk_channel,
3002 .rfk_band_changed = rtw8852c_rfk_band_changed,
3003 .rfk_scan = rtw8852c_rfk_scan,
3004 .rfk_track = rtw8852c_rfk_track,
3005 .power_trim = rtw8852c_power_trim,
3006 .set_txpwr = rtw8852c_set_txpwr,
3007 .set_txpwr_ctrl = rtw8852c_set_txpwr_ctrl,
3008 .init_txpwr_unit = rtw8852c_init_txpwr_unit,
3009 .get_thermal = rtw8852c_get_thermal,
3010 .ctrl_btg = rtw8852c_ctrl_btg,
3011 .query_ppdu = rtw8852c_query_ppdu,
3012 .bb_ctrl_btc_preagc = rtw8852c_bb_ctrl_btc_preagc,
3013 .cfg_txrx_path = rtw8852c_bb_cfg_txrx_path,
3014 .set_txpwr_ul_tb_offset = rtw8852c_set_txpwr_ul_tb_offset,
3015 .pwr_on_func = rtw8852c_pwr_on_func,
3016 .pwr_off_func = rtw8852c_pwr_off_func,
3017 .fill_txdesc = rtw89_core_fill_txdesc_v1,
3018 .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v1,
3019 .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v1,
3020 .mac_cfg_gnt = rtw89_mac_cfg_gnt_v1,
3021 .stop_sch_tx = rtw89_mac_stop_sch_tx_v1,
3022 .resume_sch_tx = rtw89_mac_resume_sch_tx_v1,
3023 .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v1,
3024
3025 .btc_set_rfe = rtw8852c_btc_set_rfe,
3026 .btc_init_cfg = rtw8852c_btc_init_cfg,
3027 .btc_set_wl_pri = rtw8852c_btc_set_wl_pri,
3028 .btc_set_wl_txpwr_ctrl = rtw8852c_btc_set_wl_txpwr_ctrl,
3029 .btc_get_bt_rssi = rtw8852c_btc_get_bt_rssi,
3030 .btc_bt_aci_imp = rtw8852c_btc_bt_aci_imp,
3031 .btc_update_bt_cnt = rtw8852c_btc_update_bt_cnt,
3032 .btc_wl_s1_standby = rtw8852c_btc_wl_s1_standby,
3033 .btc_set_wl_rx_gain = rtw8852c_btc_set_wl_rx_gain,
3034 .btc_set_policy = rtw89_btc_set_policy_v1,
3035 };
3036
3037 const struct rtw89_chip_info rtw8852c_chip_info = {
3038 .chip_id = RTL8852C,
3039 .ops = &rtw8852c_chip_ops,
3040 .fw_name = "rtw89/rtw8852c_fw.bin",
3041 .fifo_size = 458752,
3042 .dle_scc_rsvd_size = 0,
3043 .max_amsdu_limit = 8000,
3044 .dis_2g_40m_ul_ofdma = false,
3045 .rsvd_ple_ofst = 0x6f800,
3046 .hfc_param_ini = rtw8852c_hfc_param_ini_pcie,
3047 .dle_mem = rtw8852c_dle_mem_pcie,
3048 .rf_base_addr = {0xe000, 0xf000},
3049 .pwr_on_seq = NULL,
3050 .pwr_off_seq = NULL,
3051 .bb_table = &rtw89_8852c_phy_bb_table,
3052 .bb_gain_table = &rtw89_8852c_phy_bb_gain_table,
3053 .rf_table = {&rtw89_8852c_phy_radiob_table,
3054 &rtw89_8852c_phy_radioa_table,},
3055 .nctl_table = &rtw89_8852c_phy_nctl_table,
3056 .byr_table = &rtw89_8852c_byr_table,
3057 .txpwr_lmt_2g = &rtw89_8852c_txpwr_lmt_2g,
3058 .txpwr_lmt_5g = &rtw89_8852c_txpwr_lmt_5g,
3059 .txpwr_lmt_6g = &rtw89_8852c_txpwr_lmt_6g,
3060 .txpwr_lmt_ru_2g = &rtw89_8852c_txpwr_lmt_ru_2g,
3061 .txpwr_lmt_ru_5g = &rtw89_8852c_txpwr_lmt_ru_5g,
3062 .txpwr_lmt_ru_6g = &rtw89_8852c_txpwr_lmt_ru_6g,
3063 .txpwr_factor_rf = 2,
3064 .txpwr_factor_mac = 1,
3065 .dig_table = NULL,
3066 .dig_regs = &rtw8852c_dig_regs,
3067 .tssi_dbw_table = &rtw89_8852c_tssi_dbw_table,
3068 .support_chanctx_num = 1,
3069 .support_bands = BIT(NL80211_BAND_2GHZ) |
3070 BIT(NL80211_BAND_5GHZ) |
3071 BIT(NL80211_BAND_6GHZ),
3072 .support_bw160 = true,
3073 .hw_sec_hdr = true,
3074 .rf_path_num = 2,
3075 .tx_nss = 2,
3076 .rx_nss = 2,
3077 .acam_num = 128,
3078 .bcam_num = 20,
3079 .scam_num = 128,
3080 .bacam_num = 8,
3081 .bacam_dynamic_num = 8,
3082 .bacam_v1 = true,
3083 .sec_ctrl_efuse_size = 4,
3084 .physical_efuse_size = 1216,
3085 .logical_efuse_size = 2048,
3086 .limit_efuse_size = 1280,
3087 .dav_phy_efuse_size = 96,
3088 .dav_log_efuse_size = 16,
3089 .phycap_addr = 0x590,
3090 .phycap_size = 0x60,
3091 .para_ver = 0x1,
3092 .wlcx_desired = 0x06000000,
3093 .btcx_desired = 0x7,
3094 .scbd = 0x1,
3095 .mailbox = 0x1,
3096 .btc_fwinfo_buf = 1280,
3097
3098 .fcxbtcrpt_ver = 4,
3099 .fcxtdma_ver = 3,
3100 .fcxslots_ver = 1,
3101 .fcxcysta_ver = 3,
3102 .fcxstep_ver = 3,
3103 .fcxnullsta_ver = 2,
3104 .fcxmreg_ver = 1,
3105 .fcxgpiodbg_ver = 1,
3106 .fcxbtver_ver = 1,
3107 .fcxbtscan_ver = 1,
3108 .fcxbtafh_ver = 1,
3109 .fcxbtdevinfo_ver = 1,
3110
3111 .afh_guard_ch = 6,
3112 .wl_rssi_thres = rtw89_btc_8852c_wl_rssi_thres,
3113 .bt_rssi_thres = rtw89_btc_8852c_bt_rssi_thres,
3114 .rssi_tol = 2,
3115 .mon_reg_num = ARRAY_SIZE(rtw89_btc_8852c_mon_reg),
3116 .mon_reg = rtw89_btc_8852c_mon_reg,
3117 .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_ul),
3118 .rf_para_ulink = rtw89_btc_8852c_rf_ul,
3119 .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_dl),
3120 .rf_para_dlink = rtw89_btc_8852c_rf_dl,
3121 .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
3122 BIT(RTW89_PS_MODE_CLK_GATED) |
3123 BIT(RTW89_PS_MODE_PWR_GATED),
3124 .low_power_hci_modes = BIT(RTW89_PS_MODE_CLK_GATED) |
3125 BIT(RTW89_PS_MODE_PWR_GATED),
3126 .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_V1,
3127 .hci_func_en_addr = R_AX_HCI_FUNC_EN_V1,
3128 .h2c_desc_size = sizeof(struct rtw89_rxdesc_short),
3129 .txwd_body_size = sizeof(struct rtw89_txwd_body_v1),
3130 .h2c_ctrl_reg = R_AX_H2CREG_CTRL_V1,
3131 .h2c_regs = rtw8852c_h2c_regs,
3132 .c2h_ctrl_reg = R_AX_C2HREG_CTRL_V1,
3133 .c2h_regs = rtw8852c_c2h_regs,
3134 .page_regs = &rtw8852c_page_regs,
3135 .dcfo_comp = &rtw8852c_dcfo_comp,
3136 .dcfo_comp_sft = 5,
3137 .imr_info = &rtw8852c_imr_info,
3138 .rrsr_cfgs = &rtw8852c_rrsr_cfgs,
3139 .dma_ch_mask = 0,
3140 };
3141 EXPORT_SYMBOL(rtw8852c_chip_info);
3142
3143 MODULE_FIRMWARE("rtw89/rtw8852c_fw.bin");
3144 MODULE_AUTHOR("Realtek Corporation");
3145 MODULE_DESCRIPTION("Realtek 802.11ax wireless 8852C driver");
3146 MODULE_LICENSE("Dual BSD/GPL");
3147