1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023 Realtek Corporation
3 */
4
5 #include "debug.h"
6 #include "mac.h"
7 #include "phy.h"
8 #include "reg.h"
9
10 static const struct rtw89_ccx_regs rtw89_ccx_regs_be = {
11 .setting_addr = R_CCX,
12 .edcca_opt_mask = B_CCX_EDCCA_OPT_MSK_V1,
13 .measurement_trig_mask = B_MEASUREMENT_TRIG_MSK,
14 .trig_opt_mask = B_CCX_TRIG_OPT_MSK,
15 .en_mask = B_CCX_EN_MSK,
16 .ifs_cnt_addr = R_IFS_COUNTER,
17 .ifs_clm_period_mask = B_IFS_CLM_PERIOD_MSK,
18 .ifs_clm_cnt_unit_mask = B_IFS_CLM_COUNTER_UNIT_MSK,
19 .ifs_clm_cnt_clear_mask = B_IFS_COUNTER_CLR_MSK,
20 .ifs_collect_en_mask = B_IFS_COLLECT_EN,
21 .ifs_t1_addr = R_IFS_T1,
22 .ifs_t1_th_h_mask = B_IFS_T1_TH_HIGH_MSK,
23 .ifs_t1_en_mask = B_IFS_T1_EN_MSK,
24 .ifs_t1_th_l_mask = B_IFS_T1_TH_LOW_MSK,
25 .ifs_t2_addr = R_IFS_T2,
26 .ifs_t2_th_h_mask = B_IFS_T2_TH_HIGH_MSK,
27 .ifs_t2_en_mask = B_IFS_T2_EN_MSK,
28 .ifs_t2_th_l_mask = B_IFS_T2_TH_LOW_MSK,
29 .ifs_t3_addr = R_IFS_T3,
30 .ifs_t3_th_h_mask = B_IFS_T3_TH_HIGH_MSK,
31 .ifs_t3_en_mask = B_IFS_T3_EN_MSK,
32 .ifs_t3_th_l_mask = B_IFS_T3_TH_LOW_MSK,
33 .ifs_t4_addr = R_IFS_T4,
34 .ifs_t4_th_h_mask = B_IFS_T4_TH_HIGH_MSK,
35 .ifs_t4_en_mask = B_IFS_T4_EN_MSK,
36 .ifs_t4_th_l_mask = B_IFS_T4_TH_LOW_MSK,
37 .ifs_clm_tx_cnt_addr = R_IFS_CLM_TX_CNT_V1,
38 .ifs_clm_edcca_excl_cca_fa_mask = B_IFS_CLM_EDCCA_EXCLUDE_CCA_FA_MSK,
39 .ifs_clm_tx_cnt_msk = B_IFS_CLM_TX_CNT_MSK,
40 .ifs_clm_cca_addr = R_IFS_CLM_CCA_V1,
41 .ifs_clm_ofdmcca_excl_fa_mask = B_IFS_CLM_OFDMCCA_EXCLUDE_FA_MSK,
42 .ifs_clm_cckcca_excl_fa_mask = B_IFS_CLM_CCKCCA_EXCLUDE_FA_MSK,
43 .ifs_clm_fa_addr = R_IFS_CLM_FA_V1,
44 .ifs_clm_ofdm_fa_mask = B_IFS_CLM_OFDM_FA_MSK,
45 .ifs_clm_cck_fa_mask = B_IFS_CLM_CCK_FA_MSK,
46 .ifs_his_addr = R_IFS_HIS_V1,
47 .ifs_t4_his_mask = B_IFS_T4_HIS_MSK,
48 .ifs_t3_his_mask = B_IFS_T3_HIS_MSK,
49 .ifs_t2_his_mask = B_IFS_T2_HIS_MSK,
50 .ifs_t1_his_mask = B_IFS_T1_HIS_MSK,
51 .ifs_avg_l_addr = R_IFS_AVG_L_V1,
52 .ifs_t2_avg_mask = B_IFS_T2_AVG_MSK,
53 .ifs_t1_avg_mask = B_IFS_T1_AVG_MSK,
54 .ifs_avg_h_addr = R_IFS_AVG_H_V1,
55 .ifs_t4_avg_mask = B_IFS_T4_AVG_MSK,
56 .ifs_t3_avg_mask = B_IFS_T3_AVG_MSK,
57 .ifs_cca_l_addr = R_IFS_CCA_L_V1,
58 .ifs_t2_cca_mask = B_IFS_T2_CCA_MSK,
59 .ifs_t1_cca_mask = B_IFS_T1_CCA_MSK,
60 .ifs_cca_h_addr = R_IFS_CCA_H_V1,
61 .ifs_t4_cca_mask = B_IFS_T4_CCA_MSK,
62 .ifs_t3_cca_mask = B_IFS_T3_CCA_MSK,
63 .ifs_total_addr = R_IFSCNT_V1,
64 .ifs_cnt_done_mask = B_IFSCNT_DONE_MSK,
65 .ifs_total_mask = B_IFSCNT_TOTAL_CNT_MSK,
66 };
67
68 static const struct rtw89_physts_regs rtw89_physts_regs_be = {
69 .setting_addr = R_PLCP_HISTOGRAM,
70 .dis_trigger_fail_mask = B_STS_DIS_TRIG_BY_FAIL,
71 .dis_trigger_brk_mask = B_STS_DIS_TRIG_BY_BRK,
72 };
73
74 static const struct rtw89_cfo_regs rtw89_cfo_regs_be = {
75 .comp = R_DCFO_WEIGHT_V1,
76 .weighting_mask = B_DCFO_WEIGHT_MSK_V1,
77 .comp_seg0 = R_DCFO_OPT_V1,
78 .valid_0_mask = B_DCFO_OPT_EN_V1,
79 };
80
81 struct rtw89_byr_spec_ent_be {
82 struct rtw89_rate_desc init;
83 u8 num_of_idx;
84 bool no_over_bw40;
85 bool no_multi_nss;
86 };
87
88 static const struct rtw89_byr_spec_ent_be rtw89_byr_spec_be[] = {
89 {
90 .init = { .rs = RTW89_RS_CCK },
91 .num_of_idx = RTW89_RATE_CCK_NUM,
92 .no_over_bw40 = true,
93 .no_multi_nss = true,
94 },
95 {
96 .init = { .rs = RTW89_RS_OFDM },
97 .num_of_idx = RTW89_RATE_OFDM_NUM,
98 .no_multi_nss = true,
99 },
100 {
101 .init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_NON_OFDMA },
102 .num_of_idx = 2,
103 .no_multi_nss = true,
104 },
105 {
106 .init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_OFDMA },
107 .num_of_idx = 2,
108 .no_multi_nss = true,
109 },
110 {
111 .init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_NON_OFDMA },
112 .num_of_idx = 14,
113 },
114 {
115 .init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_NON_OFDMA },
116 .num_of_idx = RTW89_RATE_HEDCM_NUM,
117 },
118 {
119 .init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_OFDMA },
120 .num_of_idx = 14,
121 },
122 {
123 .init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_OFDMA },
124 .num_of_idx = RTW89_RATE_HEDCM_NUM,
125 },
126 };
127
128 static
__phy_set_txpwr_byrate_be(struct rtw89_dev * rtwdev,u8 band,u8 bw,u8 nss,u32 * addr,enum rtw89_phy_idx phy_idx)129 void __phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev, u8 band, u8 bw,
130 u8 nss, u32 *addr, enum rtw89_phy_idx phy_idx)
131 {
132 const struct rtw89_byr_spec_ent_be *ent;
133 struct rtw89_rate_desc desc;
134 int pos = 0;
135 int i, j;
136 u32 val;
137 s8 v[4];
138
139 for (i = 0; i < ARRAY_SIZE(rtw89_byr_spec_be); i++) {
140 ent = &rtw89_byr_spec_be[i];
141
142 if (bw > RTW89_CHANNEL_WIDTH_40 && ent->no_over_bw40)
143 continue;
144 if (nss > RTW89_NSS_1 && ent->no_multi_nss)
145 continue;
146
147 desc = ent->init;
148 desc.nss = nss;
149 for (j = 0; j < ent->num_of_idx; j++, desc.idx++) {
150 v[pos] = rtw89_phy_read_txpwr_byrate(rtwdev, band, bw,
151 &desc);
152 pos = (pos + 1) % 4;
153 if (pos)
154 continue;
155
156 val = u32_encode_bits(v[0], GENMASK(7, 0)) |
157 u32_encode_bits(v[1], GENMASK(15, 8)) |
158 u32_encode_bits(v[2], GENMASK(23, 16)) |
159 u32_encode_bits(v[3], GENMASK(31, 24));
160
161 rtw89_mac_txpwr_write32(rtwdev, phy_idx, *addr, val);
162 *addr += 4;
163 }
164 }
165 }
166
rtw89_phy_set_txpwr_byrate_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)167 static void rtw89_phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev,
168 const struct rtw89_chan *chan,
169 enum rtw89_phy_idx phy_idx)
170 {
171 u32 addr = R_BE_PWR_BY_RATE;
172 u8 band = chan->band_type;
173 u8 bw, nss;
174
175 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
176 "[TXPWR] set txpwr byrate on band %d\n", band);
177
178 for (bw = 0; bw <= RTW89_CHANNEL_WIDTH_320; bw++)
179 for (nss = 0; nss <= RTW89_NSS_2; nss++)
180 __phy_set_txpwr_byrate_be(rtwdev, band, bw, nss,
181 &addr, phy_idx);
182 }
183
rtw89_phy_set_txpwr_offset_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)184 static void rtw89_phy_set_txpwr_offset_be(struct rtw89_dev *rtwdev,
185 const struct rtw89_chan *chan,
186 enum rtw89_phy_idx phy_idx)
187 {
188 struct rtw89_rate_desc desc = {
189 .nss = RTW89_NSS_1,
190 .rs = RTW89_RS_OFFSET,
191 };
192 u8 band = chan->band_type;
193 s8 v[RTW89_RATE_OFFSET_NUM_BE] = {};
194 u32 val;
195
196 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
197 "[TXPWR] set txpwr offset on band %d\n", band);
198
199 for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_NUM_BE; desc.idx++)
200 v[desc.idx] = rtw89_phy_read_txpwr_byrate(rtwdev, band, 0, &desc);
201
202 val = u32_encode_bits(v[RTW89_RATE_OFFSET_CCK], GENMASK(3, 0)) |
203 u32_encode_bits(v[RTW89_RATE_OFFSET_OFDM], GENMASK(7, 4)) |
204 u32_encode_bits(v[RTW89_RATE_OFFSET_HT], GENMASK(11, 8)) |
205 u32_encode_bits(v[RTW89_RATE_OFFSET_VHT], GENMASK(15, 12)) |
206 u32_encode_bits(v[RTW89_RATE_OFFSET_HE], GENMASK(19, 16)) |
207 u32_encode_bits(v[RTW89_RATE_OFFSET_EHT], GENMASK(23, 20)) |
208 u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_HE], GENMASK(27, 24)) |
209 u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_EHT], GENMASK(31, 28));
210
211 rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_BE_PWR_RATE_OFST_CTRL, val);
212 }
213
214 static void
fill_limit_nonbf_bf(struct rtw89_dev * rtwdev,s8 (* ptr)[RTW89_BF_NUM],u8 band,u8 bw,u8 ntx,u8 rs,u8 ch)215 fill_limit_nonbf_bf(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
216 u8 band, u8 bw, u8 ntx, u8 rs, u8 ch)
217 {
218 int bf;
219
220 for (bf = 0; bf < RTW89_BF_NUM; bf++)
221 (*ptr)[bf] = rtw89_phy_read_txpwr_limit(rtwdev, band, bw, ntx,
222 rs, bf, ch);
223 }
224
225 static void
fill_limit_nonbf_bf_min(struct rtw89_dev * rtwdev,s8 (* ptr)[RTW89_BF_NUM],u8 band,u8 bw,u8 ntx,u8 rs,u8 ch1,u8 ch2)226 fill_limit_nonbf_bf_min(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
227 u8 band, u8 bw, u8 ntx, u8 rs, u8 ch1, u8 ch2)
228 {
229 s8 v1[RTW89_BF_NUM];
230 s8 v2[RTW89_BF_NUM];
231 int bf;
232
233 fill_limit_nonbf_bf(rtwdev, &v1, band, bw, ntx, rs, ch1);
234 fill_limit_nonbf_bf(rtwdev, &v2, band, bw, ntx, rs, ch2);
235
236 for (bf = 0; bf < RTW89_BF_NUM; bf++)
237 (*ptr)[bf] = min(v1[bf], v2[bf]);
238 }
239
phy_fill_limit_20m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_be * lmt,u8 band,u8 ntx,u8 ch)240 static void phy_fill_limit_20m_be(struct rtw89_dev *rtwdev,
241 struct rtw89_txpwr_limit_be *lmt,
242 u8 band, u8 ntx, u8 ch)
243 {
244 fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
245 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch);
246 fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
247 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
248 fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
249 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, ch);
250 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
251 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch);
252 }
253
phy_fill_limit_40m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_be * lmt,u8 band,u8 ntx,u8 ch,u8 pri_ch)254 static void phy_fill_limit_40m_be(struct rtw89_dev *rtwdev,
255 struct rtw89_txpwr_limit_be *lmt,
256 u8 band, u8 ntx, u8 ch, u8 pri_ch)
257 {
258 fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
259 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch - 2);
260 fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
261 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
262
263 fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
264 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
265
266 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
267 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
268 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
269 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
270 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
271 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch);
272 }
273
phy_fill_limit_80m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_be * lmt,u8 band,u8 ntx,u8 ch,u8 pri_ch)274 static void phy_fill_limit_80m_be(struct rtw89_dev *rtwdev,
275 struct rtw89_txpwr_limit_be *lmt,
276 u8 band, u8 ntx, u8 ch, u8 pri_ch)
277 {
278 fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
279 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
280
281 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
282 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
283 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
284 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
285 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
286 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
287 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
288 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
289 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
290 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
291 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
292 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
293 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
294 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch);
295
296 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
297 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
298 ch - 4, ch + 4);
299 }
300
phy_fill_limit_160m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_be * lmt,u8 band,u8 ntx,u8 ch,u8 pri_ch)301 static void phy_fill_limit_160m_be(struct rtw89_dev *rtwdev,
302 struct rtw89_txpwr_limit_be *lmt,
303 u8 band, u8 ntx, u8 ch, u8 pri_ch)
304 {
305 fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
306 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
307
308 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
309 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
310 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
311 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
312 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
313 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
314 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
315 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
316 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
317 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
318 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
319 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
320 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
321 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
322 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
323 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
324
325 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
326 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
327 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
328 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
329 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
330 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
331 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
332 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
333
334 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
335 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
336 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
337 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
338
339 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
340 RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch);
341
342 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
343 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
344 ch - 12, ch - 4);
345 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
346 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
347 ch + 4, ch + 12);
348 }
349
phy_fill_limit_320m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_be * lmt,u8 band,u8 ntx,u8 ch,u8 pri_ch)350 static void phy_fill_limit_320m_be(struct rtw89_dev *rtwdev,
351 struct rtw89_txpwr_limit_be *lmt,
352 u8 band, u8 ntx, u8 ch, u8 pri_ch)
353 {
354 fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
355 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
356
357 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
358 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 30);
359 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
360 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 26);
361 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
362 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 22);
363 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
364 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 18);
365 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
366 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
367 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
368 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
369 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
370 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
371 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
372 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
373 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[8], band,
374 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
375 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[9], band,
376 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
377 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[10], band,
378 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
379 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[11], band,
380 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
381 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[12], band,
382 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 18);
383 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[13], band,
384 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 22);
385 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[14], band,
386 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 26);
387 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[15], band,
388 RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 30);
389
390 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
391 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 28);
392 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
393 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 20);
394 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
395 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
396 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
397 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
398 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[4], band,
399 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
400 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[5], band,
401 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
402 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[6], band,
403 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 20);
404 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[7], band,
405 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 28);
406
407 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
408 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 24);
409 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
410 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
411 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[2], band,
412 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
413 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[3], band,
414 RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 24);
415
416 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
417 RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch - 16);
418 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[1], band,
419 RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch + 16);
420
421 fill_limit_nonbf_bf(rtwdev, &lmt->mcs_320m, band,
422 RTW89_CHANNEL_WIDTH_320, ntx, RTW89_RS_MCS, ch);
423
424 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
425 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
426 ch - 28, ch - 20);
427 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
428 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
429 ch - 12, ch - 4);
430 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_4p5, band,
431 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
432 ch + 4, ch + 12);
433 fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_6p5, band,
434 RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
435 ch + 20, ch + 28);
436 }
437
rtw89_phy_fill_limit_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,struct rtw89_txpwr_limit_be * lmt,u8 ntx)438 static void rtw89_phy_fill_limit_be(struct rtw89_dev *rtwdev,
439 const struct rtw89_chan *chan,
440 struct rtw89_txpwr_limit_be *lmt,
441 u8 ntx)
442 {
443 u8 band = chan->band_type;
444 u8 pri_ch = chan->primary_channel;
445 u8 ch = chan->channel;
446 u8 bw = chan->band_width;
447
448 memset(lmt, 0, sizeof(*lmt));
449
450 switch (bw) {
451 case RTW89_CHANNEL_WIDTH_20:
452 phy_fill_limit_20m_be(rtwdev, lmt, band, ntx, ch);
453 break;
454 case RTW89_CHANNEL_WIDTH_40:
455 phy_fill_limit_40m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
456 break;
457 case RTW89_CHANNEL_WIDTH_80:
458 phy_fill_limit_80m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
459 break;
460 case RTW89_CHANNEL_WIDTH_160:
461 phy_fill_limit_160m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
462 break;
463 case RTW89_CHANNEL_WIDTH_320:
464 phy_fill_limit_320m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
465 break;
466 }
467 }
468
rtw89_phy_set_txpwr_limit_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)469 static void rtw89_phy_set_txpwr_limit_be(struct rtw89_dev *rtwdev,
470 const struct rtw89_chan *chan,
471 enum rtw89_phy_idx phy_idx)
472 {
473 struct rtw89_txpwr_limit_be lmt;
474 const s8 *ptr;
475 u32 addr, val;
476 u8 i, j;
477
478 BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_be) !=
479 RTW89_TXPWR_LMT_PAGE_SIZE_BE);
480
481 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
482 "[TXPWR] set txpwr limit on band %d bw %d\n",
483 chan->band_type, chan->band_width);
484
485 addr = R_BE_PWR_LMT;
486 for (i = 0; i <= RTW89_NSS_2; i++) {
487 rtw89_phy_fill_limit_be(rtwdev, chan, &lmt, i);
488
489 ptr = (s8 *)&lmt;
490 for (j = 0; j < RTW89_TXPWR_LMT_PAGE_SIZE_BE;
491 j += 4, addr += 4, ptr += 4) {
492 val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
493 u32_encode_bits(ptr[1], GENMASK(15, 8)) |
494 u32_encode_bits(ptr[2], GENMASK(23, 16)) |
495 u32_encode_bits(ptr[3], GENMASK(31, 24));
496
497 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
498 }
499 }
500 }
501
fill_limit_ru_each(struct rtw89_dev * rtwdev,u8 index,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)502 static void fill_limit_ru_each(struct rtw89_dev *rtwdev, u8 index,
503 struct rtw89_txpwr_limit_ru_be *lmt_ru,
504 u8 band, u8 ntx, u8 ch)
505 {
506 lmt_ru->ru26[index] =
507 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU26, ntx, ch);
508 lmt_ru->ru52[index] =
509 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52, ntx, ch);
510 lmt_ru->ru106[index] =
511 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106, ntx, ch);
512 lmt_ru->ru52_26[index] =
513 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52_26, ntx, ch);
514 lmt_ru->ru106_26[index] =
515 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106_26, ntx, ch);
516 }
517
phy_fill_limit_ru_20m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)518 static void phy_fill_limit_ru_20m_be(struct rtw89_dev *rtwdev,
519 struct rtw89_txpwr_limit_ru_be *lmt_ru,
520 u8 band, u8 ntx, u8 ch)
521 {
522 fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch);
523 }
524
phy_fill_limit_ru_40m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)525 static void phy_fill_limit_ru_40m_be(struct rtw89_dev *rtwdev,
526 struct rtw89_txpwr_limit_ru_be *lmt_ru,
527 u8 band, u8 ntx, u8 ch)
528 {
529 fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 2);
530 fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch + 2);
531 }
532
phy_fill_limit_ru_80m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)533 static void phy_fill_limit_ru_80m_be(struct rtw89_dev *rtwdev,
534 struct rtw89_txpwr_limit_ru_be *lmt_ru,
535 u8 band, u8 ntx, u8 ch)
536 {
537 fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 6);
538 fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 2);
539 fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch + 2);
540 fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch + 6);
541 }
542
phy_fill_limit_ru_160m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)543 static void phy_fill_limit_ru_160m_be(struct rtw89_dev *rtwdev,
544 struct rtw89_txpwr_limit_ru_be *lmt_ru,
545 u8 band, u8 ntx, u8 ch)
546 {
547 fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 14);
548 fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 10);
549 fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 6);
550 fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 2);
551 fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch + 2);
552 fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch + 6);
553 fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch + 10);
554 fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch + 14);
555 }
556
phy_fill_limit_ru_320m_be(struct rtw89_dev * rtwdev,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 band,u8 ntx,u8 ch)557 static void phy_fill_limit_ru_320m_be(struct rtw89_dev *rtwdev,
558 struct rtw89_txpwr_limit_ru_be *lmt_ru,
559 u8 band, u8 ntx, u8 ch)
560 {
561 fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 30);
562 fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 26);
563 fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 22);
564 fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 18);
565 fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch - 14);
566 fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch - 10);
567 fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch - 6);
568 fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch - 2);
569 fill_limit_ru_each(rtwdev, 8, lmt_ru, band, ntx, ch + 2);
570 fill_limit_ru_each(rtwdev, 9, lmt_ru, band, ntx, ch + 6);
571 fill_limit_ru_each(rtwdev, 10, lmt_ru, band, ntx, ch + 10);
572 fill_limit_ru_each(rtwdev, 11, lmt_ru, band, ntx, ch + 14);
573 fill_limit_ru_each(rtwdev, 12, lmt_ru, band, ntx, ch + 18);
574 fill_limit_ru_each(rtwdev, 13, lmt_ru, band, ntx, ch + 22);
575 fill_limit_ru_each(rtwdev, 14, lmt_ru, band, ntx, ch + 26);
576 fill_limit_ru_each(rtwdev, 15, lmt_ru, band, ntx, ch + 30);
577 }
578
rtw89_phy_fill_limit_ru_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,struct rtw89_txpwr_limit_ru_be * lmt_ru,u8 ntx)579 static void rtw89_phy_fill_limit_ru_be(struct rtw89_dev *rtwdev,
580 const struct rtw89_chan *chan,
581 struct rtw89_txpwr_limit_ru_be *lmt_ru,
582 u8 ntx)
583 {
584 u8 band = chan->band_type;
585 u8 ch = chan->channel;
586 u8 bw = chan->band_width;
587
588 memset(lmt_ru, 0, sizeof(*lmt_ru));
589
590 switch (bw) {
591 case RTW89_CHANNEL_WIDTH_20:
592 phy_fill_limit_ru_20m_be(rtwdev, lmt_ru, band, ntx, ch);
593 break;
594 case RTW89_CHANNEL_WIDTH_40:
595 phy_fill_limit_ru_40m_be(rtwdev, lmt_ru, band, ntx, ch);
596 break;
597 case RTW89_CHANNEL_WIDTH_80:
598 phy_fill_limit_ru_80m_be(rtwdev, lmt_ru, band, ntx, ch);
599 break;
600 case RTW89_CHANNEL_WIDTH_160:
601 phy_fill_limit_ru_160m_be(rtwdev, lmt_ru, band, ntx, ch);
602 break;
603 case RTW89_CHANNEL_WIDTH_320:
604 phy_fill_limit_ru_320m_be(rtwdev, lmt_ru, band, ntx, ch);
605 break;
606 }
607 }
608
rtw89_phy_set_txpwr_limit_ru_be(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)609 static void rtw89_phy_set_txpwr_limit_ru_be(struct rtw89_dev *rtwdev,
610 const struct rtw89_chan *chan,
611 enum rtw89_phy_idx phy_idx)
612 {
613 struct rtw89_txpwr_limit_ru_be lmt_ru;
614 const s8 *ptr;
615 u32 addr, val;
616 u8 i, j;
617
618 BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_ru_be) !=
619 RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE);
620
621 rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
622 "[TXPWR] set txpwr limit ru on band %d bw %d\n",
623 chan->band_type, chan->band_width);
624
625 addr = R_BE_PWR_RU_LMT;
626 for (i = 0; i <= RTW89_NSS_2; i++) {
627 rtw89_phy_fill_limit_ru_be(rtwdev, chan, &lmt_ru, i);
628
629 ptr = (s8 *)&lmt_ru;
630 for (j = 0; j < RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE;
631 j += 4, addr += 4, ptr += 4) {
632 val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
633 u32_encode_bits(ptr[1], GENMASK(15, 8)) |
634 u32_encode_bits(ptr[2], GENMASK(23, 16)) |
635 u32_encode_bits(ptr[3], GENMASK(31, 24));
636
637 rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
638 }
639 }
640 }
641
642 const struct rtw89_phy_gen_def rtw89_phy_gen_be = {
643 .cr_base = 0x20000,
644 .ccx = &rtw89_ccx_regs_be,
645 .physts = &rtw89_physts_regs_be,
646 .cfo = &rtw89_cfo_regs_be,
647
648 .set_txpwr_byrate = rtw89_phy_set_txpwr_byrate_be,
649 .set_txpwr_offset = rtw89_phy_set_txpwr_offset_be,
650 .set_txpwr_limit = rtw89_phy_set_txpwr_limit_be,
651 .set_txpwr_limit_ru = rtw89_phy_set_txpwr_limit_ru_be,
652 };
653 EXPORT_SYMBOL(rtw89_phy_gen_be);
654