0001
0002
0003
0004
0005
0006 #include "mt76x02.h"
0007
0008 #define RADAR_SPEC(m, len, el, eh, wl, wh, \
0009 w_tolerance, tl, th, t_tolerance, \
0010 bl, bh, event_exp, power_jmp) \
0011 { \
0012 .mode = m, \
0013 .avg_len = len, \
0014 .e_low = el, \
0015 .e_high = eh, \
0016 .w_low = wl, \
0017 .w_high = wh, \
0018 .w_margin = w_tolerance, \
0019 .t_low = tl, \
0020 .t_high = th, \
0021 .t_margin = t_tolerance, \
0022 .b_low = bl, \
0023 .b_high = bh, \
0024 .event_expiration = event_exp, \
0025 .pwr_jmp = power_jmp \
0026 }
0027
0028 static const struct mt76x02_radar_specs etsi_radar_specs[] = {
0029
0030 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
0031 0x7fffffff, 0x155cc0, 0x19cc),
0032 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
0033 0x7fffffff, 0x155cc0, 0x19cc),
0034 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
0035 0x7fffffff, 0x155cc0, 0x19dd),
0036 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
0037 0x7fffffff, 0x2191c0, 0x15cc),
0038
0039 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
0040 0x7fffffff, 0x155cc0, 0x19cc),
0041 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
0042 0x7fffffff, 0x155cc0, 0x19cc),
0043 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
0044 0x7fffffff, 0x155cc0, 0x19dd),
0045 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
0046 0x7fffffff, 0x2191c0, 0x15cc),
0047
0048 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
0049 0x7fffffff, 0x155cc0, 0x19cc),
0050 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
0051 0x7fffffff, 0x155cc0, 0x19cc),
0052 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
0053 0x7fffffff, 0x155cc0, 0x19dd),
0054 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
0055 0x7fffffff, 0x2191c0, 0x15cc)
0056 };
0057
0058 static const struct mt76x02_radar_specs fcc_radar_specs[] = {
0059
0060 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
0061 0x7fffffff, 0xfe808, 0x13dc),
0062 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0063 0x7fffffff, 0xfe808, 0x19dd),
0064 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
0065 0x7fffffff, 0xfe808, 0x12cc),
0066 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
0067 0x3938700, 0x57bcf00, 0x1289),
0068
0069 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
0070 0x7fffffff, 0xfe808, 0x13dc),
0071 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0072 0x7fffffff, 0xfe808, 0x19dd),
0073 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
0074 0x7fffffff, 0xfe808, 0x12cc),
0075 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
0076 0x3938700, 0x57bcf00, 0x1289),
0077
0078 RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0,
0079 0x7fffffff, 0xfe808, 0x16cc),
0080 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0081 0x7fffffff, 0xfe808, 0x19dd),
0082 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
0083 0x7fffffff, 0xfe808, 0x12cc),
0084 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
0085 0x3938700, 0x57bcf00, 0x1289)
0086 };
0087
0088 static const struct mt76x02_radar_specs jp_w56_radar_specs[] = {
0089
0090 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
0091 0x7fffffff, 0x14c080, 0x13dc),
0092 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0093 0x7fffffff, 0x14c080, 0x19dd),
0094 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
0095 0x7fffffff, 0x14c080, 0x12cc),
0096 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
0097 0x3938700, 0X57bcf00, 0x1289),
0098
0099 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
0100 0x7fffffff, 0x14c080, 0x13dc),
0101 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0102 0x7fffffff, 0x14c080, 0x19dd),
0103 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
0104 0x7fffffff, 0x14c080, 0x12cc),
0105 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
0106 0x3938700, 0X57bcf00, 0x1289),
0107
0108 RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0,
0109 0x7fffffff, 0x14c080, 0x16cc),
0110 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
0111 0x7fffffff, 0x14c080, 0x19dd),
0112 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
0113 0x7fffffff, 0x14c080, 0x12cc),
0114 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
0115 0x3938700, 0X57bcf00, 0x1289)
0116 };
0117
0118 static const struct mt76x02_radar_specs jp_w53_radar_specs[] = {
0119
0120 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
0121 0x7fffffff, 0x14c080, 0x16cc),
0122 { 0 },
0123 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
0124 0x7fffffff, 0x14c080, 0x16cc),
0125 { 0 },
0126
0127 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
0128 0x7fffffff, 0x14c080, 0x16cc),
0129 { 0 },
0130 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
0131 0x7fffffff, 0x14c080, 0x16cc),
0132 { 0 },
0133
0134 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
0135 0x7fffffff, 0x14c080, 0x16cc),
0136 { 0 },
0137 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
0138 0x7fffffff, 0x14c080, 0x16cc),
0139 { 0 }
0140 };
0141
0142 static void
0143 mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev *dev, u8 enable)
0144 {
0145 u32 data;
0146
0147 data = (1 << 1) | enable;
0148 mt76_wr(dev, MT_BBP(DFS, 36), data);
0149 }
0150
0151 static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev *dev,
0152 struct mt76x02_dfs_sequence *seq)
0153 {
0154 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0155
0156 list_add(&seq->head, &dfs_pd->seq_pool);
0157
0158 dfs_pd->seq_stats.seq_pool_len++;
0159 dfs_pd->seq_stats.seq_len--;
0160 }
0161
0162 static struct mt76x02_dfs_sequence *
0163 mt76x02_dfs_seq_pool_get(struct mt76x02_dev *dev)
0164 {
0165 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0166 struct mt76x02_dfs_sequence *seq;
0167
0168 if (list_empty(&dfs_pd->seq_pool)) {
0169 seq = devm_kzalloc(dev->mt76.dev, sizeof(*seq), GFP_ATOMIC);
0170 } else {
0171 seq = list_first_entry(&dfs_pd->seq_pool,
0172 struct mt76x02_dfs_sequence,
0173 head);
0174 list_del(&seq->head);
0175 dfs_pd->seq_stats.seq_pool_len--;
0176 }
0177 if (seq)
0178 dfs_pd->seq_stats.seq_len++;
0179
0180 return seq;
0181 }
0182
0183 static int mt76x02_dfs_get_multiple(int val, int frac, int margin)
0184 {
0185 int remainder, factor;
0186
0187 if (!frac)
0188 return 0;
0189
0190 if (abs(val - frac) <= margin)
0191 return 1;
0192
0193 factor = val / frac;
0194 remainder = val % frac;
0195
0196 if (remainder > margin) {
0197 if ((frac - remainder) <= margin)
0198 factor++;
0199 else
0200 factor = 0;
0201 }
0202 return factor;
0203 }
0204
0205 static void mt76x02_dfs_detector_reset(struct mt76x02_dev *dev)
0206 {
0207 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0208 struct mt76x02_dfs_sequence *seq, *tmp_seq;
0209 int i;
0210
0211
0212 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
0213
0214
0215 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
0216 dfs_pd->event_rb[i].h_rb = 0;
0217 dfs_pd->event_rb[i].t_rb = 0;
0218 }
0219
0220 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
0221 list_del_init(&seq->head);
0222 mt76x02_dfs_seq_pool_put(dev, seq);
0223 }
0224 }
0225
0226 static bool mt76x02_dfs_check_chirp(struct mt76x02_dev *dev)
0227 {
0228 bool ret = false;
0229 u32 current_ts, delta_ts;
0230 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0231
0232 current_ts = mt76_rr(dev, MT_PBF_LIFE_TIMER);
0233 delta_ts = current_ts - dfs_pd->chirp_pulse_ts;
0234 dfs_pd->chirp_pulse_ts = current_ts;
0235
0236
0237 if (delta_ts <= (12 * (1 << 20))) {
0238 if (++dfs_pd->chirp_pulse_cnt > 8)
0239 ret = true;
0240 } else {
0241 dfs_pd->chirp_pulse_cnt = 1;
0242 }
0243
0244 return ret;
0245 }
0246
0247 static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev *dev,
0248 struct mt76x02_dfs_hw_pulse *pulse)
0249 {
0250 u32 data;
0251
0252
0253 data = (MT_DFS_CH_EN << 16) | pulse->engine;
0254 mt76_wr(dev, MT_BBP(DFS, 0), data);
0255
0256
0257 pulse->period = mt76_rr(dev, MT_BBP(DFS, 19));
0258
0259
0260 pulse->w1 = mt76_rr(dev, MT_BBP(DFS, 20));
0261 pulse->w2 = mt76_rr(dev, MT_BBP(DFS, 23));
0262
0263
0264 pulse->burst = mt76_rr(dev, MT_BBP(DFS, 22));
0265 }
0266
0267 static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev *dev,
0268 struct mt76x02_dfs_hw_pulse *pulse)
0269 {
0270 bool ret = false;
0271
0272 if (!pulse->period || !pulse->w1)
0273 return false;
0274
0275 switch (dev->mt76.region) {
0276 case NL80211_DFS_FCC:
0277 if (pulse->engine > 3)
0278 break;
0279
0280 if (pulse->engine == 3) {
0281 ret = mt76x02_dfs_check_chirp(dev);
0282 break;
0283 }
0284
0285
0286 if (pulse->w1 < 120)
0287 ret = (pulse->period >= 2900 &&
0288 (pulse->period <= 4700 ||
0289 pulse->period >= 6400) &&
0290 (pulse->period <= 6800 ||
0291 pulse->period >= 10200) &&
0292 pulse->period <= 61600);
0293 else if (pulse->w1 < 130)
0294 ret = (pulse->period >= 2900 &&
0295 pulse->period <= 61600);
0296 else
0297 ret = (pulse->period >= 3500 &&
0298 pulse->period <= 10100);
0299 break;
0300 case NL80211_DFS_ETSI:
0301 if (pulse->engine >= 3)
0302 break;
0303
0304 ret = (pulse->period >= 4900 &&
0305 (pulse->period <= 10200 ||
0306 pulse->period >= 12400) &&
0307 pulse->period <= 100100);
0308 break;
0309 case NL80211_DFS_JP:
0310 if (dev->mphy.chandef.chan->center_freq >= 5250 &&
0311 dev->mphy.chandef.chan->center_freq <= 5350) {
0312
0313 if (pulse->w1 <= 130)
0314 ret = (pulse->period >= 28360 &&
0315 (pulse->period <= 28700 ||
0316 pulse->period >= 76900) &&
0317 pulse->period <= 76940);
0318 break;
0319 }
0320
0321 if (pulse->engine > 3)
0322 break;
0323
0324 if (pulse->engine == 3) {
0325 ret = mt76x02_dfs_check_chirp(dev);
0326 break;
0327 }
0328
0329
0330 if (pulse->w1 < 120)
0331 ret = (pulse->period >= 2900 &&
0332 (pulse->period <= 4700 ||
0333 pulse->period >= 6400) &&
0334 (pulse->period <= 6800 ||
0335 pulse->period >= 27560) &&
0336 (pulse->period <= 27960 ||
0337 pulse->period >= 28360) &&
0338 (pulse->period <= 28700 ||
0339 pulse->period >= 79900) &&
0340 pulse->period <= 80100);
0341 else if (pulse->w1 < 130)
0342 ret = (pulse->period >= 2900 &&
0343 (pulse->period <= 10100 ||
0344 pulse->period >= 27560) &&
0345 (pulse->period <= 27960 ||
0346 pulse->period >= 28360) &&
0347 (pulse->period <= 28700 ||
0348 pulse->period >= 79900) &&
0349 pulse->period <= 80100);
0350 else
0351 ret = (pulse->period >= 3900 &&
0352 pulse->period <= 10100);
0353 break;
0354 case NL80211_DFS_UNSET:
0355 default:
0356 return false;
0357 }
0358
0359 return ret;
0360 }
0361
0362 static bool mt76x02_dfs_fetch_event(struct mt76x02_dev *dev,
0363 struct mt76x02_dfs_event *event)
0364 {
0365 u32 data;
0366
0367
0368
0369
0370
0371
0372
0373
0374
0375
0376 data = mt76_rr(dev, MT_BBP(DFS, 37));
0377 if (!MT_DFS_CHECK_EVENT(data))
0378 return false;
0379
0380 event->engine = MT_DFS_EVENT_ENGINE(data);
0381 data = mt76_rr(dev, MT_BBP(DFS, 37));
0382 event->ts = MT_DFS_EVENT_TIMESTAMP(data);
0383 data = mt76_rr(dev, MT_BBP(DFS, 37));
0384 event->width = MT_DFS_EVENT_WIDTH(data);
0385
0386 return true;
0387 }
0388
0389 static bool mt76x02_dfs_check_event(struct mt76x02_dev *dev,
0390 struct mt76x02_dfs_event *event)
0391 {
0392 if (event->engine == 2) {
0393 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0394 struct mt76x02_dfs_event_rb *event_buff = &dfs_pd->event_rb[1];
0395 u16 last_event_idx;
0396 u32 delta_ts;
0397
0398 last_event_idx = mt76_decr(event_buff->t_rb,
0399 MT_DFS_EVENT_BUFLEN);
0400 delta_ts = event->ts - event_buff->data[last_event_idx].ts;
0401 if (delta_ts < MT_DFS_EVENT_TIME_MARGIN &&
0402 event_buff->data[last_event_idx].width >= 200)
0403 return false;
0404 }
0405 return true;
0406 }
0407
0408 static void mt76x02_dfs_queue_event(struct mt76x02_dev *dev,
0409 struct mt76x02_dfs_event *event)
0410 {
0411 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0412 struct mt76x02_dfs_event_rb *event_buff;
0413
0414
0415 event_buff = event->engine == 2 ? &dfs_pd->event_rb[1]
0416 : &dfs_pd->event_rb[0];
0417 event_buff->data[event_buff->t_rb] = *event;
0418 event_buff->data[event_buff->t_rb].fetch_ts = jiffies;
0419
0420 event_buff->t_rb = mt76_incr(event_buff->t_rb, MT_DFS_EVENT_BUFLEN);
0421 if (event_buff->t_rb == event_buff->h_rb)
0422 event_buff->h_rb = mt76_incr(event_buff->h_rb,
0423 MT_DFS_EVENT_BUFLEN);
0424 }
0425
0426 static int mt76x02_dfs_create_sequence(struct mt76x02_dev *dev,
0427 struct mt76x02_dfs_event *event,
0428 u16 cur_len)
0429 {
0430 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0431 struct mt76x02_dfs_sw_detector_params *sw_params;
0432 u32 width_delta, with_sum;
0433 struct mt76x02_dfs_sequence seq, *seq_p;
0434 struct mt76x02_dfs_event_rb *event_rb;
0435 struct mt76x02_dfs_event *cur_event;
0436 int i, j, end, pri, factor, cur_pri;
0437
0438 event_rb = event->engine == 2 ? &dfs_pd->event_rb[1]
0439 : &dfs_pd->event_rb[0];
0440
0441 i = mt76_decr(event_rb->t_rb, MT_DFS_EVENT_BUFLEN);
0442 end = mt76_decr(event_rb->h_rb, MT_DFS_EVENT_BUFLEN);
0443
0444 while (i != end) {
0445 cur_event = &event_rb->data[i];
0446 with_sum = event->width + cur_event->width;
0447
0448 sw_params = &dfs_pd->sw_dpd_params;
0449 switch (dev->mt76.region) {
0450 case NL80211_DFS_FCC:
0451 case NL80211_DFS_JP:
0452 if (with_sum < 600)
0453 width_delta = 8;
0454 else
0455 width_delta = with_sum >> 3;
0456 break;
0457 case NL80211_DFS_ETSI:
0458 if (event->engine == 2)
0459 width_delta = with_sum >> 6;
0460 else if (with_sum < 620)
0461 width_delta = 24;
0462 else
0463 width_delta = 8;
0464 break;
0465 case NL80211_DFS_UNSET:
0466 default:
0467 return -EINVAL;
0468 }
0469
0470 pri = event->ts - cur_event->ts;
0471 if (abs(event->width - cur_event->width) > width_delta ||
0472 pri < sw_params->min_pri)
0473 goto next;
0474
0475 if (pri > sw_params->max_pri)
0476 break;
0477
0478 seq.pri = event->ts - cur_event->ts;
0479 seq.first_ts = cur_event->ts;
0480 seq.last_ts = event->ts;
0481 seq.engine = event->engine;
0482 seq.count = 2;
0483
0484 j = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
0485 while (j != end) {
0486 cur_event = &event_rb->data[j];
0487 cur_pri = event->ts - cur_event->ts;
0488 factor = mt76x02_dfs_get_multiple(cur_pri, seq.pri,
0489 sw_params->pri_margin);
0490 if (factor > 0) {
0491 seq.first_ts = cur_event->ts;
0492 seq.count++;
0493 }
0494
0495 j = mt76_decr(j, MT_DFS_EVENT_BUFLEN);
0496 }
0497 if (seq.count <= cur_len)
0498 goto next;
0499
0500 seq_p = mt76x02_dfs_seq_pool_get(dev);
0501 if (!seq_p)
0502 return -ENOMEM;
0503
0504 *seq_p = seq;
0505 INIT_LIST_HEAD(&seq_p->head);
0506 list_add(&seq_p->head, &dfs_pd->sequences);
0507 next:
0508 i = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
0509 }
0510 return 0;
0511 }
0512
0513 static u16 mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev *dev,
0514 struct mt76x02_dfs_event *event)
0515 {
0516 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0517 struct mt76x02_dfs_sw_detector_params *sw_params;
0518 struct mt76x02_dfs_sequence *seq, *tmp_seq;
0519 u16 max_seq_len = 0;
0520 int factor, pri;
0521
0522 sw_params = &dfs_pd->sw_dpd_params;
0523 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
0524 if (event->ts > seq->first_ts + MT_DFS_SEQUENCE_WINDOW) {
0525 list_del_init(&seq->head);
0526 mt76x02_dfs_seq_pool_put(dev, seq);
0527 continue;
0528 }
0529
0530 if (event->engine != seq->engine)
0531 continue;
0532
0533 pri = event->ts - seq->last_ts;
0534 factor = mt76x02_dfs_get_multiple(pri, seq->pri,
0535 sw_params->pri_margin);
0536 if (factor > 0) {
0537 seq->last_ts = event->ts;
0538 seq->count++;
0539 max_seq_len = max_t(u16, max_seq_len, seq->count);
0540 }
0541 }
0542 return max_seq_len;
0543 }
0544
0545 static bool mt76x02_dfs_check_detection(struct mt76x02_dev *dev)
0546 {
0547 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0548 struct mt76x02_dfs_sequence *seq;
0549
0550 if (list_empty(&dfs_pd->sequences))
0551 return false;
0552
0553 list_for_each_entry(seq, &dfs_pd->sequences, head) {
0554 if (seq->count > MT_DFS_SEQUENCE_TH) {
0555 dfs_pd->stats[seq->engine].sw_pattern++;
0556 return true;
0557 }
0558 }
0559 return false;
0560 }
0561
0562 static void mt76x02_dfs_add_events(struct mt76x02_dev *dev)
0563 {
0564 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0565 struct mt76x02_dfs_event event;
0566 int i, seq_len;
0567
0568
0569 mt76x02_dfs_set_capture_mode_ctrl(dev, false);
0570 for (i = 0; i < MT_DFS_EVENT_LOOP; i++) {
0571 if (!mt76x02_dfs_fetch_event(dev, &event))
0572 break;
0573
0574 if (dfs_pd->last_event_ts > event.ts)
0575 mt76x02_dfs_detector_reset(dev);
0576 dfs_pd->last_event_ts = event.ts;
0577
0578 if (!mt76x02_dfs_check_event(dev, &event))
0579 continue;
0580
0581 seq_len = mt76x02_dfs_add_event_to_sequence(dev, &event);
0582 mt76x02_dfs_create_sequence(dev, &event, seq_len);
0583
0584 mt76x02_dfs_queue_event(dev, &event);
0585 }
0586 mt76x02_dfs_set_capture_mode_ctrl(dev, true);
0587 }
0588
0589 static void mt76x02_dfs_check_event_window(struct mt76x02_dev *dev)
0590 {
0591 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0592 struct mt76x02_dfs_event_rb *event_buff;
0593 struct mt76x02_dfs_event *event;
0594 int i;
0595
0596 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
0597 event_buff = &dfs_pd->event_rb[i];
0598
0599 while (event_buff->h_rb != event_buff->t_rb) {
0600 event = &event_buff->data[event_buff->h_rb];
0601
0602
0603 if (time_is_after_jiffies(event->fetch_ts +
0604 MT_DFS_EVENT_WINDOW))
0605 break;
0606 event_buff->h_rb = mt76_incr(event_buff->h_rb,
0607 MT_DFS_EVENT_BUFLEN);
0608 }
0609 }
0610 }
0611
0612 static void mt76x02_dfs_tasklet(struct tasklet_struct *t)
0613 {
0614 struct mt76x02_dfs_pattern_detector *dfs_pd = from_tasklet(dfs_pd, t,
0615 dfs_tasklet);
0616 struct mt76x02_dev *dev = container_of(dfs_pd, typeof(*dev), dfs_pd);
0617 u32 engine_mask;
0618 int i;
0619
0620 if (test_bit(MT76_SCANNING, &dev->mphy.state))
0621 goto out;
0622
0623 if (time_is_before_jiffies(dfs_pd->last_sw_check +
0624 MT_DFS_SW_TIMEOUT)) {
0625 bool radar_detected;
0626
0627 dfs_pd->last_sw_check = jiffies;
0628
0629 mt76x02_dfs_add_events(dev);
0630 radar_detected = mt76x02_dfs_check_detection(dev);
0631 if (radar_detected) {
0632
0633 ieee80211_radar_detected(dev->mt76.hw);
0634 mt76x02_dfs_detector_reset(dev);
0635
0636 return;
0637 }
0638 mt76x02_dfs_check_event_window(dev);
0639 }
0640
0641 engine_mask = mt76_rr(dev, MT_BBP(DFS, 1));
0642 if (!(engine_mask & 0xf))
0643 goto out;
0644
0645 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
0646 struct mt76x02_dfs_hw_pulse pulse;
0647
0648 if (!(engine_mask & (1 << i)))
0649 continue;
0650
0651 pulse.engine = i;
0652 mt76x02_dfs_get_hw_pulse(dev, &pulse);
0653
0654 if (!mt76x02_dfs_check_hw_pulse(dev, &pulse)) {
0655 dfs_pd->stats[i].hw_pulse_discarded++;
0656 continue;
0657 }
0658
0659
0660 dfs_pd->stats[i].hw_pattern++;
0661 ieee80211_radar_detected(dev->mt76.hw);
0662 mt76x02_dfs_detector_reset(dev);
0663
0664 return;
0665 }
0666
0667
0668 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
0669
0670 out:
0671 mt76x02_irq_enable(dev, MT_INT_GPTIMER);
0672 }
0673
0674 static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev *dev)
0675 {
0676 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0677
0678 switch (dev->mt76.region) {
0679 case NL80211_DFS_FCC:
0680 dfs_pd->sw_dpd_params.max_pri = MT_DFS_FCC_MAX_PRI;
0681 dfs_pd->sw_dpd_params.min_pri = MT_DFS_FCC_MIN_PRI;
0682 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
0683 break;
0684 case NL80211_DFS_ETSI:
0685 dfs_pd->sw_dpd_params.max_pri = MT_DFS_ETSI_MAX_PRI;
0686 dfs_pd->sw_dpd_params.min_pri = MT_DFS_ETSI_MIN_PRI;
0687 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN << 2;
0688 break;
0689 case NL80211_DFS_JP:
0690 dfs_pd->sw_dpd_params.max_pri = MT_DFS_JP_MAX_PRI;
0691 dfs_pd->sw_dpd_params.min_pri = MT_DFS_JP_MIN_PRI;
0692 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
0693 break;
0694 case NL80211_DFS_UNSET:
0695 default:
0696 break;
0697 }
0698 }
0699
0700 static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev *dev)
0701 {
0702 const struct mt76x02_radar_specs *radar_specs;
0703 u8 i, shift;
0704 u32 data;
0705
0706 switch (dev->mphy.chandef.width) {
0707 case NL80211_CHAN_WIDTH_40:
0708 shift = MT_DFS_NUM_ENGINES;
0709 break;
0710 case NL80211_CHAN_WIDTH_80:
0711 shift = 2 * MT_DFS_NUM_ENGINES;
0712 break;
0713 default:
0714 shift = 0;
0715 break;
0716 }
0717
0718 switch (dev->mt76.region) {
0719 case NL80211_DFS_FCC:
0720 radar_specs = &fcc_radar_specs[shift];
0721 break;
0722 case NL80211_DFS_ETSI:
0723 radar_specs = &etsi_radar_specs[shift];
0724 break;
0725 case NL80211_DFS_JP:
0726 if (dev->mphy.chandef.chan->center_freq >= 5250 &&
0727 dev->mphy.chandef.chan->center_freq <= 5350)
0728 radar_specs = &jp_w53_radar_specs[shift];
0729 else
0730 radar_specs = &jp_w56_radar_specs[shift];
0731 break;
0732 case NL80211_DFS_UNSET:
0733 default:
0734 return;
0735 }
0736
0737 data = (MT_DFS_VGA_MASK << 16) |
0738 (MT_DFS_PWR_GAIN_OFFSET << 12) |
0739 (MT_DFS_PWR_DOWN_TIME << 8) |
0740 (MT_DFS_SYM_ROUND << 4) |
0741 (MT_DFS_DELTA_DELAY & 0xf);
0742 mt76_wr(dev, MT_BBP(DFS, 2), data);
0743
0744 data = (MT_DFS_RX_PE_MASK << 16) | MT_DFS_PKT_END_MASK;
0745 mt76_wr(dev, MT_BBP(DFS, 3), data);
0746
0747 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
0748
0749 mt76_wr(dev, MT_BBP(DFS, 0), i);
0750
0751
0752 data = ((radar_specs[i].avg_len & 0x1ff) << 16) |
0753 (radar_specs[i].mode & 0xf);
0754 mt76_wr(dev, MT_BBP(DFS, 4), data);
0755
0756
0757 data = ((radar_specs[i].e_high & 0x0fff) << 16) |
0758 (radar_specs[i].e_low & 0x0fff);
0759 mt76_wr(dev, MT_BBP(DFS, 5), data);
0760
0761
0762 mt76_wr(dev, MT_BBP(DFS, 7), radar_specs[i].t_low);
0763 mt76_wr(dev, MT_BBP(DFS, 9), radar_specs[i].t_high);
0764
0765
0766 mt76_wr(dev, MT_BBP(DFS, 11), radar_specs[i].b_low);
0767 mt76_wr(dev, MT_BBP(DFS, 13), radar_specs[i].b_high);
0768
0769
0770 data = ((radar_specs[i].w_high & 0x0fff) << 16) |
0771 (radar_specs[i].w_low & 0x0fff);
0772 mt76_wr(dev, MT_BBP(DFS, 14), data);
0773
0774
0775 data = (radar_specs[i].w_margin << 16) |
0776 radar_specs[i].t_margin;
0777 mt76_wr(dev, MT_BBP(DFS, 15), data);
0778
0779
0780 mt76_wr(dev, MT_BBP(DFS, 17), radar_specs[i].event_expiration);
0781
0782
0783 mt76_wr(dev, MT_BBP(DFS, 30), radar_specs[i].pwr_jmp);
0784 }
0785
0786
0787 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
0788 mt76_wr(dev, MT_BBP(DFS, 36), 0x3);
0789
0790
0791 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
0792 mt76_wr(dev, MT_BBP(IBI, 11), 0x0c350001);
0793 }
0794
0795 void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev *dev)
0796 {
0797 u32 agc_r8, agc_r4, val_r8, val_r4, dfs_r31;
0798
0799 agc_r8 = mt76_rr(dev, MT_BBP(AGC, 8));
0800 agc_r4 = mt76_rr(dev, MT_BBP(AGC, 4));
0801
0802 val_r8 = (agc_r8 & 0x00007e00) >> 9;
0803 val_r4 = agc_r4 & ~0x1f000000;
0804 val_r4 += (((val_r8 + 1) >> 1) << 24);
0805 mt76_wr(dev, MT_BBP(AGC, 4), val_r4);
0806
0807 dfs_r31 = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, val_r4);
0808 dfs_r31 += val_r8;
0809 dfs_r31 -= (agc_r8 & 0x00000038) >> 3;
0810 dfs_r31 = (dfs_r31 << 16) | 0x00000307;
0811 mt76_wr(dev, MT_BBP(DFS, 31), dfs_r31);
0812
0813 if (is_mt76x2(dev)) {
0814 mt76_wr(dev, MT_BBP(DFS, 32), 0x00040071);
0815 } else {
0816
0817 mt76_wr(dev, MT_BBP(DFS, 0), 0);
0818
0819 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
0820 }
0821 }
0822 EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc);
0823
0824 void mt76x02_dfs_init_params(struct mt76x02_dev *dev)
0825 {
0826 if (mt76_phy_dfs_state(&dev->mphy) > MT_DFS_STATE_DISABLED) {
0827 mt76x02_dfs_init_sw_detector(dev);
0828 mt76x02_dfs_set_bbp_params(dev);
0829
0830 mt76x02_dfs_set_capture_mode_ctrl(dev, true);
0831
0832 mt76x02_irq_enable(dev, MT_INT_GPTIMER);
0833 mt76_rmw_field(dev, MT_INT_TIMER_EN,
0834 MT_INT_TIMER_EN_GP_TIMER_EN, 1);
0835 } else {
0836
0837 mt76_wr(dev, MT_BBP(DFS, 0), 0);
0838
0839 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
0840 if (mt76_chip(&dev->mt76) == 0x7610 ||
0841 mt76_chip(&dev->mt76) == 0x7630)
0842 mt76_wr(dev, MT_BBP(IBI, 11), 0xfde8081);
0843 else
0844 mt76_wr(dev, MT_BBP(IBI, 11), 0);
0845
0846 mt76x02_irq_disable(dev, MT_INT_GPTIMER);
0847 mt76_rmw_field(dev, MT_INT_TIMER_EN,
0848 MT_INT_TIMER_EN_GP_TIMER_EN, 0);
0849 }
0850 }
0851 EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params);
0852
0853 void mt76x02_dfs_init_detector(struct mt76x02_dev *dev)
0854 {
0855 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0856
0857 INIT_LIST_HEAD(&dfs_pd->sequences);
0858 INIT_LIST_HEAD(&dfs_pd->seq_pool);
0859 dev->mt76.region = NL80211_DFS_UNSET;
0860 dfs_pd->last_sw_check = jiffies;
0861 tasklet_setup(&dfs_pd->dfs_tasklet, mt76x02_dfs_tasklet);
0862 }
0863
0864 static void
0865 mt76x02_dfs_set_domain(struct mt76x02_dev *dev,
0866 enum nl80211_dfs_regions region)
0867 {
0868 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
0869
0870 mutex_lock(&dev->mt76.mutex);
0871 if (dev->mt76.region != region) {
0872 tasklet_disable(&dfs_pd->dfs_tasklet);
0873
0874 dev->ed_monitor = dev->ed_monitor_enabled &&
0875 region == NL80211_DFS_ETSI;
0876 mt76x02_edcca_init(dev);
0877
0878 dev->mt76.region = region;
0879 mt76x02_dfs_init_params(dev);
0880 tasklet_enable(&dfs_pd->dfs_tasklet);
0881 }
0882 mutex_unlock(&dev->mt76.mutex);
0883 }
0884
0885 void mt76x02_regd_notifier(struct wiphy *wiphy,
0886 struct regulatory_request *request)
0887 {
0888 struct ieee80211_hw *hw = wiphy_to_ieee80211_hw(wiphy);
0889 struct mt76x02_dev *dev = hw->priv;
0890
0891 mt76x02_dfs_set_domain(dev, request->dfs_region);
0892 }