1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2019 MediaTek Inc.
4 * Author: Roy Luo <royluo@google.com>
5 * Ryder Lee <ryder.lee@mediatek.com>
8 #include <linux/firmware.h>
14 static bool prefer_offload_fw = true;
15 module_param(prefer_offload_fw, bool, 0644);
16 MODULE_PARM_DESC(prefer_offload_fw,
17 "Prefer client mode offload firmware (MT7663)");
19 struct mt7615_patch_hdr {
27 struct mt7615_fw_trailer {
37 #define FW_V3_COMMON_TAILER_SIZE 36
38 #define FW_V3_REGION_TAILER_SIZE 40
39 #define FW_START_OVERRIDE BIT(0)
40 #define FW_START_DLYCAL BIT(1)
41 #define FW_START_WORKING_PDA_CR4 BIT(2)
43 struct mt7663_fw_trailer {
55 struct mt7663_fw_buf {
65 #define MT7615_PATCH_ADDRESS 0x80000
66 #define MT7622_PATCH_ADDRESS 0x9c000
67 #define MT7663_PATCH_ADDRESS 0xdc000
69 #define N9_REGION_NUM 2
70 #define CR4_REGION_NUM 1
74 void mt7615_mcu_fill_msg(struct mt7615_dev *dev, struct sk_buff *skb,
75 int cmd, int *wait_seq)
77 int txd_len, mcu_cmd = FIELD_GET(__MCU_CMD_FIELD_ID, cmd);
78 struct mt7615_uni_txd *uni_txd;
79 struct mt7615_mcu_txd *mcu_txd;
80 u8 seq, q_idx, pkt_fmt;
84 /* TODO: make dynamic based on msg type */
85 dev->mt76.mcu.timeout = 20 * HZ;
87 seq = ++dev->mt76.mcu.msg_seq & 0xf;
89 seq = ++dev->mt76.mcu.msg_seq & 0xf;
93 txd_len = cmd & __MCU_CMD_FIELD_UNI ? sizeof(*uni_txd) : sizeof(*mcu_txd);
94 txd = (__le32 *)skb_push(skb, txd_len);
96 if (cmd != MCU_CMD(FW_SCATTER)) {
97 q_idx = MT_TX_MCU_PORT_RX_Q0;
98 pkt_fmt = MT_TX_TYPE_CMD;
100 q_idx = MT_TX_MCU_PORT_RX_FWDL;
101 pkt_fmt = MT_TX_TYPE_FW;
104 val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len) |
105 FIELD_PREP(MT_TXD0_P_IDX, MT_TX_PORT_IDX_MCU) |
106 FIELD_PREP(MT_TXD0_Q_IDX, q_idx);
107 txd[0] = cpu_to_le32(val);
109 val = MT_TXD1_LONG_FORMAT |
110 FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_CMD) |
111 FIELD_PREP(MT_TXD1_PKT_FMT, pkt_fmt);
112 txd[1] = cpu_to_le32(val);
114 if (cmd & __MCU_CMD_FIELD_UNI) {
115 uni_txd = (struct mt7615_uni_txd *)txd;
116 uni_txd->len = cpu_to_le16(skb->len - sizeof(uni_txd->txd));
117 uni_txd->option = MCU_CMD_UNI_EXT_ACK;
118 uni_txd->cid = cpu_to_le16(mcu_cmd);
119 uni_txd->s2d_index = MCU_S2D_H2N;
120 uni_txd->pkt_type = MCU_PKT_ID;
126 mcu_txd = (struct mt7615_mcu_txd *)txd;
127 mcu_txd->len = cpu_to_le16(skb->len - sizeof(mcu_txd->txd));
128 mcu_txd->pq_id = cpu_to_le16(MCU_PQ_ID(MT_TX_PORT_IDX_MCU, q_idx));
129 mcu_txd->s2d_index = MCU_S2D_H2N;
130 mcu_txd->pkt_type = MCU_PKT_ID;
132 mcu_txd->cid = mcu_cmd;
133 mcu_txd->ext_cid = FIELD_GET(__MCU_CMD_FIELD_EXT_ID, cmd);
135 if (mcu_txd->ext_cid || (cmd & __MCU_CMD_FIELD_CE)) {
136 if (cmd & __MCU_CMD_FIELD_QUERY)
137 mcu_txd->set_query = MCU_Q_QUERY;
139 mcu_txd->set_query = MCU_Q_SET;
140 mcu_txd->ext_cid_ack = !!mcu_txd->ext_cid;
142 mcu_txd->set_query = MCU_Q_NA;
145 EXPORT_SYMBOL_GPL(mt7615_mcu_fill_msg);
147 int mt7615_mcu_parse_response(struct mt76_dev *mdev, int cmd,
148 struct sk_buff *skb, int seq)
150 struct mt7615_mcu_rxd *rxd;
154 dev_err(mdev->dev, "Message %08x (seq %d) timeout\n",
159 rxd = (struct mt7615_mcu_rxd *)skb->data;
163 if (cmd == MCU_CMD(PATCH_SEM_CONTROL)) {
164 skb_pull(skb, sizeof(*rxd) - 4);
166 } else if (cmd == MCU_EXT_CMD(THERMAL_CTRL)) {
167 skb_pull(skb, sizeof(*rxd));
168 ret = le32_to_cpu(*(__le32 *)skb->data);
169 } else if (cmd == MCU_EXT_QUERY(RF_REG_ACCESS)) {
170 skb_pull(skb, sizeof(*rxd));
171 ret = le32_to_cpu(*(__le32 *)&skb->data[8]);
172 } else if (cmd == MCU_UNI_CMD(DEV_INFO_UPDATE) ||
173 cmd == MCU_UNI_CMD(BSS_INFO_UPDATE) ||
174 cmd == MCU_UNI_CMD(STA_REC_UPDATE) ||
175 cmd == MCU_UNI_CMD(HIF_CTRL) ||
176 cmd == MCU_UNI_CMD(OFFLOAD) ||
177 cmd == MCU_UNI_CMD(SUSPEND)) {
178 struct mt7615_mcu_uni_event *event;
180 skb_pull(skb, sizeof(*rxd));
181 event = (struct mt7615_mcu_uni_event *)skb->data;
182 ret = le32_to_cpu(event->status);
183 } else if (cmd == MCU_CE_QUERY(REG_READ)) {
184 struct mt7615_mcu_reg_event *event;
186 skb_pull(skb, sizeof(*rxd));
187 event = (struct mt7615_mcu_reg_event *)skb->data;
188 ret = (int)le32_to_cpu(event->val);
193 EXPORT_SYMBOL_GPL(mt7615_mcu_parse_response);
196 mt7615_mcu_send_message(struct mt76_dev *mdev, struct sk_buff *skb,
199 struct mt7615_dev *dev = container_of(mdev, struct mt7615_dev, mt76);
200 enum mt76_mcuq_id qid;
202 mt7615_mcu_fill_msg(dev, skb, cmd, seq);
203 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mphy.state))
208 return mt76_tx_queue_skb_raw(dev, dev->mt76.q_mcu[qid], skb, 0);
211 u32 mt7615_rf_rr(struct mt7615_dev *dev, u32 wf, u32 reg)
218 .wifi_stream = cpu_to_le32(wf),
219 .address = cpu_to_le32(reg),
222 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_QUERY(RF_REG_ACCESS),
223 &req, sizeof(req), true);
226 int mt7615_rf_wr(struct mt7615_dev *dev, u32 wf, u32 reg, u32 val)
233 .wifi_stream = cpu_to_le32(wf),
234 .address = cpu_to_le32(reg),
235 .data = cpu_to_le32(val),
238 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_REG_ACCESS),
239 &req, sizeof(req), false);
242 void mt7622_trigger_hif_int(struct mt7615_dev *dev, bool en)
244 if (!is_mt7622(&dev->mt76))
247 regmap_update_bits(dev->infracfg, MT_INFRACFG_MISC,
248 MT_INFRACFG_MISC_AP2CONN_WAKE,
249 !en * MT_INFRACFG_MISC_AP2CONN_WAKE);
251 EXPORT_SYMBOL_GPL(mt7622_trigger_hif_int);
253 static int mt7615_mcu_drv_pmctrl(struct mt7615_dev *dev)
255 struct mt76_phy *mphy = &dev->mt76.phy;
256 struct mt76_connac_pm *pm = &dev->pm;
257 struct mt76_dev *mdev = &dev->mt76;
261 if (is_mt7663(mdev)) {
262 /* Clear firmware own via N9 eint */
263 mt76_wr(dev, MT_PCIE_DOORBELL_PUSH, MT_CFG_LPCR_HOST_DRV_OWN);
264 mt76_poll(dev, MT_CONN_ON_MISC, MT_CFG_LPCR_HOST_FW_OWN, 0, 3000);
266 addr = MT_CONN_HIF_ON_LPCTL;
268 addr = MT_CFG_LPCR_HOST;
271 mt76_wr(dev, addr, MT_CFG_LPCR_HOST_DRV_OWN);
273 mt7622_trigger_hif_int(dev, true);
275 err = !mt76_poll_msec(dev, addr, MT_CFG_LPCR_HOST_FW_OWN, 0, 3000);
277 mt7622_trigger_hif_int(dev, false);
280 dev_err(mdev->dev, "driver own failed\n");
284 clear_bit(MT76_STATE_PM, &mphy->state);
286 pm->stats.last_wake_event = jiffies;
287 pm->stats.doze_time += pm->stats.last_wake_event -
288 pm->stats.last_doze_event;
293 static int mt7615_mcu_lp_drv_pmctrl(struct mt7615_dev *dev)
295 struct mt76_phy *mphy = &dev->mt76.phy;
296 struct mt76_connac_pm *pm = &dev->pm;
299 mutex_lock(&pm->mutex);
301 if (!test_bit(MT76_STATE_PM, &mphy->state))
304 for (i = 0; i < MT7615_DRV_OWN_RETRY_COUNT; i++) {
305 mt76_wr(dev, MT_PCIE_DOORBELL_PUSH, MT_CFG_LPCR_HOST_DRV_OWN);
306 if (mt76_poll_msec(dev, MT_CONN_HIF_ON_LPCTL,
307 MT_CFG_LPCR_HOST_FW_OWN, 0, 50))
311 if (i == MT7615_DRV_OWN_RETRY_COUNT) {
312 dev_err(dev->mt76.dev, "driver own failed\n");
316 clear_bit(MT76_STATE_PM, &mphy->state);
318 pm->stats.last_wake_event = jiffies;
319 pm->stats.doze_time += pm->stats.last_wake_event -
320 pm->stats.last_doze_event;
322 mutex_unlock(&pm->mutex);
327 static int mt7615_mcu_fw_pmctrl(struct mt7615_dev *dev)
329 struct mt76_phy *mphy = &dev->mt76.phy;
330 struct mt76_connac_pm *pm = &dev->pm;
334 mutex_lock(&pm->mutex);
336 if (mt76_connac_skip_fw_pmctrl(mphy, pm))
339 mt7622_trigger_hif_int(dev, true);
341 addr = is_mt7663(&dev->mt76) ? MT_CONN_HIF_ON_LPCTL : MT_CFG_LPCR_HOST;
342 mt76_wr(dev, addr, MT_CFG_LPCR_HOST_FW_OWN);
344 if (is_mt7622(&dev->mt76) &&
345 !mt76_poll_msec(dev, addr, MT_CFG_LPCR_HOST_FW_OWN,
346 MT_CFG_LPCR_HOST_FW_OWN, 3000)) {
347 dev_err(dev->mt76.dev, "Timeout for firmware own\n");
348 clear_bit(MT76_STATE_PM, &mphy->state);
352 mt7622_trigger_hif_int(dev, false);
354 pm->stats.last_doze_event = jiffies;
355 pm->stats.awake_time += pm->stats.last_doze_event -
356 pm->stats.last_wake_event;
359 mutex_unlock(&pm->mutex);
365 mt7615_mcu_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
368 ieee80211_csa_finish(vif);
372 mt7615_mcu_rx_csa_notify(struct mt7615_dev *dev, struct sk_buff *skb)
374 struct mt7615_phy *ext_phy = mt7615_ext_phy(dev);
375 struct mt76_phy *mphy = &dev->mt76.phy;
376 struct mt7615_mcu_csa_notify *c;
378 c = (struct mt7615_mcu_csa_notify *)skb->data;
380 if (c->omac_idx > EXT_BSSID_MAX)
383 if (ext_phy && ext_phy->omac_mask & BIT_ULL(c->omac_idx))
384 mphy = dev->mt76.phy2;
386 ieee80211_iterate_active_interfaces_atomic(mphy->hw,
387 IEEE80211_IFACE_ITER_RESUME_ALL,
388 mt7615_mcu_csa_finish, mphy->hw);
392 mt7615_mcu_rx_radar_detected(struct mt7615_dev *dev, struct sk_buff *skb)
394 struct mt76_phy *mphy = &dev->mt76.phy;
395 struct mt7615_mcu_rdd_report *r;
397 r = (struct mt7615_mcu_rdd_report *)skb->data;
399 if (!dev->radar_pattern.n_pulses && !r->long_detected &&
400 !r->constant_prf_detected && !r->staggered_prf_detected)
403 if (r->band_idx && dev->mt76.phy2)
404 mphy = dev->mt76.phy2;
406 if (mt76_phy_dfs_state(mphy) < MT_DFS_STATE_CAC)
409 ieee80211_radar_detected(mphy->hw);
414 mt7615_mcu_rx_log_message(struct mt7615_dev *dev, struct sk_buff *skb)
416 struct mt7615_mcu_rxd *rxd = (struct mt7615_mcu_rxd *)skb->data;
417 const char *data = (char *)&rxd[1];
420 switch (rxd->s2d_index) {
432 wiphy_info(mt76_hw(dev)->wiphy, "%s: %.*s", type,
433 (int)(skb->len - sizeof(*rxd)), data);
437 mt7615_mcu_rx_ext_event(struct mt7615_dev *dev, struct sk_buff *skb)
439 struct mt7615_mcu_rxd *rxd = (struct mt7615_mcu_rxd *)skb->data;
441 switch (rxd->ext_eid) {
442 case MCU_EXT_EVENT_RDD_REPORT:
443 mt7615_mcu_rx_radar_detected(dev, skb);
445 case MCU_EXT_EVENT_CSA_NOTIFY:
446 mt7615_mcu_rx_csa_notify(dev, skb);
448 case MCU_EXT_EVENT_FW_LOG_2_HOST:
449 mt7615_mcu_rx_log_message(dev, skb);
457 mt7615_mcu_scan_event(struct mt7615_dev *dev, struct sk_buff *skb)
459 u8 *seq_num = skb->data + sizeof(struct mt7615_mcu_rxd);
460 struct mt7615_phy *phy;
461 struct mt76_phy *mphy;
463 if (*seq_num & BIT(7) && dev->mt76.phy2)
464 mphy = dev->mt76.phy2;
466 mphy = &dev->mt76.phy;
468 phy = (struct mt7615_phy *)mphy->priv;
470 spin_lock_bh(&dev->mt76.lock);
471 __skb_queue_tail(&phy->scan_event_list, skb);
472 spin_unlock_bh(&dev->mt76.lock);
474 ieee80211_queue_delayed_work(mphy->hw, &phy->scan_work,
475 MT7615_HW_SCAN_TIMEOUT);
479 mt7615_mcu_roc_event(struct mt7615_dev *dev, struct sk_buff *skb)
481 struct mt7615_roc_tlv *event;
482 struct mt7615_phy *phy;
483 struct mt76_phy *mphy;
486 skb_pull(skb, sizeof(struct mt7615_mcu_rxd));
487 event = (struct mt7615_roc_tlv *)skb->data;
489 if (event->dbdc_band && dev->mt76.phy2)
490 mphy = dev->mt76.phy2;
492 mphy = &dev->mt76.phy;
494 ieee80211_ready_on_channel(mphy->hw);
496 phy = (struct mt7615_phy *)mphy->priv;
497 phy->roc_grant = true;
498 wake_up(&phy->roc_wait);
500 duration = le32_to_cpu(event->max_interval);
501 mod_timer(&phy->roc_timer,
502 round_jiffies_up(jiffies + msecs_to_jiffies(duration)));
506 mt7615_mcu_beacon_loss_event(struct mt7615_dev *dev, struct sk_buff *skb)
508 struct mt76_connac_beacon_loss_event *event;
509 struct mt76_phy *mphy;
510 u8 band_idx = 0; /* DBDC support */
512 skb_pull(skb, sizeof(struct mt7615_mcu_rxd));
513 event = (struct mt76_connac_beacon_loss_event *)skb->data;
514 if (band_idx && dev->mt76.phy2)
515 mphy = dev->mt76.phy2;
517 mphy = &dev->mt76.phy;
519 ieee80211_iterate_active_interfaces_atomic(mphy->hw,
520 IEEE80211_IFACE_ITER_RESUME_ALL,
521 mt76_connac_mcu_beacon_loss_iter,
526 mt7615_mcu_bss_event(struct mt7615_dev *dev, struct sk_buff *skb)
528 struct mt76_connac_mcu_bss_event *event;
529 struct mt76_phy *mphy;
530 u8 band_idx = 0; /* DBDC support */
532 skb_pull(skb, sizeof(struct mt7615_mcu_rxd));
533 event = (struct mt76_connac_mcu_bss_event *)skb->data;
535 if (band_idx && dev->mt76.phy2)
536 mphy = dev->mt76.phy2;
538 mphy = &dev->mt76.phy;
540 if (event->is_absent)
541 ieee80211_stop_queues(mphy->hw);
543 ieee80211_wake_queues(mphy->hw);
547 mt7615_mcu_rx_unsolicited_event(struct mt7615_dev *dev, struct sk_buff *skb)
549 struct mt7615_mcu_rxd *rxd = (struct mt7615_mcu_rxd *)skb->data;
553 mt7615_mcu_rx_ext_event(dev, skb);
555 case MCU_EVENT_BSS_BEACON_LOSS:
556 mt7615_mcu_beacon_loss_event(dev, skb);
559 mt7615_mcu_roc_event(dev, skb);
561 case MCU_EVENT_SCHED_SCAN_DONE:
562 case MCU_EVENT_SCAN_DONE:
563 mt7615_mcu_scan_event(dev, skb);
565 case MCU_EVENT_BSS_ABSENCE:
566 mt7615_mcu_bss_event(dev, skb);
568 case MCU_EVENT_COREDUMP:
569 mt76_connac_mcu_coredump_event(&dev->mt76, skb,
578 void mt7615_mcu_rx_event(struct mt7615_dev *dev, struct sk_buff *skb)
580 struct mt7615_mcu_rxd *rxd = (struct mt7615_mcu_rxd *)skb->data;
582 if (rxd->ext_eid == MCU_EXT_EVENT_THERMAL_PROTECT ||
583 rxd->ext_eid == MCU_EXT_EVENT_FW_LOG_2_HOST ||
584 rxd->ext_eid == MCU_EXT_EVENT_ASSERT_DUMP ||
585 rxd->ext_eid == MCU_EXT_EVENT_PS_SYNC ||
586 rxd->eid == MCU_EVENT_BSS_BEACON_LOSS ||
587 rxd->eid == MCU_EVENT_SCHED_SCAN_DONE ||
588 rxd->eid == MCU_EVENT_BSS_ABSENCE ||
589 rxd->eid == MCU_EVENT_SCAN_DONE ||
590 rxd->eid == MCU_EVENT_COREDUMP ||
591 rxd->eid == MCU_EVENT_ROC ||
593 mt7615_mcu_rx_unsolicited_event(dev, skb);
595 mt76_mcu_rx_event(&dev->mt76, skb);
599 mt7615_mcu_muar_config(struct mt7615_dev *dev, struct ieee80211_vif *vif,
600 bool bssid, bool enable)
602 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
603 u32 idx = mvif->mt76.omac_idx - REPEATER_BSSID_START;
604 u32 mask = dev->omac_mask >> 32 & ~BIT(idx);
605 const u8 *addr = vif->addr;
617 .mode = !!mask || enable,
621 .index = idx * 2 + bssid,
625 addr = vif->bss_conf.bssid;
628 ether_addr_copy(req.addr, addr);
630 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(MUAR_UPDATE),
631 &req, sizeof(req), true);
635 mt7615_mcu_add_dev(struct mt7615_phy *phy, struct ieee80211_vif *vif,
638 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
639 struct mt7615_dev *dev = phy->dev;
653 u8 omac_addr[ETH_ALEN];
657 .omac_idx = mvif->mt76.omac_idx,
658 .band_idx = mvif->mt76.band_idx,
659 .tlv_num = cpu_to_le16(1),
663 .tag = cpu_to_le16(DEV_INFO_ACTIVE),
664 .len = cpu_to_le16(sizeof(struct req_tlv)),
666 .band_idx = mvif->mt76.band_idx,
670 if (mvif->mt76.omac_idx >= REPEATER_BSSID_START)
671 return mt7615_mcu_muar_config(dev, vif, false, enable);
673 memcpy(data.tlv.omac_addr, vif->addr, ETH_ALEN);
674 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(DEV_INFO_UPDATE),
675 &data, sizeof(data), true);
679 mt7615_mcu_add_beacon_offload(struct mt7615_dev *dev,
680 struct ieee80211_hw *hw,
681 struct ieee80211_vif *vif, bool enable)
683 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
684 struct mt76_wcid *wcid = &dev->mt76.global_wcid;
685 struct ieee80211_mutable_offsets offs;
686 struct ieee80211_tx_info *info;
693 u8 need_pre_tbtt_int;
699 /* bss color change */
703 .omac_idx = mvif->mt76.omac_idx,
705 .wlan_idx = wcid->idx,
706 .band_idx = mvif->mt76.band_idx,
713 skb = ieee80211_beacon_get_template(hw, vif, &offs);
717 if (skb->len > 512 - MT_TXD_SIZE) {
718 dev_err(dev->mt76.dev, "Bcn size limit exceed\n");
723 if (mvif->mt76.band_idx) {
724 info = IEEE80211_SKB_CB(skb);
725 info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY;
728 mt7615_mac_write_txwi(dev, (__le32 *)(req.pkt), skb, wcid, NULL,
730 memcpy(req.pkt + MT_TXD_SIZE, skb->data, skb->len);
731 req.pkt_len = cpu_to_le16(MT_TXD_SIZE + skb->len);
732 req.tim_ie_pos = cpu_to_le16(MT_TXD_SIZE + offs.tim_offset);
733 if (offs.cntdwn_counter_offs[0]) {
736 csa_offs = MT_TXD_SIZE + offs.cntdwn_counter_offs[0] - 4;
737 req.csa_ie_pos = cpu_to_le16(csa_offs);
738 req.csa_cnt = skb->data[offs.cntdwn_counter_offs[0]];
743 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(BCN_OFFLOAD), &req,
748 mt7615_mcu_ctrl_pm_state(struct mt7615_dev *dev, int band, int state)
750 return mt76_connac_mcu_set_pm(&dev->mt76, band, state);
754 mt7615_mcu_add_bss(struct mt7615_phy *phy, struct ieee80211_vif *vif,
755 struct ieee80211_sta *sta, bool enable)
757 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
758 struct mt7615_dev *dev = phy->dev;
761 if (mvif->mt76.omac_idx >= REPEATER_BSSID_START)
762 mt7615_mcu_muar_config(dev, vif, true, enable);
764 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76, NULL);
769 mt76_connac_mcu_bss_omac_tlv(skb, vif);
771 mt76_connac_mcu_bss_basic_tlv(skb, vif, sta, phy->mt76,
772 mvif->sta.wcid.idx, enable);
774 if (enable && mvif->mt76.omac_idx >= EXT_BSSID_START &&
775 mvif->mt76.omac_idx < REPEATER_BSSID_START)
776 mt76_connac_mcu_bss_ext_tlv(skb, &mvif->mt76);
778 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
779 MCU_EXT_CMD(BSS_INFO_UPDATE), true);
783 mt7615_mcu_wtbl_tx_ba(struct mt7615_dev *dev,
784 struct ieee80211_ampdu_params *params,
787 struct mt7615_sta *msta = (struct mt7615_sta *)params->sta->drv_priv;
788 struct mt7615_vif *mvif = msta->vif;
789 struct wtbl_req_hdr *wtbl_hdr;
790 struct sk_buff *skb = NULL;
793 wtbl_hdr = mt76_connac_mcu_alloc_wtbl_req(&dev->mt76, &msta->wcid,
794 WTBL_SET, NULL, &skb);
795 if (IS_ERR(wtbl_hdr))
796 return PTR_ERR(wtbl_hdr);
798 mt76_connac_mcu_wtbl_ba_tlv(&dev->mt76, skb, params, enable, true,
801 err = mt76_mcu_skb_send_msg(&dev->mt76, skb,
802 MCU_EXT_CMD(WTBL_UPDATE), true);
806 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
811 mt76_connac_mcu_sta_ba_tlv(skb, params, enable, true);
813 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
814 MCU_EXT_CMD(STA_REC_UPDATE), true);
818 mt7615_mcu_wtbl_rx_ba(struct mt7615_dev *dev,
819 struct ieee80211_ampdu_params *params,
822 struct mt7615_sta *msta = (struct mt7615_sta *)params->sta->drv_priv;
823 struct mt7615_vif *mvif = msta->vif;
824 struct wtbl_req_hdr *wtbl_hdr;
828 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
833 mt76_connac_mcu_sta_ba_tlv(skb, params, enable, false);
835 err = mt76_mcu_skb_send_msg(&dev->mt76, skb,
836 MCU_EXT_CMD(STA_REC_UPDATE), true);
837 if (err < 0 || !enable)
841 wtbl_hdr = mt76_connac_mcu_alloc_wtbl_req(&dev->mt76, &msta->wcid,
842 WTBL_SET, NULL, &skb);
843 if (IS_ERR(wtbl_hdr))
844 return PTR_ERR(wtbl_hdr);
846 mt76_connac_mcu_wtbl_ba_tlv(&dev->mt76, skb, params, enable, false,
849 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
850 MCU_EXT_CMD(WTBL_UPDATE), true);
854 mt7615_mcu_wtbl_sta_add(struct mt7615_phy *phy, struct ieee80211_vif *vif,
855 struct ieee80211_sta *sta, bool enable)
857 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
858 struct sk_buff *skb, *sskb, *wskb = NULL;
859 struct mt7615_dev *dev = phy->dev;
860 struct wtbl_req_hdr *wtbl_hdr;
861 struct mt7615_sta *msta;
864 msta = sta ? (struct mt7615_sta *)sta->drv_priv : &mvif->sta;
866 sskb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
869 return PTR_ERR(sskb);
871 mt76_connac_mcu_sta_basic_tlv(sskb, vif, sta, enable, true);
873 mt76_connac_mcu_sta_tlv(phy->mt76, sskb, sta, vif, 0,
874 MT76_STA_INFO_STATE_ASSOC);
876 wtbl_hdr = mt76_connac_mcu_alloc_wtbl_req(&dev->mt76, &msta->wcid,
877 WTBL_RESET_AND_SET, NULL,
879 if (IS_ERR(wtbl_hdr))
880 return PTR_ERR(wtbl_hdr);
883 mt76_connac_mcu_wtbl_generic_tlv(&dev->mt76, wskb, vif, sta,
886 mt76_connac_mcu_wtbl_ht_tlv(&dev->mt76, wskb, sta,
887 NULL, wtbl_hdr, true, true);
888 mt76_connac_mcu_wtbl_hdr_trans_tlv(wskb, vif, &msta->wcid,
892 cmd = enable ? MCU_EXT_CMD(WTBL_UPDATE) : MCU_EXT_CMD(STA_REC_UPDATE);
893 skb = enable ? wskb : sskb;
895 err = mt76_mcu_skb_send_msg(&dev->mt76, skb, cmd, true);
897 skb = enable ? sskb : wskb;
903 cmd = enable ? MCU_EXT_CMD(STA_REC_UPDATE) : MCU_EXT_CMD(WTBL_UPDATE);
904 skb = enable ? sskb : wskb;
906 return mt76_mcu_skb_send_msg(&dev->mt76, skb, cmd, true);
910 mt7615_mcu_wtbl_update_hdr_trans(struct mt7615_dev *dev,
911 struct ieee80211_vif *vif,
912 struct ieee80211_sta *sta)
914 return mt76_connac_mcu_wtbl_update_hdr_trans(&dev->mt76, vif, sta);
917 static const struct mt7615_mcu_ops wtbl_update_ops = {
918 .add_beacon_offload = mt7615_mcu_add_beacon_offload,
919 .set_pm_state = mt7615_mcu_ctrl_pm_state,
920 .add_dev_info = mt7615_mcu_add_dev,
921 .add_bss_info = mt7615_mcu_add_bss,
922 .add_tx_ba = mt7615_mcu_wtbl_tx_ba,
923 .add_rx_ba = mt7615_mcu_wtbl_rx_ba,
924 .sta_add = mt7615_mcu_wtbl_sta_add,
925 .set_drv_ctrl = mt7615_mcu_drv_pmctrl,
926 .set_fw_ctrl = mt7615_mcu_fw_pmctrl,
927 .set_sta_decap_offload = mt7615_mcu_wtbl_update_hdr_trans,
931 mt7615_mcu_sta_ba(struct mt7615_dev *dev,
932 struct ieee80211_ampdu_params *params,
933 bool enable, bool tx)
935 struct mt7615_sta *msta = (struct mt7615_sta *)params->sta->drv_priv;
936 struct mt7615_vif *mvif = msta->vif;
937 struct wtbl_req_hdr *wtbl_hdr;
938 struct tlv *sta_wtbl;
941 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
946 mt76_connac_mcu_sta_ba_tlv(skb, params, enable, tx);
948 sta_wtbl = mt76_connac_mcu_add_tlv(skb, STA_REC_WTBL, sizeof(struct tlv));
950 wtbl_hdr = mt76_connac_mcu_alloc_wtbl_req(&dev->mt76, &msta->wcid,
951 WTBL_SET, sta_wtbl, &skb);
952 if (IS_ERR(wtbl_hdr))
953 return PTR_ERR(wtbl_hdr);
955 mt76_connac_mcu_wtbl_ba_tlv(&dev->mt76, skb, params, enable, tx,
958 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
959 MCU_EXT_CMD(STA_REC_UPDATE), true);
963 mt7615_mcu_sta_tx_ba(struct mt7615_dev *dev,
964 struct ieee80211_ampdu_params *params,
967 return mt7615_mcu_sta_ba(dev, params, enable, true);
971 mt7615_mcu_sta_rx_ba(struct mt7615_dev *dev,
972 struct ieee80211_ampdu_params *params,
975 return mt7615_mcu_sta_ba(dev, params, enable, false);
979 __mt7615_mcu_add_sta(struct mt76_phy *phy, struct ieee80211_vif *vif,
980 struct ieee80211_sta *sta, bool enable, int cmd,
983 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
984 struct mt76_sta_cmd_info info = {
987 .offload_fw = offload_fw,
993 info.wcid = sta ? (struct mt76_wcid *)sta->drv_priv : &mvif->sta.wcid;
994 return mt76_connac_mcu_sta_cmd(phy, &info);
998 mt7615_mcu_add_sta(struct mt7615_phy *phy, struct ieee80211_vif *vif,
999 struct ieee80211_sta *sta, bool enable)
1001 return __mt7615_mcu_add_sta(phy->mt76, vif, sta, enable,
1002 MCU_EXT_CMD(STA_REC_UPDATE), false);
1006 mt7615_mcu_sta_update_hdr_trans(struct mt7615_dev *dev,
1007 struct ieee80211_vif *vif,
1008 struct ieee80211_sta *sta)
1010 struct mt7615_sta *msta = (struct mt7615_sta *)sta->drv_priv;
1012 return mt76_connac_mcu_sta_update_hdr_trans(&dev->mt76,
1014 MCU_EXT_CMD(STA_REC_UPDATE));
1017 static const struct mt7615_mcu_ops sta_update_ops = {
1018 .add_beacon_offload = mt7615_mcu_add_beacon_offload,
1019 .set_pm_state = mt7615_mcu_ctrl_pm_state,
1020 .add_dev_info = mt7615_mcu_add_dev,
1021 .add_bss_info = mt7615_mcu_add_bss,
1022 .add_tx_ba = mt7615_mcu_sta_tx_ba,
1023 .add_rx_ba = mt7615_mcu_sta_rx_ba,
1024 .sta_add = mt7615_mcu_add_sta,
1025 .set_drv_ctrl = mt7615_mcu_drv_pmctrl,
1026 .set_fw_ctrl = mt7615_mcu_fw_pmctrl,
1027 .set_sta_decap_offload = mt7615_mcu_sta_update_hdr_trans,
1031 mt7615_mcu_uni_ctrl_pm_state(struct mt7615_dev *dev, int band, int state)
1037 mt7615_mcu_uni_add_beacon_offload(struct mt7615_dev *dev,
1038 struct ieee80211_hw *hw,
1039 struct ieee80211_vif *vif,
1042 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
1043 struct mt76_wcid *wcid = &dev->mt76.global_wcid;
1044 struct ieee80211_mutable_offsets offs;
1050 struct bcn_content_tlv {
1056 /* 0: disable beacon offload
1057 * 1: enable beacon offload
1058 * 2: update probe respond offload
1061 /* 0: legacy format (TXD + payload)
1062 * 1: only cap field IE
1067 } __packed beacon_tlv;
1070 .bss_idx = mvif->mt76.idx,
1073 .tag = cpu_to_le16(UNI_BSS_INFO_BCN_CONTENT),
1074 .len = cpu_to_le16(sizeof(struct bcn_content_tlv)),
1078 struct sk_buff *skb;
1083 skb = ieee80211_beacon_get_template(mt76_hw(dev), vif, &offs);
1087 if (skb->len > 512 - MT_TXD_SIZE) {
1088 dev_err(dev->mt76.dev, "beacon size limit exceed\n");
1093 mt7615_mac_write_txwi(dev, (__le32 *)(req.beacon_tlv.pkt), skb,
1094 wcid, NULL, 0, NULL, true);
1095 memcpy(req.beacon_tlv.pkt + MT_TXD_SIZE, skb->data, skb->len);
1096 req.beacon_tlv.pkt_len = cpu_to_le16(MT_TXD_SIZE + skb->len);
1097 req.beacon_tlv.tim_ie_pos = cpu_to_le16(MT_TXD_SIZE + offs.tim_offset);
1099 if (offs.cntdwn_counter_offs[0]) {
1102 csa_offs = MT_TXD_SIZE + offs.cntdwn_counter_offs[0] - 4;
1103 req.beacon_tlv.csa_ie_pos = cpu_to_le16(csa_offs);
1108 return mt76_mcu_send_msg(&dev->mt76, MCU_UNI_CMD(BSS_INFO_UPDATE),
1109 &req, sizeof(req), true);
1113 mt7615_mcu_uni_add_dev(struct mt7615_phy *phy, struct ieee80211_vif *vif,
1116 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
1118 return mt76_connac_mcu_uni_add_dev(phy->mt76, vif, &mvif->sta.wcid,
1123 mt7615_mcu_uni_add_bss(struct mt7615_phy *phy, struct ieee80211_vif *vif,
1124 struct ieee80211_sta *sta, bool enable)
1126 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
1128 return mt76_connac_mcu_uni_add_bss(phy->mt76, vif, &mvif->sta.wcid,
1133 mt7615_mcu_uni_add_sta(struct mt7615_phy *phy, struct ieee80211_vif *vif,
1134 struct ieee80211_sta *sta, bool enable)
1136 return __mt7615_mcu_add_sta(phy->mt76, vif, sta, enable,
1137 MCU_UNI_CMD(STA_REC_UPDATE), true);
1141 mt7615_mcu_uni_tx_ba(struct mt7615_dev *dev,
1142 struct ieee80211_ampdu_params *params,
1145 struct mt7615_sta *sta = (struct mt7615_sta *)params->sta->drv_priv;
1147 return mt76_connac_mcu_sta_ba(&dev->mt76, &sta->vif->mt76, params,
1148 MCU_UNI_CMD(STA_REC_UPDATE), enable,
1153 mt7615_mcu_uni_rx_ba(struct mt7615_dev *dev,
1154 struct ieee80211_ampdu_params *params,
1157 struct mt7615_sta *msta = (struct mt7615_sta *)params->sta->drv_priv;
1158 struct mt7615_vif *mvif = msta->vif;
1159 struct wtbl_req_hdr *wtbl_hdr;
1160 struct tlv *sta_wtbl;
1161 struct sk_buff *skb;
1164 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
1167 return PTR_ERR(skb);
1169 mt76_connac_mcu_sta_ba_tlv(skb, params, enable, false);
1171 err = mt76_mcu_skb_send_msg(&dev->mt76, skb,
1172 MCU_UNI_CMD(STA_REC_UPDATE), true);
1173 if (err < 0 || !enable)
1176 skb = mt76_connac_mcu_alloc_sta_req(&dev->mt76, &mvif->mt76,
1179 return PTR_ERR(skb);
1181 sta_wtbl = mt76_connac_mcu_add_tlv(skb, STA_REC_WTBL,
1182 sizeof(struct tlv));
1184 wtbl_hdr = mt76_connac_mcu_alloc_wtbl_req(&dev->mt76, &msta->wcid,
1185 WTBL_SET, sta_wtbl, &skb);
1186 if (IS_ERR(wtbl_hdr))
1187 return PTR_ERR(wtbl_hdr);
1189 mt76_connac_mcu_wtbl_ba_tlv(&dev->mt76, skb, params, enable, false,
1190 sta_wtbl, wtbl_hdr);
1192 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
1193 MCU_UNI_CMD(STA_REC_UPDATE), true);
1197 mt7615_mcu_sta_uni_update_hdr_trans(struct mt7615_dev *dev,
1198 struct ieee80211_vif *vif,
1199 struct ieee80211_sta *sta)
1201 struct mt7615_sta *msta = (struct mt7615_sta *)sta->drv_priv;
1203 return mt76_connac_mcu_sta_update_hdr_trans(&dev->mt76,
1205 MCU_UNI_CMD(STA_REC_UPDATE));
1208 static const struct mt7615_mcu_ops uni_update_ops = {
1209 .add_beacon_offload = mt7615_mcu_uni_add_beacon_offload,
1210 .set_pm_state = mt7615_mcu_uni_ctrl_pm_state,
1211 .add_dev_info = mt7615_mcu_uni_add_dev,
1212 .add_bss_info = mt7615_mcu_uni_add_bss,
1213 .add_tx_ba = mt7615_mcu_uni_tx_ba,
1214 .add_rx_ba = mt7615_mcu_uni_rx_ba,
1215 .sta_add = mt7615_mcu_uni_add_sta,
1216 .set_drv_ctrl = mt7615_mcu_lp_drv_pmctrl,
1217 .set_fw_ctrl = mt7615_mcu_fw_pmctrl,
1218 .set_sta_decap_offload = mt7615_mcu_sta_uni_update_hdr_trans,
1221 int mt7615_mcu_restart(struct mt76_dev *dev)
1223 return mt76_mcu_send_msg(dev, MCU_CMD(RESTART_DL_REQ), NULL, 0, true);
1225 EXPORT_SYMBOL_GPL(mt7615_mcu_restart);
1227 static int mt7615_load_patch(struct mt7615_dev *dev, u32 addr, const char *name)
1229 const struct mt7615_patch_hdr *hdr;
1230 const struct firmware *fw = NULL;
1233 ret = firmware_request_nowarn(&fw, name, dev->mt76.dev);
1237 if (!fw || !fw->data || fw->size < sizeof(*hdr)) {
1238 dev_err(dev->mt76.dev, "Invalid firmware\n");
1243 sem = mt76_connac_mcu_patch_sem_ctrl(&dev->mt76, true);
1247 case PATCH_NOT_DL_SEM_SUCCESS:
1250 dev_err(dev->mt76.dev, "Failed to get patch semaphore\n");
1255 hdr = (const struct mt7615_patch_hdr *)(fw->data);
1257 dev_info(dev->mt76.dev, "HW/SW Version: 0x%x, Build Time: %.16s\n",
1258 be32_to_cpu(hdr->hw_sw_ver), hdr->build_date);
1260 len = fw->size - sizeof(*hdr);
1262 ret = mt76_connac_mcu_init_download(&dev->mt76, addr, len,
1265 dev_err(dev->mt76.dev, "Download request failed\n");
1269 ret = mt76_mcu_send_firmware(&dev->mt76, MCU_CMD(FW_SCATTER),
1270 fw->data + sizeof(*hdr), len);
1272 dev_err(dev->mt76.dev, "Failed to send firmware to device\n");
1276 ret = mt76_connac_mcu_start_patch(&dev->mt76);
1278 dev_err(dev->mt76.dev, "Failed to start patch\n");
1281 sem = mt76_connac_mcu_patch_sem_ctrl(&dev->mt76, false);
1283 case PATCH_REL_SEM_SUCCESS:
1287 dev_err(dev->mt76.dev, "Failed to release patch semaphore\n");
1292 release_firmware(fw);
1298 mt7615_mcu_send_ram_firmware(struct mt7615_dev *dev,
1299 const struct mt7615_fw_trailer *hdr,
1300 const u8 *data, bool is_cr4)
1302 int n_region = is_cr4 ? CR4_REGION_NUM : N9_REGION_NUM;
1303 int err, i, offset = 0;
1304 u32 len, addr, mode;
1306 for (i = 0; i < n_region; i++) {
1307 mode = mt76_connac_mcu_gen_dl_mode(&dev->mt76,
1308 hdr[i].feature_set, is_cr4);
1309 len = le32_to_cpu(hdr[i].len) + IMG_CRC_LEN;
1310 addr = le32_to_cpu(hdr[i].addr);
1312 err = mt76_connac_mcu_init_download(&dev->mt76, addr, len,
1315 dev_err(dev->mt76.dev, "Download request failed\n");
1319 err = mt76_mcu_send_firmware(&dev->mt76, MCU_CMD(FW_SCATTER),
1320 data + offset, len);
1322 dev_err(dev->mt76.dev, "Failed to send firmware to device\n");
1332 static int mt7615_load_n9(struct mt7615_dev *dev, const char *name)
1334 const struct mt7615_fw_trailer *hdr;
1335 const struct firmware *fw;
1338 ret = request_firmware(&fw, name, dev->mt76.dev);
1342 if (!fw || !fw->data || fw->size < N9_REGION_NUM * sizeof(*hdr)) {
1343 dev_err(dev->mt76.dev, "Invalid firmware\n");
1348 hdr = (const struct mt7615_fw_trailer *)(fw->data + fw->size -
1349 N9_REGION_NUM * sizeof(*hdr));
1351 dev_info(dev->mt76.dev, "N9 Firmware Version: %.10s, Build Time: %.15s\n",
1352 hdr->fw_ver, hdr->build_date);
1354 ret = mt7615_mcu_send_ram_firmware(dev, hdr, fw->data, false);
1358 ret = mt76_connac_mcu_start_firmware(&dev->mt76,
1359 le32_to_cpu(hdr->addr),
1362 dev_err(dev->mt76.dev, "Failed to start N9 firmware\n");
1366 snprintf(dev->mt76.hw->wiphy->fw_version,
1367 sizeof(dev->mt76.hw->wiphy->fw_version),
1368 "%.10s-%.15s", hdr->fw_ver, hdr->build_date);
1370 if (!is_mt7615(&dev->mt76)) {
1371 dev->fw_ver = MT7615_FIRMWARE_V2;
1372 dev->mcu_ops = &sta_update_ops;
1374 dev->fw_ver = MT7615_FIRMWARE_V1;
1375 dev->mcu_ops = &wtbl_update_ops;
1379 release_firmware(fw);
1383 static int mt7615_load_cr4(struct mt7615_dev *dev, const char *name)
1385 const struct mt7615_fw_trailer *hdr;
1386 const struct firmware *fw;
1389 ret = request_firmware(&fw, name, dev->mt76.dev);
1393 if (!fw || !fw->data || fw->size < CR4_REGION_NUM * sizeof(*hdr)) {
1394 dev_err(dev->mt76.dev, "Invalid firmware\n");
1399 hdr = (const struct mt7615_fw_trailer *)(fw->data + fw->size -
1400 CR4_REGION_NUM * sizeof(*hdr));
1402 dev_info(dev->mt76.dev, "CR4 Firmware Version: %.10s, Build Time: %.15s\n",
1403 hdr->fw_ver, hdr->build_date);
1405 ret = mt7615_mcu_send_ram_firmware(dev, hdr, fw->data, true);
1409 ret = mt76_connac_mcu_start_firmware(&dev->mt76, 0,
1410 FW_START_WORKING_PDA_CR4);
1412 dev_err(dev->mt76.dev, "Failed to start CR4 firmware\n");
1417 release_firmware(fw);
1422 static int mt7615_load_ram(struct mt7615_dev *dev)
1426 ret = mt7615_load_n9(dev, MT7615_FIRMWARE_N9);
1430 return mt7615_load_cr4(dev, MT7615_FIRMWARE_CR4);
1433 static int mt7615_load_firmware(struct mt7615_dev *dev)
1438 val = mt76_get_field(dev, MT_TOP_MISC2, MT_TOP_MISC2_FW_STATE);
1440 if (val != FW_STATE_FW_DOWNLOAD) {
1441 dev_err(dev->mt76.dev, "Firmware is not ready for download\n");
1445 ret = mt7615_load_patch(dev, MT7615_PATCH_ADDRESS, MT7615_ROM_PATCH);
1449 ret = mt7615_load_ram(dev);
1453 if (!mt76_poll_msec(dev, MT_TOP_MISC2, MT_TOP_MISC2_FW_STATE,
1454 FIELD_PREP(MT_TOP_MISC2_FW_STATE,
1455 FW_STATE_RDY), 500)) {
1456 dev_err(dev->mt76.dev, "Timeout for initializing firmware\n");
1463 static int mt7622_load_firmware(struct mt7615_dev *dev)
1468 mt76_set(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_BYPASS_TX_SCH);
1470 val = mt76_get_field(dev, MT_TOP_OFF_RSV, MT_TOP_OFF_RSV_FW_STATE);
1471 if (val != FW_STATE_FW_DOWNLOAD) {
1472 dev_err(dev->mt76.dev, "Firmware is not ready for download\n");
1476 ret = mt7615_load_patch(dev, MT7622_PATCH_ADDRESS, MT7622_ROM_PATCH);
1480 ret = mt7615_load_n9(dev, MT7622_FIRMWARE_N9);
1484 if (!mt76_poll_msec(dev, MT_TOP_OFF_RSV, MT_TOP_OFF_RSV_FW_STATE,
1485 FIELD_PREP(MT_TOP_OFF_RSV_FW_STATE,
1486 FW_STATE_NORMAL_TRX), 1500)) {
1487 dev_err(dev->mt76.dev, "Timeout for initializing firmware\n");
1491 mt76_clear(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_BYPASS_TX_SCH);
1496 int mt7615_mcu_fw_log_2_host(struct mt7615_dev *dev, u8 ctrl)
1505 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(FW_LOG_2_HOST),
1506 &data, sizeof(data), true);
1509 static int mt7615_mcu_cal_cache_apply(struct mt7615_dev *dev)
1515 .cache_enable = true
1518 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(CAL_CACHE), &data,
1519 sizeof(data), false);
1522 static int mt7663_load_n9(struct mt7615_dev *dev, const char *name)
1524 u32 offset = 0, override_addr = 0, flag = FW_START_DLYCAL;
1525 const struct mt7663_fw_trailer *hdr;
1526 const struct mt7663_fw_buf *buf;
1527 const struct firmware *fw;
1528 const u8 *base_addr;
1531 ret = request_firmware(&fw, name, dev->mt76.dev);
1535 if (!fw || !fw->data || fw->size < FW_V3_COMMON_TAILER_SIZE) {
1536 dev_err(dev->mt76.dev, "Invalid firmware\n");
1541 hdr = (const struct mt7663_fw_trailer *)(fw->data + fw->size -
1542 FW_V3_COMMON_TAILER_SIZE);
1544 dev_info(dev->mt76.dev, "N9 Firmware Version: %.10s, Build Time: %.15s\n",
1545 hdr->fw_ver, hdr->build_date);
1546 dev_info(dev->mt76.dev, "Region number: 0x%x\n", hdr->n_region);
1548 base_addr = fw->data + fw->size - FW_V3_COMMON_TAILER_SIZE;
1549 for (i = 0; i < hdr->n_region; i++) {
1550 u32 shift = (hdr->n_region - i) * FW_V3_REGION_TAILER_SIZE;
1551 u32 len, addr, mode;
1553 dev_info(dev->mt76.dev, "Parsing tailer Region: %d\n", i);
1555 buf = (const struct mt7663_fw_buf *)(base_addr - shift);
1556 mode = mt76_connac_mcu_gen_dl_mode(&dev->mt76,
1557 buf->feature_set, false);
1558 addr = le32_to_cpu(buf->img_dest_addr);
1559 len = le32_to_cpu(buf->img_size);
1561 ret = mt76_connac_mcu_init_download(&dev->mt76, addr, len,
1564 dev_err(dev->mt76.dev, "Download request failed\n");
1568 ret = mt76_mcu_send_firmware(&dev->mt76, MCU_CMD(FW_SCATTER),
1569 fw->data + offset, len);
1571 dev_err(dev->mt76.dev, "Failed to send firmware\n");
1575 offset += le32_to_cpu(buf->img_size);
1576 if (buf->feature_set & DL_MODE_VALID_RAM_ENTRY) {
1577 override_addr = le32_to_cpu(buf->img_dest_addr);
1578 dev_info(dev->mt76.dev, "Region %d, override_addr = 0x%08x\n",
1584 flag |= FW_START_OVERRIDE;
1586 dev_info(dev->mt76.dev, "override_addr = 0x%08x, option = %d\n",
1587 override_addr, flag);
1589 ret = mt76_connac_mcu_start_firmware(&dev->mt76, override_addr, flag);
1591 dev_err(dev->mt76.dev, "Failed to start N9 firmware\n");
1595 snprintf(dev->mt76.hw->wiphy->fw_version,
1596 sizeof(dev->mt76.hw->wiphy->fw_version),
1597 "%.10s-%.15s", hdr->fw_ver, hdr->build_date);
1600 release_firmware(fw);
1606 mt7663_load_rom_patch(struct mt7615_dev *dev, const char **n9_firmware)
1608 const char *selected_rom, *secondary_rom = MT7663_ROM_PATCH;
1609 const char *primary_rom = MT7663_OFFLOAD_ROM_PATCH;
1612 if (!prefer_offload_fw) {
1613 secondary_rom = MT7663_OFFLOAD_ROM_PATCH;
1614 primary_rom = MT7663_ROM_PATCH;
1616 selected_rom = primary_rom;
1618 ret = mt7615_load_patch(dev, MT7663_PATCH_ADDRESS, primary_rom);
1620 dev_info(dev->mt76.dev, "%s not found, switching to %s",
1621 primary_rom, secondary_rom);
1622 ret = mt7615_load_patch(dev, MT7663_PATCH_ADDRESS,
1625 dev_err(dev->mt76.dev, "failed to load %s",
1629 selected_rom = secondary_rom;
1632 if (!strcmp(selected_rom, MT7663_OFFLOAD_ROM_PATCH)) {
1633 *n9_firmware = MT7663_OFFLOAD_FIRMWARE_N9;
1634 dev->fw_ver = MT7615_FIRMWARE_V3;
1635 dev->mcu_ops = &uni_update_ops;
1637 *n9_firmware = MT7663_FIRMWARE_N9;
1638 dev->fw_ver = MT7615_FIRMWARE_V2;
1639 dev->mcu_ops = &sta_update_ops;
1645 int __mt7663_load_firmware(struct mt7615_dev *dev)
1647 const char *n9_firmware;
1650 ret = mt76_get_field(dev, MT_CONN_ON_MISC, MT_TOP_MISC2_FW_N9_RDY);
1652 dev_dbg(dev->mt76.dev, "Firmware is already download\n");
1656 ret = mt7663_load_rom_patch(dev, &n9_firmware);
1660 ret = mt7663_load_n9(dev, n9_firmware);
1664 if (!mt76_poll_msec(dev, MT_CONN_ON_MISC, MT_TOP_MISC2_FW_N9_RDY,
1665 MT_TOP_MISC2_FW_N9_RDY, 1500)) {
1666 ret = mt76_get_field(dev, MT_CONN_ON_MISC,
1667 MT7663_TOP_MISC2_FW_STATE);
1668 dev_err(dev->mt76.dev, "Timeout for initializing firmware\n");
1673 if (mt7615_firmware_offload(dev))
1674 dev->mt76.hw->wiphy->wowlan = &mt76_connac_wowlan_support;
1675 #endif /* CONFIG_PM */
1677 dev_dbg(dev->mt76.dev, "Firmware init done\n");
1681 EXPORT_SYMBOL_GPL(__mt7663_load_firmware);
1683 static int mt7663_load_firmware(struct mt7615_dev *dev)
1687 mt76_set(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_BYPASS_TX_SCH);
1689 ret = __mt7663_load_firmware(dev);
1693 mt76_clear(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_BYPASS_TX_SCH);
1698 int mt7615_mcu_init(struct mt7615_dev *dev)
1700 static const struct mt76_mcu_ops mt7615_mcu_ops = {
1701 .headroom = sizeof(struct mt7615_mcu_txd),
1702 .mcu_skb_send_msg = mt7615_mcu_send_message,
1703 .mcu_parse_response = mt7615_mcu_parse_response,
1704 .mcu_restart = mt7615_mcu_restart,
1708 dev->mt76.mcu_ops = &mt7615_mcu_ops,
1710 ret = mt7615_mcu_drv_pmctrl(dev);
1714 switch (mt76_chip(&dev->mt76)) {
1716 ret = mt7622_load_firmware(dev);
1719 ret = mt7663_load_firmware(dev);
1722 ret = mt7615_load_firmware(dev);
1728 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_FWDL], false);
1729 dev_dbg(dev->mt76.dev, "Firmware init done\n");
1730 set_bit(MT76_STATE_MCU_RUNNING, &dev->mphy.state);
1732 if (dev->dbdc_support) {
1733 ret = mt7615_mcu_cal_cache_apply(dev);
1738 return mt7615_mcu_fw_log_2_host(dev, 0);
1740 EXPORT_SYMBOL_GPL(mt7615_mcu_init);
1742 void mt7615_mcu_exit(struct mt7615_dev *dev)
1744 __mt76_mcu_restart(&dev->mt76);
1745 mt7615_mcu_set_fw_ctrl(dev);
1746 skb_queue_purge(&dev->mt76.mcu.res_q);
1748 EXPORT_SYMBOL_GPL(mt7615_mcu_exit);
1750 int mt7615_mcu_set_eeprom(struct mt7615_dev *dev)
1756 } __packed req_hdr = {
1759 u8 *eep = (u8 *)dev->mt76.eeprom.data;
1760 struct sk_buff *skb;
1761 int eep_len, offset;
1763 switch (mt76_chip(&dev->mt76)) {
1765 eep_len = MT7622_EE_MAX - MT_EE_NIC_CONF_0;
1766 offset = MT_EE_NIC_CONF_0;
1769 eep_len = MT7663_EE_MAX - MT_EE_CHIP_ID;
1770 req_hdr.content_format = 1;
1771 offset = MT_EE_CHIP_ID;
1774 eep_len = MT7615_EE_MAX - MT_EE_NIC_CONF_0;
1775 offset = MT_EE_NIC_CONF_0;
1779 req_hdr.len = cpu_to_le16(eep_len);
1781 skb = mt76_mcu_msg_alloc(&dev->mt76, NULL, sizeof(req_hdr) + eep_len);
1785 skb_put_data(skb, &req_hdr, sizeof(req_hdr));
1786 skb_put_data(skb, eep + offset, eep_len);
1788 return mt76_mcu_skb_send_msg(&dev->mt76, skb,
1789 MCU_EXT_CMD(EFUSE_BUFFER_MODE), true);
1792 int mt7615_mcu_set_wmm(struct mt7615_dev *dev, u8 queue,
1793 const struct ieee80211_tx_queue_params *params)
1795 #define WMM_AIFS_SET BIT(0)
1796 #define WMM_CW_MIN_SET BIT(1)
1797 #define WMM_CW_MAX_SET BIT(2)
1798 #define WMM_TXOP_SET BIT(3)
1799 #define WMM_PARAM_SET (WMM_AIFS_SET | WMM_CW_MIN_SET | \
1800 WMM_CW_MAX_SET | WMM_TXOP_SET)
1813 .valid = WMM_PARAM_SET,
1814 .aifs = params->aifs,
1816 .cw_max = cpu_to_le16(10),
1817 .txop = cpu_to_le16(params->txop),
1821 req.cw_min = fls(params->cw_min);
1823 req.cw_max = cpu_to_le16(fls(params->cw_max));
1825 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(EDCA_UPDATE),
1826 &req, sizeof(req), true);
1829 int mt7615_mcu_set_dbdc(struct mt7615_dev *dev)
1831 struct mt7615_phy *ext_phy = mt7615_ext_phy(dev);
1842 struct dbdc_entry entry[64];
1844 .enable = !!ext_phy,
1851 #define ADD_DBDC_ENTRY(_type, _idx, _band) \
1853 req.entry[req.num].type = _type; \
1854 req.entry[req.num].index = _idx; \
1855 req.entry[req.num++].band = _band; \
1858 for (i = 0; i < 4; i++) {
1859 bool band = !!(ext_phy->omac_mask & BIT_ULL(i));
1861 ADD_DBDC_ENTRY(DBDC_TYPE_BSS, i, band);
1864 for (i = 0; i < 14; i++) {
1865 bool band = !!(ext_phy->omac_mask & BIT_ULL(0x11 + i));
1867 ADD_DBDC_ENTRY(DBDC_TYPE_MBSS, i, band);
1870 ADD_DBDC_ENTRY(DBDC_TYPE_MU, 0, 1);
1872 for (i = 0; i < 3; i++)
1873 ADD_DBDC_ENTRY(DBDC_TYPE_BF, i, 1);
1875 ADD_DBDC_ENTRY(DBDC_TYPE_WMM, 0, 0);
1876 ADD_DBDC_ENTRY(DBDC_TYPE_WMM, 1, 0);
1877 ADD_DBDC_ENTRY(DBDC_TYPE_WMM, 2, 1);
1878 ADD_DBDC_ENTRY(DBDC_TYPE_WMM, 3, 1);
1880 ADD_DBDC_ENTRY(DBDC_TYPE_MGMT, 0, 0);
1881 ADD_DBDC_ENTRY(DBDC_TYPE_MGMT, 1, 1);
1884 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(DBDC_CTRL), &req,
1888 int mt7615_mcu_del_wtbl_all(struct mt7615_dev *dev)
1890 struct wtbl_req_hdr req = {
1891 .operation = WTBL_RESET_ALL,
1894 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(WTBL_UPDATE),
1895 &req, sizeof(req), true);
1898 int mt7615_mcu_set_fcc5_lpn(struct mt7615_dev *dev, int val)
1904 .tag = cpu_to_le16(0x1),
1905 .min_lpn = cpu_to_le16(val),
1908 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(SET_RADAR_TH),
1909 &req, sizeof(req), true);
1912 int mt7615_mcu_set_pulse_th(struct mt7615_dev *dev,
1913 const struct mt7615_dfs_pulse *pulse)
1917 __le32 max_width; /* us */
1918 __le32 max_pwr; /* dbm */
1919 __le32 min_pwr; /* dbm */
1920 __le32 min_stgr_pri; /* us */
1921 __le32 max_stgr_pri; /* us */
1922 __le32 min_cr_pri; /* us */
1923 __le32 max_cr_pri; /* us */
1925 .tag = cpu_to_le16(0x3),
1926 #define __req_field(field) .field = cpu_to_le32(pulse->field)
1927 __req_field(max_width),
1928 __req_field(max_pwr),
1929 __req_field(min_pwr),
1930 __req_field(min_stgr_pri),
1931 __req_field(max_stgr_pri),
1932 __req_field(min_cr_pri),
1933 __req_field(max_cr_pri),
1937 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(SET_RADAR_TH),
1938 &req, sizeof(req), true);
1941 int mt7615_mcu_set_radar_th(struct mt7615_dev *dev, int index,
1942 const struct mt7615_dfs_pattern *pattern)
1962 .tag = cpu_to_le16(0x2),
1963 .radar_type = cpu_to_le16(index),
1964 #define __req_field_u8(field) .field = pattern->field
1965 #define __req_field_u32(field) .field = cpu_to_le32(pattern->field)
1966 __req_field_u8(enb),
1967 __req_field_u8(stgr),
1968 __req_field_u8(min_crpn),
1969 __req_field_u8(max_crpn),
1970 __req_field_u8(min_crpr),
1971 __req_field_u8(min_pw),
1972 __req_field_u8(max_pw),
1973 __req_field_u32(min_pri),
1974 __req_field_u32(max_pri),
1975 __req_field_u8(min_crbn),
1976 __req_field_u8(max_crbn),
1977 __req_field_u8(min_stgpn),
1978 __req_field_u8(max_stgpn),
1979 __req_field_u8(min_stgpr),
1980 #undef __req_field_u8
1981 #undef __req_field_u32
1984 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(SET_RADAR_TH),
1985 &req, sizeof(req), true);
1988 int mt7615_mcu_rdd_send_pattern(struct mt7615_dev *dev)
1999 .pulse_num = dev->radar_pattern.n_pulses,
2001 u32 start_time = ktime_to_ms(ktime_get_boottime());
2004 if (dev->radar_pattern.n_pulses > ARRAY_SIZE(req.pattern))
2007 /* TODO: add some noise here */
2008 for (i = 0; i < dev->radar_pattern.n_pulses; i++) {
2009 u32 ts = start_time + i * dev->radar_pattern.period;
2011 req.pattern[i].width = cpu_to_le16(dev->radar_pattern.width);
2012 req.pattern[i].power = cpu_to_le16(dev->radar_pattern.power);
2013 req.pattern[i].start_time = cpu_to_le32(ts);
2016 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(SET_RDD_PATTERN),
2017 &req, sizeof(req), false);
2020 static void mt7615_mcu_set_txpower_sku(struct mt7615_phy *phy, u8 *sku)
2022 struct mt76_phy *mphy = phy->mt76;
2023 struct ieee80211_hw *hw = mphy->hw;
2024 struct mt76_power_limits limits;
2025 s8 *limits_array = (s8 *)&limits;
2026 int n_chains = hweight8(mphy->antenna_mask);
2027 int tx_power = hw->conf.power_level * 2;
2029 static const u8 sku_mapping[] = {
2030 #define SKU_FIELD(_type, _field) \
2031 [MT_SKU_##_type] = offsetof(struct mt76_power_limits, _field)
2032 SKU_FIELD(CCK_1_2, cck[0]),
2033 SKU_FIELD(CCK_55_11, cck[2]),
2034 SKU_FIELD(OFDM_6_9, ofdm[0]),
2035 SKU_FIELD(OFDM_12_18, ofdm[2]),
2036 SKU_FIELD(OFDM_24_36, ofdm[4]),
2037 SKU_FIELD(OFDM_48, ofdm[6]),
2038 SKU_FIELD(OFDM_54, ofdm[7]),
2039 SKU_FIELD(HT20_0_8, mcs[0][0]),
2040 SKU_FIELD(HT20_32, ofdm[0]),
2041 SKU_FIELD(HT20_1_2_9_10, mcs[0][1]),
2042 SKU_FIELD(HT20_3_4_11_12, mcs[0][3]),
2043 SKU_FIELD(HT20_5_13, mcs[0][5]),
2044 SKU_FIELD(HT20_6_14, mcs[0][6]),
2045 SKU_FIELD(HT20_7_15, mcs[0][7]),
2046 SKU_FIELD(HT40_0_8, mcs[1][0]),
2047 SKU_FIELD(HT40_32, ofdm[0]),
2048 SKU_FIELD(HT40_1_2_9_10, mcs[1][1]),
2049 SKU_FIELD(HT40_3_4_11_12, mcs[1][3]),
2050 SKU_FIELD(HT40_5_13, mcs[1][5]),
2051 SKU_FIELD(HT40_6_14, mcs[1][6]),
2052 SKU_FIELD(HT40_7_15, mcs[1][7]),
2053 SKU_FIELD(VHT20_0, mcs[0][0]),
2054 SKU_FIELD(VHT20_1_2, mcs[0][1]),
2055 SKU_FIELD(VHT20_3_4, mcs[0][3]),
2056 SKU_FIELD(VHT20_5_6, mcs[0][5]),
2057 SKU_FIELD(VHT20_7, mcs[0][7]),
2058 SKU_FIELD(VHT20_8, mcs[0][8]),
2059 SKU_FIELD(VHT20_9, mcs[0][9]),
2060 SKU_FIELD(VHT40_0, mcs[1][0]),
2061 SKU_FIELD(VHT40_1_2, mcs[1][1]),
2062 SKU_FIELD(VHT40_3_4, mcs[1][3]),
2063 SKU_FIELD(VHT40_5_6, mcs[1][5]),
2064 SKU_FIELD(VHT40_7, mcs[1][7]),
2065 SKU_FIELD(VHT40_8, mcs[1][8]),
2066 SKU_FIELD(VHT40_9, mcs[1][9]),
2067 SKU_FIELD(VHT80_0, mcs[2][0]),
2068 SKU_FIELD(VHT80_1_2, mcs[2][1]),
2069 SKU_FIELD(VHT80_3_4, mcs[2][3]),
2070 SKU_FIELD(VHT80_5_6, mcs[2][5]),
2071 SKU_FIELD(VHT80_7, mcs[2][7]),
2072 SKU_FIELD(VHT80_8, mcs[2][8]),
2073 SKU_FIELD(VHT80_9, mcs[2][9]),
2074 SKU_FIELD(VHT160_0, mcs[3][0]),
2075 SKU_FIELD(VHT160_1_2, mcs[3][1]),
2076 SKU_FIELD(VHT160_3_4, mcs[3][3]),
2077 SKU_FIELD(VHT160_5_6, mcs[3][5]),
2078 SKU_FIELD(VHT160_7, mcs[3][7]),
2079 SKU_FIELD(VHT160_8, mcs[3][8]),
2080 SKU_FIELD(VHT160_9, mcs[3][9]),
2084 tx_power = mt76_get_sar_power(mphy, mphy->chandef.chan, tx_power);
2085 tx_power -= mt76_tx_power_nss_delta(n_chains);
2086 tx_power = mt76_get_rate_power_limits(mphy, mphy->chandef.chan,
2088 mphy->txpower_cur = tx_power;
2090 if (is_mt7663(mphy->dev)) {
2091 memset(sku, tx_power, MT_SKU_4SS_DELTA + 1);
2095 for (i = 0; i < MT_SKU_1SS_DELTA; i++)
2096 sku[i] = limits_array[sku_mapping[i]];
2098 for (i = 0; i < 4; i++) {
2101 if (i < n_chains - 1)
2102 delta = mt76_tx_power_nss_delta(n_chains) -
2103 mt76_tx_power_nss_delta(i + 1);
2104 sku[MT_SKU_1SS_DELTA + i] = delta;
2108 static u8 mt7615_mcu_chan_bw(struct cfg80211_chan_def *chandef)
2110 static const u8 width_to_bw[] = {
2111 [NL80211_CHAN_WIDTH_40] = CMD_CBW_40MHZ,
2112 [NL80211_CHAN_WIDTH_80] = CMD_CBW_80MHZ,
2113 [NL80211_CHAN_WIDTH_80P80] = CMD_CBW_8080MHZ,
2114 [NL80211_CHAN_WIDTH_160] = CMD_CBW_160MHZ,
2115 [NL80211_CHAN_WIDTH_5] = CMD_CBW_5MHZ,
2116 [NL80211_CHAN_WIDTH_10] = CMD_CBW_10MHZ,
2117 [NL80211_CHAN_WIDTH_20] = CMD_CBW_20MHZ,
2118 [NL80211_CHAN_WIDTH_20_NOHT] = CMD_CBW_20MHZ,
2121 if (chandef->width >= ARRAY_SIZE(width_to_bw))
2124 return width_to_bw[chandef->width];
2127 int mt7615_mcu_set_chan_info(struct mt7615_phy *phy, int cmd)
2129 struct mt7615_dev *dev = phy->dev;
2130 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
2131 int freq1 = chandef->center_freq1, freq2 = chandef->center_freq2;
2140 /* for 80+80 only */
2145 __le32 outband_freq;
2151 .control_chan = chandef->chan->hw_value,
2152 .center_chan = ieee80211_frequency_to_channel(freq1),
2153 .tx_streams = hweight8(phy->mt76->antenna_mask),
2154 .rx_streams_mask = phy->mt76->chainmask,
2155 .center_chan2 = ieee80211_frequency_to_channel(freq2),
2158 if (cmd == MCU_EXT_CMD(SET_RX_PATH) ||
2159 dev->mt76.hw->conf.flags & IEEE80211_CONF_MONITOR)
2160 req.switch_reason = CH_SWITCH_NORMAL;
2161 else if (phy->mt76->hw->conf.flags & IEEE80211_CONF_OFFCHANNEL)
2162 req.switch_reason = CH_SWITCH_SCAN_BYPASS_DPD;
2163 else if (!cfg80211_reg_can_beacon(phy->mt76->hw->wiphy, chandef,
2165 req.switch_reason = CH_SWITCH_DFS;
2167 req.switch_reason = CH_SWITCH_NORMAL;
2169 req.band_idx = phy != &dev->phy;
2170 req.bw = mt7615_mcu_chan_bw(chandef);
2172 if (mt76_testmode_enabled(phy->mt76))
2173 memset(req.txpower_sku, 0x3f, 49);
2175 mt7615_mcu_set_txpower_sku(phy, req.txpower_sku);
2177 return mt76_mcu_send_msg(&dev->mt76, cmd, &req, sizeof(req), true);
2180 int mt7615_mcu_get_temperature(struct mt7615_dev *dev)
2187 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(THERMAL_CTRL),
2188 &req, sizeof(req), true);
2191 int mt7615_mcu_set_test_param(struct mt7615_dev *dev, u8 param, bool test_mode,
2203 .test_mode_en = test_mode,
2205 .value = cpu_to_le32(val),
2208 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL),
2209 &req, sizeof(req), false);
2212 int mt7615_mcu_set_sku_en(struct mt7615_phy *phy, bool enable)
2214 struct mt7615_dev *dev = phy->dev;
2222 .band_idx = phy != &dev->phy,
2223 .sku_enable = enable,
2226 return mt76_mcu_send_msg(&dev->mt76,
2227 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
2228 &req, sizeof(req), true);
2231 static int mt7615_find_freq_idx(const u16 *freqs, int n_freqs, u16 cur)
2235 for (i = 0; i < n_freqs; i++)
2236 if (cur == freqs[i])
2242 static int mt7615_dcoc_freq_idx(u16 freq, u8 bw)
2244 static const u16 freq_list[] = {
2245 4980, 5805, 5905, 5190,
2246 5230, 5270, 5310, 5350,
2247 5390, 5430, 5470, 5510,
2248 5550, 5590, 5630, 5670,
2249 5710, 5755, 5795, 5835,
2250 5875, 5210, 5290, 5370,
2251 5450, 5530, 5610, 5690,
2254 static const u16 freq_bw40[] = {
2255 5190, 5230, 5270, 5310,
2256 5350, 5390, 5430, 5470,
2257 5510, 5550, 5590, 5630,
2258 5670, 5710, 5755, 5795,
2261 int offset_2g = ARRAY_SIZE(freq_list);
2268 return offset_2g + 1;
2270 return offset_2g + 2;
2272 return offset_2g + 3;
2276 case NL80211_CHAN_WIDTH_80:
2277 case NL80211_CHAN_WIDTH_80P80:
2278 case NL80211_CHAN_WIDTH_160:
2281 idx = mt7615_find_freq_idx(freq_bw40, ARRAY_SIZE(freq_bw40),
2284 freq = freq_bw40[idx];
2288 idx = mt7615_find_freq_idx(freq_bw40, ARRAY_SIZE(freq_bw40),
2291 freq = freq_bw40[idx];
2295 case NL80211_CHAN_WIDTH_40:
2296 idx = mt7615_find_freq_idx(freq_bw40, ARRAY_SIZE(freq_bw40),
2305 return mt7615_find_freq_idx(freq_list, ARRAY_SIZE(freq_list), freq);
2308 int mt7615_mcu_apply_rx_dcoc(struct mt7615_phy *phy)
2310 struct mt7615_dev *dev = phy->dev;
2311 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
2312 int freq2 = chandef->center_freq2;
2316 u8 runtime_calibration;
2329 __le32 sx0_i_lna[4];
2330 __le32 sx0_q_lna[4];
2332 __le32 sx2_i_lna[4];
2333 __le32 sx2_q_lna[4];
2338 .bw = mt7615_mcu_chan_bw(chandef),
2339 .band = chandef->center_freq1 > 4000,
2340 .dbdc_en = !!dev->mt76.phy2,
2342 u16 center_freq = chandef->center_freq1;
2344 u8 *eep = dev->mt76.eeprom.data;
2346 if (!(eep[MT_EE_CALDATA_FLASH] & MT_EE_CALDATA_FLASH_RX_CAL))
2349 if (chandef->width == NL80211_CHAN_WIDTH_160) {
2350 freq2 = center_freq + 40;
2355 req.runtime_calibration = 1;
2356 freq_idx = mt7615_dcoc_freq_idx(center_freq, chandef->width);
2360 memcpy(req.dcoc_data, eep + MT7615_EEPROM_DCOC_OFFSET +
2361 freq_idx * MT7615_EEPROM_DCOC_SIZE,
2362 sizeof(req.dcoc_data));
2363 req.runtime_calibration = 0;
2366 req.center_freq = cpu_to_le16(center_freq);
2367 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RXDCOC_CAL), &req,
2370 if ((chandef->width == NL80211_CHAN_WIDTH_80P80 ||
2371 chandef->width == NL80211_CHAN_WIDTH_160) && !req.is_freq2) {
2372 req.is_freq2 = true;
2373 center_freq = freq2;
2380 static int mt7615_dpd_freq_idx(u16 freq, u8 bw)
2382 static const u16 freq_list[] = {
2383 4920, 4940, 4960, 4980,
2384 5040, 5060, 5080, 5180,
2385 5200, 5220, 5240, 5260,
2386 5280, 5300, 5320, 5340,
2387 5360, 5380, 5400, 5420,
2388 5440, 5460, 5480, 5500,
2389 5520, 5540, 5560, 5580,
2390 5600, 5620, 5640, 5660,
2391 5680, 5700, 5720, 5745,
2392 5765, 5785, 5805, 5825,
2393 5845, 5865, 5885, 5905
2395 int offset_2g = ARRAY_SIZE(freq_list);
2402 return offset_2g + 1;
2404 return offset_2g + 2;
2407 if (bw != NL80211_CHAN_WIDTH_20) {
2408 idx = mt7615_find_freq_idx(freq_list, ARRAY_SIZE(freq_list),
2413 idx = mt7615_find_freq_idx(freq_list, ARRAY_SIZE(freq_list),
2419 return mt7615_find_freq_idx(freq_list, ARRAY_SIZE(freq_list), freq);
2423 int mt7615_mcu_apply_tx_dpd(struct mt7615_phy *phy)
2425 struct mt7615_dev *dev = phy->dev;
2426 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
2427 int freq2 = chandef->center_freq2;
2431 u8 runtime_calibration;
2459 .bw = mt7615_mcu_chan_bw(chandef),
2460 .band = chandef->center_freq1 > 4000,
2461 .dbdc_en = !!dev->mt76.phy2,
2463 u16 center_freq = chandef->center_freq1;
2465 u8 *eep = dev->mt76.eeprom.data;
2467 if (!(eep[MT_EE_CALDATA_FLASH] & MT_EE_CALDATA_FLASH_TX_DPD))
2470 if (chandef->width == NL80211_CHAN_WIDTH_160) {
2471 freq2 = center_freq + 40;
2476 req.runtime_calibration = 1;
2477 freq_idx = mt7615_dpd_freq_idx(center_freq, chandef->width);
2481 memcpy(&req.dpd_data, eep + MT7615_EEPROM_TXDPD_OFFSET +
2482 freq_idx * MT7615_EEPROM_TXDPD_SIZE,
2483 sizeof(req.dpd_data));
2484 req.runtime_calibration = 0;
2487 req.center_freq = cpu_to_le16(center_freq);
2488 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(TXDPD_CAL),
2489 &req, sizeof(req), true);
2491 if ((chandef->width == NL80211_CHAN_WIDTH_80P80 ||
2492 chandef->width == NL80211_CHAN_WIDTH_160) && !req.is_freq2) {
2493 req.is_freq2 = true;
2494 center_freq = freq2;
2501 int mt7615_mcu_set_rx_hdr_trans_blacklist(struct mt7615_dev *dev)
2514 .etype = cpu_to_le16(ETH_P_PAE),
2517 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RX_HDR_TRANS),
2518 &req, sizeof(req), false);
2521 int mt7615_mcu_set_bss_pm(struct mt7615_dev *dev, struct ieee80211_vif *vif,
2524 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
2529 __le16 bcn_interval;
2532 u8 bmc_delivered_ac;
2533 u8 bmc_triggered_ac;
2536 .bss_idx = mvif->mt76.idx,
2537 .aid = cpu_to_le16(vif->bss_conf.aid),
2538 .dtim_period = vif->bss_conf.dtim_period,
2539 .bcn_interval = cpu_to_le16(vif->bss_conf.beacon_int),
2545 .bss_idx = mvif->mt76.idx,
2549 if (vif->type != NL80211_IFTYPE_STATION)
2552 err = mt76_mcu_send_msg(&dev->mt76, MCU_CE_CMD(SET_BSS_ABORT),
2553 &req_hdr, sizeof(req_hdr), false);
2554 if (err < 0 || !enable)
2557 return mt76_mcu_send_msg(&dev->mt76, MCU_CE_CMD(SET_BSS_CONNECTED),
2558 &req, sizeof(req), false);
2561 int mt7615_mcu_set_roc(struct mt7615_phy *phy, struct ieee80211_vif *vif,
2562 struct ieee80211_channel *chan, int duration)
2564 struct mt7615_vif *mvif = (struct mt7615_vif *)vif->drv_priv;
2565 struct mt7615_dev *dev = phy->dev;
2566 struct mt7615_roc_tlv req = {
2567 .bss_idx = mvif->mt76.idx,
2569 .max_interval = cpu_to_le32(duration),
2570 .primary_chan = chan ? chan->hw_value : 0,
2571 .band = chan ? chan->band : 0,
2575 phy->roc_grant = false;
2577 return mt76_mcu_send_msg(&dev->mt76, MCU_CE_CMD(SET_ROC),
2578 &req, sizeof(req), false);