• Home
  • Raw
  • Download

Lines Matching full:dev

49 	struct net_device *dev;  member
53 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wed_m32() argument
55 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
59 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_set() argument
61 return wed_m32(dev, reg, 0, mask); in wed_set()
65 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_clr() argument
67 return wed_m32(dev, reg, mask, 0); in wed_clr()
71 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wdma_m32() argument
73 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val); in wdma_m32()
77 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_set() argument
79 wdma_m32(dev, reg, 0, mask); in wdma_set()
83 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_clr() argument
85 wdma_m32(dev, reg, mask, 0); in wdma_clr()
89 wifi_r32(struct mtk_wed_device *dev, u32 reg) in wifi_r32() argument
91 return readl(dev->wlan.base + reg); in wifi_r32()
95 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val) in wifi_w32() argument
97 writel(val, dev->wlan.base + reg); in wifi_w32()
101 mtk_wed_read_reset(struct mtk_wed_device *dev) in mtk_wed_read_reset() argument
103 return wed_r32(dev, MTK_WED_RESET); in mtk_wed_read_reset()
107 mtk_wdma_read_reset(struct mtk_wed_device *dev) in mtk_wdma_read_reset() argument
109 return wdma_r32(dev, MTK_WDMA_GLO_CFG); in mtk_wdma_read_reset()
113 mtk_wdma_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_rx_reset() argument
118 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN); in mtk_wdma_rx_reset()
119 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_rx_reset()
122 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
124 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX); in mtk_wdma_rx_reset()
125 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_rx_reset()
127 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
128 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
131 wdma_w32(dev, in mtk_wdma_rx_reset()
139 mtk_wdma_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_tx_reset() argument
144 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wdma_tx_reset()
145 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_tx_reset()
147 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
149 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX); in mtk_wdma_tx_reset()
150 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_tx_reset()
152 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
153 wdma_w32(dev, in mtk_wdma_tx_reset()
158 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask) in mtk_wed_reset() argument
162 wed_w32(dev, MTK_WED_RESET, mask); in mtk_wed_reset()
163 if (readx_poll_timeout(mtk_wed_read_reset, dev, status, in mtk_wed_reset()
169 mtk_wed_wo_read_status(struct mtk_wed_device *dev) in mtk_wed_wo_read_status() argument
171 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS); in mtk_wed_wo_read_status()
175 mtk_wed_wo_reset(struct mtk_wed_device *dev) in mtk_wed_wo_reset() argument
177 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
182 mtk_wdma_tx_reset(dev); in mtk_wed_wo_reset()
183 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_wo_reset()
190 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val, in mtk_wed_wo_reset()
193 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
198 switch (dev->hw->index) { in mtk_wed_wo_reset()
225 struct mtk_wed_device *dev; in mtk_wed_fe_reset() local
231 dev = hw->wed_dev; in mtk_wed_fe_reset()
232 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
236 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
238 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
252 struct mtk_wed_device *dev; in mtk_wed_fe_reset_complete() local
257 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
258 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
261 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
268 mtk_wed_assign(struct mtk_wed_device *dev) in mtk_wed_assign() argument
273 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
274 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
296 hw->wed_dev = dev; in mtk_wed_assign()
301 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_tx_buffer_alloc() argument
306 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
311 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
318 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
319 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
321 desc = dma_alloc_coherent(dev->hw->dev, ring_size * sizeof(*desc), in mtk_wed_tx_buffer_alloc()
326 dev->tx_buf_ring.desc = desc; in mtk_wed_tx_buffer_alloc()
327 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
339 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
341 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
347 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
357 txd_size = dev->wlan.init_buf(buf, buf_phys, token++); in mtk_wed_tx_buffer_alloc()
362 if (dev->hw->version == 1) in mtk_wed_tx_buffer_alloc()
380 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
388 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_tx_buffer() argument
390 struct mtk_wdma_desc *desc = dev->tx_buf_ring.desc; in mtk_wed_free_tx_buffer()
391 void **page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
401 for (i = 0, page_idx = 0; i < dev->tx_buf_ring.size; in mtk_wed_free_tx_buffer()
410 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_free_tx_buffer()
415 dma_free_coherent(dev->hw->dev, dev->tx_buf_ring.size * sizeof(*desc), in mtk_wed_free_tx_buffer()
416 desc, dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
423 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_alloc() argument
428 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
429 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
430 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
435 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
436 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
437 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
443 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_rx_buffer() argument
445 struct mtk_rxbm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
450 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
451 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
452 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
456 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_hw_init() argument
458 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD, in mtk_wed_rx_buffer_hw_init()
459 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
460 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
461 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL | in mtk_wed_rx_buffer_hw_init()
462 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
463 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH, in mtk_wed_rx_buffer_hw_init()
465 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_buffer_hw_init()
469 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring) in mtk_wed_free_ring() argument
474 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
479 mtk_wed_free_rx_rings(struct mtk_wed_device *dev) in mtk_wed_free_rx_rings() argument
481 mtk_wed_free_rx_buffer(dev); in mtk_wed_free_rx_rings()
482 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
486 mtk_wed_free_tx_rings(struct mtk_wed_device *dev) in mtk_wed_free_tx_rings() argument
490 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
491 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
492 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
493 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
497 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en) in mtk_wed_set_ext_int() argument
501 if (dev->hw->version == 1) in mtk_wed_set_ext_int()
509 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
512 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0); in mtk_wed_set_ext_int()
513 wed_r32(dev, MTK_WED_EXT_INT_MASK); in mtk_wed_set_ext_int()
517 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable) in mtk_wed_set_512_support() argument
520 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
521 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
524 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
526 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
532 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, int idx) in mtk_wed_check_wfdma_rx_fill() argument
537 if (!(dev->rx_ring[idx].flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_check_wfdma_rx_fill()
543 cur_idx = wed_r32(dev, in mtk_wed_check_wfdma_rx_fill()
553 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
557 val = wifi_r32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base) | in mtk_wed_check_wfdma_rx_fill()
559 wifi_w32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, val); in mtk_wed_check_wfdma_rx_fill()
563 mtk_wed_dma_disable(struct mtk_wed_device *dev) in mtk_wed_dma_disable() argument
565 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
569 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_dma_disable()
571 wed_clr(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_disable()
575 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
580 if (dev->hw->version == 1) { in mtk_wed_dma_disable()
581 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
582 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
585 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
589 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_disable()
591 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_disable()
595 mtk_wed_set_512_support(dev, false); in mtk_wed_dma_disable()
599 mtk_wed_stop(struct mtk_wed_device *dev) in mtk_wed_stop() argument
601 mtk_wed_set_ext_int(dev, false); in mtk_wed_stop()
603 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0); in mtk_wed_stop()
604 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0); in mtk_wed_stop()
605 wdma_w32(dev, MTK_WDMA_INT_MASK, 0); in mtk_wed_stop()
606 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0); in mtk_wed_stop()
607 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, 0); in mtk_wed_stop()
609 if (dev->hw->version == 1) in mtk_wed_stop()
612 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0); in mtk_wed_stop()
613 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0); in mtk_wed_stop()
617 mtk_wed_deinit(struct mtk_wed_device *dev) in mtk_wed_deinit() argument
619 mtk_wed_stop(dev); in mtk_wed_deinit()
620 mtk_wed_dma_disable(dev); in mtk_wed_deinit()
622 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
628 if (dev->hw->version == 1) in mtk_wed_deinit()
631 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
638 __mtk_wed_detach(struct mtk_wed_device *dev) in __mtk_wed_detach() argument
640 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
642 mtk_wed_deinit(dev); in __mtk_wed_detach()
644 mtk_wdma_rx_reset(dev); in __mtk_wed_detach()
645 mtk_wed_reset(dev, MTK_WED_RESET_WED); in __mtk_wed_detach()
646 mtk_wed_free_tx_buffer(dev); in __mtk_wed_detach()
647 mtk_wed_free_tx_rings(dev); in __mtk_wed_detach()
649 if (mtk_wed_get_rx_capa(dev)) { in __mtk_wed_detach()
651 mtk_wed_wo_reset(dev); in __mtk_wed_detach()
652 mtk_wed_free_rx_rings(dev); in __mtk_wed_detach()
657 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
660 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
667 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
668 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
670 memset(dev, 0, sizeof(*dev)); in __mtk_wed_detach()
677 mtk_wed_detach(struct mtk_wed_device *dev) in mtk_wed_detach() argument
680 __mtk_wed_detach(dev); in mtk_wed_detach()
686 mtk_wed_bus_init(struct mtk_wed_device *dev) in mtk_wed_bus_init() argument
688 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
690 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
700 wed_w32(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
704 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
707 wed_r32(dev, MTK_WED_PCIE_INT_CTRL); in mtk_wed_bus_init()
709 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, PCIE_BASE_ADDR0 | 0x180); in mtk_wed_bus_init()
710 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, PCIE_BASE_ADDR0 | 0x184); in mtk_wed_bus_init()
713 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24)); in mtk_wed_bus_init()
714 wed_r32(dev, MTK_WED_PCIE_INT_TRIGGER); in mtk_wed_bus_init()
717 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
722 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_bus_init()
732 mtk_wed_set_wpdma(struct mtk_wed_device *dev) in mtk_wed_set_wpdma() argument
734 if (dev->hw->version == 1) { in mtk_wed_set_wpdma()
735 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
737 mtk_wed_bus_init(dev); in mtk_wed_set_wpdma()
739 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
740 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
741 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
742 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
743 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
744 wed_w32(dev, MTK_WED_WPDMA_RX_RING, dev->wlan.wpdma_rx); in mtk_wed_set_wpdma()
749 mtk_wed_hw_init_early(struct mtk_wed_device *dev) in mtk_wed_hw_init_early() argument
753 mtk_wed_deinit(dev); in mtk_wed_hw_init_early()
754 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_hw_init_early()
755 mtk_wed_set_wpdma(dev); in mtk_wed_hw_init_early()
763 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set); in mtk_wed_hw_init_early()
765 if (dev->hw->version == 1) { in mtk_wed_hw_init_early()
766 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
768 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_hw_init_early()
773 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset); in mtk_wed_hw_init_early()
774 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset); in mtk_wed_hw_init_early()
775 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_hw_init_early()
776 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
778 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
779 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT); in mtk_wed_hw_init_early()
780 wed_w32(dev, MTK_WED_WDMA_OFFSET0, in mtk_wed_hw_init_early()
786 wed_w32(dev, MTK_WED_WDMA_OFFSET1, in mtk_wed_hw_init_early()
795 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_rro_ring_alloc() argument
798 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
812 mtk_wed_rro_alloc(struct mtk_wed_device *dev) in mtk_wed_rro_alloc() argument
818 index = of_property_match_string(dev->hw->node, "memory-region-names", in mtk_wed_rro_alloc()
823 np = of_parse_phandle(dev->hw->node, "memory-region", index); in mtk_wed_rro_alloc()
833 dev->rro.miod_phys = rmem->base; in mtk_wed_rro_alloc()
834 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
836 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
841 mtk_wed_rro_cfg(struct mtk_wed_device *dev) in mtk_wed_rro_cfg() argument
843 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
872 mtk_wed_rro_hw_init(struct mtk_wed_device *dev) in mtk_wed_rro_hw_init() argument
874 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG, in mtk_wed_rro_hw_init()
880 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
881 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1, in mtk_wed_rro_hw_init()
883 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
884 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1, in mtk_wed_rro_hw_init()
886 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0); in mtk_wed_rro_hw_init()
887 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
889 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rro_hw_init()
893 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rro_hw_init()
894 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
895 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rro_hw_init()
899 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev) in mtk_wed_route_qm_hw_init() argument
901 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_route_qm_hw_init()
905 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM)) in mtk_wed_route_qm_hw_init()
910 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
911 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT); in mtk_wed_route_qm_hw_init()
912 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_route_qm_hw_init()
913 FIELD_PREP(MTK_WED_RTQM_TXDMAD_FPORT, 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
914 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
916 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_route_qm_hw_init()
920 mtk_wed_hw_init(struct mtk_wed_device *dev) in mtk_wed_hw_init() argument
922 if (dev->init_done) in mtk_wed_hw_init()
925 dev->init_done = true; in mtk_wed_hw_init()
926 mtk_wed_set_ext_int(dev, false); in mtk_wed_hw_init()
927 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
930 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
934 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
936 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE); in mtk_wed_hw_init()
938 if (dev->hw->version == 1) { in mtk_wed_hw_init()
939 wed_w32(dev, MTK_WED_TX_BM_TKID, in mtk_wed_hw_init()
941 dev->wlan.token_start) | in mtk_wed_hw_init()
943 dev->wlan.token_start + in mtk_wed_hw_init()
944 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
945 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
949 wed_w32(dev, MTK_WED_TX_BM_TKID_V2, in mtk_wed_hw_init()
951 dev->wlan.token_start) | in mtk_wed_hw_init()
953 dev->wlan.token_start + in mtk_wed_hw_init()
954 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
955 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
958 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
961 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
963 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
964 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR, in mtk_wed_hw_init()
969 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_hw_init()
971 if (dev->hw->version == 1) { in mtk_wed_hw_init()
972 wed_set(dev, MTK_WED_CTRL, in mtk_wed_hw_init()
976 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE); in mtk_wed_hw_init()
978 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_hw_init()
981 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_hw_init()
983 mtk_wed_rx_buffer_hw_init(dev); in mtk_wed_hw_init()
984 mtk_wed_rro_hw_init(dev); in mtk_wed_hw_init()
985 mtk_wed_route_qm_hw_init(dev); in mtk_wed_hw_init()
988 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE); in mtk_wed_hw_init()
1012 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_check_busy() argument
1014 return !!(wed_r32(dev, reg) & mask); in mtk_wed_check_busy()
1018 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_poll_busy() argument
1025 timeout, false, dev, reg, mask); in mtk_wed_poll_busy()
1029 mtk_wed_rx_reset(struct mtk_wed_device *dev) in mtk_wed_rx_reset() argument
1031 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1041 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN); in mtk_wed_rx_reset()
1042 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1045 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_rx_reset()
1046 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV); in mtk_wed_rx_reset()
1048 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1052 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1055 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1059 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_rx_reset()
1063 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rx_reset()
1064 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1067 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM); in mtk_wed_rx_reset()
1069 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rx_reset()
1072 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rx_reset()
1076 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_rx_reset()
1077 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1080 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1082 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_rx_reset()
1086 mtk_wdma_tx_reset(dev); in mtk_wed_rx_reset()
1089 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN); in mtk_wed_rx_reset()
1090 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1092 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV); in mtk_wed_rx_reset()
1095 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_rx_reset()
1097 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN); in mtk_wed_rx_reset()
1099 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA); in mtk_wed_rx_reset()
1101 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_rx_reset()
1104 wed_set(dev, MTK_WED_RESET_IDX, in mtk_wed_rx_reset()
1107 wed_set(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_RX); in mtk_wed_rx_reset()
1108 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_rx_reset()
1112 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_reset()
1113 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1115 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM); in mtk_wed_rx_reset()
1126 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1127 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1130 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1133 mtk_wed_free_rx_buffer(dev); in mtk_wed_rx_reset()
1139 mtk_wed_reset_dma(struct mtk_wed_device *dev) in mtk_wed_reset_dma() argument
1145 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1146 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1149 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1154 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN); in mtk_wed_reset_dma()
1155 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_reset_dma()
1158 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA); in mtk_wed_reset_dma()
1160 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_TX); in mtk_wed_reset_dma()
1161 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1165 busy = !!mtk_wdma_rx_reset(dev); in mtk_wed_reset_dma()
1166 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_reset_dma()
1168 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1172 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT); in mtk_wed_reset_dma()
1173 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV); in mtk_wed_reset_dma()
1175 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1177 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1179 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1182 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1187 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN); in mtk_wed_reset_dma()
1190 val = wed_r32(dev, MTK_WED_TX_BM_INTF); in mtk_wed_reset_dma()
1195 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT); in mtk_wed_reset_dma()
1196 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN); in mtk_wed_reset_dma()
1197 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_reset_dma()
1200 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1202 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1206 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1210 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_reset_dma()
1211 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV); in mtk_wed_reset_dma()
1212 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV); in mtk_wed_reset_dma()
1214 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, in mtk_wed_reset_dma()
1217 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1220 dev->init_done = false; in mtk_wed_reset_dma()
1221 if (dev->hw->version == 1) in mtk_wed_reset_dma()
1225 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX); in mtk_wed_reset_dma()
1226 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1229 mtk_wed_rx_reset(dev); in mtk_wed_reset_dma()
1233 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_ring_alloc() argument
1236 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1249 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_rx_ring_setup() argument
1252 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_rx_ring_setup()
1255 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1258 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1259 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_rx_ring_setup()
1263 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1265 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1267 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_rx_ring_setup()
1269 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1271 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1278 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_tx_ring_setup() argument
1281 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_tx_ring_setup()
1284 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1287 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1288 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_tx_ring_setup()
1292 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1294 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1296 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1297 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1303 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1305 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1307 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX, in mtk_wed_wdma_tx_ring_setup()
1309 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX, in mtk_wed_wdma_tx_ring_setup()
1317 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb, in mtk_wed_ppe_check() argument
1320 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1332 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1336 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_configure_irq() argument
1341 wed_set(dev, MTK_WED_CTRL, in mtk_wed_configure_irq()
1347 if (dev->hw->version == 1) { in mtk_wed_configure_irq()
1348 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, in mtk_wed_configure_irq()
1351 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, in mtk_wed_configure_irq()
1355 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask); in mtk_wed_configure_irq()
1360 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX, in mtk_wed_configure_irq()
1366 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
1368 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
1371 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE, in mtk_wed_configure_irq()
1375 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
1377 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX, in mtk_wed_configure_irq()
1383 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
1385 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
1387 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask); in mtk_wed_configure_irq()
1388 wed_set(dev, MTK_WED_WDMA_INT_CTRL, in mtk_wed_configure_irq()
1390 dev->wdma_idx)); in mtk_wed_configure_irq()
1393 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask); in mtk_wed_configure_irq()
1395 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask); in mtk_wed_configure_irq()
1396 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask); in mtk_wed_configure_irq()
1397 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_configure_irq()
1398 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_configure_irq()
1402 mtk_wed_dma_enable(struct mtk_wed_device *dev) in mtk_wed_dma_enable() argument
1404 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, MTK_WED_WPDMA_INT_CTRL_SUBRT_ADV); in mtk_wed_dma_enable()
1406 wed_set(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_enable()
1409 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1412 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1415 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1420 if (dev->hw->version == 1) { in mtk_wed_dma_enable()
1421 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1426 wed_set(dev, MTK_WED_WPDMA_CTRL, in mtk_wed_dma_enable()
1429 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1433 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1437 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1441 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_enable()
1448 mtk_wed_check_wfdma_rx_fill(dev, i); in mtk_wed_dma_enable()
1453 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_start() argument
1457 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev)) in mtk_wed_start()
1460 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
1461 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
1462 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false); in mtk_wed_start()
1464 mtk_wed_hw_init(dev); in mtk_wed_start()
1465 mtk_wed_configure_irq(dev, irq_mask); in mtk_wed_start()
1467 mtk_wed_set_ext_int(dev, true); in mtk_wed_start()
1469 if (dev->hw->version == 1) { in mtk_wed_start()
1470 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
1472 dev->hw->index); in mtk_wed_start()
1474 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
1475 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
1478 wed_w32(dev, MTK_WED_EXT_INT_MASK1, in mtk_wed_start()
1480 wed_w32(dev, MTK_WED_EXT_INT_MASK2, in mtk_wed_start()
1483 wed_r32(dev, MTK_WED_EXT_INT_MASK1); in mtk_wed_start()
1484 wed_r32(dev, MTK_WED_EXT_INT_MASK2); in mtk_wed_start()
1486 if (mtk_wed_rro_cfg(dev)) in mtk_wed_start()
1491 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
1493 mtk_wed_dma_enable(dev); in mtk_wed_start()
1494 dev->running = true; in mtk_wed_start()
1498 mtk_wed_attach(struct mtk_wed_device *dev) in mtk_wed_attach() argument
1508 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
1509 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
1520 hw = mtk_wed_assign(dev); in mtk_wed_attach()
1527 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
1528 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
1529 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
1533 dev->hw = hw; in mtk_wed_attach()
1534 dev->dev = hw->dev; in mtk_wed_attach()
1535 dev->irq = hw->irq; in mtk_wed_attach()
1536 dev->wdma_idx = hw->index; in mtk_wed_attach()
1537 dev->version = hw->version; in mtk_wed_attach()
1539 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
1540 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
1541 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
1543 ret = mtk_wed_tx_buffer_alloc(dev); in mtk_wed_attach()
1547 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_attach()
1548 ret = mtk_wed_rro_alloc(dev); in mtk_wed_attach()
1553 mtk_wed_hw_init_early(dev); in mtk_wed_attach()
1558 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
1563 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
1564 __mtk_wed_detach(dev); in mtk_wed_attach()
1573 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_tx_ring_setup() argument
1576 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
1590 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
1593 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE, in mtk_wed_tx_ring_setup()
1597 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_tx_ring_setup()
1605 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
1606 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE); in mtk_wed_tx_ring_setup()
1607 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
1609 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_tx_ring_setup()
1611 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_tx_ring_setup()
1613 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
1619 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_txfree_ring_setup() argument
1621 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
1622 int i, index = dev->hw->version == 1; in mtk_wed_txfree_ring_setup()
1635 wed_w32(dev, MTK_WED_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
1636 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
1643 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_rx_ring_setup() argument
1646 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
1648 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
1651 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE, in mtk_wed_rx_ring_setup()
1655 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_rx_ring_setup()
1664 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
1665 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE); in mtk_wed_rx_ring_setup()
1667 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rx_ring_setup()
1669 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rx_ring_setup()
1676 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_get() argument
1680 if (dev->hw->version == 1) in mtk_wed_irq_get()
1688 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS); in mtk_wed_irq_get()
1689 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val); in mtk_wed_irq_get()
1691 if (!dev->hw->num_flows) in mtk_wed_irq_get()
1694 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
1696 val = wed_r32(dev, MTK_WED_INT_STATUS); in mtk_wed_irq_get()
1698 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */ in mtk_wed_irq_get()
1704 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_set_mask() argument
1706 if (!dev->running) in mtk_wed_irq_set_mask()
1709 mtk_wed_set_ext_int(dev, !!mask); in mtk_wed_irq_set_mask()
1710 wed_w32(dev, MTK_WED_INT_MASK, mask); in mtk_wed_irq_set_mask()
1771 if (!tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
1781 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev, in mtk_wed_setup_tc_block() argument
1801 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1812 priv->dev = dev; in mtk_wed_setup_tc_block()
1813 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL); in mtk_wed_setup_tc_block()
1824 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1840 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
1851 return mtk_wed_setup_tc_block(hw, dev, type_data); in mtk_wed_setup_tc()
1878 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
1891 get_device(&pdev->dev); in mtk_wed_add_hw()
1914 hw->dev = &pdev->dev; in mtk_wed_add_hw()
1948 put_device(&pdev->dev); in mtk_wed_add_hw()
1970 put_device(hw->dev); in mtk_wed_exit()