Lines Matching +full:never +full:- +full:post +full:- +full:merge +full:- +full:rules
7 * Copyright (C) 2005-2016 Broadcom Corporation.
8 * Copyright (C) 2016-2017 Broadcom Limited.
14 * Copyright (C) 2000-2016 Broadcom Corporation.
15 * Copyright (C) 2016-2017 Broadcom Ltd.
52 #include <linux/dma-mapping.h>
56 #include <linux/hwmon-sysfs.h>
94 _tg3_flag(TG3_FLAG_##flag, (tp)->tg3_flags)
96 _tg3_flag_set(TG3_FLAG_##flag, (tp)->tg3_flags)
98 _tg3_flag_clear(TG3_FLAG_##flag, (tp)->tg3_flags)
124 * and dev->tx_timeout() should be called to fix the problem
147 /* Do not place this n-ring entries value into the tp struct itself,
151 * replace things like '% foo' with '& (foo - 1)'.
155 #define TG3_DEF_TX_RING_PENDING (TG3_TX_RING_SIZE - 1)
162 (sizeof(struct tg3_rx_buffer_desc) * (tp->rx_ret_ring_mask + 1))
165 #define NEXT_TX(N) (((N) + 1) & (TG3_TX_RING_SIZE - 1))
198 #define TG3_RX_COPY_THRESH(tp) ((tp)->rx_copy_thresh)
202 #define TG3_RX_OFFSET(tp) ((tp)->rx_offset)
208 #define TG3_TX_WAKEUP_THRESH(tnapi) ((tnapi)->tx_pending / 4)
233 static int tg3_debug = -1; /* -1 == use TG3_DEF_MSG_ENABLE as value */
355 {PCI_DEVICE(0x10cf, 0x11a2)}, /* Fujitsu 1000base-SX with BCM5703SKHB */
473 writel(val, tp->regs + off); in tg3_write32()
478 return readl(tp->regs + off); in tg3_read32()
483 writel(val, tp->aperegs + off); in tg3_ape_write32()
488 return readl(tp->aperegs + off); in tg3_ape_read32()
495 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_write_indirect_reg32()
496 pci_write_config_dword(tp->pdev, TG3PCI_REG_BASE_ADDR, off); in tg3_write_indirect_reg32()
497 pci_write_config_dword(tp->pdev, TG3PCI_REG_DATA, val); in tg3_write_indirect_reg32()
498 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_write_indirect_reg32()
503 writel(val, tp->regs + off); in tg3_write_flush_reg32()
504 readl(tp->regs + off); in tg3_write_flush_reg32()
512 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_read_indirect_reg32()
513 pci_write_config_dword(tp->pdev, TG3PCI_REG_BASE_ADDR, off); in tg3_read_indirect_reg32()
514 pci_read_config_dword(tp->pdev, TG3PCI_REG_DATA, &val); in tg3_read_indirect_reg32()
515 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_read_indirect_reg32()
524 pci_write_config_dword(tp->pdev, TG3PCI_RCV_RET_RING_CON_IDX + in tg3_write_indirect_mbox()
529 pci_write_config_dword(tp->pdev, TG3PCI_STD_RING_PROD_IDX + in tg3_write_indirect_mbox()
534 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_write_indirect_mbox()
535 pci_write_config_dword(tp->pdev, TG3PCI_REG_BASE_ADDR, off + 0x5600); in tg3_write_indirect_mbox()
536 pci_write_config_dword(tp->pdev, TG3PCI_REG_DATA, val); in tg3_write_indirect_mbox()
537 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_write_indirect_mbox()
544 pci_write_config_dword(tp->pdev, TG3PCI_MISC_LOCAL_CTRL, in tg3_write_indirect_mbox()
545 tp->grc_local_ctrl|GRC_LCLCTRL_CLEARINT); in tg3_write_indirect_mbox()
554 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_read_indirect_mbox()
555 pci_write_config_dword(tp->pdev, TG3PCI_REG_BASE_ADDR, off + 0x5600); in tg3_read_indirect_mbox()
556 pci_read_config_dword(tp->pdev, TG3PCI_REG_DATA, &val); in tg3_read_indirect_mbox()
557 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_read_indirect_mbox()
569 /* Non-posted methods */ in _tw32_flush()
570 tp->write32(tp, off, val); in _tw32_flush()
576 tp->read32(tp, off); in _tw32_flush()
587 tp->write32_mbox(tp, off, val); in tw32_mailbox_flush()
591 tp->read32_mbox(tp, off); in tw32_mailbox_flush()
596 void __iomem *mbox = tp->regs + off; in tg3_write32_tx_mbox()
607 return readl(tp->regs + off + GRCMBOX_BASE); in tg3_read32_mbox_5906()
612 writel(val, tp->regs + off + GRCMBOX_BASE); in tg3_write32_mbox_5906()
615 #define tw32_mailbox(reg, val) tp->write32_mbox(tp, reg, val)
617 #define tw32_rx_mbox(reg, val) tp->write32_rx_mbox(tp, reg, val)
618 #define tw32_tx_mbox(reg, val) tp->write32_tx_mbox(tp, reg, val)
619 #define tr32_mailbox(reg) tp->read32_mbox(tp, reg)
621 #define tw32(reg, val) tp->write32(tp, reg, val)
624 #define tr32(reg) tp->read32(tp, reg)
634 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_write_mem()
636 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, off); in tg3_write_mem()
637 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_DATA, val); in tg3_write_mem()
640 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, 0); in tg3_write_mem()
648 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_write_mem()
661 spin_lock_irqsave(&tp->indirect_lock, flags); in tg3_read_mem()
663 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, off); in tg3_read_mem()
664 pci_read_config_dword(tp->pdev, TG3PCI_MEM_WIN_DATA, val); in tg3_read_mem()
667 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, 0); in tg3_read_mem()
675 spin_unlock_irqrestore(&tp->indirect_lock, flags); in tg3_read_mem()
698 if (!tp->pci_fn) in tg3_ape_lock_init()
701 bit = 1 << tp->pci_fn; in tg3_ape_lock_init()
724 if (!tp->pci_fn) in tg3_ape_lock()
727 bit = 1 << tp->pci_fn; in tg3_ape_lock()
736 return -EINVAL; in tg3_ape_lock()
756 if (pci_channel_offline(tp->pdev)) in tg3_ape_lock()
765 ret = -EBUSY; in tg3_ape_lock()
785 if (!tp->pci_fn) in tg3_ape_unlock()
788 bit = 1 << tp->pci_fn; in tg3_ape_unlock()
814 return -EBUSY; in tg3_ape_event_lock()
823 timeout_us -= (timeout_us > 10) ? 10 : timeout_us; in tg3_ape_event_lock()
826 return timeout_us ? 0 : -EBUSY; in tg3_ape_event_lock()
857 return -ENODEV; in tg3_ape_scratchpad_read()
861 return -EAGAIN; in tg3_ape_scratchpad_read()
873 len -= length; in tg3_ape_scratchpad_read()
877 return -EAGAIN; in tg3_ape_scratchpad_read()
898 return -EAGAIN; in tg3_ape_scratchpad_read()
900 for (i = 0; length; i += 4, length -= 4) { in tg3_ape_scratchpad_read()
918 return -EAGAIN; in tg3_ape_send_event()
922 return -EAGAIN; in tg3_ape_send_event()
948 tg3_ape_write32(tp, TG3_APE_HOST_HEARTBEAT_COUNT, tp->ape_hb++); in tg3_ape_driver_state_change()
965 if (device_may_wakeup(&tp->pdev->dev) && in tg3_ape_driver_state_change()
991 time_before(jiffies, tp->ape_hb_jiffies + interval)) in tg3_send_ape_heartbeat()
994 tg3_ape_write32(tp, TG3_APE_HOST_HEARTBEAT_COUNT, tp->ape_hb++); in tg3_send_ape_heartbeat()
995 tp->ape_hb_jiffies = jiffies; in tg3_send_ape_heartbeat()
1003 (tp->misc_host_ctrl | MISC_HOST_CTRL_MASK_PCI_INT)); in tg3_disable_ints()
1004 for (i = 0; i < tp->irq_max; i++) in tg3_disable_ints()
1005 tw32_mailbox_f(tp->napi[i].int_mbox, 0x00000001); in tg3_disable_ints()
1012 tp->irq_sync = 0; in tg3_enable_ints()
1016 (tp->misc_host_ctrl & ~MISC_HOST_CTRL_MASK_PCI_INT)); in tg3_enable_ints()
1018 tp->coal_now = tp->coalesce_mode | HOSTCC_MODE_ENABLE; in tg3_enable_ints()
1019 for (i = 0; i < tp->irq_cnt; i++) { in tg3_enable_ints()
1020 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_enable_ints()
1022 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_enable_ints()
1024 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_enable_ints()
1026 tp->coal_now |= tnapi->coal_now; in tg3_enable_ints()
1031 (tp->napi[0].hw_status->status & SD_STATUS_UPDATED)) in tg3_enable_ints()
1032 tw32(GRC_LOCAL_CTRL, tp->grc_local_ctrl | GRC_LCLCTRL_SETINT); in tg3_enable_ints()
1034 tw32(HOSTCC_MODE, tp->coal_now); in tg3_enable_ints()
1036 tp->coal_now &= ~(tp->napi[0].coal_now | tp->napi[1].coal_now); in tg3_enable_ints()
1041 struct tg3 *tp = tnapi->tp; in tg3_has_work()
1042 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_has_work()
1047 if (sblk->status & SD_STATUS_LINK_CHG) in tg3_has_work()
1052 if (sblk->idx[0].tx_consumer != tnapi->tx_cons) in tg3_has_work()
1056 if (tnapi->rx_rcb_prod_idx && in tg3_has_work()
1057 *(tnapi->rx_rcb_prod_idx) != tnapi->rx_rcb_ptr) in tg3_has_work()
1070 struct tg3 *tp = tnapi->tp; in tg3_int_reenable()
1072 tw32_mailbox(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_int_reenable()
1079 tw32(HOSTCC_MODE, tp->coalesce_mode | in tg3_int_reenable()
1080 HOSTCC_MODE_ENABLE | tnapi->coal_now); in tg3_int_reenable()
1097 tp->pci_clock_ctrl = clock_ctrl; in tg3_switch_clocks()
1125 if ((tp->mi_mode & MAC_MI_MODE_AUTO_POLL) != 0) { in __tg3_readphy()
1127 (tp->mi_mode & ~MAC_MI_MODE_AUTO_POLL)); in __tg3_readphy()
1131 tg3_ape_lock(tp, tp->phy_ape_lock); in __tg3_readphy()
1153 loops -= 1; in __tg3_readphy()
1156 ret = -EBUSY; in __tg3_readphy()
1162 if ((tp->mi_mode & MAC_MI_MODE_AUTO_POLL) != 0) { in __tg3_readphy()
1163 tw32_f(MAC_MI_MODE, tp->mi_mode); in __tg3_readphy()
1167 tg3_ape_unlock(tp, tp->phy_ape_lock); in __tg3_readphy()
1174 return __tg3_readphy(tp, tp->phy_addr, reg, val); in tg3_readphy()
1184 if ((tp->phy_flags & TG3_PHYFLG_IS_FET) && in __tg3_writephy()
1188 if ((tp->mi_mode & MAC_MI_MODE_AUTO_POLL) != 0) { in __tg3_writephy()
1190 (tp->mi_mode & ~MAC_MI_MODE_AUTO_POLL)); in __tg3_writephy()
1194 tg3_ape_lock(tp, tp->phy_ape_lock); in __tg3_writephy()
1214 loops -= 1; in __tg3_writephy()
1217 ret = -EBUSY; in __tg3_writephy()
1221 if ((tp->mi_mode & MAC_MI_MODE_AUTO_POLL) != 0) { in __tg3_writephy()
1222 tw32_f(MAC_MI_MODE, tp->mi_mode); in __tg3_writephy()
1226 tg3_ape_unlock(tp, tp->phy_ape_lock); in __tg3_writephy()
1233 return __tg3_writephy(tp, tp->phy_addr, reg, val); in tg3_writephy()
1363 return -EBUSY; in tg3_bmcr_reset()
1366 while (limit--) { in tg3_bmcr_reset()
1369 return -EBUSY; in tg3_bmcr_reset()
1378 return -EBUSY; in tg3_bmcr_reset()
1385 struct tg3 *tp = bp->priv; in tg3_mdio_read()
1388 spin_lock_bh(&tp->lock); in tg3_mdio_read()
1391 val = -EIO; in tg3_mdio_read()
1393 spin_unlock_bh(&tp->lock); in tg3_mdio_read()
1400 struct tg3 *tp = bp->priv; in tg3_mdio_write()
1403 spin_lock_bh(&tp->lock); in tg3_mdio_write()
1406 ret = -EIO; in tg3_mdio_write()
1408 spin_unlock_bh(&tp->lock); in tg3_mdio_write()
1418 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_mdio_config_5785()
1419 switch (phydev->drv->phy_id & phydev->drv->phy_id_mask) { in tg3_mdio_config_5785()
1437 if (phydev->interface != PHY_INTERFACE_MODE_RGMII) { in tg3_mdio_config_5785()
1496 tp->mi_mode &= ~MAC_MI_MODE_AUTO_POLL; in tg3_mdio_start()
1497 tw32_f(MAC_MI_MODE, tp->mi_mode); in tg3_mdio_start()
1514 tp->phy_addr = tp->pci_fn + 1; in tg3_mdio_init()
1522 tp->phy_addr += 7; in tg3_mdio_init()
1526 addr = ssb_gige_get_phyaddr(tp->pdev); in tg3_mdio_init()
1529 tp->phy_addr = addr; in tg3_mdio_init()
1531 tp->phy_addr = TG3_PHY_MII_ADDR; in tg3_mdio_init()
1538 tp->mdio_bus = mdiobus_alloc(); in tg3_mdio_init()
1539 if (tp->mdio_bus == NULL) in tg3_mdio_init()
1540 return -ENOMEM; in tg3_mdio_init()
1542 tp->mdio_bus->name = "tg3 mdio bus"; in tg3_mdio_init()
1543 snprintf(tp->mdio_bus->id, MII_BUS_ID_SIZE, "%x", pci_dev_id(tp->pdev)); in tg3_mdio_init()
1544 tp->mdio_bus->priv = tp; in tg3_mdio_init()
1545 tp->mdio_bus->parent = &tp->pdev->dev; in tg3_mdio_init()
1546 tp->mdio_bus->read = &tg3_mdio_read; in tg3_mdio_init()
1547 tp->mdio_bus->write = &tg3_mdio_write; in tg3_mdio_init()
1548 tp->mdio_bus->phy_mask = ~(1 << tp->phy_addr); in tg3_mdio_init()
1558 i = mdiobus_register(tp->mdio_bus); in tg3_mdio_init()
1560 dev_warn(&tp->pdev->dev, "mdiobus_reg failed (0x%x)\n", i); in tg3_mdio_init()
1561 mdiobus_free(tp->mdio_bus); in tg3_mdio_init()
1565 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_mdio_init()
1567 if (!phydev || !phydev->drv) { in tg3_mdio_init()
1568 dev_warn(&tp->pdev->dev, "No PHY devices\n"); in tg3_mdio_init()
1569 mdiobus_unregister(tp->mdio_bus); in tg3_mdio_init()
1570 mdiobus_free(tp->mdio_bus); in tg3_mdio_init()
1571 return -ENODEV; in tg3_mdio_init()
1574 switch (phydev->drv->phy_id & phydev->drv->phy_id_mask) { in tg3_mdio_init()
1576 phydev->interface = PHY_INTERFACE_MODE_GMII; in tg3_mdio_init()
1577 phydev->dev_flags |= PHY_BRCM_AUTO_PWRDWN_ENABLE; in tg3_mdio_init()
1581 phydev->dev_flags |= PHY_BRCM_CLEAR_RGMII_MODE | in tg3_mdio_init()
1587 phydev->interface = PHY_INTERFACE_MODE_RGMII; in tg3_mdio_init()
1591 phydev->interface = PHY_INTERFACE_MODE_MII; in tg3_mdio_init()
1592 phydev->dev_flags |= PHY_BRCM_AUTO_PWRDWN_ENABLE; in tg3_mdio_init()
1593 tp->phy_flags |= TG3_PHYFLG_IS_FET; in tg3_mdio_init()
1609 mdiobus_unregister(tp->mdio_bus); in tg3_mdio_fini()
1610 mdiobus_free(tp->mdio_bus); in tg3_mdio_fini()
1614 /* tp->lock is held. */
1623 tp->last_event_jiffies = jiffies; in tg3_generate_fw_event()
1628 /* tp->lock is held. */
1636 time_remain = (long)(tp->last_event_jiffies + 1 + in tg3_wait_for_event_ack()
1637 usecs_to_jiffies(TG3_FW_EVENT_TIMEOUT_USEC)) - in tg3_wait_for_event_ack()
1651 if (pci_channel_offline(tp->pdev)) in tg3_wait_for_event_ack()
1658 /* tp->lock is held. */
1678 if (!(tp->phy_flags & TG3_PHYFLG_MII_SERDES)) { in tg3_phy_gather_ump_data()
1693 /* tp->lock is held. */
1715 /* tp->lock is held. */
1731 /* tp->lock is held. */
1760 /* tp->lock is held. */
1781 /* tp->lock is held. */
1825 if (pci_channel_offline(tp->pdev)) in tg3_poll_fw()
1826 return -ENODEV; in tg3_poll_fw()
1830 return -ENODEV; in tg3_poll_fw()
1838 if (pci_channel_offline(tp->pdev)) { in tg3_poll_fw()
1841 netdev_info(tp->dev, "No firmware running\n"); in tg3_poll_fw()
1858 netdev_info(tp->dev, "No firmware running\n"); in tg3_poll_fw()
1873 if (!netif_carrier_ok(tp->dev)) { in tg3_link_report()
1874 netif_info(tp, link, tp->dev, "Link is down\n"); in tg3_link_report()
1877 netdev_info(tp->dev, "Link is up at %d Mbps, %s duplex\n", in tg3_link_report()
1878 (tp->link_config.active_speed == SPEED_1000 ? in tg3_link_report()
1880 (tp->link_config.active_speed == SPEED_100 ? in tg3_link_report()
1882 (tp->link_config.active_duplex == DUPLEX_FULL ? in tg3_link_report()
1885 netdev_info(tp->dev, "Flow control is %s for TX and %s for RX\n", in tg3_link_report()
1886 (tp->link_config.active_flowctrl & FLOW_CTRL_TX) ? in tg3_link_report()
1888 (tp->link_config.active_flowctrl & FLOW_CTRL_RX) ? in tg3_link_report()
1891 if (tp->phy_flags & TG3_PHYFLG_EEE_CAP) in tg3_link_report()
1892 netdev_info(tp->dev, "EEE is %s\n", in tg3_link_report()
1893 tp->setlpicnt ? "enabled" : "disabled"); in tg3_link_report()
1898 tp->link_up = netif_carrier_ok(tp->dev); in tg3_link_report()
1965 u32 old_rx_mode = tp->rx_mode; in tg3_setup_flow_control()
1966 u32 old_tx_mode = tp->tx_mode; in tg3_setup_flow_control()
1969 autoneg = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr)->autoneg; in tg3_setup_flow_control()
1971 autoneg = tp->link_config.autoneg; in tg3_setup_flow_control()
1974 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) in tg3_setup_flow_control()
1979 flowctrl = tp->link_config.flowctrl; in tg3_setup_flow_control()
1981 tp->link_config.active_flowctrl = flowctrl; in tg3_setup_flow_control()
1984 tp->rx_mode |= RX_MODE_FLOW_CTRL_ENABLE; in tg3_setup_flow_control()
1986 tp->rx_mode &= ~RX_MODE_FLOW_CTRL_ENABLE; in tg3_setup_flow_control()
1988 if (old_rx_mode != tp->rx_mode) in tg3_setup_flow_control()
1989 tw32_f(MAC_RX_MODE, tp->rx_mode); in tg3_setup_flow_control()
1992 tp->tx_mode |= TX_MODE_FLOW_CTRL_ENABLE; in tg3_setup_flow_control()
1994 tp->tx_mode &= ~TX_MODE_FLOW_CTRL_ENABLE; in tg3_setup_flow_control()
1996 if (old_tx_mode != tp->tx_mode) in tg3_setup_flow_control()
1997 tw32_f(MAC_TX_MODE, tp->tx_mode); in tg3_setup_flow_control()
2005 struct phy_device *phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_adjust_link()
2007 spin_lock_bh(&tp->lock); in tg3_adjust_link()
2009 mac_mode = tp->mac_mode & ~(MAC_MODE_PORT_MODE_MASK | in tg3_adjust_link()
2012 oldflowctrl = tp->link_config.active_flowctrl; in tg3_adjust_link()
2014 if (phydev->link) { in tg3_adjust_link()
2018 if (phydev->speed == SPEED_100 || phydev->speed == SPEED_10) in tg3_adjust_link()
2020 else if (phydev->speed == SPEED_1000 || in tg3_adjust_link()
2026 if (phydev->duplex == DUPLEX_HALF) in tg3_adjust_link()
2030 tp->link_config.flowctrl); in tg3_adjust_link()
2032 if (phydev->pause) in tg3_adjust_link()
2034 if (phydev->asym_pause) in tg3_adjust_link()
2042 if (mac_mode != tp->mac_mode) { in tg3_adjust_link()
2043 tp->mac_mode = mac_mode; in tg3_adjust_link()
2044 tw32_f(MAC_MODE, tp->mac_mode); in tg3_adjust_link()
2049 if (phydev->speed == SPEED_10) in tg3_adjust_link()
2057 if (phydev->speed == SPEED_1000 && phydev->duplex == DUPLEX_HALF) in tg3_adjust_link()
2068 if (phydev->link != tp->old_link || in tg3_adjust_link()
2069 phydev->speed != tp->link_config.active_speed || in tg3_adjust_link()
2070 phydev->duplex != tp->link_config.active_duplex || in tg3_adjust_link()
2071 oldflowctrl != tp->link_config.active_flowctrl) in tg3_adjust_link()
2074 tp->old_link = phydev->link; in tg3_adjust_link()
2075 tp->link_config.active_speed = phydev->speed; in tg3_adjust_link()
2076 tp->link_config.active_duplex = phydev->duplex; in tg3_adjust_link()
2078 spin_unlock_bh(&tp->lock); in tg3_adjust_link()
2088 if (tp->phy_flags & TG3_PHYFLG_IS_CONNECTED) in tg3_phy_init()
2094 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_phy_init()
2097 phydev = phy_connect(tp->dev, phydev_name(phydev), in tg3_phy_init()
2098 tg3_adjust_link, phydev->interface); in tg3_phy_init()
2100 dev_err(&tp->pdev->dev, "Could not attach to PHY\n"); in tg3_phy_init()
2105 switch (phydev->interface) { in tg3_phy_init()
2108 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_init()
2119 phy_disconnect(mdiobus_get_phy(tp->mdio_bus, tp->phy_addr)); in tg3_phy_init()
2120 return -EINVAL; in tg3_phy_init()
2123 tp->phy_flags |= TG3_PHYFLG_IS_CONNECTED; in tg3_phy_init()
2134 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_phy_start()
2137 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_phy_start()
2139 if (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) { in tg3_phy_start()
2140 tp->phy_flags &= ~TG3_PHYFLG_IS_LOW_POWER; in tg3_phy_start()
2141 phydev->speed = tp->link_config.speed; in tg3_phy_start()
2142 phydev->duplex = tp->link_config.duplex; in tg3_phy_start()
2143 phydev->autoneg = tp->link_config.autoneg; in tg3_phy_start()
2145 phydev->advertising, tp->link_config.advertising); in tg3_phy_start()
2155 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_phy_stop()
2158 phy_stop(mdiobus_get_phy(tp->mdio_bus, tp->phy_addr)); in tg3_phy_stop()
2163 if (tp->phy_flags & TG3_PHYFLG_IS_CONNECTED) { in tg3_phy_fini()
2164 phy_disconnect(mdiobus_get_phy(tp->mdio_bus, tp->phy_addr)); in tg3_phy_fini()
2165 tp->phy_flags &= ~TG3_PHYFLG_IS_CONNECTED; in tg3_phy_fini()
2174 if (tp->phy_flags & TG3_PHYFLG_IS_FET) in tg3_phy_set_extloopbk()
2177 if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5401) { in tg3_phy_set_extloopbk()
2178 /* Cannot do read-modify-write on 5401 */ in tg3_phy_set_extloopbk()
2225 (tp->phy_flags & TG3_PHYFLG_MII_SERDES))) in tg3_phy_toggle_apd()
2228 if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_phy_toggle_apd()
2255 (tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) in tg3_phy_toggle_automdix()
2258 if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_phy_toggle_automdix()
2296 if (tp->phy_flags & TG3_PHYFLG_NO_ETH_WIRE_SPEED) in tg3_phy_set_wirespeed()
2309 if (!tp->phy_otp) in tg3_phy_apply_otp()
2312 otp = tp->phy_otp; in tg3_phy_apply_otp()
2345 struct ethtool_keee *dest = &tp->eee; in tg3_eee_pull_config()
2347 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) in tg3_eee_pull_config()
2359 dest->eee_active = 1; in tg3_eee_pull_config()
2361 dest->eee_active = 0; in tg3_eee_pull_config()
2366 mii_eee_cap1_mod_linkmode_t(dest->lp_advertised, val); in tg3_eee_pull_config()
2371 dest->eee_enabled = !!val; in tg3_eee_pull_config()
2372 mii_eee_cap1_mod_linkmode_t(dest->advertised, val); in tg3_eee_pull_config()
2376 dest->tx_lpi_enabled = !!(val & TG3_CPMU_EEEMD_LPI_IN_TX); in tg3_eee_pull_config()
2379 dest->tx_lpi_timer = tr32(TG3_CPMU_EEE_DBTMR1) & 0xffff; in tg3_eee_pull_config()
2386 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) in tg3_phy_eee_adjust()
2389 tp->setlpicnt = 0; in tg3_phy_eee_adjust()
2391 if (tp->link_config.autoneg == AUTONEG_ENABLE && in tg3_phy_eee_adjust()
2393 tp->link_config.active_duplex == DUPLEX_FULL && in tg3_phy_eee_adjust()
2394 (tp->link_config.active_speed == SPEED_100 || in tg3_phy_eee_adjust()
2395 tp->link_config.active_speed == SPEED_1000)) { in tg3_phy_eee_adjust()
2398 if (tp->link_config.active_speed == SPEED_1000) in tg3_phy_eee_adjust()
2406 if (tp->eee.eee_active) in tg3_phy_eee_adjust()
2407 tp->setlpicnt = 2; in tg3_phy_eee_adjust()
2410 if (!tp->setlpicnt) { in tg3_phy_eee_adjust()
2426 if (tp->link_config.active_speed == SPEED_1000 && in tg3_phy_eee_enable()
2445 while (limit--) { in tg3_wait_macro_done()
2454 return -EBUSY; in tg3_wait_macro_done()
2483 return -EBUSY; in tg3_phy_write_and_check_testpat()
2491 return -EBUSY; in tg3_phy_write_and_check_testpat()
2497 return -EBUSY; in tg3_phy_write_and_check_testpat()
2507 return -EBUSY; in tg3_phy_write_and_check_testpat()
2517 return -EBUSY; in tg3_phy_write_and_check_testpat()
2539 return -EBUSY; in tg3_phy_reset_chanpat()
2567 /* Set full-duplex, 1000 mbps. */ in tg3_phy_reset_5703_4_5()
2588 } while (--retries); in tg3_phy_reset_5703_4_5()
2615 netif_carrier_off(tp->dev); in tg3_carrier_off()
2616 tp->link_up = false; in tg3_carrier_off()
2622 netdev_warn(tp->dev, in tg3_warn_mgmt_link_flap()
2623 "Management side-band traffic will be interrupted during phy settings change\n"); in tg3_warn_mgmt_link_flap()
2627 * link unless the FORCE argument is non-zero.
2642 return -EBUSY; in tg3_phy_reset()
2644 if (netif_running(tp->dev) && tp->link_up) { in tg3_phy_reset()
2645 netif_carrier_off(tp->dev); in tg3_phy_reset()
2690 (tp->phy_flags & TG3_PHYFLG_MII_SERDES)) in tg3_phy_reset()
2695 if (tp->phy_flags & TG3_PHYFLG_ENABLE_APD) in tg3_phy_reset()
2701 if ((tp->phy_flags & TG3_PHYFLG_ADC_BUG) && in tg3_phy_reset()
2708 if (tp->phy_flags & TG3_PHYFLG_5704_A0_BUG) { in tg3_phy_reset()
2713 if (tp->phy_flags & TG3_PHYFLG_BER_BUG) { in tg3_phy_reset()
2720 } else if (tp->phy_flags & TG3_PHYFLG_JITTER_BUG) { in tg3_phy_reset()
2723 if (tp->phy_flags & TG3_PHYFLG_ADJUST_TRIM) { in tg3_phy_reset()
2736 if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5401) { in tg3_phy_reset()
2737 /* Cannot do read-modify-write on 5401 */ in tg3_phy_reset()
2740 /* Set bit 14 with read-modify-write to preserve other bits */ in tg3_phy_reset()
2796 shift = TG3_APE_GPIO_MSG_SHIFT + 4 * tp->pci_fn; in tg3_set_function_status()
2818 return -EIO; in tg3_pwrsrc_switch_to_vmain()
2822 tw32_wait_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl, in tg3_pwrsrc_switch_to_vmain()
2827 tw32_wait_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl, in tg3_pwrsrc_switch_to_vmain()
2843 grc_local_ctrl = tp->grc_local_ctrl | GRC_LCLCTRL_GPIO_OE1; in tg3_pwrsrc_die_with_vmain()
2865 tw32_wait_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl | in tg3_pwrsrc_switch_to_vaux()
2872 } else if (tp->pdev->device == PCI_DEVICE_ID_TIGON3_5761 || in tg3_pwrsrc_switch_to_vaux()
2873 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5761S) { in tg3_pwrsrc_switch_to_vaux()
2874 /* The 5761 non-e device swaps GPIO 0 and GPIO 2. */ in tg3_pwrsrc_switch_to_vaux()
2880 tp->grc_local_ctrl; in tg3_pwrsrc_switch_to_vaux()
2898 tw32_wait_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl | in tg3_pwrsrc_switch_to_vaux()
2904 no_gpio2 = tp->nic_sram_data_cfg & in tg3_pwrsrc_switch_to_vaux()
2917 tp->grc_local_ctrl | grc_local_ctrl, in tg3_pwrsrc_switch_to_vaux()
2923 tp->grc_local_ctrl | grc_local_ctrl, in tg3_pwrsrc_switch_to_vaux()
2929 tp->grc_local_ctrl | grc_local_ctrl, in tg3_pwrsrc_switch_to_vaux()
2976 if (tp->pdev_peer && tp->pdev_peer != tp->pdev) { in tg3_frob_aux_power()
2979 dev_peer = pci_get_drvdata(tp->pdev_peer); in tg3_frob_aux_power()
3006 if (tp->led_ctrl == LED_CTRL_MODE_PHY_2) in tg3_5700_link_polarity()
3008 else if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5411) { in tg3_5700_link_polarity()
3024 if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) in tg3_phy_power_bug()
3028 if (!tp->pci_fn) in tg3_phy_power_bug()
3033 if ((tp->phy_flags & TG3_PHYFLG_PHY_SERDES) && in tg3_phy_power_bug()
3034 !tp->pci_fn) in tg3_phy_power_bug()
3047 if ((tp->phy_flags & TG3_PHYFLG_MII_SERDES) && in tg3_phy_led_bug()
3048 !tp->pci_fn) in tg3_phy_led_bug()
3060 if (tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN) in tg3_power_down_phy()
3063 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) { in tg3_power_down_phy()
3082 } else if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_power_down_phy()
3130 /* tp->lock is held. */
3136 if (tp->nvram_lock_cnt == 0) { in tg3_nvram_lock()
3145 return -ENODEV; in tg3_nvram_lock()
3148 tp->nvram_lock_cnt++; in tg3_nvram_lock()
3153 /* tp->lock is held. */
3157 if (tp->nvram_lock_cnt > 0) in tg3_nvram_unlock()
3158 tp->nvram_lock_cnt--; in tg3_nvram_unlock()
3159 if (tp->nvram_lock_cnt == 0) in tg3_nvram_unlock()
3164 /* tp->lock is held. */
3174 /* tp->lock is held. */
3191 return -EINVAL; in tg3_nvram_read_using_eeprom()
3211 return -EBUSY; in tg3_nvram_read_using_eeprom()
3240 return -EBUSY; in tg3_nvram_exec_cmd()
3251 (tp->nvram_jedecnum == JEDEC_ATMEL)) in tg3_nvram_phys_addr()
3253 addr = ((addr / tp->nvram_pagesize) << in tg3_nvram_phys_addr()
3255 (addr % tp->nvram_pagesize); in tg3_nvram_phys_addr()
3266 (tp->nvram_jedecnum == JEDEC_ATMEL)) in tg3_nvram_logical_addr()
3269 tp->nvram_pagesize) + in tg3_nvram_logical_addr()
3270 (addr & ((1 << ATMEL_AT45DB0X1B_PAGE_POS) - 1)); in tg3_nvram_logical_addr()
3279 * machine, the 32-bit value will be byteswapped.
3291 return -EINVAL; in tg3_nvram_read()
3364 rc = -EBUSY; in tg3_nvram_write_block_using_eeprom()
3377 u32 pagesize = tp->nvram_pagesize; in tg3_nvram_write_block_unbuffered()
3378 u32 pagemask = pagesize - 1; in tg3_nvram_write_block_unbuffered()
3384 return -ENOMEM; in tg3_nvram_write_block_unbuffered()
3406 len -= size; in tg3_nvram_write_block_unbuffered()
3410 offset = offset + (pagesize - page_off); in tg3_nvram_write_block_unbuffered()
3452 else if (j == (pagesize - 4)) in tg3_nvram_write_block_unbuffered()
3484 page_off = offset % tp->nvram_pagesize; in tg3_nvram_write_block_buffered()
3492 if (page_off == (tp->nvram_pagesize - 4)) in tg3_nvram_write_block_buffered()
3495 if (i == (len - 4)) in tg3_nvram_write_block_buffered()
3505 (tp->nvram_jedecnum == JEDEC_ST) && in tg3_nvram_write_block_buffered()
3532 tw32_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl & in tg3_nvram_write_block()
3569 tw32_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl); in tg3_nvram_write_block()
3581 /* tp->lock is held. */
3592 if (pci_channel_offline(tp->pdev)) in tg3_pause_cpu()
3593 return -EBUSY; in tg3_pause_cpu()
3596 return (i == iters) ? -EBUSY : 0; in tg3_pause_cpu()
3599 /* tp->lock is held. */
3611 /* tp->lock is held. */
3617 /* tp->lock is held. */
3624 /* tp->lock is held. */
3630 /* tp->lock is held. */
3657 netdev_err(tp->dev, "%s timed out, %s CPU\n", in tg3_halt_cpu()
3659 return -ENODEV; in tg3_halt_cpu()
3677 * tp->fw->size minus headers. in tg3_fw_data_len()
3687 if (tp->fw_len == 0xffffffff) in tg3_fw_data_len()
3688 fw_len = be32_to_cpu(fw_hdr->len); in tg3_fw_data_len()
3690 fw_len = tp->fw->size; in tg3_fw_data_len()
3692 return (fw_len - TG3_FW_HDR_LEN) / sizeof(u32); in tg3_fw_data_len()
3695 /* tp->lock is held. */
3702 int total_len = tp->fw->size; in tg3_load_firmware_cpu()
3705 netdev_err(tp->dev, in tg3_load_firmware_cpu()
3708 return -EINVAL; in tg3_load_firmware_cpu()
3736 total_len -= TG3_FW_HDR_LEN; in tg3_load_firmware_cpu()
3744 (be32_to_cpu(fw_hdr->base_addr) & 0xffff) + in tg3_load_firmware_cpu()
3748 total_len -= be32_to_cpu(fw_hdr->len); in tg3_load_firmware_cpu()
3752 ((void *)fw_hdr + be32_to_cpu(fw_hdr->len)); in tg3_load_firmware_cpu()
3761 /* tp->lock is held. */
3779 return (i == iters) ? -EBUSY : 0; in tg3_pause_cpu_and_set_pc()
3782 /* tp->lock is held. */
3788 fw_hdr = (struct tg3_firmware_hdr *)tp->fw->data; in tg3_load_5701_a0_firmware_fix()
3792 length = end_address_of_bss - start_address_of_text. in tg3_load_5701_a0_firmware_fix()
3810 be32_to_cpu(fw_hdr->base_addr)); in tg3_load_5701_a0_firmware_fix()
3812 netdev_err(tp->dev, "%s fails to set RX CPU PC, is %08x " in tg3_load_5701_a0_firmware_fix()
3815 be32_to_cpu(fw_hdr->base_addr)); in tg3_load_5701_a0_firmware_fix()
3816 return -ENODEV; in tg3_load_5701_a0_firmware_fix()
3841 netdev_err(tp->dev, "Boot code not ready for service patches\n"); in tg3_validate_rxcpu_state()
3842 return -EBUSY; in tg3_validate_rxcpu_state()
3847 netdev_warn(tp->dev, in tg3_validate_rxcpu_state()
3849 return -EEXIST; in tg3_validate_rxcpu_state()
3855 /* tp->lock is held. */
3866 if (!tp->fw) in tg3_load_57766_firmware()
3871 * data to be written to non-contiguous locations. in tg3_load_57766_firmware()
3883 fw_hdr = (struct tg3_firmware_hdr *)tp->fw->data; in tg3_load_57766_firmware()
3884 if (be32_to_cpu(fw_hdr->base_addr) != TG3_57766_FW_BASE_ADDR) in tg3_load_57766_firmware()
3896 /* tp->lock is held. */
3906 fw_hdr = (struct tg3_firmware_hdr *)tp->fw->data; in tg3_load_tso_firmware()
3910 length = end_address_of_bss - start_address_of_text. in tg3_load_tso_firmware()
3914 cpu_scratch_size = tp->fw_len; in tg3_load_tso_firmware()
3933 be32_to_cpu(fw_hdr->base_addr)); in tg3_load_tso_firmware()
3935 netdev_err(tp->dev, in tg3_load_tso_firmware()
3938 be32_to_cpu(fw_hdr->base_addr)); in tg3_load_tso_firmware()
3939 return -ENODEV; in tg3_load_tso_firmware()
3946 /* tp->lock is held. */
3960 index -= 4; in __tg3_set_one_mac_addr()
3966 /* tp->lock is held. */
3975 __tg3_set_one_mac_addr(tp, tp->dev->dev_addr, i); in __tg3_set_mac_addr()
3981 __tg3_set_one_mac_addr(tp, tp->dev->dev_addr, i); in __tg3_set_mac_addr()
3984 addr_high = (tp->dev->dev_addr[0] + in __tg3_set_mac_addr()
3985 tp->dev->dev_addr[1] + in __tg3_set_mac_addr()
3986 tp->dev->dev_addr[2] + in __tg3_set_mac_addr()
3987 tp->dev->dev_addr[3] + in __tg3_set_mac_addr()
3988 tp->dev->dev_addr[4] + in __tg3_set_mac_addr()
3989 tp->dev->dev_addr[5]) & in __tg3_set_mac_addr()
4000 pci_write_config_dword(tp->pdev, in tg3_enable_register_access()
4001 TG3PCI_MISC_HOST_CTRL, tp->misc_host_ctrl); in tg3_enable_register_access()
4010 err = pci_set_power_state(tp->pdev, PCI_D0); in tg3_power_up()
4015 netdev_err(tp->dev, "Transition to D0 failed\n"); in tg3_power_up()
4032 pcie_capability_set_word(tp->pdev, PCI_EXP_LNKCTL, in tg3_power_down_prepare()
4039 device_should_wake = device_may_wakeup(&tp->pdev->dev) && in tg3_power_down_prepare()
4044 if ((tp->phy_flags & TG3_PHYFLG_IS_CONNECTED) && in tg3_power_down_prepare()
4045 !(tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER)) { in tg3_power_down_prepare()
4050 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_power_down_prepare()
4052 tp->phy_flags |= TG3_PHYFLG_IS_LOW_POWER; in tg3_power_down_prepare()
4054 tp->link_config.speed = phydev->speed; in tg3_power_down_prepare()
4055 tp->link_config.duplex = phydev->duplex; in tg3_power_down_prepare()
4056 tp->link_config.autoneg = phydev->autoneg; in tg3_power_down_prepare()
4058 &tp->link_config.advertising, in tg3_power_down_prepare()
4059 phydev->advertising); in tg3_power_down_prepare()
4083 linkmode_copy(phydev->advertising, advertising); in tg3_power_down_prepare()
4086 phyid = phydev->drv->phy_id & phydev->drv->phy_id_mask; in tg3_power_down_prepare()
4098 if (!(tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER)) in tg3_power_down_prepare()
4099 tp->phy_flags |= TG3_PHYFLG_IS_LOW_POWER; in tg3_power_down_prepare()
4101 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) in tg3_power_down_prepare()
4130 if (!(tp->phy_flags & TG3_PHYFLG_PHY_SERDES)) { in tg3_power_down_prepare()
4132 !(tp->phy_flags & TG3_PHYFLG_IS_FET)) { in tg3_power_down_prepare()
4141 if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) in tg3_power_down_prepare()
4143 else if (tp->phy_flags & in tg3_power_down_prepare()
4145 if (tp->link_config.active_speed == SPEED_1000) in tg3_power_down_prepare()
4152 mac_mode |= tp->mac_mode & MAC_MODE_LINK_POLARITY; in tg3_power_down_prepare()
4166 tw32(MAC_LED_CTRL, tp->led_ctrl); in tg3_power_down_prepare()
4190 base_val = tp->pci_clock_ctrl; in tg3_power_down_prepare()
4217 tw32_wait_f(TG3PCI_CLOCK_CTRL, tp->pci_clock_ctrl | newbits1, in tg3_power_down_prepare()
4220 tw32_wait_f(TG3PCI_CLOCK_CTRL, tp->pci_clock_ctrl | newbits2, in tg3_power_down_prepare()
4236 tp->pci_clock_ctrl | newbits3, 40); in tg3_power_down_prepare()
4272 pci_wake_from_d3(tp->pdev, tg3_flag(tp, WOL_ENABLE)); in tg3_power_down()
4273 pci_set_power_state(tp->pdev, PCI_D3hot); in tg3_power_down()
4310 if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_aux_stat_to_speed_duplex()
4336 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_autoneg_cfg()
4348 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) in tg3_phy_autoneg_cfg()
4358 if (!tp->eee.eee_enabled) in tg3_phy_autoneg_cfg()
4363 mii_eee_cap1_mod_linkmode_t(tp->eee.advertised, val); in tg3_phy_autoneg_cfg()
4398 if (tp->link_config.autoneg == AUTONEG_ENABLE || in tg3_phy_copper_begin()
4399 (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER)) { in tg3_phy_copper_begin()
4402 if ((tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) && in tg3_phy_copper_begin()
4403 !(tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN)) { in tg3_phy_copper_begin()
4409 if (tp->phy_flags & TG3_PHYFLG_1G_ON_VAUX_OK) { in tg3_phy_copper_begin()
4410 if (!(tp->phy_flags & in tg3_phy_copper_begin()
4418 adv = tp->link_config.advertising; in tg3_phy_copper_begin()
4419 if (tp->phy_flags & TG3_PHYFLG_10_100_ONLY) in tg3_phy_copper_begin()
4423 fc = tp->link_config.flowctrl; in tg3_phy_copper_begin()
4428 if ((tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) && in tg3_phy_copper_begin()
4429 (tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN)) { in tg3_phy_copper_begin()
4443 tp->link_config.active_speed = tp->link_config.speed; in tg3_phy_copper_begin()
4444 tp->link_config.active_duplex = tp->link_config.duplex; in tg3_phy_copper_begin()
4455 switch (tp->link_config.speed) { in tg3_phy_copper_begin()
4469 if (tp->link_config.duplex == DUPLEX_FULL) in tg3_phy_copper_begin()
4503 tp->link_config.autoneg = AUTONEG_DISABLE; in tg3_phy_pull_config()
4504 tp->link_config.advertising = 0; in tg3_phy_pull_config()
4507 err = -EIO; in tg3_phy_pull_config()
4511 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) in tg3_phy_pull_config()
4514 tp->link_config.speed = SPEED_10; in tg3_phy_pull_config()
4517 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) in tg3_phy_pull_config()
4520 tp->link_config.speed = SPEED_100; in tg3_phy_pull_config()
4523 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_pull_config()
4524 tp->link_config.speed = SPEED_1000; in tg3_phy_pull_config()
4533 tp->link_config.duplex = DUPLEX_FULL; in tg3_phy_pull_config()
4535 tp->link_config.duplex = DUPLEX_HALF; in tg3_phy_pull_config()
4537 tp->link_config.flowctrl = FLOW_CTRL_RX | FLOW_CTRL_TX; in tg3_phy_pull_config()
4543 tp->link_config.autoneg = AUTONEG_ENABLE; in tg3_phy_pull_config()
4544 tp->link_config.advertising = ADVERTISED_Autoneg; in tg3_phy_pull_config()
4547 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) { in tg3_phy_pull_config()
4555 tp->link_config.advertising |= adv | ADVERTISED_TP; in tg3_phy_pull_config()
4557 tp->link_config.flowctrl = tg3_decode_flowctrl_1000T(val); in tg3_phy_pull_config()
4559 tp->link_config.advertising |= ADVERTISED_FIBRE; in tg3_phy_pull_config()
4562 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_pull_config()
4565 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) { in tg3_phy_pull_config()
4577 tp->link_config.flowctrl = adv; in tg3_phy_pull_config()
4583 tp->link_config.advertising |= adv; in tg3_phy_pull_config()
4613 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) in tg3_phy_eee_config_ok()
4618 if (tp->eee.eee_enabled) { in tg3_phy_eee_config_ok()
4619 if (!linkmode_equal(tp->eee.advertised, eee.advertised) || in tg3_phy_eee_config_ok()
4620 tp->eee.tx_lpi_timer != eee.tx_lpi_timer || in tg3_phy_eee_config_ok()
4621 tp->eee.tx_lpi_enabled != eee.tx_lpi_enabled) in tg3_phy_eee_config_ok()
4636 advertising = tp->link_config.advertising; in tg3_phy_copper_an_config_ok()
4640 if (tp->link_config.active_duplex == DUPLEX_FULL) { in tg3_phy_copper_an_config_ok()
4641 tgtadv |= mii_advertise_flowctrl(tp->link_config.flowctrl); in tg3_phy_copper_an_config_ok()
4651 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_copper_an_config_ok()
4680 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_copper_fetch_rmtadv()
4693 tp->link_config.rmt_adv = lpeth; in tg3_phy_copper_fetch_rmtadv()
4700 if (curr_link_up != tp->link_up) { in tg3_test_and_report_link_chg()
4702 netif_carrier_on(tp->dev); in tg3_test_and_report_link_chg()
4704 netif_carrier_off(tp->dev); in tg3_test_and_report_link_chg()
4705 if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) in tg3_test_and_report_link_chg()
4706 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_test_and_report_link_chg()
4743 (tp->eee.tx_lpi_enabled ? TG3_CPMU_EEEMD_LPI_IN_TX : 0) | in tg3_setup_eee()
4753 tw32_f(TG3_CPMU_EEE_MODE, tp->eee.eee_enabled ? val : 0); in tg3_setup_eee()
4757 (tp->eee.tx_lpi_timer & 0xffff)); in tg3_setup_eee()
4775 if ((tp->mi_mode & MAC_MI_MODE_AUTO_POLL) != 0) { in tg3_setup_copper_phy()
4777 (tp->mi_mode & ~MAC_MI_MODE_AUTO_POLL)); in tg3_setup_copper_phy()
4783 /* Some third-party PHYs need to be reset on link going in tg3_setup_copper_phy()
4789 tp->link_up) { in tg3_setup_copper_phy()
4798 if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5401) { in tg3_setup_copper_phy()
4819 if ((tp->phy_id & TG3_PHY_ID_REV_MASK) == in tg3_setup_copper_phy()
4822 tp->link_config.active_speed == SPEED_1000) { in tg3_setup_copper_phy()
4843 if (tp->phy_flags & TG3_PHYFLG_USE_MI_INTERRUPT) in tg3_setup_copper_phy()
4845 else if (!(tp->phy_flags & TG3_PHYFLG_IS_FET)) in tg3_setup_copper_phy()
4850 if (tp->led_ctrl == LED_CTRL_MODE_PHY_1) in tg3_setup_copper_phy()
4860 tp->phy_flags &= ~TG3_PHYFLG_MDIX_STATE; in tg3_setup_copper_phy()
4861 tp->link_config.rmt_adv = 0; in tg3_setup_copper_phy()
4863 if (tp->phy_flags & TG3_PHYFLG_CAPACITIVE_COUPLING) { in tg3_setup_copper_phy()
4912 tp->link_config.active_speed = current_speed; in tg3_setup_copper_phy()
4913 tp->link_config.active_duplex = current_duplex; in tg3_setup_copper_phy()
4915 if (tp->link_config.autoneg == AUTONEG_ENABLE) { in tg3_setup_copper_phy()
4929 (tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN) && in tg3_setup_copper_phy()
4936 tp->link_config.speed == current_speed && in tg3_setup_copper_phy()
4937 tp->link_config.duplex == current_duplex) { in tg3_setup_copper_phy()
4943 tp->link_config.active_duplex == DUPLEX_FULL) { in tg3_setup_copper_phy()
4946 if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_setup_copper_phy()
4955 tp->phy_flags |= TG3_PHYFLG_MDIX_STATE; in tg3_setup_copper_phy()
4962 if (!current_link_up || (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER)) { in tg3_setup_copper_phy()
4970 tp->link_config.active_speed = current_speed; in tg3_setup_copper_phy()
4971 tp->link_config.active_duplex = current_duplex; in tg3_setup_copper_phy()
4976 (tp->mac_mode & MAC_MODE_PORT_INT_LPBACK)) in tg3_setup_copper_phy()
4980 tp->mac_mode &= ~MAC_MODE_PORT_MODE_MASK; in tg3_setup_copper_phy()
4982 if (tp->link_config.active_speed == SPEED_100 || in tg3_setup_copper_phy()
4983 tp->link_config.active_speed == SPEED_10) in tg3_setup_copper_phy()
4984 tp->mac_mode |= MAC_MODE_PORT_MODE_MII; in tg3_setup_copper_phy()
4986 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_setup_copper_phy()
4987 } else if (tp->phy_flags & TG3_PHYFLG_IS_FET) in tg3_setup_copper_phy()
4988 tp->mac_mode |= MAC_MODE_PORT_MODE_MII; in tg3_setup_copper_phy()
4990 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_setup_copper_phy()
4999 if (tp->link_config.active_speed == SPEED_10) in tg3_setup_copper_phy()
5001 else if (tp->link_config.active_speed == SPEED_100) in tg3_setup_copper_phy()
5004 else if (tp->link_config.active_speed == SPEED_1000) in tg3_setup_copper_phy()
5012 tp->mac_mode &= ~MAC_MODE_HALF_DUPLEX; in tg3_setup_copper_phy()
5013 if (tp->link_config.active_duplex == DUPLEX_HALF) in tg3_setup_copper_phy()
5014 tp->mac_mode |= MAC_MODE_HALF_DUPLEX; in tg3_setup_copper_phy()
5018 tg3_5700_link_polarity(tp, tp->link_config.active_speed)) in tg3_setup_copper_phy()
5019 tp->mac_mode |= MAC_MODE_LINK_POLARITY; in tg3_setup_copper_phy()
5021 tp->mac_mode &= ~MAC_MODE_LINK_POLARITY; in tg3_setup_copper_phy()
5027 if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5411 && in tg3_setup_copper_phy()
5029 tp->mi_mode |= MAC_MI_MODE_AUTO_POLL; in tg3_setup_copper_phy()
5030 tw32_f(MAC_MI_MODE, tp->mi_mode); in tg3_setup_copper_phy()
5034 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_copper_phy()
5049 tp->link_config.active_speed == SPEED_1000 && in tg3_setup_copper_phy()
5063 if (tp->link_config.active_speed == SPEED_100 || in tg3_setup_copper_phy()
5064 tp->link_config.active_speed == SPEED_10) in tg3_setup_copper_phy()
5065 pcie_capability_clear_word(tp->pdev, PCI_EXP_LNKCTL, in tg3_setup_copper_phy()
5068 pcie_capability_set_word(tp->pdev, PCI_EXP_LNKCTL, in tg3_setup_copper_phy()
5137 #define ANEG_FAILED -1
5149 if (ap->state == ANEG_STATE_UNKNOWN) { in tg3_fiber_aneg_smachine()
5150 ap->rxconfig = 0; in tg3_fiber_aneg_smachine()
5151 ap->link_time = 0; in tg3_fiber_aneg_smachine()
5152 ap->cur_time = 0; in tg3_fiber_aneg_smachine()
5153 ap->ability_match_cfg = 0; in tg3_fiber_aneg_smachine()
5154 ap->ability_match_count = 0; in tg3_fiber_aneg_smachine()
5155 ap->ability_match = 0; in tg3_fiber_aneg_smachine()
5156 ap->idle_match = 0; in tg3_fiber_aneg_smachine()
5157 ap->ack_match = 0; in tg3_fiber_aneg_smachine()
5159 ap->cur_time++; in tg3_fiber_aneg_smachine()
5164 if (rx_cfg_reg != ap->ability_match_cfg) { in tg3_fiber_aneg_smachine()
5165 ap->ability_match_cfg = rx_cfg_reg; in tg3_fiber_aneg_smachine()
5166 ap->ability_match = 0; in tg3_fiber_aneg_smachine()
5167 ap->ability_match_count = 0; in tg3_fiber_aneg_smachine()
5169 if (++ap->ability_match_count > 1) { in tg3_fiber_aneg_smachine()
5170 ap->ability_match = 1; in tg3_fiber_aneg_smachine()
5171 ap->ability_match_cfg = rx_cfg_reg; in tg3_fiber_aneg_smachine()
5175 ap->ack_match = 1; in tg3_fiber_aneg_smachine()
5177 ap->ack_match = 0; in tg3_fiber_aneg_smachine()
5179 ap->idle_match = 0; in tg3_fiber_aneg_smachine()
5181 ap->idle_match = 1; in tg3_fiber_aneg_smachine()
5182 ap->ability_match_cfg = 0; in tg3_fiber_aneg_smachine()
5183 ap->ability_match_count = 0; in tg3_fiber_aneg_smachine()
5184 ap->ability_match = 0; in tg3_fiber_aneg_smachine()
5185 ap->ack_match = 0; in tg3_fiber_aneg_smachine()
5190 ap->rxconfig = rx_cfg_reg; in tg3_fiber_aneg_smachine()
5193 switch (ap->state) { in tg3_fiber_aneg_smachine()
5195 if (ap->flags & (MR_AN_ENABLE | MR_RESTART_AN)) in tg3_fiber_aneg_smachine()
5196 ap->state = ANEG_STATE_AN_ENABLE; in tg3_fiber_aneg_smachine()
5200 ap->flags &= ~(MR_AN_COMPLETE | MR_PAGE_RX); in tg3_fiber_aneg_smachine()
5201 if (ap->flags & MR_AN_ENABLE) { in tg3_fiber_aneg_smachine()
5202 ap->link_time = 0; in tg3_fiber_aneg_smachine()
5203 ap->cur_time = 0; in tg3_fiber_aneg_smachine()
5204 ap->ability_match_cfg = 0; in tg3_fiber_aneg_smachine()
5205 ap->ability_match_count = 0; in tg3_fiber_aneg_smachine()
5206 ap->ability_match = 0; in tg3_fiber_aneg_smachine()
5207 ap->idle_match = 0; in tg3_fiber_aneg_smachine()
5208 ap->ack_match = 0; in tg3_fiber_aneg_smachine()
5210 ap->state = ANEG_STATE_RESTART_INIT; in tg3_fiber_aneg_smachine()
5212 ap->state = ANEG_STATE_DISABLE_LINK_OK; in tg3_fiber_aneg_smachine()
5217 ap->link_time = ap->cur_time; in tg3_fiber_aneg_smachine()
5218 ap->flags &= ~(MR_NP_LOADED); in tg3_fiber_aneg_smachine()
5219 ap->txconfig = 0; in tg3_fiber_aneg_smachine()
5221 tp->mac_mode |= MAC_MODE_SEND_CONFIGS; in tg3_fiber_aneg_smachine()
5222 tw32_f(MAC_MODE, tp->mac_mode); in tg3_fiber_aneg_smachine()
5226 ap->state = ANEG_STATE_RESTART; in tg3_fiber_aneg_smachine()
5230 delta = ap->cur_time - ap->link_time; in tg3_fiber_aneg_smachine()
5232 ap->state = ANEG_STATE_ABILITY_DETECT_INIT; in tg3_fiber_aneg_smachine()
5242 ap->flags &= ~(MR_TOGGLE_TX); in tg3_fiber_aneg_smachine()
5243 ap->txconfig = ANEG_CFG_FD; in tg3_fiber_aneg_smachine()
5244 flowctrl = tg3_advert_flowctrl_1000X(tp->link_config.flowctrl); in tg3_fiber_aneg_smachine()
5246 ap->txconfig |= ANEG_CFG_PS1; in tg3_fiber_aneg_smachine()
5248 ap->txconfig |= ANEG_CFG_PS2; in tg3_fiber_aneg_smachine()
5249 tw32(MAC_TX_AUTO_NEG, ap->txconfig); in tg3_fiber_aneg_smachine()
5250 tp->mac_mode |= MAC_MODE_SEND_CONFIGS; in tg3_fiber_aneg_smachine()
5251 tw32_f(MAC_MODE, tp->mac_mode); in tg3_fiber_aneg_smachine()
5254 ap->state = ANEG_STATE_ABILITY_DETECT; in tg3_fiber_aneg_smachine()
5258 if (ap->ability_match != 0 && ap->rxconfig != 0) in tg3_fiber_aneg_smachine()
5259 ap->state = ANEG_STATE_ACK_DETECT_INIT; in tg3_fiber_aneg_smachine()
5263 ap->txconfig |= ANEG_CFG_ACK; in tg3_fiber_aneg_smachine()
5264 tw32(MAC_TX_AUTO_NEG, ap->txconfig); in tg3_fiber_aneg_smachine()
5265 tp->mac_mode |= MAC_MODE_SEND_CONFIGS; in tg3_fiber_aneg_smachine()
5266 tw32_f(MAC_MODE, tp->mac_mode); in tg3_fiber_aneg_smachine()
5269 ap->state = ANEG_STATE_ACK_DETECT; in tg3_fiber_aneg_smachine()
5273 if (ap->ack_match != 0) { in tg3_fiber_aneg_smachine()
5274 if ((ap->rxconfig & ~ANEG_CFG_ACK) == in tg3_fiber_aneg_smachine()
5275 (ap->ability_match_cfg & ~ANEG_CFG_ACK)) { in tg3_fiber_aneg_smachine()
5276 ap->state = ANEG_STATE_COMPLETE_ACK_INIT; in tg3_fiber_aneg_smachine()
5278 ap->state = ANEG_STATE_AN_ENABLE; in tg3_fiber_aneg_smachine()
5280 } else if (ap->ability_match != 0 && in tg3_fiber_aneg_smachine()
5281 ap->rxconfig == 0) { in tg3_fiber_aneg_smachine()
5282 ap->state = ANEG_STATE_AN_ENABLE; in tg3_fiber_aneg_smachine()
5287 if (ap->rxconfig & ANEG_CFG_INVAL) { in tg3_fiber_aneg_smachine()
5291 ap->flags &= ~(MR_LP_ADV_FULL_DUPLEX | in tg3_fiber_aneg_smachine()
5300 if (ap->rxconfig & ANEG_CFG_FD) in tg3_fiber_aneg_smachine()
5301 ap->flags |= MR_LP_ADV_FULL_DUPLEX; in tg3_fiber_aneg_smachine()
5302 if (ap->rxconfig & ANEG_CFG_HD) in tg3_fiber_aneg_smachine()
5303 ap->flags |= MR_LP_ADV_HALF_DUPLEX; in tg3_fiber_aneg_smachine()
5304 if (ap->rxconfig & ANEG_CFG_PS1) in tg3_fiber_aneg_smachine()
5305 ap->flags |= MR_LP_ADV_SYM_PAUSE; in tg3_fiber_aneg_smachine()
5306 if (ap->rxconfig & ANEG_CFG_PS2) in tg3_fiber_aneg_smachine()
5307 ap->flags |= MR_LP_ADV_ASYM_PAUSE; in tg3_fiber_aneg_smachine()
5308 if (ap->rxconfig & ANEG_CFG_RF1) in tg3_fiber_aneg_smachine()
5309 ap->flags |= MR_LP_ADV_REMOTE_FAULT1; in tg3_fiber_aneg_smachine()
5310 if (ap->rxconfig & ANEG_CFG_RF2) in tg3_fiber_aneg_smachine()
5311 ap->flags |= MR_LP_ADV_REMOTE_FAULT2; in tg3_fiber_aneg_smachine()
5312 if (ap->rxconfig & ANEG_CFG_NP) in tg3_fiber_aneg_smachine()
5313 ap->flags |= MR_LP_ADV_NEXT_PAGE; in tg3_fiber_aneg_smachine()
5315 ap->link_time = ap->cur_time; in tg3_fiber_aneg_smachine()
5317 ap->flags ^= (MR_TOGGLE_TX); in tg3_fiber_aneg_smachine()
5318 if (ap->rxconfig & 0x0008) in tg3_fiber_aneg_smachine()
5319 ap->flags |= MR_TOGGLE_RX; in tg3_fiber_aneg_smachine()
5320 if (ap->rxconfig & ANEG_CFG_NP) in tg3_fiber_aneg_smachine()
5321 ap->flags |= MR_NP_RX; in tg3_fiber_aneg_smachine()
5322 ap->flags |= MR_PAGE_RX; in tg3_fiber_aneg_smachine()
5324 ap->state = ANEG_STATE_COMPLETE_ACK; in tg3_fiber_aneg_smachine()
5329 if (ap->ability_match != 0 && in tg3_fiber_aneg_smachine()
5330 ap->rxconfig == 0) { in tg3_fiber_aneg_smachine()
5331 ap->state = ANEG_STATE_AN_ENABLE; in tg3_fiber_aneg_smachine()
5334 delta = ap->cur_time - ap->link_time; in tg3_fiber_aneg_smachine()
5336 if (!(ap->flags & (MR_LP_ADV_NEXT_PAGE))) { in tg3_fiber_aneg_smachine()
5337 ap->state = ANEG_STATE_IDLE_DETECT_INIT; in tg3_fiber_aneg_smachine()
5339 if ((ap->txconfig & ANEG_CFG_NP) == 0 && in tg3_fiber_aneg_smachine()
5340 !(ap->flags & MR_NP_RX)) { in tg3_fiber_aneg_smachine()
5341 ap->state = ANEG_STATE_IDLE_DETECT_INIT; in tg3_fiber_aneg_smachine()
5350 ap->link_time = ap->cur_time; in tg3_fiber_aneg_smachine()
5351 tp->mac_mode &= ~MAC_MODE_SEND_CONFIGS; in tg3_fiber_aneg_smachine()
5352 tw32_f(MAC_MODE, tp->mac_mode); in tg3_fiber_aneg_smachine()
5355 ap->state = ANEG_STATE_IDLE_DETECT; in tg3_fiber_aneg_smachine()
5360 if (ap->ability_match != 0 && in tg3_fiber_aneg_smachine()
5361 ap->rxconfig == 0) { in tg3_fiber_aneg_smachine()
5362 ap->state = ANEG_STATE_AN_ENABLE; in tg3_fiber_aneg_smachine()
5365 delta = ap->cur_time - ap->link_time; in tg3_fiber_aneg_smachine()
5368 ap->state = ANEG_STATE_LINK_OK; in tg3_fiber_aneg_smachine()
5373 ap->flags |= (MR_AN_COMPLETE | MR_LINK_OK); in tg3_fiber_aneg_smachine()
5403 tmp = tp->mac_mode & ~MAC_MODE_PORT_MODE_MASK; in fiber_autoneg()
5407 tw32_f(MAC_MODE, tp->mac_mode | MAC_MODE_SEND_CONFIGS); in fiber_autoneg()
5423 tp->mac_mode &= ~MAC_MODE_SEND_CONFIGS; in fiber_autoneg()
5424 tw32_f(MAC_MODE, tp->mac_mode); in fiber_autoneg()
5462 /* Enable auto-lock and comdet, select txclk for tx. */ in tg3_init_bcm8002()
5507 /* preserve bits 0-11,13,14 for signal pre-emphasis */ in tg3_setup_fiber_hw_autoneg()
5508 /* preserve bits 20-23 for voltage regulator */ in tg3_setup_fiber_hw_autoneg()
5514 if (tp->link_config.autoneg != AUTONEG_ENABLE) { in tg3_setup_fiber_hw_autoneg()
5535 /* Want auto-negotiation. */ in tg3_setup_fiber_hw_autoneg()
5538 flowctrl = tg3_advert_flowctrl_1000X(tp->link_config.flowctrl); in tg3_setup_fiber_hw_autoneg()
5545 if ((tp->phy_flags & TG3_PHYFLG_PARALLEL_DETECT) && in tg3_setup_fiber_hw_autoneg()
5546 tp->serdes_counter && in tg3_setup_fiber_hw_autoneg()
5550 tp->serdes_counter--; in tg3_setup_fiber_hw_autoneg()
5561 tp->serdes_counter = SERDES_AN_TIMEOUT_5704S; in tg3_setup_fiber_hw_autoneg()
5562 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_setup_fiber_hw_autoneg()
5582 tp->link_config.rmt_adv = in tg3_setup_fiber_hw_autoneg()
5587 tp->serdes_counter = 0; in tg3_setup_fiber_hw_autoneg()
5588 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_setup_fiber_hw_autoneg()
5590 if (tp->serdes_counter) in tg3_setup_fiber_hw_autoneg()
5591 tp->serdes_counter--; in tg3_setup_fiber_hw_autoneg()
5607 /* Link parallel detection - link is up */ in tg3_setup_fiber_hw_autoneg()
5615 tp->phy_flags |= in tg3_setup_fiber_hw_autoneg()
5617 tp->serdes_counter = in tg3_setup_fiber_hw_autoneg()
5624 tp->serdes_counter = SERDES_AN_TIMEOUT_5704S; in tg3_setup_fiber_hw_autoneg()
5625 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_setup_fiber_hw_autoneg()
5639 if (tp->link_config.autoneg == AUTONEG_ENABLE) { in tg3_setup_fiber_by_hand()
5656 tp->link_config.rmt_adv = in tg3_setup_fiber_by_hand()
5686 tw32_f(MAC_MODE, (tp->mac_mode | MAC_MODE_SEND_CONFIGS)); in tg3_setup_fiber_by_hand()
5689 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_by_hand()
5706 orig_pause_cfg = tp->link_config.active_flowctrl; in tg3_setup_fiber_phy()
5707 orig_active_speed = tp->link_config.active_speed; in tg3_setup_fiber_phy()
5708 orig_active_duplex = tp->link_config.active_duplex; in tg3_setup_fiber_phy()
5711 tp->link_up && in tg3_setup_fiber_phy()
5728 tp->mac_mode &= ~(MAC_MODE_PORT_MODE_MASK | MAC_MODE_HALF_DUPLEX); in tg3_setup_fiber_phy()
5729 tp->mac_mode |= MAC_MODE_PORT_MODE_TBI; in tg3_setup_fiber_phy()
5730 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_phy()
5733 if (tp->phy_id == TG3_PHY_ID_BCM8002) in tg3_setup_fiber_phy()
5740 tp->link_config.rmt_adv = 0; in tg3_setup_fiber_phy()
5748 tp->napi[0].hw_status->status = in tg3_setup_fiber_phy()
5750 (tp->napi[0].hw_status->status & ~SD_STATUS_LINK_CHG)); in tg3_setup_fiber_phy()
5765 if (tp->link_config.autoneg == AUTONEG_ENABLE && in tg3_setup_fiber_phy()
5766 tp->serdes_counter == 0) { in tg3_setup_fiber_phy()
5767 tw32_f(MAC_MODE, (tp->mac_mode | in tg3_setup_fiber_phy()
5770 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_phy()
5775 tp->link_config.active_speed = SPEED_1000; in tg3_setup_fiber_phy()
5776 tp->link_config.active_duplex = DUPLEX_FULL; in tg3_setup_fiber_phy()
5777 tw32(MAC_LED_CTRL, (tp->led_ctrl | in tg3_setup_fiber_phy()
5781 tp->link_config.active_speed = SPEED_UNKNOWN; in tg3_setup_fiber_phy()
5782 tp->link_config.active_duplex = DUPLEX_UNKNOWN; in tg3_setup_fiber_phy()
5783 tw32(MAC_LED_CTRL, (tp->led_ctrl | in tg3_setup_fiber_phy()
5789 u32 now_pause_cfg = tp->link_config.active_flowctrl; in tg3_setup_fiber_phy()
5791 orig_active_speed != tp->link_config.active_speed || in tg3_setup_fiber_phy()
5792 orig_active_duplex != tp->link_config.active_duplex) in tg3_setup_fiber_phy()
5816 tp->mac_mode &= ~MAC_MODE_PORT_MODE_MASK; in tg3_setup_fiber_mii_phy()
5819 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_setup_fiber_mii_phy()
5824 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_setup_fiber_mii_phy()
5827 tp->mac_mode |= MAC_MODE_PORT_MODE_MII; in tg3_setup_fiber_mii_phy()
5830 tp->mac_mode |= MAC_MODE_PORT_MODE_MII; in tg3_setup_fiber_mii_phy()
5839 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_mii_phy()
5847 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_setup_fiber_mii_phy()
5848 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_mii_phy()
5856 tp->link_config.rmt_adv = 0; in tg3_setup_fiber_mii_phy()
5869 if ((tp->link_config.autoneg == AUTONEG_ENABLE) && !force_reset && in tg3_setup_fiber_mii_phy()
5870 (tp->phy_flags & TG3_PHYFLG_PARALLEL_DETECT)) { in tg3_setup_fiber_mii_phy()
5872 } else if (tp->link_config.autoneg == AUTONEG_ENABLE) { in tg3_setup_fiber_mii_phy()
5881 newadv |= tg3_advert_flowctrl_1000X(tp->link_config.flowctrl); in tg3_setup_fiber_mii_phy()
5882 newadv |= ethtool_adv_to_mii_adv_x(tp->link_config.advertising); in tg3_setup_fiber_mii_phy()
5890 tp->serdes_counter = SERDES_AN_TIMEOUT_5714S; in tg3_setup_fiber_mii_phy()
5891 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_setup_fiber_mii_phy()
5901 if (tp->link_config.duplex == DUPLEX_FULL) in tg3_setup_fiber_mii_phy()
5911 if (tp->link_up) { in tg3_setup_fiber_mii_phy()
5935 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_setup_fiber_mii_phy()
5963 tp->link_config.rmt_adv = in tg3_setup_fiber_mii_phy()
5977 tp->mac_mode &= ~MAC_MODE_HALF_DUPLEX; in tg3_setup_fiber_mii_phy()
5978 if (tp->link_config.active_duplex == DUPLEX_HALF) in tg3_setup_fiber_mii_phy()
5979 tp->mac_mode |= MAC_MODE_HALF_DUPLEX; in tg3_setup_fiber_mii_phy()
5981 tw32_f(MAC_MODE, tp->mac_mode); in tg3_setup_fiber_mii_phy()
5986 tp->link_config.active_speed = current_speed; in tg3_setup_fiber_mii_phy()
5987 tp->link_config.active_duplex = current_duplex; in tg3_setup_fiber_mii_phy()
5995 if (tp->serdes_counter) { in tg3_serdes_parallel_detect()
5997 tp->serdes_counter--; in tg3_serdes_parallel_detect()
6001 if (!tp->link_up && in tg3_serdes_parallel_detect()
6002 (tp->link_config.autoneg == AUTONEG_ENABLE)) { in tg3_serdes_parallel_detect()
6028 tp->phy_flags |= TG3_PHYFLG_PARALLEL_DETECT; in tg3_serdes_parallel_detect()
6031 } else if (tp->link_up && in tg3_serdes_parallel_detect()
6032 (tp->link_config.autoneg == AUTONEG_ENABLE) && in tg3_serdes_parallel_detect()
6033 (tp->phy_flags & TG3_PHYFLG_PARALLEL_DETECT)) { in tg3_serdes_parallel_detect()
6047 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_serdes_parallel_detect()
6058 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) in tg3_setup_phy()
6060 else if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) in tg3_setup_phy()
6089 if (tp->link_config.active_speed == SPEED_1000 && in tg3_setup_phy()
6090 tp->link_config.active_duplex == DUPLEX_HALF) in tg3_setup_phy()
6098 if (tp->link_up) { in tg3_setup_phy()
6100 tp->coal.stats_block_coalesce_usecs); in tg3_setup_phy()
6108 if (!tp->link_up) in tg3_setup_phy()
6110 tp->pwrmgmt_thresh; in tg3_setup_phy()
6119 /* tp->lock must be held */
6132 /* tp->lock must be held */
6149 info->so_timestamping = SOF_TIMESTAMPING_TX_SOFTWARE; in tg3_get_ts_info()
6152 info->so_timestamping |= SOF_TIMESTAMPING_TX_HARDWARE | in tg3_get_ts_info()
6157 if (tp->ptp_clock) in tg3_get_ts_info()
6158 info->phc_index = ptp_clock_index(tp->ptp_clock); in tg3_get_ts_info()
6160 info->tx_types = (1 << HWTSTAMP_TX_OFF) | (1 << HWTSTAMP_TX_ON); in tg3_get_ts_info()
6162 info->rx_filters = (1 << HWTSTAMP_FILTER_NONE) | in tg3_get_ts_info()
6202 tp->ptp_adjust += delta; in tg3_ptp_adjtime()
6216 ns += tp->ptp_adjust; in tg3_ptp_gettimex()
6234 tp->ptp_adjust = 0; in tg3_ptp_settime()
6247 switch (rq->type) { in tg3_ptp_enable()
6250 if (rq->perout.flags) in tg3_ptp_enable()
6251 return -EOPNOTSUPP; in tg3_ptp_enable()
6253 if (rq->perout.index != 0) in tg3_ptp_enable()
6254 return -EINVAL; in tg3_ptp_enable()
6263 nsec = rq->perout.start.sec * 1000000000ULL + in tg3_ptp_enable()
6264 rq->perout.start.nsec; in tg3_ptp_enable()
6266 if (rq->perout.period.sec || rq->perout.period.nsec) { in tg3_ptp_enable()
6267 netdev_warn(tp->dev, in tg3_ptp_enable()
6268 "Device supports only a one-shot timesync output, period must be 0\n"); in tg3_ptp_enable()
6269 rval = -EINVAL; in tg3_ptp_enable()
6274 netdev_warn(tp->dev, in tg3_ptp_enable()
6276 rval = -EINVAL; in tg3_ptp_enable()
6300 return -EOPNOTSUPP; in tg3_ptp_enable()
6307 timestamp->hwtstamp = ns_to_ktime((hwclock & TG3_TSTAMP_MASK) + in tg3_hwclock_to_timestamp()
6308 tp->ptp_adjust); in tg3_hwclock_to_timestamp()
6323 if (tp->ptp_txts_retrycnt > 2) in tg3_ptp_ts_aux_work()
6328 if (hwclock != tp->pre_tx_ts) { in tg3_ptp_ts_aux_work()
6330 skb_tstamp_tx(tp->tx_tstamp_skb, ×tamp); in tg3_ptp_ts_aux_work()
6333 tp->ptp_txts_retrycnt++; in tg3_ptp_ts_aux_work()
6336 dev_consume_skb_any(tp->tx_tstamp_skb); in tg3_ptp_ts_aux_work()
6337 tp->tx_tstamp_skb = NULL; in tg3_ptp_ts_aux_work()
6338 tp->ptp_txts_retrycnt = 0; in tg3_ptp_ts_aux_work()
6339 tp->pre_tx_ts = 0; in tg3_ptp_ts_aux_work()
6340 return -1; in tg3_ptp_ts_aux_work()
6360 /* tp->lock must be held */
6368 tp->ptp_adjust = 0; in tg3_ptp_init()
6369 tp->ptp_info = tg3_ptp_caps; in tg3_ptp_init()
6372 /* tp->lock must be held */
6378 tg3_refclk_write(tp, ktime_to_ns(ktime_get_real()) + tp->ptp_adjust); in tg3_ptp_resume()
6379 tp->ptp_adjust = 0; in tg3_ptp_resume()
6384 if (!tg3_flag(tp, PTP_CAPABLE) || !tp->ptp_clock) in tg3_ptp_fini()
6387 ptp_clock_unregister(tp->ptp_clock); in tg3_ptp_fini()
6388 tp->ptp_clock = NULL; in tg3_ptp_fini()
6389 tp->ptp_adjust = 0; in tg3_ptp_fini()
6390 dev_consume_skb_any(tp->tx_tstamp_skb); in tg3_ptp_fini()
6391 tp->tx_tstamp_skb = NULL; in tg3_ptp_fini()
6396 return tp->irq_sync; in tg3_irq_sync()
6466 if (tp->pdev->error_state != pci_channel_io_normal) { in tg3_dump_state()
6467 netdev_err(tp->dev, "PCI channel ERROR!\n"); in tg3_dump_state()
6487 netdev_err(tp->dev, "0x%08x: 0x%08x, 0x%08x, 0x%08x, 0x%08x\n", in tg3_dump_state()
6494 for (i = 0; i < tp->irq_cnt; i++) { in tg3_dump_state()
6495 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_dump_state()
6498 netdev_err(tp->dev, in tg3_dump_state()
6501 tnapi->hw_status->status, in tg3_dump_state()
6502 tnapi->hw_status->status_tag, in tg3_dump_state()
6503 tnapi->hw_status->rx_jumbo_consumer, in tg3_dump_state()
6504 tnapi->hw_status->rx_consumer, in tg3_dump_state()
6505 tnapi->hw_status->rx_mini_consumer, in tg3_dump_state()
6506 tnapi->hw_status->idx[0].rx_producer, in tg3_dump_state()
6507 tnapi->hw_status->idx[0].tx_consumer); in tg3_dump_state()
6509 netdev_err(tp->dev, in tg3_dump_state()
6512 tnapi->last_tag, tnapi->last_irq_tag, in tg3_dump_state()
6513 tnapi->tx_prod, tnapi->tx_cons, tnapi->tx_pending, in tg3_dump_state()
6514 tnapi->rx_rcb_ptr, in tg3_dump_state()
6515 tnapi->prodring.rx_std_prod_idx, in tg3_dump_state()
6516 tnapi->prodring.rx_std_cons_idx, in tg3_dump_state()
6517 tnapi->prodring.rx_jmb_prod_idx, in tg3_dump_state()
6518 tnapi->prodring.rx_jmb_cons_idx); in tg3_dump_state()
6522 /* This is called whenever we suspect that the system chipset is re-
6531 tp->write32_tx_mbox == tg3_write_indirect_mbox); in tg3_tx_recover()
6533 netdev_warn(tp->dev, in tg3_tx_recover()
6534 "The system may be re-ordering memory-mapped I/O " in tg3_tx_recover()
6546 return tnapi->tx_pending - in tg3_tx_avail()
6547 ((tnapi->tx_prod - tnapi->tx_cons) & (TG3_TX_RING_SIZE - 1)); in tg3_tx_avail()
6550 /* Tigon3 never reports partial packet sends. So we do not
6556 struct tg3 *tp = tnapi->tp; in tg3_tx()
6557 u32 hw_idx = tnapi->hw_status->idx[0].tx_consumer; in tg3_tx()
6558 u32 sw_idx = tnapi->tx_cons; in tg3_tx()
6560 int index = tnapi - tp->napi; in tg3_tx()
6564 index--; in tg3_tx()
6566 txq = netdev_get_tx_queue(tp->dev, index); in tg3_tx()
6569 struct tg3_tx_ring_info *ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6571 struct sk_buff *skb = ri->skb; in tg3_tx()
6579 if (tnapi->tx_ring[sw_idx].len_flags & TXD_FLAG_HWTSTAMP) { in tg3_tx()
6584 if (hwclock != tp->pre_tx_ts) { in tg3_tx()
6587 tp->pre_tx_ts = 0; in tg3_tx()
6589 tp->tx_tstamp_skb = skb; in tg3_tx()
6594 dma_unmap_single(&tp->pdev->dev, dma_unmap_addr(ri, mapping), in tg3_tx()
6597 ri->skb = NULL; in tg3_tx()
6599 while (ri->fragmented) { in tg3_tx()
6600 ri->fragmented = false; in tg3_tx()
6602 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6607 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in tg3_tx()
6608 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6609 if (unlikely(ri->skb != NULL || sw_idx == hw_idx)) in tg3_tx()
6612 dma_unmap_page(&tp->pdev->dev, in tg3_tx()
6614 skb_frag_size(&skb_shinfo(skb)->frags[i]), in tg3_tx()
6617 while (ri->fragmented) { in tg3_tx()
6618 ri->fragmented = false; in tg3_tx()
6620 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6627 bytes_compl += skb->len; in tg3_tx()
6632 ptp_schedule_worker(tp->ptp_clock, 0); in tg3_tx()
6642 tnapi->tx_cons = sw_idx; in tg3_tx()
6674 if (!ri->data) in tg3_rx_data_free()
6677 dma_unmap_single(&tp->pdev->dev, dma_unmap_addr(ri, mapping), map_sz, in tg3_rx_data_free()
6679 tg3_frag_free(skb_size <= PAGE_SIZE, ri->data); in tg3_rx_data_free()
6680 ri->data = NULL; in tg3_rx_data_free()
6707 dest_idx = dest_idx_unmasked & tp->rx_std_ring_mask; in tg3_alloc_rx_data()
6708 desc = &tpr->rx_std[dest_idx]; in tg3_alloc_rx_data()
6709 map = &tpr->rx_std_buffers[dest_idx]; in tg3_alloc_rx_data()
6710 data_size = tp->rx_pkt_map_sz; in tg3_alloc_rx_data()
6714 dest_idx = dest_idx_unmasked & tp->rx_jmb_ring_mask; in tg3_alloc_rx_data()
6715 desc = &tpr->rx_jmb[dest_idx].std; in tg3_alloc_rx_data()
6716 map = &tpr->rx_jmb_buffers[dest_idx]; in tg3_alloc_rx_data()
6721 return -EINVAL; in tg3_alloc_rx_data()
6740 return -ENOMEM; in tg3_alloc_rx_data()
6742 mapping = dma_map_single(&tp->pdev->dev, data + TG3_RX_OFFSET(tp), in tg3_alloc_rx_data()
6744 if (unlikely(dma_mapping_error(&tp->pdev->dev, mapping))) { in tg3_alloc_rx_data()
6746 return -EIO; in tg3_alloc_rx_data()
6749 map->data = data; in tg3_alloc_rx_data()
6752 desc->addr_hi = ((u64)mapping >> 32); in tg3_alloc_rx_data()
6753 desc->addr_lo = ((u64)mapping & 0xffffffff); in tg3_alloc_rx_data()
6767 struct tg3 *tp = tnapi->tp; in tg3_recycle_rx()
6770 struct tg3_rx_prodring_set *spr = &tp->napi[0].prodring; in tg3_recycle_rx()
6775 dest_idx = dest_idx_unmasked & tp->rx_std_ring_mask; in tg3_recycle_rx()
6776 dest_desc = &dpr->rx_std[dest_idx]; in tg3_recycle_rx()
6777 dest_map = &dpr->rx_std_buffers[dest_idx]; in tg3_recycle_rx()
6778 src_desc = &spr->rx_std[src_idx]; in tg3_recycle_rx()
6779 src_map = &spr->rx_std_buffers[src_idx]; in tg3_recycle_rx()
6783 dest_idx = dest_idx_unmasked & tp->rx_jmb_ring_mask; in tg3_recycle_rx()
6784 dest_desc = &dpr->rx_jmb[dest_idx].std; in tg3_recycle_rx()
6785 dest_map = &dpr->rx_jmb_buffers[dest_idx]; in tg3_recycle_rx()
6786 src_desc = &spr->rx_jmb[src_idx].std; in tg3_recycle_rx()
6787 src_map = &spr->rx_jmb_buffers[src_idx]; in tg3_recycle_rx()
6794 dest_map->data = src_map->data; in tg3_recycle_rx()
6797 dest_desc->addr_hi = src_desc->addr_hi; in tg3_recycle_rx()
6798 dest_desc->addr_lo = src_desc->addr_lo; in tg3_recycle_rx()
6805 src_map->data = NULL; in tg3_recycle_rx()
6808 /* The RX ring scheme is composed of multiple rings which post fresh
6818 * Each ring the host uses to post buffers to the chip is described
6820 * it is first placed into the on-chip ram. When the packet's length
6827 * to the buffer post rings, and only the chip writes to the rx status
6828 * rings, then cache lines never move beyond shared-modified state.
6834 struct tg3 *tp = tnapi->tp; in tg3_rx()
6837 u32 sw_idx = tnapi->rx_rcb_ptr; in tg3_rx()
6840 struct tg3_rx_prodring_set *tpr = &tnapi->prodring; in tg3_rx()
6842 hw_idx = *(tnapi->rx_rcb_prod_idx); in tg3_rx()
6850 std_prod_idx = tpr->rx_std_prod_idx; in tg3_rx()
6851 jmb_prod_idx = tpr->rx_jmb_prod_idx; in tg3_rx()
6854 struct tg3_rx_buffer_desc *desc = &tnapi->rx_rcb[sw_idx]; in tg3_rx()
6862 desc_idx = desc->opaque & RXD_OPAQUE_INDEX_MASK; in tg3_rx()
6863 opaque_key = desc->opaque & RXD_OPAQUE_RING_MASK; in tg3_rx()
6865 ri = &tp->napi[0].prodring.rx_std_buffers[desc_idx]; in tg3_rx()
6867 data = ri->data; in tg3_rx()
6871 ri = &tp->napi[0].prodring.rx_jmb_buffers[desc_idx]; in tg3_rx()
6873 data = ri->data; in tg3_rx()
6880 if (desc->err_vlan & RXD_ERR_MASK) { in tg3_rx()
6886 tnapi->rx_dropped++; in tg3_rx()
6891 len = ((desc->idx_len & RXD_LEN_MASK) >> RXD_LEN_SHIFT) - in tg3_rx()
6894 if ((desc->type_flags & RXD_FLAG_PTPSTAT_MASK) == in tg3_rx()
6896 (desc->type_flags & RXD_FLAG_PTPSTAT_MASK) == in tg3_rx()
6911 dma_unmap_single(&tp->pdev->dev, dma_addr, skb_size, in tg3_rx()
6919 ri->data = NULL; in tg3_rx()
6934 skb = netdev_alloc_skb(tp->dev, in tg3_rx()
6940 dma_sync_single_for_cpu(&tp->pdev->dev, dma_addr, len, in tg3_rx()
6942 memcpy(skb->data, in tg3_rx()
6945 dma_sync_single_for_device(&tp->pdev->dev, dma_addr, in tg3_rx()
6954 if ((tp->dev->features & NETIF_F_RXCSUM) && in tg3_rx()
6955 (desc->type_flags & RXD_FLAG_TCPUDP_CSUM) && in tg3_rx()
6956 (((desc->ip_tcp_csum & RXD_TCPCSUM_MASK) in tg3_rx()
6958 skb->ip_summed = CHECKSUM_UNNECESSARY; in tg3_rx()
6962 skb->protocol = eth_type_trans(skb, tp->dev); in tg3_rx()
6964 if (len > (tp->dev->mtu + ETH_HLEN) && in tg3_rx()
6965 skb->protocol != htons(ETH_P_8021Q) && in tg3_rx()
6966 skb->protocol != htons(ETH_P_8021AD)) { in tg3_rx()
6971 if (desc->type_flags & RXD_FLAG_VLAN && in tg3_rx()
6972 !(tp->rx_mode & RX_MODE_KEEP_VLAN_TAG)) in tg3_rx()
6974 desc->err_vlan & RXD_VLAN_MASK); in tg3_rx()
6976 napi_gro_receive(&tnapi->napi, skb); in tg3_rx()
6979 budget--; in tg3_rx()
6984 if (unlikely(rx_std_posted >= tp->rx_std_max_post)) { in tg3_rx()
6985 tpr->rx_std_prod_idx = std_prod_idx & in tg3_rx()
6986 tp->rx_std_ring_mask; in tg3_rx()
6988 tpr->rx_std_prod_idx); in tg3_rx()
6994 sw_idx &= tp->rx_ret_ring_mask; in tg3_rx()
6998 hw_idx = *(tnapi->rx_rcb_prod_idx); in tg3_rx()
7004 tnapi->rx_rcb_ptr = sw_idx; in tg3_rx()
7005 tw32_rx_mbox(tnapi->consmbox, sw_idx); in tg3_rx()
7013 tpr->rx_std_prod_idx = std_prod_idx & in tg3_rx()
7014 tp->rx_std_ring_mask; in tg3_rx()
7016 tpr->rx_std_prod_idx); in tg3_rx()
7019 tpr->rx_jmb_prod_idx = jmb_prod_idx & in tg3_rx()
7020 tp->rx_jmb_ring_mask; in tg3_rx()
7022 tpr->rx_jmb_prod_idx); in tg3_rx()
7030 tpr->rx_std_prod_idx = std_prod_idx & tp->rx_std_ring_mask; in tg3_rx()
7031 tpr->rx_jmb_prod_idx = jmb_prod_idx & tp->rx_jmb_ring_mask; in tg3_rx()
7033 if (tnapi != &tp->napi[1]) { in tg3_rx()
7034 tp->rx_refill = true; in tg3_rx()
7035 napi_schedule(&tp->napi[1].napi); in tg3_rx()
7046 struct tg3_hw_status *sblk = tp->napi[0].hw_status; in tg3_poll_link()
7048 if (sblk->status & SD_STATUS_LINK_CHG) { in tg3_poll_link()
7049 sblk->status = SD_STATUS_UPDATED | in tg3_poll_link()
7050 (sblk->status & ~SD_STATUS_LINK_CHG); in tg3_poll_link()
7051 spin_lock(&tp->lock); in tg3_poll_link()
7061 spin_unlock(&tp->lock); in tg3_poll_link()
7074 src_prod_idx = spr->rx_std_prod_idx; in tg3_rx_prodring_xfer()
7081 if (spr->rx_std_cons_idx == src_prod_idx) in tg3_rx_prodring_xfer()
7084 if (spr->rx_std_cons_idx < src_prod_idx) in tg3_rx_prodring_xfer()
7085 cpycnt = src_prod_idx - spr->rx_std_cons_idx; in tg3_rx_prodring_xfer()
7087 cpycnt = tp->rx_std_ring_mask + 1 - in tg3_rx_prodring_xfer()
7088 spr->rx_std_cons_idx; in tg3_rx_prodring_xfer()
7091 tp->rx_std_ring_mask + 1 - dpr->rx_std_prod_idx); in tg3_rx_prodring_xfer()
7093 si = spr->rx_std_cons_idx; in tg3_rx_prodring_xfer()
7094 di = dpr->rx_std_prod_idx; in tg3_rx_prodring_xfer()
7097 if (dpr->rx_std_buffers[i].data) { in tg3_rx_prodring_xfer()
7098 cpycnt = i - di; in tg3_rx_prodring_xfer()
7099 err = -ENOSPC; in tg3_rx_prodring_xfer()
7113 memcpy(&dpr->rx_std_buffers[di], in tg3_rx_prodring_xfer()
7114 &spr->rx_std_buffers[si], in tg3_rx_prodring_xfer()
7119 sbd = &spr->rx_std[si]; in tg3_rx_prodring_xfer()
7120 dbd = &dpr->rx_std[di]; in tg3_rx_prodring_xfer()
7121 dbd->addr_hi = sbd->addr_hi; in tg3_rx_prodring_xfer()
7122 dbd->addr_lo = sbd->addr_lo; in tg3_rx_prodring_xfer()
7125 spr->rx_std_cons_idx = (spr->rx_std_cons_idx + cpycnt) & in tg3_rx_prodring_xfer()
7126 tp->rx_std_ring_mask; in tg3_rx_prodring_xfer()
7127 dpr->rx_std_prod_idx = (dpr->rx_std_prod_idx + cpycnt) & in tg3_rx_prodring_xfer()
7128 tp->rx_std_ring_mask; in tg3_rx_prodring_xfer()
7132 src_prod_idx = spr->rx_jmb_prod_idx; in tg3_rx_prodring_xfer()
7139 if (spr->rx_jmb_cons_idx == src_prod_idx) in tg3_rx_prodring_xfer()
7142 if (spr->rx_jmb_cons_idx < src_prod_idx) in tg3_rx_prodring_xfer()
7143 cpycnt = src_prod_idx - spr->rx_jmb_cons_idx; in tg3_rx_prodring_xfer()
7145 cpycnt = tp->rx_jmb_ring_mask + 1 - in tg3_rx_prodring_xfer()
7146 spr->rx_jmb_cons_idx; in tg3_rx_prodring_xfer()
7149 tp->rx_jmb_ring_mask + 1 - dpr->rx_jmb_prod_idx); in tg3_rx_prodring_xfer()
7151 si = spr->rx_jmb_cons_idx; in tg3_rx_prodring_xfer()
7152 di = dpr->rx_jmb_prod_idx; in tg3_rx_prodring_xfer()
7155 if (dpr->rx_jmb_buffers[i].data) { in tg3_rx_prodring_xfer()
7156 cpycnt = i - di; in tg3_rx_prodring_xfer()
7157 err = -ENOSPC; in tg3_rx_prodring_xfer()
7171 memcpy(&dpr->rx_jmb_buffers[di], in tg3_rx_prodring_xfer()
7172 &spr->rx_jmb_buffers[si], in tg3_rx_prodring_xfer()
7177 sbd = &spr->rx_jmb[si].std; in tg3_rx_prodring_xfer()
7178 dbd = &dpr->rx_jmb[di].std; in tg3_rx_prodring_xfer()
7179 dbd->addr_hi = sbd->addr_hi; in tg3_rx_prodring_xfer()
7180 dbd->addr_lo = sbd->addr_lo; in tg3_rx_prodring_xfer()
7183 spr->rx_jmb_cons_idx = (spr->rx_jmb_cons_idx + cpycnt) & in tg3_rx_prodring_xfer()
7184 tp->rx_jmb_ring_mask; in tg3_rx_prodring_xfer()
7185 dpr->rx_jmb_prod_idx = (dpr->rx_jmb_prod_idx + cpycnt) & in tg3_rx_prodring_xfer()
7186 tp->rx_jmb_ring_mask; in tg3_rx_prodring_xfer()
7194 struct tg3 *tp = tnapi->tp; in tg3_poll_work()
7197 if (tnapi->hw_status->idx[0].tx_consumer != tnapi->tx_cons) { in tg3_poll_work()
7203 if (!tnapi->rx_rcb_prod_idx) in tg3_poll_work()
7208 * code synchronizes with tg3->napi.poll() in tg3_poll_work()
7210 if (*(tnapi->rx_rcb_prod_idx) != tnapi->rx_rcb_ptr) in tg3_poll_work()
7211 work_done += tg3_rx(tnapi, budget - work_done); in tg3_poll_work()
7213 if (tg3_flag(tp, ENABLE_RSS) && tnapi == &tp->napi[1]) { in tg3_poll_work()
7214 struct tg3_rx_prodring_set *dpr = &tp->napi[0].prodring; in tg3_poll_work()
7216 u32 std_prod_idx = dpr->rx_std_prod_idx; in tg3_poll_work()
7217 u32 jmb_prod_idx = dpr->rx_jmb_prod_idx; in tg3_poll_work()
7219 tp->rx_refill = false; in tg3_poll_work()
7220 for (i = 1; i <= tp->rxq_cnt; i++) in tg3_poll_work()
7222 &tp->napi[i].prodring); in tg3_poll_work()
7226 if (std_prod_idx != dpr->rx_std_prod_idx) in tg3_poll_work()
7228 dpr->rx_std_prod_idx); in tg3_poll_work()
7230 if (jmb_prod_idx != dpr->rx_jmb_prod_idx) in tg3_poll_work()
7232 dpr->rx_jmb_prod_idx); in tg3_poll_work()
7235 tw32_f(HOSTCC_MODE, tp->coal_now); in tg3_poll_work()
7243 if (!test_and_set_bit(TG3_FLAG_RESET_TASK_PENDING, tp->tg3_flags)) in tg3_reset_task_schedule()
7244 schedule_work(&tp->reset_task); in tg3_reset_task_schedule()
7249 if (test_and_clear_bit(TG3_FLAG_RESET_TASK_PENDING, tp->tg3_flags)) in tg3_reset_task_cancel()
7250 cancel_work_sync(&tp->reset_task); in tg3_reset_task_cancel()
7257 struct tg3 *tp = tnapi->tp; in tg3_poll_msix()
7259 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_poll_msix()
7270 /* tp->last_tag is used in tg3_int_reenable() below in tg3_poll_msix()
7274 tnapi->last_tag = sblk->status_tag; in tg3_poll_msix()
7275 tnapi->last_irq_tag = tnapi->last_tag; in tg3_poll_msix()
7279 if (likely(sblk->idx[0].tx_consumer == tnapi->tx_cons && in tg3_poll_msix()
7280 *(tnapi->rx_rcb_prod_idx) == tnapi->rx_rcb_ptr)) { in tg3_poll_msix()
7285 if (tnapi == &tp->napi[1] && tp->rx_refill) in tg3_poll_msix()
7290 tw32_mailbox(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_poll_msix()
7295 if (unlikely(tnapi == &tp->napi[1] && tp->rx_refill)) { in tg3_poll_msix()
7296 tw32(HOSTCC_MODE, tp->coalesce_mode | in tg3_poll_msix()
7298 tnapi->coal_now); in tg3_poll_msix()
7325 netdev_err(tp->dev, "FLOW Attention error. Resetting chip.\n"); in tg3_process_error()
7330 netdev_err(tp->dev, "MSI Status error. Resetting chip.\n"); in tg3_process_error()
7335 netdev_err(tp->dev, "DMA Status error. Resetting chip.\n"); in tg3_process_error()
7351 struct tg3 *tp = tnapi->tp; in tg3_poll()
7353 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_poll()
7356 if (sblk->status & SD_STATUS_ERROR) in tg3_poll()
7370 /* tp->last_tag is used in tg3_int_reenable() below in tg3_poll()
7374 tnapi->last_tag = sblk->status_tag; in tg3_poll()
7375 tnapi->last_irq_tag = tnapi->last_tag; in tg3_poll()
7378 sblk->status &= ~SD_STATUS_UPDATED; in tg3_poll()
7401 for (i = tp->irq_cnt - 1; i >= 0; i--) in tg3_napi_disable()
7402 napi_disable(&tp->napi[i].napi); in tg3_napi_disable()
7409 for (i = 0; i < tp->irq_cnt; i++) in tg3_napi_enable()
7410 napi_enable(&tp->napi[i].napi); in tg3_napi_enable()
7417 netif_napi_add(tp->dev, &tp->napi[0].napi, tg3_poll); in tg3_napi_init()
7418 for (i = 1; i < tp->irq_cnt; i++) in tg3_napi_init()
7419 netif_napi_add(tp->dev, &tp->napi[i].napi, tg3_poll_msix); in tg3_napi_init()
7426 for (i = 0; i < tp->irq_cnt; i++) in tg3_napi_fini()
7427 netif_napi_del(&tp->napi[i].napi); in tg3_napi_fini()
7432 netif_trans_update(tp->dev); /* prevent tx timeout */ in tg3_netif_stop()
7434 netif_carrier_off(tp->dev); in tg3_netif_stop()
7435 netif_tx_disable(tp->dev); in tg3_netif_stop()
7438 /* tp->lock must be held */
7447 netif_tx_wake_all_queues(tp->dev); in tg3_netif_start()
7449 if (tp->link_up) in tg3_netif_start()
7450 netif_carrier_on(tp->dev); in tg3_netif_start()
7453 tp->napi[0].hw_status->status |= SD_STATUS_UPDATED; in tg3_netif_start()
7458 __releases(tp->lock) in tg3_irq_quiesce()
7459 __acquires(tp->lock) in tg3_irq_quiesce()
7463 BUG_ON(tp->irq_sync); in tg3_irq_quiesce()
7465 tp->irq_sync = 1; in tg3_irq_quiesce()
7468 spin_unlock_bh(&tp->lock); in tg3_irq_quiesce()
7470 for (i = 0; i < tp->irq_cnt; i++) in tg3_irq_quiesce()
7471 synchronize_irq(tp->napi[i].irq_vec); in tg3_irq_quiesce()
7473 spin_lock_bh(&tp->lock); in tg3_irq_quiesce()
7477 * If irq_sync is non-zero, then the IRQ handler must be synchronized
7483 spin_lock_bh(&tp->lock); in tg3_full_lock()
7490 spin_unlock_bh(&tp->lock); in tg3_full_unlock()
7493 /* One-shot MSI handler - Chip automatically disables interrupt
7499 struct tg3 *tp = tnapi->tp; in tg3_msi_1shot()
7501 prefetch(tnapi->hw_status); in tg3_msi_1shot()
7502 if (tnapi->rx_rcb) in tg3_msi_1shot()
7503 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_msi_1shot()
7506 napi_schedule(&tnapi->napi); in tg3_msi_1shot()
7511 /* MSI ISR - No need to check for interrupt sharing and no need to
7512 * flush status block and interrupt mailbox. PCI ordering rules
7518 struct tg3 *tp = tnapi->tp; in tg3_msi()
7520 prefetch(tnapi->hw_status); in tg3_msi()
7521 if (tnapi->rx_rcb) in tg3_msi()
7522 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_msi()
7524 * Writing any value to intr-mbox-0 clears PCI INTA# and in tg3_msi()
7525 * chip-internal interrupt pending events. in tg3_msi()
7526 * Writing non-zero to intr-mbox-0 additional tells the in tg3_msi()
7527 * NIC to stop sending us irqs, engaging "in-intr-handler" in tg3_msi()
7530 tw32_mailbox(tnapi->int_mbox, 0x00000001); in tg3_msi()
7532 napi_schedule(&tnapi->napi); in tg3_msi()
7540 struct tg3 *tp = tnapi->tp; in tg3_interrupt()
7541 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_interrupt()
7549 if (unlikely(!(sblk->status & SD_STATUS_UPDATED))) { in tg3_interrupt()
7558 * Writing any value to intr-mbox-0 clears PCI INTA# and in tg3_interrupt()
7559 * chip-internal interrupt pending events. in tg3_interrupt()
7560 * Writing non-zero to intr-mbox-0 additional tells the in tg3_interrupt()
7561 * NIC to stop sending us irqs, engaging "in-intr-handler" in tg3_interrupt()
7564 * Flush the mailbox to de-assert the IRQ immediately to prevent in tg3_interrupt()
7571 sblk->status &= ~SD_STATUS_UPDATED; in tg3_interrupt()
7573 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_interrupt()
7574 napi_schedule(&tnapi->napi); in tg3_interrupt()
7576 /* No work, shared interrupt perhaps? re-enable in tg3_interrupt()
7589 struct tg3 *tp = tnapi->tp; in tg3_interrupt_tagged()
7590 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_interrupt_tagged()
7598 if (unlikely(sblk->status_tag == tnapi->last_irq_tag)) { in tg3_interrupt_tagged()
7607 * writing any value to intr-mbox-0 clears PCI INTA# and in tg3_interrupt_tagged()
7608 * chip-internal interrupt pending events. in tg3_interrupt_tagged()
7609 * writing non-zero to intr-mbox-0 additional tells the in tg3_interrupt_tagged()
7610 * NIC to stop sending us irqs, engaging "in-intr-handler" in tg3_interrupt_tagged()
7613 * Flush the mailbox to de-assert the IRQ immediately to prevent in tg3_interrupt_tagged()
7625 tnapi->last_irq_tag = sblk->status_tag; in tg3_interrupt_tagged()
7630 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_interrupt_tagged()
7632 napi_schedule(&tnapi->napi); in tg3_interrupt_tagged()
7642 struct tg3 *tp = tnapi->tp; in tg3_test_isr()
7643 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_test_isr()
7645 if ((sblk->status & SD_STATUS_UPDATED) || in tg3_test_isr()
7662 for (i = 0; i < tp->irq_cnt; i++) in tg3_poll_controller()
7663 tg3_interrupt(tp->napi[i].irq_vec, &tp->napi[i]); in tg3_poll_controller()
7701 /* Test for DMA addresses > 40-bit */
7718 txbd->addr_hi = ((u64) mapping >> 32); in tg3_tx_set_bd()
7719 txbd->addr_lo = ((u64) mapping & 0xffffffff); in tg3_tx_set_bd()
7720 txbd->len_flags = (len << TXD_LEN_SHIFT) | (flags & 0x0000ffff); in tg3_tx_set_bd()
7721 txbd->vlan_tag = (mss << TXD_MSS_SHIFT) | (vlan << TXD_VLAN_TAG_SHIFT); in tg3_tx_set_bd()
7728 struct tg3 *tp = tnapi->tp; in tg3_tx_frag_set()
7743 if (tp->dma_limit) { in tg3_tx_frag_set()
7746 while (len > tp->dma_limit && *budget) { in tg3_tx_frag_set()
7747 u32 frag_len = tp->dma_limit; in tg3_tx_frag_set()
7748 len -= tp->dma_limit; in tg3_tx_frag_set()
7752 len += tp->dma_limit / 2; in tg3_tx_frag_set()
7753 frag_len = tp->dma_limit / 2; in tg3_tx_frag_set()
7756 tnapi->tx_buffers[*entry].fragmented = true; in tg3_tx_frag_set()
7758 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7760 *budget -= 1; in tg3_tx_frag_set()
7769 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7771 *budget -= 1; in tg3_tx_frag_set()
7775 tnapi->tx_buffers[prvidx].fragmented = false; in tg3_tx_frag_set()
7779 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7791 struct tg3_tx_ring_info *txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7793 skb = txb->skb; in tg3_tx_skb_unmap()
7794 txb->skb = NULL; in tg3_tx_skb_unmap()
7796 dma_unmap_single(&tnapi->tp->pdev->dev, dma_unmap_addr(txb, mapping), in tg3_tx_skb_unmap()
7799 while (txb->fragmented) { in tg3_tx_skb_unmap()
7800 txb->fragmented = false; in tg3_tx_skb_unmap()
7802 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7806 const skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in tg3_tx_skb_unmap()
7809 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7811 dma_unmap_page(&tnapi->tp->pdev->dev, in tg3_tx_skb_unmap()
7815 while (txb->fragmented) { in tg3_tx_skb_unmap()
7816 txb->fragmented = false; in tg3_tx_skb_unmap()
7818 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7823 /* Workaround 4GB and 40-bit hardware DMA bugs. */
7829 struct tg3 *tp = tnapi->tp; in tigon3_dma_hwbug_workaround()
7837 int more_headroom = 4 - ((unsigned long)skb->data & 3); in tigon3_dma_hwbug_workaround()
7845 ret = -1; in tigon3_dma_hwbug_workaround()
7848 new_addr = dma_map_single(&tp->pdev->dev, new_skb->data, in tigon3_dma_hwbug_workaround()
7849 new_skb->len, DMA_TO_DEVICE); in tigon3_dma_hwbug_workaround()
7851 if (dma_mapping_error(&tp->pdev->dev, new_addr)) { in tigon3_dma_hwbug_workaround()
7853 ret = -1; in tigon3_dma_hwbug_workaround()
7859 tnapi->tx_buffers[*entry].skb = new_skb; in tigon3_dma_hwbug_workaround()
7860 dma_unmap_addr_set(&tnapi->tx_buffers[*entry], in tigon3_dma_hwbug_workaround()
7864 new_skb->len, base_flags, in tigon3_dma_hwbug_workaround()
7866 tg3_tx_skb_unmap(tnapi, save_entry, -1); in tigon3_dma_hwbug_workaround()
7868 ret = -1; in tigon3_dma_hwbug_workaround()
7880 /* Check if we will never have enough descriptors, in tg3_tso_bug_gso_check()
7883 return skb_shinfo(skb)->gso_segs < tnapi->tx_pending / 3; in tg3_tso_bug_gso_check()
7894 u32 frag_cnt_est = skb_shinfo(skb)->gso_segs * 3; in tg3_tso_bug()
7913 segs = skb_gso_segment(skb, tp->dev->features & in tg3_tso_bug()
7916 tnapi->tx_dropped++; in tg3_tso_bug()
7922 __tg3_start_xmit(seg, tp->dev); in tg3_tso_bug()
7937 int i = -1, would_hit_hwbug; in __tg3_start_xmit()
7948 tnapi = &tp->napi[skb_get_queue_mapping(skb)]; in __tg3_start_xmit()
7955 * and TX reclaim runs via tp->napi.poll inside of a software in __tg3_start_xmit()
7959 if (unlikely(budget <= (skb_shinfo(skb)->nr_frags + 1))) { in __tg3_start_xmit()
7970 entry = tnapi->tx_prod; in __tg3_start_xmit()
7973 mss = skb_shinfo(skb)->gso_size; in __tg3_start_xmit()
7983 hdr_len = skb_tcp_all_headers(skb) - ETH_HLEN; in __tg3_start_xmit()
7988 if (skb->protocol == htons(ETH_P_8021Q) || in __tg3_start_xmit()
7989 skb->protocol == htons(ETH_P_8021AD)) { in __tg3_start_xmit()
8002 ip_csum = iph->check; in __tg3_start_xmit()
8003 ip_tot_len = iph->tot_len; in __tg3_start_xmit()
8004 iph->check = 0; in __tg3_start_xmit()
8005 iph->tot_len = htons(mss + hdr_len); in __tg3_start_xmit()
8012 tcp_csum = tcph->check; in __tg3_start_xmit()
8017 tcph->check = 0; in __tg3_start_xmit()
8020 tcph->check = ~csum_tcpudp_magic(iph->saddr, iph->daddr, in __tg3_start_xmit()
8033 if (tcp_opt_len || iph->ihl > 5) { in __tg3_start_xmit()
8036 tsflags = (iph->ihl - 5) + (tcp_opt_len >> 2); in __tg3_start_xmit()
8040 if (tcp_opt_len || iph->ihl > 5) { in __tg3_start_xmit()
8043 tsflags = (iph->ihl - 5) + (tcp_opt_len >> 2); in __tg3_start_xmit()
8047 } else if (skb->ip_summed == CHECKSUM_PARTIAL) { in __tg3_start_xmit()
8051 if (skb->protocol == htons(ETH_P_8021Q) || in __tg3_start_xmit()
8052 skb->protocol == htons(ETH_P_8021AD)) { in __tg3_start_xmit()
8061 !mss && skb->len > VLAN_ETH_FRAME_LEN) in __tg3_start_xmit()
8069 if ((unlikely(skb_shinfo(skb)->tx_flags & SKBTX_HW_TSTAMP)) && in __tg3_start_xmit()
8072 if (!tp->pre_tx_ts) { in __tg3_start_xmit()
8073 skb_shinfo(skb)->tx_flags |= SKBTX_IN_PROGRESS; in __tg3_start_xmit()
8075 tg3_read_tx_tstamp(tp, &tp->pre_tx_ts); in __tg3_start_xmit()
8082 mapping = dma_map_single(&tp->pdev->dev, skb->data, len, in __tg3_start_xmit()
8084 if (dma_mapping_error(&tp->pdev->dev, mapping)) in __tg3_start_xmit()
8088 tnapi->tx_buffers[entry].skb = skb; in __tg3_start_xmit()
8089 dma_unmap_addr_set(&tnapi->tx_buffers[entry], mapping, mapping); in __tg3_start_xmit()
8097 ((skb_shinfo(skb)->nr_frags == 0) ? TXD_FLAG_END : 0), in __tg3_start_xmit()
8100 } else if (skb_shinfo(skb)->nr_frags > 0) { in __tg3_start_xmit()
8111 last = skb_shinfo(skb)->nr_frags - 1; in __tg3_start_xmit()
8113 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in __tg3_start_xmit()
8116 mapping = skb_frag_dma_map(&tp->pdev->dev, frag, 0, in __tg3_start_xmit()
8119 tnapi->tx_buffers[entry].skb = NULL; in __tg3_start_xmit()
8120 dma_unmap_addr_set(&tnapi->tx_buffers[entry], mapping, in __tg3_start_xmit()
8122 if (dma_mapping_error(&tp->pdev->dev, mapping)) in __tg3_start_xmit()
8137 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod, i); in __tg3_start_xmit()
8144 iph->check = ip_csum; in __tg3_start_xmit()
8145 iph->tot_len = ip_tot_len; in __tg3_start_xmit()
8147 tcph->check = tcp_csum; in __tg3_start_xmit()
8154 entry = tnapi->tx_prod; in __tg3_start_xmit()
8162 netdev_tx_sent_queue(txq, skb->len); in __tg3_start_xmit()
8167 tnapi->tx_prod = entry; in __tg3_start_xmit()
8184 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod, --i); in __tg3_start_xmit()
8185 tnapi->tx_buffers[tnapi->tx_prod].skb = NULL; in __tg3_start_xmit()
8189 tnapi->tx_dropped++; in __tg3_start_xmit()
8218 tnapi = &tp->napi[skb_queue_mapping]; in tg3_start_xmit()
8223 tw32_tx_mbox(tnapi->prodmbox, tnapi->tx_prod); in tg3_start_xmit()
8232 tp->mac_mode &= ~(MAC_MODE_HALF_DUPLEX | in tg3_mac_loopback()
8235 tp->mac_mode |= MAC_MODE_PORT_INT_LPBACK; in tg3_mac_loopback()
8238 tp->mac_mode |= MAC_MODE_LINK_POLARITY; in tg3_mac_loopback()
8240 if (tp->phy_flags & TG3_PHYFLG_10_100_ONLY) in tg3_mac_loopback()
8241 tp->mac_mode |= MAC_MODE_PORT_MODE_MII; in tg3_mac_loopback()
8243 tp->mac_mode |= MAC_MODE_PORT_MODE_GMII; in tg3_mac_loopback()
8245 tp->mac_mode &= ~MAC_MODE_PORT_INT_LPBACK; in tg3_mac_loopback()
8248 (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) || in tg3_mac_loopback()
8250 tp->mac_mode &= ~MAC_MODE_LINK_POLARITY; in tg3_mac_loopback()
8253 tw32(MAC_MODE, tp->mac_mode); in tg3_mac_loopback()
8265 return -EIO; in tg3_phy_lpbk_set()
8276 if (tp->phy_flags & TG3_PHYFLG_IS_FET) { in tg3_phy_lpbk_set()
8286 if (!(tp->phy_flags & TG3_PHYFLG_IS_FET)) { in tg3_phy_lpbk_set()
8302 if (tp->phy_flags & TG3_PHYFLG_IS_FET) in tg3_phy_lpbk_set()
8307 if ((tp->phy_flags & TG3_PHYFLG_IS_FET) && in tg3_phy_lpbk_set()
8318 if ((tp->phy_flags & TG3_PHYFLG_MII_SERDES) && in tg3_phy_lpbk_set()
8322 tw32_f(MAC_RX_MODE, tp->rx_mode); in tg3_phy_lpbk_set()
8325 mac_mode = tp->mac_mode & in tg3_phy_lpbk_set()
8333 u32 masked_phy_id = tp->phy_id & TG3_PHY_ID_MASK; in tg3_phy_lpbk_set()
8355 if (tp->mac_mode & MAC_MODE_PORT_INT_LPBACK) in tg3_set_loopback()
8358 spin_lock_bh(&tp->lock); in tg3_set_loopback()
8360 netif_carrier_on(tp->dev); in tg3_set_loopback()
8361 spin_unlock_bh(&tp->lock); in tg3_set_loopback()
8364 if (!(tp->mac_mode & MAC_MODE_PORT_INT_LPBACK)) in tg3_set_loopback()
8367 spin_lock_bh(&tp->lock); in tg3_set_loopback()
8371 spin_unlock_bh(&tp->lock); in tg3_set_loopback()
8381 if (dev->mtu > ETH_DATA_LEN && tg3_flag(tp, 5780_CLASS)) in tg3_fix_features()
8389 netdev_features_t changed = dev->features ^ features; in tg3_set_features()
8402 if (tpr != &tp->napi[0].prodring) { in tg3_rx_prodring_free()
8403 for (i = tpr->rx_std_cons_idx; i != tpr->rx_std_prod_idx; in tg3_rx_prodring_free()
8404 i = (i + 1) & tp->rx_std_ring_mask) in tg3_rx_prodring_free()
8405 tg3_rx_data_free(tp, &tpr->rx_std_buffers[i], in tg3_rx_prodring_free()
8406 tp->rx_pkt_map_sz); in tg3_rx_prodring_free()
8409 for (i = tpr->rx_jmb_cons_idx; in tg3_rx_prodring_free()
8410 i != tpr->rx_jmb_prod_idx; in tg3_rx_prodring_free()
8411 i = (i + 1) & tp->rx_jmb_ring_mask) { in tg3_rx_prodring_free()
8412 tg3_rx_data_free(tp, &tpr->rx_jmb_buffers[i], in tg3_rx_prodring_free()
8420 for (i = 0; i <= tp->rx_std_ring_mask; i++) in tg3_rx_prodring_free()
8421 tg3_rx_data_free(tp, &tpr->rx_std_buffers[i], in tg3_rx_prodring_free()
8422 tp->rx_pkt_map_sz); in tg3_rx_prodring_free()
8425 for (i = 0; i <= tp->rx_jmb_ring_mask; i++) in tg3_rx_prodring_free()
8426 tg3_rx_data_free(tp, &tpr->rx_jmb_buffers[i], in tg3_rx_prodring_free()
8435 * end up in the driver. tp->{tx,}lock are held and thus
8443 tpr->rx_std_cons_idx = 0; in tg3_rx_prodring_alloc()
8444 tpr->rx_std_prod_idx = 0; in tg3_rx_prodring_alloc()
8445 tpr->rx_jmb_cons_idx = 0; in tg3_rx_prodring_alloc()
8446 tpr->rx_jmb_prod_idx = 0; in tg3_rx_prodring_alloc()
8448 if (tpr != &tp->napi[0].prodring) { in tg3_rx_prodring_alloc()
8449 memset(&tpr->rx_std_buffers[0], 0, in tg3_rx_prodring_alloc()
8451 if (tpr->rx_jmb_buffers) in tg3_rx_prodring_alloc()
8452 memset(&tpr->rx_jmb_buffers[0], 0, in tg3_rx_prodring_alloc()
8458 memset(tpr->rx_std, 0, TG3_RX_STD_RING_BYTES(tp)); in tg3_rx_prodring_alloc()
8462 tp->dev->mtu > ETH_DATA_LEN) in tg3_rx_prodring_alloc()
8464 tp->rx_pkt_map_sz = TG3_RX_DMA_TO_MAP_SZ(rx_pkt_dma_sz); in tg3_rx_prodring_alloc()
8470 for (i = 0; i <= tp->rx_std_ring_mask; i++) { in tg3_rx_prodring_alloc()
8473 rxd = &tpr->rx_std[i]; in tg3_rx_prodring_alloc()
8474 rxd->idx_len = rx_pkt_dma_sz << RXD_LEN_SHIFT; in tg3_rx_prodring_alloc()
8475 rxd->type_flags = (RXD_FLAG_END << RXD_FLAGS_SHIFT); in tg3_rx_prodring_alloc()
8476 rxd->opaque = (RXD_OPAQUE_RING_STD | in tg3_rx_prodring_alloc()
8481 for (i = 0; i < tp->rx_pending; i++) { in tg3_rx_prodring_alloc()
8486 netdev_warn(tp->dev, in tg3_rx_prodring_alloc()
8489 "successfully\n", i, tp->rx_pending); in tg3_rx_prodring_alloc()
8492 tp->rx_pending = i; in tg3_rx_prodring_alloc()
8500 memset(tpr->rx_jmb, 0, TG3_RX_JMB_RING_BYTES(tp)); in tg3_rx_prodring_alloc()
8505 for (i = 0; i <= tp->rx_jmb_ring_mask; i++) { in tg3_rx_prodring_alloc()
8508 rxd = &tpr->rx_jmb[i].std; in tg3_rx_prodring_alloc()
8509 rxd->idx_len = TG3_RX_JMB_DMA_SZ << RXD_LEN_SHIFT; in tg3_rx_prodring_alloc()
8510 rxd->type_flags = (RXD_FLAG_END << RXD_FLAGS_SHIFT) | in tg3_rx_prodring_alloc()
8512 rxd->opaque = (RXD_OPAQUE_RING_JUMBO | in tg3_rx_prodring_alloc()
8516 for (i = 0; i < tp->rx_jumbo_pending; i++) { in tg3_rx_prodring_alloc()
8521 netdev_warn(tp->dev, in tg3_rx_prodring_alloc()
8524 "successfully\n", i, tp->rx_jumbo_pending); in tg3_rx_prodring_alloc()
8527 tp->rx_jumbo_pending = i; in tg3_rx_prodring_alloc()
8537 return -ENOMEM; in tg3_rx_prodring_alloc()
8543 kfree(tpr->rx_std_buffers); in tg3_rx_prodring_fini()
8544 tpr->rx_std_buffers = NULL; in tg3_rx_prodring_fini()
8545 kfree(tpr->rx_jmb_buffers); in tg3_rx_prodring_fini()
8546 tpr->rx_jmb_buffers = NULL; in tg3_rx_prodring_fini()
8547 if (tpr->rx_std) { in tg3_rx_prodring_fini()
8548 dma_free_coherent(&tp->pdev->dev, TG3_RX_STD_RING_BYTES(tp), in tg3_rx_prodring_fini()
8549 tpr->rx_std, tpr->rx_std_mapping); in tg3_rx_prodring_fini()
8550 tpr->rx_std = NULL; in tg3_rx_prodring_fini()
8552 if (tpr->rx_jmb) { in tg3_rx_prodring_fini()
8553 dma_free_coherent(&tp->pdev->dev, TG3_RX_JMB_RING_BYTES(tp), in tg3_rx_prodring_fini()
8554 tpr->rx_jmb, tpr->rx_jmb_mapping); in tg3_rx_prodring_fini()
8555 tpr->rx_jmb = NULL; in tg3_rx_prodring_fini()
8562 tpr->rx_std_buffers = kzalloc(TG3_RX_STD_BUFF_RING_SIZE(tp), in tg3_rx_prodring_init()
8564 if (!tpr->rx_std_buffers) in tg3_rx_prodring_init()
8565 return -ENOMEM; in tg3_rx_prodring_init()
8567 tpr->rx_std = dma_alloc_coherent(&tp->pdev->dev, in tg3_rx_prodring_init()
8569 &tpr->rx_std_mapping, in tg3_rx_prodring_init()
8571 if (!tpr->rx_std) in tg3_rx_prodring_init()
8575 tpr->rx_jmb_buffers = kzalloc(TG3_RX_JMB_BUFF_RING_SIZE(tp), in tg3_rx_prodring_init()
8577 if (!tpr->rx_jmb_buffers) in tg3_rx_prodring_init()
8580 tpr->rx_jmb = dma_alloc_coherent(&tp->pdev->dev, in tg3_rx_prodring_init()
8582 &tpr->rx_jmb_mapping, in tg3_rx_prodring_init()
8584 if (!tpr->rx_jmb) in tg3_rx_prodring_init()
8592 return -ENOMEM; in tg3_rx_prodring_init()
8599 * end up in the driver. tp->{tx,}lock is not held and we are not
8606 for (j = 0; j < tp->irq_cnt; j++) { in tg3_free_rings()
8607 struct tg3_napi *tnapi = &tp->napi[j]; in tg3_free_rings()
8609 tg3_rx_prodring_free(tp, &tnapi->prodring); in tg3_free_rings()
8611 if (!tnapi->tx_buffers) in tg3_free_rings()
8615 struct sk_buff *skb = tnapi->tx_buffers[i].skb; in tg3_free_rings()
8621 skb_shinfo(skb)->nr_frags - 1); in tg3_free_rings()
8625 netdev_tx_reset_queue(netdev_get_tx_queue(tp->dev, j)); in tg3_free_rings()
8633 * end up in the driver. tp->{tx,}lock are held and thus
8643 for (i = 0; i < tp->irq_cnt; i++) { in tg3_init_rings()
8644 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_init_rings()
8646 tnapi->last_tag = 0; in tg3_init_rings()
8647 tnapi->last_irq_tag = 0; in tg3_init_rings()
8648 tnapi->hw_status->status = 0; in tg3_init_rings()
8649 tnapi->hw_status->status_tag = 0; in tg3_init_rings()
8650 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_init_rings()
8652 tnapi->tx_prod = 0; in tg3_init_rings()
8653 tnapi->tx_cons = 0; in tg3_init_rings()
8654 if (tnapi->tx_ring) in tg3_init_rings()
8655 memset(tnapi->tx_ring, 0, TG3_TX_RING_BYTES); in tg3_init_rings()
8657 tnapi->rx_rcb_ptr = 0; in tg3_init_rings()
8658 if (tnapi->rx_rcb) in tg3_init_rings()
8659 memset(tnapi->rx_rcb, 0, TG3_RX_RCB_RING_BYTES(tp)); in tg3_init_rings()
8661 if (tnapi->prodring.rx_std && in tg3_init_rings()
8662 tg3_rx_prodring_alloc(tp, &tnapi->prodring)) { in tg3_init_rings()
8664 return -ENOMEM; in tg3_init_rings()
8675 for (i = 0; i < tp->irq_max; i++) { in tg3_mem_tx_release()
8676 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_tx_release()
8678 if (tnapi->tx_ring) { in tg3_mem_tx_release()
8679 dma_free_coherent(&tp->pdev->dev, TG3_TX_RING_BYTES, in tg3_mem_tx_release()
8680 tnapi->tx_ring, tnapi->tx_desc_mapping); in tg3_mem_tx_release()
8681 tnapi->tx_ring = NULL; in tg3_mem_tx_release()
8684 kfree(tnapi->tx_buffers); in tg3_mem_tx_release()
8685 tnapi->tx_buffers = NULL; in tg3_mem_tx_release()
8692 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_mem_tx_acquire()
8700 for (i = 0; i < tp->txq_cnt; i++, tnapi++) { in tg3_mem_tx_acquire()
8701 tnapi->tx_buffers = kcalloc(TG3_TX_RING_SIZE, in tg3_mem_tx_acquire()
8704 if (!tnapi->tx_buffers) in tg3_mem_tx_acquire()
8707 tnapi->tx_ring = dma_alloc_coherent(&tp->pdev->dev, in tg3_mem_tx_acquire()
8709 &tnapi->tx_desc_mapping, in tg3_mem_tx_acquire()
8711 if (!tnapi->tx_ring) in tg3_mem_tx_acquire()
8719 return -ENOMEM; in tg3_mem_tx_acquire()
8726 for (i = 0; i < tp->irq_max; i++) { in tg3_mem_rx_release()
8727 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_rx_release()
8729 tg3_rx_prodring_fini(tp, &tnapi->prodring); in tg3_mem_rx_release()
8731 if (!tnapi->rx_rcb) in tg3_mem_rx_release()
8734 dma_free_coherent(&tp->pdev->dev, in tg3_mem_rx_release()
8736 tnapi->rx_rcb, in tg3_mem_rx_release()
8737 tnapi->rx_rcb_mapping); in tg3_mem_rx_release()
8738 tnapi->rx_rcb = NULL; in tg3_mem_rx_release()
8746 limit = tp->rxq_cnt; in tg3_mem_rx_acquire()
8755 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_rx_acquire()
8757 if (tg3_rx_prodring_init(tp, &tnapi->prodring)) in tg3_mem_rx_acquire()
8767 tnapi->rx_rcb = dma_alloc_coherent(&tp->pdev->dev, in tg3_mem_rx_acquire()
8769 &tnapi->rx_rcb_mapping, in tg3_mem_rx_acquire()
8771 if (!tnapi->rx_rcb) in tg3_mem_rx_acquire()
8779 return -ENOMEM; in tg3_mem_rx_acquire()
8790 for (i = 0; i < tp->irq_cnt; i++) { in tg3_free_consistent()
8791 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_free_consistent()
8793 if (tnapi->hw_status) { in tg3_free_consistent()
8794 dma_free_coherent(&tp->pdev->dev, TG3_HW_STATUS_SIZE, in tg3_free_consistent()
8795 tnapi->hw_status, in tg3_free_consistent()
8796 tnapi->status_mapping); in tg3_free_consistent()
8797 tnapi->hw_status = NULL; in tg3_free_consistent()
8804 /* tp->hw_stats can be referenced safely: in tg3_free_consistent()
8806 * 2. or under tp->lock if TG3_FLAG_INIT_COMPLETE is set. in tg3_free_consistent()
8808 if (tp->hw_stats) { in tg3_free_consistent()
8809 dma_free_coherent(&tp->pdev->dev, sizeof(struct tg3_hw_stats), in tg3_free_consistent()
8810 tp->hw_stats, tp->stats_mapping); in tg3_free_consistent()
8811 tp->hw_stats = NULL; in tg3_free_consistent()
8823 tp->hw_stats = dma_alloc_coherent(&tp->pdev->dev, in tg3_alloc_consistent()
8825 &tp->stats_mapping, GFP_KERNEL); in tg3_alloc_consistent()
8826 if (!tp->hw_stats) in tg3_alloc_consistent()
8829 for (i = 0; i < tp->irq_cnt; i++) { in tg3_alloc_consistent()
8830 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_alloc_consistent()
8833 tnapi->hw_status = dma_alloc_coherent(&tp->pdev->dev, in tg3_alloc_consistent()
8835 &tnapi->status_mapping, in tg3_alloc_consistent()
8837 if (!tnapi->hw_status) in tg3_alloc_consistent()
8840 sblk = tnapi->hw_status; in tg3_alloc_consistent()
8853 prodptr = &sblk->idx[0].rx_producer; in tg3_alloc_consistent()
8856 prodptr = &sblk->rx_jumbo_consumer; in tg3_alloc_consistent()
8859 prodptr = &sblk->reserved; in tg3_alloc_consistent()
8862 prodptr = &sblk->rx_mini_consumer; in tg3_alloc_consistent()
8865 tnapi->rx_rcb_prod_idx = prodptr; in tg3_alloc_consistent()
8867 tnapi->rx_rcb_prod_idx = &sblk->idx[0].rx_producer; in tg3_alloc_consistent()
8878 return -ENOMEM; in tg3_alloc_consistent()
8884 * clears. tp->lock is held.
8913 if (pci_channel_offline(tp->pdev)) { in tg3_stop_block()
8914 dev_err(&tp->pdev->dev, in tg3_stop_block()
8918 return -ENODEV; in tg3_stop_block()
8928 dev_err(&tp->pdev->dev, in tg3_stop_block()
8931 return -ENODEV; in tg3_stop_block()
8937 /* tp->lock is held. */
8944 if (pci_channel_offline(tp->pdev)) { in tg3_abort_hw()
8945 tp->rx_mode &= ~(RX_MODE_ENABLE | TX_MODE_ENABLE); in tg3_abort_hw()
8946 tp->mac_mode &= ~MAC_MODE_TDE_ENABLE; in tg3_abort_hw()
8947 err = -ENODEV; in tg3_abort_hw()
8951 tp->rx_mode &= ~RX_MODE_ENABLE; in tg3_abort_hw()
8952 tw32_f(MAC_RX_MODE, tp->rx_mode); in tg3_abort_hw()
8970 tp->mac_mode &= ~MAC_MODE_TDE_ENABLE; in tg3_abort_hw()
8971 tw32_f(MAC_MODE, tp->mac_mode); in tg3_abort_hw()
8974 tp->tx_mode &= ~TX_MODE_ENABLE; in tg3_abort_hw()
8975 tw32_f(MAC_TX_MODE, tp->tx_mode); in tg3_abort_hw()
8983 dev_err(&tp->pdev->dev, in tg3_abort_hw()
8986 err |= -ENODEV; in tg3_abort_hw()
9000 for (i = 0; i < tp->irq_cnt; i++) { in tg3_abort_hw()
9001 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_abort_hw()
9002 if (tnapi->hw_status) in tg3_abort_hw()
9003 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_abort_hw()
9012 pci_read_config_word(tp->pdev, PCI_COMMAND, &tp->pci_cmd); in tg3_save_pci_state()
9020 /* Re-enable indirect register accesses. */ in tg3_restore_pci_state()
9021 pci_write_config_dword(tp->pdev, TG3PCI_MISC_HOST_CTRL, in tg3_restore_pci_state()
9022 tp->misc_host_ctrl); in tg3_restore_pci_state()
9034 pci_write_config_dword(tp->pdev, TG3PCI_PCISTATE, val); in tg3_restore_pci_state()
9036 pci_write_config_word(tp->pdev, PCI_COMMAND, tp->pci_cmd); in tg3_restore_pci_state()
9039 pci_write_config_byte(tp->pdev, PCI_CACHE_LINE_SIZE, in tg3_restore_pci_state()
9040 tp->pci_cacheline_sz); in tg3_restore_pci_state()
9041 pci_write_config_byte(tp->pdev, PCI_LATENCY_TIMER, in tg3_restore_pci_state()
9042 tp->pci_lat_timer); in tg3_restore_pci_state()
9045 /* Make sure PCI-X relaxed ordering bit is clear. */ in tg3_restore_pci_state()
9049 pci_read_config_word(tp->pdev, tp->pcix_cap + PCI_X_CMD, in tg3_restore_pci_state()
9052 pci_write_config_word(tp->pdev, tp->pcix_cap + PCI_X_CMD, in tg3_restore_pci_state()
9064 pci_read_config_word(tp->pdev, in tg3_restore_pci_state()
9065 tp->msi_cap + PCI_MSI_FLAGS, in tg3_restore_pci_state()
9067 pci_write_config_word(tp->pdev, in tg3_restore_pci_state()
9068 tp->msi_cap + PCI_MSI_FLAGS, in tg3_restore_pci_state()
9119 /* tp->lock is held. */
9121 __releases(tp->lock) in tg3_chip_reset()
9122 __acquires(tp->lock) in tg3_chip_reset()
9128 if (!pci_device_is_present(tp->pdev)) in tg3_chip_reset()
9129 return -ENODEV; in tg3_chip_reset()
9138 tp->nvram_lock_cnt = 0; in tg3_chip_reset()
9156 write_op = tp->write32; in tg3_chip_reset()
9158 tp->write32 = tg3_write32; in tg3_chip_reset()
9167 for (i = 0; i < tp->irq_cnt; i++) { in tg3_chip_reset()
9168 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_chip_reset()
9169 if (tnapi->hw_status) { in tg3_chip_reset()
9170 tnapi->hw_status->status = 0; in tg3_chip_reset()
9171 tnapi->hw_status->status_tag = 0; in tg3_chip_reset()
9173 tnapi->last_tag = 0; in tg3_chip_reset()
9174 tnapi->last_irq_tag = 0; in tg3_chip_reset()
9180 for (i = 0; i < tp->irq_cnt; i++) in tg3_chip_reset()
9181 synchronize_irq(tp->napi[i].irq_vec); in tg3_chip_reset()
9227 tp->write32 = write_op; in tg3_chip_reset()
9250 pci_read_config_dword(tp->pdev, PCI_COMMAND, &val); in tg3_chip_reset()
9254 if (tg3_flag(tp, PCI_EXPRESS) && pci_is_pcie(tp->pdev)) { in tg3_chip_reset()
9265 pci_read_config_dword(tp->pdev, 0xc4, &cfg_val); in tg3_chip_reset()
9266 pci_write_config_dword(tp->pdev, 0xc4, in tg3_chip_reset()
9278 pcie_capability_clear_word(tp->pdev, PCI_EXP_DEVCTL, val16); in tg3_chip_reset()
9281 pcie_capability_write_word(tp->pdev, PCI_EXP_DEVSTA, in tg3_chip_reset()
9317 tw32(GRC_MODE, tp->grc_mode); in tg3_chip_reset()
9325 if ((tp->nic_sram_data_cfg & NIC_SRAM_DATA_CFG_MINI_PCI) != 0 && in tg3_chip_reset()
9327 tp->pci_clock_ctrl |= CLOCK_CTRL_CLKRUN_OENABLE; in tg3_chip_reset()
9329 tp->pci_clock_ctrl |= CLOCK_CTRL_FORCE_CLKRUN; in tg3_chip_reset()
9330 tw32(TG3PCI_CLOCK_CTRL, tp->pci_clock_ctrl); in tg3_chip_reset()
9333 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) { in tg3_chip_reset()
9334 tp->mac_mode = MAC_MODE_PORT_MODE_TBI; in tg3_chip_reset()
9335 val = tp->mac_mode; in tg3_chip_reset()
9336 } else if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) { in tg3_chip_reset()
9337 tp->mac_mode = MAC_MODE_PORT_MODE_GMII; in tg3_chip_reset()
9338 val = tp->mac_mode; in tg3_chip_reset()
9371 tp->phy_flags &= ~(TG3_PHYFLG_1G_ON_VAUX_OK | in tg3_chip_reset()
9382 tp->last_event_jiffies = jiffies; in tg3_chip_reset()
9388 tp->phy_flags |= TG3_PHYFLG_1G_ON_VAUX_OK; in tg3_chip_reset()
9390 tp->phy_flags |= TG3_PHYFLG_KEEP_LINK_ON_PWRDN; in tg3_chip_reset()
9401 /* tp->lock is held. */
9418 if (tp->hw_stats) { in tg3_halt()
9420 tg3_get_nstats(tp, &tp->net_stats_prev); in tg3_halt()
9421 tg3_get_estats(tp, &tp->estats_prev); in tg3_halt()
9424 memset(tp->hw_stats, 0, sizeof(struct tg3_hw_stats)); in tg3_halt()
9427 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_halt()
9429 tnapi->rx_dropped = 0; in tg3_halt()
9430 tnapi->tx_dropped = 0; in tg3_halt()
9444 if (!is_valid_ether_addr(addr->sa_data)) in tg3_set_mac_addr()
9445 return -EADDRNOTAVAIL; in tg3_set_mac_addr()
9447 eth_hw_addr_set(dev, addr->sa_data); in tg3_set_mac_addr()
9465 spin_lock_bh(&tp->lock); in tg3_set_mac_addr()
9468 spin_unlock_bh(&tp->lock); in tg3_set_mac_addr()
9473 /* tp->lock is held. */
9500 tw32(HOSTCC_TXCOL_TICKS, ec->tx_coalesce_usecs); in tg3_coal_tx_init()
9501 tw32(HOSTCC_TXMAX_FRAMES, ec->tx_max_coalesced_frames); in tg3_coal_tx_init()
9502 tw32(HOSTCC_TXCOAL_MAXF_INT, ec->tx_max_coalesced_frames_irq); in tg3_coal_tx_init()
9508 for (; i < tp->txq_cnt; i++) { in tg3_coal_tx_init()
9512 tw32(reg, ec->tx_coalesce_usecs); in tg3_coal_tx_init()
9514 tw32(reg, ec->tx_max_coalesced_frames); in tg3_coal_tx_init()
9516 tw32(reg, ec->tx_max_coalesced_frames_irq); in tg3_coal_tx_init()
9520 for (; i < tp->irq_max - 1; i++) { in tg3_coal_tx_init()
9530 u32 limit = tp->rxq_cnt; in tg3_coal_rx_init()
9533 tw32(HOSTCC_RXCOL_TICKS, ec->rx_coalesce_usecs); in tg3_coal_rx_init()
9534 tw32(HOSTCC_RXMAX_FRAMES, ec->rx_max_coalesced_frames); in tg3_coal_rx_init()
9535 tw32(HOSTCC_RXCOAL_MAXF_INT, ec->rx_max_coalesced_frames_irq); in tg3_coal_rx_init()
9536 limit--; in tg3_coal_rx_init()
9547 tw32(reg, ec->rx_coalesce_usecs); in tg3_coal_rx_init()
9549 tw32(reg, ec->rx_max_coalesced_frames); in tg3_coal_rx_init()
9551 tw32(reg, ec->rx_max_coalesced_frames_irq); in tg3_coal_rx_init()
9554 for (; i < tp->irq_max - 1; i++) { in tg3_coal_rx_init()
9567 u32 val = ec->stats_block_coalesce_usecs; in __tg3_set_coalesce()
9569 tw32(HOSTCC_RXCOAL_TICK_INT, ec->rx_coalesce_usecs_irq); in __tg3_set_coalesce()
9570 tw32(HOSTCC_TXCOAL_TICK_INT, ec->tx_coalesce_usecs_irq); in __tg3_set_coalesce()
9572 if (!tp->link_up) in __tg3_set_coalesce()
9579 /* tp->lock is held. */
9601 /* tp->lock is held. */
9610 for (; i < tp->irq_max; i++, txrcb += TG3_BDINFO_SIZE) { in tg3_tx_rcbs_init()
9611 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_tx_rcbs_init()
9613 if (!tnapi->tx_ring) in tg3_tx_rcbs_init()
9616 tg3_set_bdinfo(tp, txrcb, tnapi->tx_desc_mapping, in tg3_tx_rcbs_init()
9622 /* tp->lock is held. */
9645 /* tp->lock is held. */
9654 for (; i < tp->irq_max; i++, rxrcb += TG3_BDINFO_SIZE) { in tg3_rx_ret_rcbs_init()
9655 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_rx_ret_rcbs_init()
9657 if (!tnapi->rx_rcb) in tg3_rx_ret_rcbs_init()
9660 tg3_set_bdinfo(tp, rxrcb, tnapi->rx_rcb_mapping, in tg3_rx_ret_rcbs_init()
9661 (tp->rx_ret_ring_mask + 1) << in tg3_rx_ret_rcbs_init()
9666 /* tp->lock is held. */
9671 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_rings_reset()
9678 tw32_mailbox_f(tp->napi[0].int_mbox, 1); in tg3_rings_reset()
9679 tp->napi[0].chk_msi_cnt = 0; in tg3_rings_reset()
9680 tp->napi[0].last_rx_cons = 0; in tg3_rings_reset()
9681 tp->napi[0].last_tx_cons = 0; in tg3_rings_reset()
9685 for (i = 1; i < tp->irq_max; i++) { in tg3_rings_reset()
9686 tp->napi[i].tx_prod = 0; in tg3_rings_reset()
9687 tp->napi[i].tx_cons = 0; in tg3_rings_reset()
9689 tw32_mailbox(tp->napi[i].prodmbox, 0); in tg3_rings_reset()
9690 tw32_rx_mbox(tp->napi[i].consmbox, 0); in tg3_rings_reset()
9691 tw32_mailbox_f(tp->napi[i].int_mbox, 1); in tg3_rings_reset()
9692 tp->napi[i].chk_msi_cnt = 0; in tg3_rings_reset()
9693 tp->napi[i].last_rx_cons = 0; in tg3_rings_reset()
9694 tp->napi[i].last_tx_cons = 0; in tg3_rings_reset()
9697 tw32_mailbox(tp->napi[0].prodmbox, 0); in tg3_rings_reset()
9699 tp->napi[0].tx_prod = 0; in tg3_rings_reset()
9700 tp->napi[0].tx_cons = 0; in tg3_rings_reset()
9701 tw32_mailbox(tp->napi[0].prodmbox, 0); in tg3_rings_reset()
9702 tw32_rx_mbox(tp->napi[0].consmbox, 0); in tg3_rings_reset()
9705 /* Make sure the NIC-based send BD rings are disabled. */ in tg3_rings_reset()
9713 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_rings_reset()
9717 ((u64) tnapi->status_mapping >> 32)); in tg3_rings_reset()
9719 ((u64) tnapi->status_mapping & 0xffffffff)); in tg3_rings_reset()
9723 for (i = 1, tnapi++; i < tp->irq_cnt; i++, tnapi++) { in tg3_rings_reset()
9724 u64 mapping = (u64)tnapi->status_mapping; in tg3_rings_reset()
9730 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_rings_reset()
9753 nic_rep_thresh = min(bdcache_maxcnt / 2, tp->rx_std_max_post); in tg3_setup_rxbd_thresholds()
9754 host_rep_thresh = max_t(u32, tp->rx_pending / 8, 1); in tg3_setup_rxbd_thresholds()
9767 host_rep_thresh = max_t(u32, tp->rx_jumbo_pending / 8, 1); in tg3_setup_rxbd_thresholds()
9814 rx_mode = tp->rx_mode & ~(RX_MODE_PROMISC | in __tg3_set_rx_mode()
9825 if (dev->flags & IFF_PROMISC) { in __tg3_set_rx_mode()
9828 } else if (dev->flags & IFF_ALLMULTI) { in __tg3_set_rx_mode()
9843 crc = calc_crc(ha->addr, ETH_ALEN); in __tg3_set_rx_mode()
9858 } else if (!(dev->flags & IFF_PROMISC)) { in __tg3_set_rx_mode()
9864 __tg3_set_one_mac_addr(tp, ha->addr, in __tg3_set_rx_mode()
9870 if (rx_mode != tp->rx_mode) { in __tg3_set_rx_mode()
9871 tp->rx_mode = rx_mode; in __tg3_set_rx_mode()
9882 tp->rss_ind_tbl[i] = ethtool_rxfh_indir_default(i, qcnt); in tg3_rss_init_dflt_indir_tbl()
9892 if (tp->rxq_cnt == 1) { in tg3_rss_check_indir_tbl()
9893 memset(&tp->rss_ind_tbl[0], 0, sizeof(tp->rss_ind_tbl)); in tg3_rss_check_indir_tbl()
9899 if (tp->rss_ind_tbl[i] >= tp->rxq_cnt) in tg3_rss_check_indir_tbl()
9904 tg3_rss_init_dflt_indir_tbl(tp, tp->rxq_cnt); in tg3_rss_check_indir_tbl()
9913 u32 val = tp->rss_ind_tbl[i]; in tg3_rss_write_indir_tbl()
9917 val |= tp->rss_ind_tbl[i]; in tg3_rss_write_indir_tbl()
9932 /* tp->lock is held. */
9937 struct tg3_rx_prodring_set *tpr = &tp->napi[0].prodring; in tg3_reset_hw()
9948 if ((tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN) && in tg3_reset_hw()
9949 !(tp->phy_flags & TG3_PHYFLG_USER_CONFIGURED)) { in tg3_reset_hw()
9952 tp->phy_flags |= TG3_PHYFLG_USER_CONFIGURED; in tg3_reset_hw()
9956 if (tp->phy_flags & TG3_PHYFLG_EEE_CAP) in tg3_reset_hw()
10070 tp->pci_clock_ctrl |= CLOCK_CTRL_DELAY_PCI_GRANT; in tg3_reset_hw()
10071 tw32_f(TG3PCI_CLOCK_CTRL, tp->pci_clock_ctrl); in tg3_reset_hw()
10117 tw32(TG3PCI_DMA_RW_CTRL, val | tp->dma_rwctrl); in tg3_reset_hw()
10123 tw32(TG3PCI_DMA_RW_CTRL, tp->dma_rwctrl); in tg3_reset_hw()
10126 tp->grc_mode &= ~(GRC_MODE_HOST_SENDBDS | in tg3_reset_hw()
10130 tp->grc_mode |= GRC_MODE_HOST_SENDBDS; in tg3_reset_hw()
10132 /* Pseudo-header checksum is done by hardware logic and not in tg3_reset_hw()
10133 * the offload processers, so make the chip do the pseudo- in tg3_reset_hw()
10135 * convenient to do the pseudo-header checksum in software in tg3_reset_hw()
10138 tp->grc_mode |= GRC_MODE_NO_TX_PHDR_CSUM; in tg3_reset_hw()
10141 if (tp->rxptpctl) in tg3_reset_hw()
10143 tp->rxptpctl | TG3_RX_PTP_CTL_HWTS_INTERLOCK); in tg3_reset_hw()
10148 tw32(GRC_MODE, tp->grc_mode | val); in tg3_reset_hw()
10154 if (tp->pdev->subsystem_vendor == PCI_VENDOR_ID_DELL && in tg3_reset_hw()
10155 tp->pdev->subsystem_device == TG3PCI_SUBDEVICE_ID_DELL_5762) { in tg3_reset_hw()
10180 fw_len = tp->fw_len; in tg3_reset_hw()
10181 fw_len = (fw_len + (0x80 - 1)) & ~(0x80 - 1); in tg3_reset_hw()
10185 NIC_SRAM_MBUF_POOL_SIZE5705 - fw_len - 0xa00); in tg3_reset_hw()
10188 if (tp->dev->mtu <= ETH_DATA_LEN) { in tg3_reset_hw()
10190 tp->bufmgr_config.mbuf_read_dma_low_water); in tg3_reset_hw()
10192 tp->bufmgr_config.mbuf_mac_rx_low_water); in tg3_reset_hw()
10194 tp->bufmgr_config.mbuf_high_water); in tg3_reset_hw()
10197 tp->bufmgr_config.mbuf_read_dma_low_water_jumbo); in tg3_reset_hw()
10199 tp->bufmgr_config.mbuf_mac_rx_low_water_jumbo); in tg3_reset_hw()
10201 tp->bufmgr_config.mbuf_high_water_jumbo); in tg3_reset_hw()
10204 tp->bufmgr_config.dma_low_water); in tg3_reset_hw()
10206 tp->bufmgr_config.dma_high_water); in tg3_reset_hw()
10223 netdev_err(tp->dev, "%s cannot enable BUFMGR\n", __func__); in tg3_reset_hw()
10224 return -ENODEV; in tg3_reset_hw()
10250 ((u64) tpr->rx_std_mapping >> 32)); in tg3_reset_hw()
10252 ((u64) tpr->rx_std_mapping & 0xffffffff)); in tg3_reset_hw()
10270 ((u64) tpr->rx_jmb_mapping >> 32)); in tg3_reset_hw()
10272 ((u64) tpr->rx_jmb_mapping & 0xffffffff)); in tg3_reset_hw()
10298 tpr->rx_std_prod_idx = tp->rx_pending; in tg3_reset_hw()
10299 tw32_rx_mbox(TG3_RX_STD_PROD_IDX_REG, tpr->rx_std_prod_idx); in tg3_reset_hw()
10301 tpr->rx_jmb_prod_idx = in tg3_reset_hw()
10302 tg3_flag(tp, JUMBO_RING_ENABLE) ? tp->rx_jumbo_pending : 0; in tg3_reset_hw()
10303 tw32_rx_mbox(TG3_RX_JMB_PROD_IDX_REG, tpr->rx_jmb_prod_idx); in tg3_reset_hw()
10312 tp->dev->mtu + ETH_HLEN + ETH_FCS_LEN + VLAN_HLEN); in tg3_reset_hw()
10329 /* Receive rules. */ in tg3_reset_hw()
10366 tp->dma_limit = 0; in tg3_reset_hw()
10367 if (tp->dev->mtu <= ETH_DATA_LEN) { in tg3_reset_hw()
10369 tp->dma_limit = TG3_TX_BD_DMA_MAX_2K; in tg3_reset_hw()
10455 __tg3_set_coalesce(tp, &tp->coal); in tg3_reset_hw()
10463 ((u64) tp->stats_mapping >> 32)); in tg3_reset_hw()
10465 ((u64) tp->stats_mapping & 0xffffffff)); in tg3_reset_hw()
10479 tw32(HOSTCC_MODE, HOSTCC_MODE_ENABLE | tp->coalesce_mode); in tg3_reset_hw()
10486 if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) { in tg3_reset_hw()
10487 tp->phy_flags &= ~TG3_PHYFLG_PARALLEL_DETECT; in tg3_reset_hw()
10493 tp->mac_mode |= MAC_MODE_TXSTAT_ENABLE | MAC_MODE_RXSTAT_ENABLE | in tg3_reset_hw()
10497 tp->mac_mode |= MAC_MODE_APE_TX_EN | MAC_MODE_APE_RX_EN; in tg3_reset_hw()
10499 !(tp->phy_flags & TG3_PHYFLG_PHY_SERDES) && in tg3_reset_hw()
10501 tp->mac_mode |= MAC_MODE_LINK_POLARITY; in tg3_reset_hw()
10502 tw32_f(MAC_MODE, tp->mac_mode | MAC_MODE_RXSTAT_CLEAR | MAC_MODE_TXSTAT_CLEAR); in tg3_reset_hw()
10505 /* tp->grc_local_ctrl is partially set up during tg3_get_invariants(). in tg3_reset_hw()
10525 tp->grc_local_ctrl &= ~gpio_mask; in tg3_reset_hw()
10526 tp->grc_local_ctrl |= tr32(GRC_LOCAL_CTRL) & gpio_mask; in tg3_reset_hw()
10530 tp->grc_local_ctrl |= (GRC_LCLCTRL_GPIO_OE1 | in tg3_reset_hw()
10533 tw32_f(GRC_LOCAL_CTRL, tp->grc_local_ctrl); in tg3_reset_hw()
10539 if (tp->irq_cnt > 1) in tg3_reset_hw()
10582 pci_read_config_word(tp->pdev, tp->pcix_cap + PCI_X_CMD, in tg3_reset_hw()
10591 pci_write_config_word(tp->pdev, tp->pcix_cap + PCI_X_CMD, in tg3_reset_hw()
10658 tp->tx_mode = TX_MODE_ENABLE; in tg3_reset_hw()
10662 tp->tx_mode |= TX_MODE_MBUF_LOCKUP_FIX; in tg3_reset_hw()
10667 tp->tx_mode &= ~val; in tg3_reset_hw()
10668 tp->tx_mode |= tr32(MAC_TX_MODE) & val; in tg3_reset_hw()
10671 tw32_f(MAC_TX_MODE, tp->tx_mode); in tg3_reset_hw()
10685 tp->rx_mode = RX_MODE_ENABLE; in tg3_reset_hw()
10687 tp->rx_mode |= RX_MODE_IPV6_CSUM_ENABLE; in tg3_reset_hw()
10690 tp->rx_mode |= RX_MODE_IPV4_FRAG_FIX; in tg3_reset_hw()
10693 tp->rx_mode |= RX_MODE_RSS_ENABLE | in tg3_reset_hw()
10700 tw32_f(MAC_RX_MODE, tp->rx_mode); in tg3_reset_hw()
10703 tw32(MAC_LED_CTRL, tp->led_ctrl); in tg3_reset_hw()
10706 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) { in tg3_reset_hw()
10710 tw32_f(MAC_RX_MODE, tp->rx_mode); in tg3_reset_hw()
10713 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) { in tg3_reset_hw()
10715 !(tp->phy_flags & TG3_PHYFLG_SERDES_PREEMPHASIS)) { in tg3_reset_hw()
10717 /* only if the signal pre-emphasis bit is not set */ in tg3_reset_hw()
10737 (tp->phy_flags & TG3_PHYFLG_PHY_SERDES)) { in tg3_reset_hw()
10738 /* Use hardware link auto-negotiation */ in tg3_reset_hw()
10742 if ((tp->phy_flags & TG3_PHYFLG_MII_SERDES) && in tg3_reset_hw()
10748 tp->grc_local_ctrl &= ~GRC_LCLCTRL_USE_EXT_SIG_DETECT; in tg3_reset_hw()
10749 tp->grc_local_ctrl |= GRC_LCLCTRL_USE_SIG_DETECT; in tg3_reset_hw()
10750 tw32(GRC_LOCAL_CTRL, tp->grc_local_ctrl); in tg3_reset_hw()
10754 if (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) in tg3_reset_hw()
10755 tp->phy_flags &= ~TG3_PHYFLG_IS_LOW_POWER; in tg3_reset_hw()
10761 if (!(tp->phy_flags & TG3_PHYFLG_PHY_SERDES) && in tg3_reset_hw()
10762 !(tp->phy_flags & TG3_PHYFLG_IS_FET)) { in tg3_reset_hw()
10774 __tg3_set_rx_mode(tp->dev); in tg3_reset_hw()
10776 /* Initialize receive rules. */ in tg3_reset_hw()
10787 limit -= 4; in tg3_reset_hw()
10847 * packet processing. Invoked with tp->lock held.
10874 if (ocir->signature != TG3_OCIR_SIG_MAGIC || in tg3_sd_scan_scratchpad()
10875 !(ocir->version_flags & TG3_OCIR_FLAG_ACTIVE)) in tg3_sd_scan_scratchpad()
10888 spin_lock_bh(&tp->lock); in tg3_show_temp()
10889 tg3_ape_scratchpad_read(tp, &temperature, attr->index, in tg3_show_temp()
10891 spin_unlock_bh(&tp->lock); in tg3_show_temp()
10913 if (tp->hwmon_dev) { in tg3_hwmon_close()
10914 hwmon_device_unregister(tp->hwmon_dev); in tg3_hwmon_close()
10915 tp->hwmon_dev = NULL; in tg3_hwmon_close()
10923 struct pci_dev *pdev = tp->pdev; in tg3_hwmon_open()
10939 tp->hwmon_dev = hwmon_device_register_with_groups(&pdev->dev, "tg3", in tg3_hwmon_open()
10941 if (IS_ERR(tp->hwmon_dev)) { in tg3_hwmon_open()
10942 tp->hwmon_dev = NULL; in tg3_hwmon_open()
10943 dev_err(&pdev->dev, "Cannot register hwmon device, aborting\n"); in tg3_hwmon_open()
10954 (PSTAT)->low += __val; \
10955 if ((PSTAT)->low < __val) \
10956 (PSTAT)->high += 1; \
10961 struct tg3_hw_stats *sp = tp->hw_stats; in tg3_periodic_fetch_stats()
10963 if (!tp->link_up) in tg3_periodic_fetch_stats()
10966 TG3_STAT_ADD32(&sp->tx_octets, MAC_TX_STATS_OCTETS); in tg3_periodic_fetch_stats()
10967 TG3_STAT_ADD32(&sp->tx_collisions, MAC_TX_STATS_COLLISIONS); in tg3_periodic_fetch_stats()
10968 TG3_STAT_ADD32(&sp->tx_xon_sent, MAC_TX_STATS_XON_SENT); in tg3_periodic_fetch_stats()
10969 TG3_STAT_ADD32(&sp->tx_xoff_sent, MAC_TX_STATS_XOFF_SENT); in tg3_periodic_fetch_stats()
10970 TG3_STAT_ADD32(&sp->tx_mac_errors, MAC_TX_STATS_MAC_ERRORS); in tg3_periodic_fetch_stats()
10971 TG3_STAT_ADD32(&sp->tx_single_collisions, MAC_TX_STATS_SINGLE_COLLISIONS); in tg3_periodic_fetch_stats()
10972 TG3_STAT_ADD32(&sp->tx_mult_collisions, MAC_TX_STATS_MULT_COLLISIONS); in tg3_periodic_fetch_stats()
10973 TG3_STAT_ADD32(&sp->tx_deferred, MAC_TX_STATS_DEFERRED); in tg3_periodic_fetch_stats()
10974 TG3_STAT_ADD32(&sp->tx_excessive_collisions, MAC_TX_STATS_EXCESSIVE_COL); in tg3_periodic_fetch_stats()
10975 TG3_STAT_ADD32(&sp->tx_late_collisions, MAC_TX_STATS_LATE_COL); in tg3_periodic_fetch_stats()
10976 TG3_STAT_ADD32(&sp->tx_ucast_packets, MAC_TX_STATS_UCAST); in tg3_periodic_fetch_stats()
10977 TG3_STAT_ADD32(&sp->tx_mcast_packets, MAC_TX_STATS_MCAST); in tg3_periodic_fetch_stats()
10978 TG3_STAT_ADD32(&sp->tx_bcast_packets, MAC_TX_STATS_BCAST); in tg3_periodic_fetch_stats()
10980 (sp->tx_ucast_packets.low + sp->tx_mcast_packets.low + in tg3_periodic_fetch_stats()
10981 sp->tx_bcast_packets.low) > TG3_NUM_RDMA_CHANNELS)) { in tg3_periodic_fetch_stats()
10990 TG3_STAT_ADD32(&sp->rx_octets, MAC_RX_STATS_OCTETS); in tg3_periodic_fetch_stats()
10991 TG3_STAT_ADD32(&sp->rx_fragments, MAC_RX_STATS_FRAGMENTS); in tg3_periodic_fetch_stats()
10992 TG3_STAT_ADD32(&sp->rx_ucast_packets, MAC_RX_STATS_UCAST); in tg3_periodic_fetch_stats()
10993 TG3_STAT_ADD32(&sp->rx_mcast_packets, MAC_RX_STATS_MCAST); in tg3_periodic_fetch_stats()
10994 TG3_STAT_ADD32(&sp->rx_bcast_packets, MAC_RX_STATS_BCAST); in tg3_periodic_fetch_stats()
10995 TG3_STAT_ADD32(&sp->rx_fcs_errors, MAC_RX_STATS_FCS_ERRORS); in tg3_periodic_fetch_stats()
10996 TG3_STAT_ADD32(&sp->rx_align_errors, MAC_RX_STATS_ALIGN_ERRORS); in tg3_periodic_fetch_stats()
10997 TG3_STAT_ADD32(&sp->rx_xon_pause_rcvd, MAC_RX_STATS_XON_PAUSE_RECVD); in tg3_periodic_fetch_stats()
10998 TG3_STAT_ADD32(&sp->rx_xoff_pause_rcvd, MAC_RX_STATS_XOFF_PAUSE_RECVD); in tg3_periodic_fetch_stats()
10999 TG3_STAT_ADD32(&sp->rx_mac_ctrl_rcvd, MAC_RX_STATS_MAC_CTRL_RECVD); in tg3_periodic_fetch_stats()
11000 TG3_STAT_ADD32(&sp->rx_xoff_entered, MAC_RX_STATS_XOFF_ENTERED); in tg3_periodic_fetch_stats()
11001 TG3_STAT_ADD32(&sp->rx_frame_too_long_errors, MAC_RX_STATS_FRAME_TOO_LONG); in tg3_periodic_fetch_stats()
11002 TG3_STAT_ADD32(&sp->rx_jabbers, MAC_RX_STATS_JABBERS); in tg3_periodic_fetch_stats()
11003 TG3_STAT_ADD32(&sp->rx_undersize_packets, MAC_RX_STATS_UNDERSIZE); in tg3_periodic_fetch_stats()
11005 TG3_STAT_ADD32(&sp->rxbds_empty, RCVLPC_NO_RCV_BD_CNT); in tg3_periodic_fetch_stats()
11010 TG3_STAT_ADD32(&sp->rx_discards, RCVLPC_IN_DISCARDS_CNT); in tg3_periodic_fetch_stats()
11016 sp->rx_discards.low += val; in tg3_periodic_fetch_stats()
11017 if (sp->rx_discards.low < val) in tg3_periodic_fetch_stats()
11018 sp->rx_discards.high += 1; in tg3_periodic_fetch_stats()
11020 sp->mbuf_lwm_thresh_hit = sp->rx_discards; in tg3_periodic_fetch_stats()
11022 TG3_STAT_ADD32(&sp->rx_errors, RCVLPC_IN_ERRORS_CNT); in tg3_periodic_fetch_stats()
11029 for (i = 0; i < tp->irq_cnt; i++) { in tg3_chk_missed_msi()
11030 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_chk_missed_msi()
11033 if (tnapi->last_rx_cons == tnapi->rx_rcb_ptr && in tg3_chk_missed_msi()
11034 tnapi->last_tx_cons == tnapi->tx_cons) { in tg3_chk_missed_msi()
11035 if (tnapi->chk_msi_cnt < 1) { in tg3_chk_missed_msi()
11036 tnapi->chk_msi_cnt++; in tg3_chk_missed_msi()
11042 tnapi->chk_msi_cnt = 0; in tg3_chk_missed_msi()
11043 tnapi->last_rx_cons = tnapi->rx_rcb_ptr; in tg3_chk_missed_msi()
11044 tnapi->last_tx_cons = tnapi->tx_cons; in tg3_chk_missed_msi()
11052 spin_lock(&tp->lock); in tg3_timer()
11054 if (tp->irq_sync || tg3_flag(tp, RESET_TASK_PENDING)) { in tg3_timer()
11055 spin_unlock(&tp->lock); in tg3_timer()
11069 /* All of this garbage is because when using non-tagged in tg3_timer()
11073 if (tp->napi[0].hw_status->status & SD_STATUS_UPDATED) { in tg3_timer()
11075 tp->grc_local_ctrl | GRC_LCLCTRL_SETINT); in tg3_timer()
11077 tw32(HOSTCC_MODE, tp->coalesce_mode | in tg3_timer()
11082 spin_unlock(&tp->lock); in tg3_timer()
11089 if (!--tp->timer_counter) { in tg3_timer()
11093 if (tp->setlpicnt && !--tp->setlpicnt) in tg3_timer()
11103 if (tp->phy_flags & TG3_PHYFLG_USE_MI_INTERRUPT) { in tg3_timer()
11115 if (tp->link_up && in tg3_timer()
11119 if (!tp->link_up && in tg3_timer()
11125 if (!tp->serdes_counter) { in tg3_timer()
11127 (tp->mac_mode & in tg3_timer()
11130 tw32_f(MAC_MODE, tp->mac_mode); in tg3_timer()
11135 } else if ((tp->phy_flags & TG3_PHYFLG_MII_SERDES) && in tg3_timer()
11143 if (link_up != tp->link_up) in tg3_timer()
11147 tp->timer_counter = tp->timer_multiplier; in tg3_timer()
11167 if (!--tp->asf_counter) { in tg3_timer()
11179 tp->asf_counter = tp->asf_multiplier; in tg3_timer()
11185 spin_unlock(&tp->lock); in tg3_timer()
11188 tp->timer.expires = jiffies + tp->timer_offset; in tg3_timer()
11189 add_timer(&tp->timer); in tg3_timer()
11197 tp->timer_offset = HZ; in tg3_timer_init()
11199 tp->timer_offset = HZ / 10; in tg3_timer_init()
11201 BUG_ON(tp->timer_offset > HZ); in tg3_timer_init()
11203 tp->timer_multiplier = (HZ / tp->timer_offset); in tg3_timer_init()
11204 tp->asf_multiplier = (HZ / tp->timer_offset) * in tg3_timer_init()
11207 timer_setup(&tp->timer, tg3_timer, 0); in tg3_timer_init()
11212 tp->asf_counter = tp->asf_multiplier; in tg3_timer_start()
11213 tp->timer_counter = tp->timer_multiplier; in tg3_timer_start()
11215 tp->timer.expires = jiffies + tp->timer_offset; in tg3_timer_start()
11216 add_timer(&tp->timer); in tg3_timer_start()
11221 del_timer_sync(&tp->timer); in tg3_timer_stop()
11224 /* Restart hardware after configuration changes, self-test, etc.
11225 * Invoked with tp->lock held.
11228 __releases(tp->lock) in tg3_restart_hw()
11229 __acquires(tp->lock) in tg3_restart_hw()
11235 netdev_err(tp->dev, in tg3_restart_hw()
11236 "Failed to re-initialize device, aborting\n"); in tg3_restart_hw()
11240 tp->irq_sync = 0; in tg3_restart_hw()
11242 dev_close(tp->dev); in tg3_restart_hw()
11256 if (tp->pcierr_recovery || !netif_running(tp->dev) || in tg3_reset_task()
11257 tp->pdev->error_state != pci_channel_io_normal) { in tg3_reset_task()
11273 tp->write32_tx_mbox = tg3_write32_tx_mbox; in tg3_reset_task()
11274 tp->write32_rx_mbox = tg3_write_flush_reg32; in tg3_reset_task()
11283 tp->irq_sync = 0; in tg3_reset_task()
11289 dev_close(tp->dev); in tg3_reset_task()
11306 struct tg3_napi *tnapi = &tp->napi[irq_num]; in tg3_request_irq()
11308 if (tp->irq_cnt == 1) in tg3_request_irq()
11309 name = tp->dev->name; in tg3_request_irq()
11311 name = &tnapi->irq_lbl[0]; in tg3_request_irq()
11312 if (tnapi->tx_buffers && tnapi->rx_rcb) in tg3_request_irq()
11314 "%s-txrx-%d", tp->dev->name, irq_num); in tg3_request_irq()
11315 else if (tnapi->tx_buffers) in tg3_request_irq()
11317 "%s-tx-%d", tp->dev->name, irq_num); in tg3_request_irq()
11318 else if (tnapi->rx_rcb) in tg3_request_irq()
11320 "%s-rx-%d", tp->dev->name, irq_num); in tg3_request_irq()
11323 "%s-%d", tp->dev->name, irq_num); in tg3_request_irq()
11324 name[IFNAMSIZ-1] = 0; in tg3_request_irq()
11339 return request_irq(tnapi->irq_vec, fn, flags, name, tnapi); in tg3_request_irq()
11344 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_test_interrupt()
11345 struct net_device *dev = tp->dev; in tg3_test_interrupt()
11350 return -ENODEV; in tg3_test_interrupt()
11354 free_irq(tnapi->irq_vec, tnapi); in tg3_test_interrupt()
11365 err = request_irq(tnapi->irq_vec, tg3_test_isr, in tg3_test_interrupt()
11366 IRQF_SHARED, dev->name, tnapi); in tg3_test_interrupt()
11370 tnapi->hw_status->status &= ~SD_STATUS_UPDATED; in tg3_test_interrupt()
11373 tw32_f(HOSTCC_MODE, tp->coalesce_mode | HOSTCC_MODE_ENABLE | in tg3_test_interrupt()
11374 tnapi->coal_now); in tg3_test_interrupt()
11379 int_mbox = tr32_mailbox(tnapi->int_mbox); in tg3_test_interrupt()
11389 tnapi->hw_status->status_tag != tnapi->last_tag) in tg3_test_interrupt()
11390 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_test_interrupt()
11397 free_irq(tnapi->irq_vec, tnapi); in tg3_test_interrupt()
11413 return -EIO; in tg3_test_interrupt()
11430 pci_read_config_word(tp->pdev, PCI_COMMAND, &pci_cmd); in tg3_test_msi()
11431 pci_write_config_word(tp->pdev, PCI_COMMAND, in tg3_test_msi()
11436 pci_write_config_word(tp->pdev, PCI_COMMAND, pci_cmd); in tg3_test_msi()
11442 if (err != -EIO) in tg3_test_msi()
11446 netdev_warn(tp->dev, "No interrupt was generated using MSI. Switching " in tg3_test_msi()
11450 free_irq(tp->napi[0].irq_vec, &tp->napi[0]); in tg3_test_msi()
11452 pci_disable_msi(tp->pdev); in tg3_test_msi()
11455 tp->napi[0].irq_vec = tp->pdev->irq; in tg3_test_msi()
11472 free_irq(tp->napi[0].irq_vec, &tp->napi[0]); in tg3_test_msi()
11481 if (request_firmware(&tp->fw, tp->fw_needed, &tp->pdev->dev)) { in tg3_request_firmware()
11482 netdev_err(tp->dev, "Failed to load firmware \"%s\"\n", in tg3_request_firmware()
11483 tp->fw_needed); in tg3_request_firmware()
11484 return -ENOENT; in tg3_request_firmware()
11487 fw_hdr = (struct tg3_firmware_hdr *)tp->fw->data; in tg3_request_firmware()
11494 tp->fw_len = be32_to_cpu(fw_hdr->len); /* includes bss */ in tg3_request_firmware()
11495 if (tp->fw_len < (tp->fw->size - TG3_FW_HDR_LEN)) { in tg3_request_firmware()
11496 netdev_err(tp->dev, "bogus length %d in \"%s\"\n", in tg3_request_firmware()
11497 tp->fw_len, tp->fw_needed); in tg3_request_firmware()
11498 release_firmware(tp->fw); in tg3_request_firmware()
11499 tp->fw = NULL; in tg3_request_firmware()
11500 return -EINVAL; in tg3_request_firmware()
11504 tp->fw_needed = NULL; in tg3_request_firmware()
11510 u32 irq_cnt = max(tp->rxq_cnt, tp->txq_cnt); in tg3_irq_count()
11514 * In multiqueue MSI-X mode, the first MSI-X vector in tg3_irq_count()
11518 irq_cnt = min_t(unsigned, irq_cnt + 1, tp->irq_max); in tg3_irq_count()
11529 tp->txq_cnt = tp->txq_req; in tg3_enable_msix()
11530 tp->rxq_cnt = tp->rxq_req; in tg3_enable_msix()
11531 if (!tp->rxq_cnt) in tg3_enable_msix()
11532 tp->rxq_cnt = netif_get_num_default_rss_queues(); in tg3_enable_msix()
11533 if (tp->rxq_cnt > tp->rxq_max) in tg3_enable_msix()
11534 tp->rxq_cnt = tp->rxq_max; in tg3_enable_msix()
11536 /* Disable multiple TX rings by default. Simple round-robin hardware in tg3_enable_msix()
11540 if (!tp->txq_req) in tg3_enable_msix()
11541 tp->txq_cnt = 1; in tg3_enable_msix()
11543 tp->irq_cnt = tg3_irq_count(tp); in tg3_enable_msix()
11545 for (i = 0; i < tp->irq_max; i++) { in tg3_enable_msix()
11550 rc = pci_enable_msix_range(tp->pdev, msix_ent, 1, tp->irq_cnt); in tg3_enable_msix()
11553 } else if (rc < tp->irq_cnt) { in tg3_enable_msix()
11554 netdev_notice(tp->dev, "Requested %d MSI-X vectors, received %d\n", in tg3_enable_msix()
11555 tp->irq_cnt, rc); in tg3_enable_msix()
11556 tp->irq_cnt = rc; in tg3_enable_msix()
11557 tp->rxq_cnt = max(rc - 1, 1); in tg3_enable_msix()
11558 if (tp->txq_cnt) in tg3_enable_msix()
11559 tp->txq_cnt = min(tp->rxq_cnt, tp->txq_max); in tg3_enable_msix()
11562 for (i = 0; i < tp->irq_max; i++) in tg3_enable_msix()
11563 tp->napi[i].irq_vec = msix_ent[i].vector; in tg3_enable_msix()
11565 if (netif_set_real_num_rx_queues(tp->dev, tp->rxq_cnt)) { in tg3_enable_msix()
11566 pci_disable_msix(tp->pdev); in tg3_enable_msix()
11570 if (tp->irq_cnt == 1) in tg3_enable_msix()
11575 if (tp->txq_cnt > 1) in tg3_enable_msix()
11578 netif_set_real_num_tx_queues(tp->dev, tp->txq_cnt); in tg3_enable_msix()
11590 netdev_warn(tp->dev, in tg3_ints_init()
11597 else if (tg3_flag(tp, SUPPORT_MSI) && pci_enable_msi(tp->pdev) == 0) in tg3_ints_init()
11602 if (tg3_flag(tp, USING_MSIX) && tp->irq_cnt > 1) in tg3_ints_init()
11610 tp->irq_cnt = 1; in tg3_ints_init()
11611 tp->napi[0].irq_vec = tp->pdev->irq; in tg3_ints_init()
11614 if (tp->irq_cnt == 1) { in tg3_ints_init()
11615 tp->txq_cnt = 1; in tg3_ints_init()
11616 tp->rxq_cnt = 1; in tg3_ints_init()
11617 netif_set_real_num_tx_queues(tp->dev, 1); in tg3_ints_init()
11618 netif_set_real_num_rx_queues(tp->dev, 1); in tg3_ints_init()
11625 pci_disable_msix(tp->pdev); in tg3_ints_fini()
11627 pci_disable_msi(tp->pdev); in tg3_ints_fini()
11637 struct net_device *dev = tp->dev; in tg3_start()
11659 for (i = 0; i < tp->irq_cnt; i++) { in tg3_start()
11662 for (i--; i >= 0; i--) { in tg3_start()
11663 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_start()
11665 free_irq(tnapi->irq_vec, tnapi); in tg3_start()
11727 if (dev->features & NETIF_F_LOOPBACK) in tg3_start()
11728 tg3_set_loopback(dev, dev->features); in tg3_start()
11733 for (i = tp->irq_cnt - 1; i >= 0; i--) { in tg3_start()
11734 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_start()
11735 free_irq(tnapi->irq_vec, tnapi); in tg3_start()
11772 for (i = tp->irq_cnt - 1; i >= 0; i--) { in tg3_stop()
11773 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_stop()
11774 free_irq(tnapi->irq_vec, tnapi); in tg3_stop()
11789 if (tp->pcierr_recovery) { in tg3_open()
11792 return -EAGAIN; in tg3_open()
11795 if (tp->fw_needed) { in tg3_open()
11799 netdev_warn(tp->dev, "EEE capability disabled\n"); in tg3_open()
11800 tp->phy_flags &= ~TG3_PHYFLG_EEE_CAP; in tg3_open()
11801 } else if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) { in tg3_open()
11802 netdev_warn(tp->dev, "EEE capability restored\n"); in tg3_open()
11803 tp->phy_flags |= TG3_PHYFLG_EEE_CAP; in tg3_open()
11809 netdev_warn(tp->dev, "TSO capability disabled\n"); in tg3_open()
11812 netdev_notice(tp->dev, "TSO capability restored\n"); in tg3_open()
11831 !(tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN), in tg3_open()
11835 pci_set_power_state(tp->pdev, PCI_D3hot); in tg3_open()
11845 if (tp->pcierr_recovery) { in tg3_close()
11848 return -EAGAIN; in tg3_close()
11853 if (pci_device_is_present(tp->pdev)) { in tg3_close()
11863 return ((u64)val->high << 32) | ((u64)val->low); in get_stat64()
11868 struct tg3_hw_stats *hw_stats = tp->hw_stats; in tg3_calc_crc_errors()
11870 if (!(tp->phy_flags & TG3_PHYFLG_PHY_SERDES) && in tg3_calc_crc_errors()
11882 tp->phy_crc_errors += val; in tg3_calc_crc_errors()
11884 return tp->phy_crc_errors; in tg3_calc_crc_errors()
11887 return get_stat64(&hw_stats->rx_fcs_errors); in tg3_calc_crc_errors()
11891 estats->member = old_estats->member + \
11892 get_stat64(&hw_stats->member)
11896 struct tg3_ethtool_stats *old_estats = &tp->estats_prev; in tg3_get_estats()
11897 struct tg3_hw_stats *hw_stats = tp->hw_stats; in tg3_get_estats()
11980 struct rtnl_link_stats64 *old_stats = &tp->net_stats_prev; in tg3_get_nstats()
11981 struct tg3_hw_stats *hw_stats = tp->hw_stats; in tg3_get_nstats()
11986 stats->rx_packets = old_stats->rx_packets + in tg3_get_nstats()
11987 get_stat64(&hw_stats->rx_ucast_packets) + in tg3_get_nstats()
11988 get_stat64(&hw_stats->rx_mcast_packets) + in tg3_get_nstats()
11989 get_stat64(&hw_stats->rx_bcast_packets); in tg3_get_nstats()
11991 stats->tx_packets = old_stats->tx_packets + in tg3_get_nstats()
11992 get_stat64(&hw_stats->tx_ucast_packets) + in tg3_get_nstats()
11993 get_stat64(&hw_stats->tx_mcast_packets) + in tg3_get_nstats()
11994 get_stat64(&hw_stats->tx_bcast_packets); in tg3_get_nstats()
11996 stats->rx_bytes = old_stats->rx_bytes + in tg3_get_nstats()
11997 get_stat64(&hw_stats->rx_octets); in tg3_get_nstats()
11998 stats->tx_bytes = old_stats->tx_bytes + in tg3_get_nstats()
11999 get_stat64(&hw_stats->tx_octets); in tg3_get_nstats()
12001 stats->rx_errors = old_stats->rx_errors + in tg3_get_nstats()
12002 get_stat64(&hw_stats->rx_errors); in tg3_get_nstats()
12003 stats->tx_errors = old_stats->tx_errors + in tg3_get_nstats()
12004 get_stat64(&hw_stats->tx_errors) + in tg3_get_nstats()
12005 get_stat64(&hw_stats->tx_mac_errors) + in tg3_get_nstats()
12006 get_stat64(&hw_stats->tx_carrier_sense_errors) + in tg3_get_nstats()
12007 get_stat64(&hw_stats->tx_discards); in tg3_get_nstats()
12009 stats->multicast = old_stats->multicast + in tg3_get_nstats()
12010 get_stat64(&hw_stats->rx_mcast_packets); in tg3_get_nstats()
12011 stats->collisions = old_stats->collisions + in tg3_get_nstats()
12012 get_stat64(&hw_stats->tx_collisions); in tg3_get_nstats()
12014 stats->rx_length_errors = old_stats->rx_length_errors + in tg3_get_nstats()
12015 get_stat64(&hw_stats->rx_frame_too_long_errors) + in tg3_get_nstats()
12016 get_stat64(&hw_stats->rx_undersize_packets); in tg3_get_nstats()
12018 stats->rx_frame_errors = old_stats->rx_frame_errors + in tg3_get_nstats()
12019 get_stat64(&hw_stats->rx_align_errors); in tg3_get_nstats()
12020 stats->tx_aborted_errors = old_stats->tx_aborted_errors + in tg3_get_nstats()
12021 get_stat64(&hw_stats->tx_discards); in tg3_get_nstats()
12022 stats->tx_carrier_errors = old_stats->tx_carrier_errors + in tg3_get_nstats()
12023 get_stat64(&hw_stats->tx_carrier_sense_errors); in tg3_get_nstats()
12025 stats->rx_crc_errors = old_stats->rx_crc_errors + in tg3_get_nstats()
12028 stats->rx_missed_errors = old_stats->rx_missed_errors + in tg3_get_nstats()
12029 get_stat64(&hw_stats->rx_discards); in tg3_get_nstats()
12031 /* Aggregate per-queue counters. The per-queue counters are updated in tg3_get_nstats()
12032 * by a single writer, race-free. The result computed by this loop in tg3_get_nstats()
12039 rx_dropped = (unsigned long)(old_stats->rx_dropped); in tg3_get_nstats()
12040 tx_dropped = (unsigned long)(old_stats->tx_dropped); in tg3_get_nstats()
12042 for (i = 0; i < tp->irq_cnt; i++) { in tg3_get_nstats()
12043 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_get_nstats()
12045 rx_dropped += tnapi->rx_dropped; in tg3_get_nstats()
12046 tx_dropped += tnapi->tx_dropped; in tg3_get_nstats()
12049 stats->rx_dropped = rx_dropped; in tg3_get_nstats()
12050 stats->tx_dropped = tx_dropped; in tg3_get_nstats()
12063 regs->version = 0; in tg3_get_regs()
12067 if (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) in tg3_get_regs()
12081 return tp->nvram_size; in tg3_get_eeprom_len()
12093 return -EINVAL; in tg3_get_eeprom()
12095 offset = eeprom->offset; in tg3_get_eeprom()
12096 len = eeprom->len; in tg3_get_eeprom()
12097 eeprom->len = 0; in tg3_get_eeprom()
12099 eeprom->magic = TG3_EEPROM_MAGIC; in tg3_get_eeprom()
12117 b_count = 4 - b_offset; in tg3_get_eeprom()
12122 ret = tg3_nvram_read_be32(tp, offset-b_offset, &val); in tg3_get_eeprom()
12126 len -= b_count; in tg3_get_eeprom()
12128 eeprom->len += b_count; in tg3_get_eeprom()
12132 pd = &data[eeprom->len]; in tg3_get_eeprom()
12133 for (i = 0; i < (len - (len & 3)); i += 4) { in tg3_get_eeprom()
12137 i -= 4; in tg3_get_eeprom()
12138 eeprom->len += i; in tg3_get_eeprom()
12144 eeprom->len += i; in tg3_get_eeprom()
12145 ret = -EINTR; in tg3_get_eeprom()
12151 eeprom->len += i; in tg3_get_eeprom()
12155 pd = &data[eeprom->len]; in tg3_get_eeprom()
12157 b_offset = offset + len - b_count; in tg3_get_eeprom()
12162 eeprom->len += b_count; in tg3_get_eeprom()
12184 eeprom->magic != TG3_EEPROM_MAGIC) in tg3_set_eeprom()
12185 return -EINVAL; in tg3_set_eeprom()
12187 offset = eeprom->offset; in tg3_set_eeprom()
12188 len = eeprom->len; in tg3_set_eeprom()
12192 ret = tg3_nvram_read_be32(tp, offset-b_offset, &start); in tg3_set_eeprom()
12206 ret = tg3_nvram_read_be32(tp, offset+len-4, &end); in tg3_set_eeprom()
12215 return -ENOMEM; in tg3_set_eeprom()
12219 memcpy(buf+len-4, &end, 4); in tg3_set_eeprom()
12220 memcpy(buf + b_offset, data, eeprom->len); in tg3_set_eeprom()
12239 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_get_link_ksettings()
12240 return -EAGAIN; in tg3_get_link_ksettings()
12241 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_get_link_ksettings()
12249 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) in tg3_get_link_ksettings()
12253 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) { in tg3_get_link_ksettings()
12259 cmd->base.port = PORT_TP; in tg3_get_link_ksettings()
12262 cmd->base.port = PORT_FIBRE; in tg3_get_link_ksettings()
12264 ethtool_convert_legacy_u32_to_link_mode(cmd->link_modes.supported, in tg3_get_link_ksettings()
12267 advertising = tp->link_config.advertising; in tg3_get_link_ksettings()
12269 if (tp->link_config.flowctrl & FLOW_CTRL_RX) { in tg3_get_link_ksettings()
12270 if (tp->link_config.flowctrl & FLOW_CTRL_TX) { in tg3_get_link_ksettings()
12276 } else if (tp->link_config.flowctrl & FLOW_CTRL_TX) { in tg3_get_link_ksettings()
12280 ethtool_convert_legacy_u32_to_link_mode(cmd->link_modes.advertising, in tg3_get_link_ksettings()
12283 if (netif_running(dev) && tp->link_up) { in tg3_get_link_ksettings()
12284 cmd->base.speed = tp->link_config.active_speed; in tg3_get_link_ksettings()
12285 cmd->base.duplex = tp->link_config.active_duplex; in tg3_get_link_ksettings()
12287 cmd->link_modes.lp_advertising, in tg3_get_link_ksettings()
12288 tp->link_config.rmt_adv); in tg3_get_link_ksettings()
12290 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) { in tg3_get_link_ksettings()
12291 if (tp->phy_flags & TG3_PHYFLG_MDIX_STATE) in tg3_get_link_ksettings()
12292 cmd->base.eth_tp_mdix = ETH_TP_MDI_X; in tg3_get_link_ksettings()
12294 cmd->base.eth_tp_mdix = ETH_TP_MDI; in tg3_get_link_ksettings()
12297 cmd->base.speed = SPEED_UNKNOWN; in tg3_get_link_ksettings()
12298 cmd->base.duplex = DUPLEX_UNKNOWN; in tg3_get_link_ksettings()
12299 cmd->base.eth_tp_mdix = ETH_TP_MDI_INVALID; in tg3_get_link_ksettings()
12301 cmd->base.phy_address = tp->phy_addr; in tg3_get_link_ksettings()
12302 cmd->base.autoneg = tp->link_config.autoneg; in tg3_get_link_ksettings()
12310 u32 speed = cmd->base.speed; in tg3_set_link_ksettings()
12315 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_set_link_ksettings()
12316 return -EAGAIN; in tg3_set_link_ksettings()
12317 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_set_link_ksettings()
12321 if (cmd->base.autoneg != AUTONEG_ENABLE && in tg3_set_link_ksettings()
12322 cmd->base.autoneg != AUTONEG_DISABLE) in tg3_set_link_ksettings()
12323 return -EINVAL; in tg3_set_link_ksettings()
12325 if (cmd->base.autoneg == AUTONEG_DISABLE && in tg3_set_link_ksettings()
12326 cmd->base.duplex != DUPLEX_FULL && in tg3_set_link_ksettings()
12327 cmd->base.duplex != DUPLEX_HALF) in tg3_set_link_ksettings()
12328 return -EINVAL; in tg3_set_link_ksettings()
12331 cmd->link_modes.advertising); in tg3_set_link_ksettings()
12333 if (cmd->base.autoneg == AUTONEG_ENABLE) { in tg3_set_link_ksettings()
12338 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) in tg3_set_link_ksettings()
12342 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) in tg3_set_link_ksettings()
12352 return -EINVAL; in tg3_set_link_ksettings()
12363 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) { in tg3_set_link_ksettings()
12365 return -EINVAL; in tg3_set_link_ksettings()
12367 if (cmd->base.duplex != DUPLEX_FULL) in tg3_set_link_ksettings()
12368 return -EINVAL; in tg3_set_link_ksettings()
12372 return -EINVAL; in tg3_set_link_ksettings()
12378 tp->link_config.autoneg = cmd->base.autoneg; in tg3_set_link_ksettings()
12379 if (cmd->base.autoneg == AUTONEG_ENABLE) { in tg3_set_link_ksettings()
12380 tp->link_config.advertising = (advertising | in tg3_set_link_ksettings()
12382 tp->link_config.speed = SPEED_UNKNOWN; in tg3_set_link_ksettings()
12383 tp->link_config.duplex = DUPLEX_UNKNOWN; in tg3_set_link_ksettings()
12385 tp->link_config.advertising = 0; in tg3_set_link_ksettings()
12386 tp->link_config.speed = speed; in tg3_set_link_ksettings()
12387 tp->link_config.duplex = cmd->base.duplex; in tg3_set_link_ksettings()
12390 tp->phy_flags |= TG3_PHYFLG_USER_CONFIGURED; in tg3_set_link_ksettings()
12406 strscpy(info->driver, DRV_MODULE_NAME, sizeof(info->driver)); in tg3_get_drvinfo()
12407 strscpy(info->fw_version, tp->fw_ver, sizeof(info->fw_version)); in tg3_get_drvinfo()
12408 strscpy(info->bus_info, pci_name(tp->pdev), sizeof(info->bus_info)); in tg3_get_drvinfo()
12415 if (tg3_flag(tp, WOL_CAP) && device_can_wakeup(&tp->pdev->dev)) in tg3_get_wol()
12416 wol->supported = WAKE_MAGIC; in tg3_get_wol()
12418 wol->supported = 0; in tg3_get_wol()
12419 wol->wolopts = 0; in tg3_get_wol()
12420 if (tg3_flag(tp, WOL_ENABLE) && device_can_wakeup(&tp->pdev->dev)) in tg3_get_wol()
12421 wol->wolopts = WAKE_MAGIC; in tg3_get_wol()
12422 memset(&wol->sopass, 0, sizeof(wol->sopass)); in tg3_get_wol()
12428 struct device *dp = &tp->pdev->dev; in tg3_set_wol()
12430 if (wol->wolopts & ~WAKE_MAGIC) in tg3_set_wol()
12431 return -EINVAL; in tg3_set_wol()
12432 if ((wol->wolopts & WAKE_MAGIC) && in tg3_set_wol()
12434 return -EINVAL; in tg3_set_wol()
12436 device_set_wakeup_enable(dp, wol->wolopts & WAKE_MAGIC); in tg3_set_wol()
12449 return tp->msg_enable; in tg3_get_msglevel()
12455 tp->msg_enable = value; in tg3_set_msglevel()
12464 return -EAGAIN; in tg3_nway_reset()
12466 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) in tg3_nway_reset()
12467 return -EINVAL; in tg3_nway_reset()
12472 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_nway_reset()
12473 return -EAGAIN; in tg3_nway_reset()
12474 r = phy_start_aneg(mdiobus_get_phy(tp->mdio_bus, tp->phy_addr)); in tg3_nway_reset()
12478 spin_lock_bh(&tp->lock); in tg3_nway_reset()
12479 r = -EINVAL; in tg3_nway_reset()
12483 (tp->phy_flags & TG3_PHYFLG_PARALLEL_DETECT))) { in tg3_nway_reset()
12488 spin_unlock_bh(&tp->lock); in tg3_nway_reset()
12501 ering->rx_max_pending = tp->rx_std_ring_mask; in tg3_get_ringparam()
12503 ering->rx_jumbo_max_pending = tp->rx_jmb_ring_mask; in tg3_get_ringparam()
12505 ering->rx_jumbo_max_pending = 0; in tg3_get_ringparam()
12507 ering->tx_max_pending = TG3_TX_RING_SIZE - 1; in tg3_get_ringparam()
12509 ering->rx_pending = tp->rx_pending; in tg3_get_ringparam()
12511 ering->rx_jumbo_pending = tp->rx_jumbo_pending; in tg3_get_ringparam()
12513 ering->rx_jumbo_pending = 0; in tg3_get_ringparam()
12515 ering->tx_pending = tp->napi[0].tx_pending; in tg3_get_ringparam()
12527 if ((ering->rx_pending > tp->rx_std_ring_mask) || in tg3_set_ringparam()
12528 (ering->rx_jumbo_pending > tp->rx_jmb_ring_mask) || in tg3_set_ringparam()
12529 (ering->tx_pending > TG3_TX_RING_SIZE - 1) || in tg3_set_ringparam()
12530 (ering->tx_pending <= MAX_SKB_FRAGS) || in tg3_set_ringparam()
12532 (ering->tx_pending <= (MAX_SKB_FRAGS * 3)))) in tg3_set_ringparam()
12533 return -EINVAL; in tg3_set_ringparam()
12543 tp->rx_pending = ering->rx_pending; in tg3_set_ringparam()
12546 tp->rx_pending > 63) in tg3_set_ringparam()
12547 tp->rx_pending = 63; in tg3_set_ringparam()
12550 tp->rx_jumbo_pending = ering->rx_jumbo_pending; in tg3_set_ringparam()
12552 for (i = 0; i < tp->irq_max; i++) in tg3_set_ringparam()
12553 tp->napi[i].tx_pending = ering->tx_pending; in tg3_set_ringparam()
12580 epause->autoneg = !!tg3_flag(tp, PAUSE_AUTONEG); in tg3_get_pauseparam()
12582 if (tp->link_config.flowctrl & FLOW_CTRL_RX) in tg3_get_pauseparam()
12583 epause->rx_pause = 1; in tg3_get_pauseparam()
12585 epause->rx_pause = 0; in tg3_get_pauseparam()
12587 if (tp->link_config.flowctrl & FLOW_CTRL_TX) in tg3_get_pauseparam()
12588 epause->tx_pause = 1; in tg3_get_pauseparam()
12590 epause->tx_pause = 0; in tg3_get_pauseparam()
12599 if (tp->link_config.autoneg == AUTONEG_ENABLE) in tg3_set_pauseparam()
12605 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_set_pauseparam()
12608 return -EINVAL; in tg3_set_pauseparam()
12610 tp->link_config.flowctrl = 0; in tg3_set_pauseparam()
12611 phy_set_asym_pause(phydev, epause->rx_pause, epause->tx_pause); in tg3_set_pauseparam()
12612 if (epause->rx_pause) { in tg3_set_pauseparam()
12613 tp->link_config.flowctrl |= FLOW_CTRL_RX; in tg3_set_pauseparam()
12615 if (epause->tx_pause) { in tg3_set_pauseparam()
12616 tp->link_config.flowctrl |= FLOW_CTRL_TX; in tg3_set_pauseparam()
12618 } else if (epause->tx_pause) { in tg3_set_pauseparam()
12619 tp->link_config.flowctrl |= FLOW_CTRL_TX; in tg3_set_pauseparam()
12622 if (epause->autoneg) in tg3_set_pauseparam()
12627 if (tp->phy_flags & TG3_PHYFLG_IS_CONNECTED) { in tg3_set_pauseparam()
12628 if (phydev->autoneg) { in tg3_set_pauseparam()
12639 if (!epause->autoneg) in tg3_set_pauseparam()
12652 if (epause->autoneg) in tg3_set_pauseparam()
12656 if (epause->rx_pause) in tg3_set_pauseparam()
12657 tp->link_config.flowctrl |= FLOW_CTRL_RX; in tg3_set_pauseparam()
12659 tp->link_config.flowctrl &= ~FLOW_CTRL_RX; in tg3_set_pauseparam()
12660 if (epause->tx_pause) in tg3_set_pauseparam()
12661 tp->link_config.flowctrl |= FLOW_CTRL_TX; in tg3_set_pauseparam()
12663 tp->link_config.flowctrl &= ~FLOW_CTRL_TX; in tg3_set_pauseparam()
12681 tp->phy_flags |= TG3_PHYFLG_USER_CONFIGURED; in tg3_set_pauseparam()
12694 return -EOPNOTSUPP; in tg3_get_sset_count()
12699 u32 *rules __always_unused) in tg3_get_rxnfc()
12704 return -EOPNOTSUPP; in tg3_get_rxnfc()
12706 switch (info->cmd) { in tg3_get_rxnfc()
12708 if (netif_running(tp->dev)) in tg3_get_rxnfc()
12709 info->data = tp->rxq_cnt; in tg3_get_rxnfc()
12711 info->data = num_online_cpus(); in tg3_get_rxnfc()
12712 if (info->data > TG3_RSS_MAX_NUM_QS) in tg3_get_rxnfc()
12713 info->data = TG3_RSS_MAX_NUM_QS; in tg3_get_rxnfc()
12719 return -EOPNOTSUPP; in tg3_get_rxnfc()
12739 rxfh->hfunc = ETH_RSS_HASH_TOP; in tg3_get_rxfh()
12740 if (!rxfh->indir) in tg3_get_rxfh()
12744 rxfh->indir[i] = tp->rss_ind_tbl[i]; in tg3_get_rxfh()
12758 if (rxfh->key || in tg3_set_rxfh()
12759 (rxfh->hfunc != ETH_RSS_HASH_NO_CHANGE && in tg3_set_rxfh()
12760 rxfh->hfunc != ETH_RSS_HASH_TOP)) in tg3_set_rxfh()
12761 return -EOPNOTSUPP; in tg3_set_rxfh()
12763 if (!rxfh->indir) in tg3_set_rxfh()
12767 tp->rss_ind_tbl[i] = rxfh->indir[i]; in tg3_set_rxfh()
12788 channel->max_rx = tp->rxq_max; in tg3_get_channels()
12789 channel->max_tx = tp->txq_max; in tg3_get_channels()
12792 channel->rx_count = tp->rxq_cnt; in tg3_get_channels()
12793 channel->tx_count = tp->txq_cnt; in tg3_get_channels()
12795 if (tp->rxq_req) in tg3_get_channels()
12796 channel->rx_count = tp->rxq_req; in tg3_get_channels()
12798 channel->rx_count = min(deflt_qs, tp->rxq_max); in tg3_get_channels()
12800 if (tp->txq_req) in tg3_get_channels()
12801 channel->tx_count = tp->txq_req; in tg3_get_channels()
12803 channel->tx_count = min(deflt_qs, tp->txq_max); in tg3_get_channels()
12813 return -EOPNOTSUPP; in tg3_set_channels()
12815 if (channel->rx_count > tp->rxq_max || in tg3_set_channels()
12816 channel->tx_count > tp->txq_max) in tg3_set_channels()
12817 return -EINVAL; in tg3_set_channels()
12819 tp->rxq_req = channel->rx_count; in tg3_set_channels()
12820 tp->txq_req = channel->tx_count; in tg3_set_channels()
12874 tw32(MAC_LED_CTRL, tp->led_ctrl); in tg3_set_phys_id()
12886 if (tp->hw_stats) in tg3_get_ethtool_stats()
12932 /* The data is in little-endian format in NVRAM. in tg3_vpd_readblock()
12933 * Use the big-endian read routines to preserve in tg3_vpd_readblock()
12941 buf = pci_vpd_alloc(tp->pdev, vpdlen); in tg3_vpd_readblock()
12974 return -EIO; in tg3_test_nvram()
13001 return -EIO; in tg3_test_nvram()
13008 return -EIO; in tg3_test_nvram()
13012 return -ENOMEM; in tg3_test_nvram()
13014 err = -EIO; in tg3_test_nvram()
13046 err = -EIO; in tg3_test_nvram()
13080 err = -EIO; in tg3_test_nvram()
13093 err = -EIO; in tg3_test_nvram()
13109 return -ENOMEM; in tg3_test_nvram()
13127 if (!netif_running(tp->dev)) in tg3_test_link()
13128 return -ENODEV; in tg3_test_link()
13130 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) in tg3_test_link()
13136 if (tp->link_up) in tg3_test_link()
13143 return -EIO; in tg3_test_link()
13324 /* Determine the read-only value. */ in tg3_test_registers()
13327 /* Write zero to the register, then make sure the read-only bits in tg3_test_registers()
13334 /* Test the read-only and read/write bits. */ in tg3_test_registers()
13339 * make sure the read-only bits are not changed and the in tg3_test_registers()
13346 /* Test the read-only bits. */ in tg3_test_registers()
13361 netdev_err(tp->dev, in tg3_test_registers()
13364 return -EIO; in tg3_test_registers()
13380 return -EIO; in tg3_do_mem_test()
13490 struct tg3_rx_prodring_set *tpr = &tp->napi[0].prodring; in tg3_run_loopback()
13492 tnapi = &tp->napi[0]; in tg3_run_loopback()
13493 rnapi = &tp->napi[0]; in tg3_run_loopback()
13494 if (tp->irq_cnt > 1) { in tg3_run_loopback()
13496 rnapi = &tp->napi[1]; in tg3_run_loopback()
13498 tnapi = &tp->napi[1]; in tg3_run_loopback()
13500 coal_now = tnapi->coal_now | rnapi->coal_now; in tg3_run_loopback()
13502 err = -EIO; in tg3_run_loopback()
13505 skb = netdev_alloc_skb(tp->dev, tx_len); in tg3_run_loopback()
13507 return -ENOMEM; in tg3_run_loopback()
13510 memcpy(tx_data, tp->dev->dev_addr, ETH_ALEN); in tg3_run_loopback()
13525 val = tx_len - ETH_ALEN * 2 - sizeof(tg3_tso_header); in tg3_run_loopback()
13529 iph->tot_len = htons((u16)(mss + hdr_len)); in tg3_run_loopback()
13540 th->check = 0; in tg3_run_loopback()
13571 map = dma_map_single(&tp->pdev->dev, skb->data, tx_len, DMA_TO_DEVICE); in tg3_run_loopback()
13572 if (dma_mapping_error(&tp->pdev->dev, map)) { in tg3_run_loopback()
13574 return -EIO; in tg3_run_loopback()
13577 val = tnapi->tx_prod; in tg3_run_loopback()
13578 tnapi->tx_buffers[val].skb = skb; in tg3_run_loopback()
13579 dma_unmap_addr_set(&tnapi->tx_buffers[val], mapping, map); in tg3_run_loopback()
13581 tw32_f(HOSTCC_MODE, tp->coalesce_mode | HOSTCC_MODE_ENABLE | in tg3_run_loopback()
13582 rnapi->coal_now); in tg3_run_loopback()
13586 rx_start_idx = rnapi->hw_status->idx[0].rx_producer; in tg3_run_loopback()
13591 tnapi->tx_buffers[val].skb = NULL; in tg3_run_loopback()
13593 return -EIO; in tg3_run_loopback()
13596 tnapi->tx_prod++; in tg3_run_loopback()
13601 tw32_tx_mbox(tnapi->prodmbox, tnapi->tx_prod); in tg3_run_loopback()
13602 tr32_mailbox(tnapi->prodmbox); in tg3_run_loopback()
13608 tw32_f(HOSTCC_MODE, tp->coalesce_mode | HOSTCC_MODE_ENABLE | in tg3_run_loopback()
13613 tx_idx = tnapi->hw_status->idx[0].tx_consumer; in tg3_run_loopback()
13614 rx_idx = rnapi->hw_status->idx[0].rx_producer; in tg3_run_loopback()
13615 if ((tx_idx == tnapi->tx_prod) && in tg3_run_loopback()
13620 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod - 1, -1); in tg3_run_loopback()
13623 if (tx_idx != tnapi->tx_prod) in tg3_run_loopback()
13631 desc = &rnapi->rx_rcb[rx_start_idx++]; in tg3_run_loopback()
13632 desc_idx = desc->opaque & RXD_OPAQUE_INDEX_MASK; in tg3_run_loopback()
13633 opaque_key = desc->opaque & RXD_OPAQUE_RING_MASK; in tg3_run_loopback()
13635 if ((desc->err_vlan & RXD_ERR_MASK) != 0 && in tg3_run_loopback()
13636 (desc->err_vlan != RXD_ERR_ODD_NIBBLE_RCVD_MII)) in tg3_run_loopback()
13639 rx_len = ((desc->idx_len & RXD_LEN_MASK) >> RXD_LEN_SHIFT) in tg3_run_loopback()
13640 - ETH_FCS_LEN; in tg3_run_loopback()
13646 if (pktsz <= TG3_RX_STD_DMA_SZ - ETH_FCS_LEN) { in tg3_run_loopback()
13653 } else if ((desc->type_flags & RXD_FLAG_TCPUDP_CSUM) && in tg3_run_loopback()
13654 (desc->ip_tcp_csum & RXD_TCPCSUM_MASK) in tg3_run_loopback()
13660 rx_data = tpr->rx_std_buffers[desc_idx].data; in tg3_run_loopback()
13661 map = dma_unmap_addr(&tpr->rx_std_buffers[desc_idx], in tg3_run_loopback()
13664 rx_data = tpr->rx_jmb_buffers[desc_idx].data; in tg3_run_loopback()
13665 map = dma_unmap_addr(&tpr->rx_jmb_buffers[desc_idx], in tg3_run_loopback()
13670 dma_sync_single_for_cpu(&tp->pdev->dev, map, rx_len, in tg3_run_loopback()
13697 int err = -EIO; in tg3_test_loopback()
13701 if (tp->dma_limit) in tg3_test_loopback()
13702 jmb_pkt_sz = tp->dma_limit - ETH_HLEN; in tg3_test_loopback()
13704 eee_cap = tp->phy_flags & TG3_PHYFLG_EEE_CAP; in tg3_test_loopback()
13705 tp->phy_flags &= ~TG3_PHYFLG_EEE_CAP; in tg3_test_loopback()
13707 if (!netif_running(tp->dev)) { in tg3_test_loopback()
13733 /* HW errata - mac loopback fails in some cases on 5780. in tg3_test_loopback()
13752 if (!(tp->phy_flags & TG3_PHYFLG_PHY_SERDES) && in tg3_test_loopback()
13796 /* Re-enable gphy autopowerdown. */ in tg3_test_loopback()
13797 if (tp->phy_flags & TG3_PHYFLG_ENABLE_APD) in tg3_test_loopback()
13802 data[TG3_EXT_LOOPB_TEST]) ? -EIO : 0; in tg3_test_loopback()
13805 tp->phy_flags |= eee_cap; in tg3_test_loopback()
13814 bool doextlpbk = etest->flags & ETH_TEST_FL_EXTERNAL_LB; in tg3_self_test()
13816 if (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) { in tg3_self_test()
13818 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13828 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13832 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13835 if (etest->flags & ETH_TEST_FL_OFFLINE) { in tg3_self_test()
13853 if (tp->phy_flags & TG3_PHYFLG_MII_SERDES) in tg3_self_test()
13857 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13862 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13867 etest->flags |= ETH_TEST_FL_EXTERNAL_LB_DONE; in tg3_self_test()
13870 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13875 etest->flags |= ETH_TEST_FL_FAILED; in tg3_self_test()
13894 if (tp->phy_flags & TG3_PHYFLG_IS_LOW_POWER) in tg3_self_test()
13905 return -EOPNOTSUPP; in tg3_hwtstamp_set()
13907 if (copy_from_user(&stmpconf, ifr->ifr_data, sizeof(stmpconf))) in tg3_hwtstamp_set()
13908 return -EFAULT; in tg3_hwtstamp_set()
13912 return -ERANGE; in tg3_hwtstamp_set()
13916 tp->rxptpctl = 0; in tg3_hwtstamp_set()
13919 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V1_EN | in tg3_hwtstamp_set()
13923 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V1_EN | in tg3_hwtstamp_set()
13927 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V1_EN | in tg3_hwtstamp_set()
13931 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_EN | in tg3_hwtstamp_set()
13935 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L2_EN | in tg3_hwtstamp_set()
13939 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L4_EN | in tg3_hwtstamp_set()
13943 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_EN | in tg3_hwtstamp_set()
13947 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L2_EN | in tg3_hwtstamp_set()
13951 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L4_EN | in tg3_hwtstamp_set()
13955 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_EN | in tg3_hwtstamp_set()
13959 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L2_EN | in tg3_hwtstamp_set()
13963 tp->rxptpctl = TG3_RX_PTP_CTL_RX_PTP_V2_L4_EN | in tg3_hwtstamp_set()
13967 return -ERANGE; in tg3_hwtstamp_set()
13970 if (netif_running(dev) && tp->rxptpctl) in tg3_hwtstamp_set()
13972 tp->rxptpctl | TG3_RX_PTP_CTL_HWTS_INTERLOCK); in tg3_hwtstamp_set()
13979 return copy_to_user(ifr->ifr_data, &stmpconf, sizeof(stmpconf)) ? in tg3_hwtstamp_set()
13980 -EFAULT : 0; in tg3_hwtstamp_set()
13989 return -EOPNOTSUPP; in tg3_hwtstamp_get()
13995 switch (tp->rxptpctl) { in tg3_hwtstamp_get()
14037 return -ERANGE; in tg3_hwtstamp_get()
14040 return copy_to_user(ifr->ifr_data, &stmpconf, sizeof(stmpconf)) ? in tg3_hwtstamp_get()
14041 -EFAULT : 0; in tg3_hwtstamp_get()
14052 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) in tg3_ioctl()
14053 return -EAGAIN; in tg3_ioctl()
14054 phydev = mdiobus_get_phy(tp->mdio_bus, tp->phy_addr); in tg3_ioctl()
14060 data->phy_id = tp->phy_addr; in tg3_ioctl()
14066 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) in tg3_ioctl()
14070 return -EAGAIN; in tg3_ioctl()
14072 spin_lock_bh(&tp->lock); in tg3_ioctl()
14073 err = __tg3_readphy(tp, data->phy_id & 0x1f, in tg3_ioctl()
14074 data->reg_num & 0x1f, &mii_regval); in tg3_ioctl()
14075 spin_unlock_bh(&tp->lock); in tg3_ioctl()
14077 data->val_out = mii_regval; in tg3_ioctl()
14083 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) in tg3_ioctl()
14087 return -EAGAIN; in tg3_ioctl()
14089 spin_lock_bh(&tp->lock); in tg3_ioctl()
14090 err = __tg3_writephy(tp, data->phy_id & 0x1f, in tg3_ioctl()
14091 data->reg_num & 0x1f, data->val_in); in tg3_ioctl()
14092 spin_unlock_bh(&tp->lock); in tg3_ioctl()
14106 return -EOPNOTSUPP; in tg3_ioctl()
14116 memcpy(ec, &tp->coal, sizeof(*ec)); in tg3_get_coalesce()
14136 if ((ec->rx_coalesce_usecs > MAX_RXCOL_TICKS) || in tg3_set_coalesce()
14137 (!ec->rx_coalesce_usecs) || in tg3_set_coalesce()
14138 (ec->tx_coalesce_usecs > MAX_TXCOL_TICKS) || in tg3_set_coalesce()
14139 (!ec->tx_coalesce_usecs) || in tg3_set_coalesce()
14140 (ec->rx_max_coalesced_frames > MAX_RXMAX_FRAMES) || in tg3_set_coalesce()
14141 (ec->tx_max_coalesced_frames > MAX_TXMAX_FRAMES) || in tg3_set_coalesce()
14142 (ec->rx_coalesce_usecs_irq > max_rxcoal_tick_int) || in tg3_set_coalesce()
14143 (ec->tx_coalesce_usecs_irq > max_txcoal_tick_int) || in tg3_set_coalesce()
14144 (ec->rx_max_coalesced_frames_irq > MAX_RXCOAL_MAXF_INT) || in tg3_set_coalesce()
14145 (ec->tx_max_coalesced_frames_irq > MAX_TXCOAL_MAXF_INT) || in tg3_set_coalesce()
14146 (ec->stats_block_coalesce_usecs > max_stat_coal_ticks) || in tg3_set_coalesce()
14147 (ec->stats_block_coalesce_usecs < min_stat_coal_ticks)) in tg3_set_coalesce()
14148 return -EINVAL; in tg3_set_coalesce()
14151 tp->coal.rx_coalesce_usecs = ec->rx_coalesce_usecs; in tg3_set_coalesce()
14152 tp->coal.tx_coalesce_usecs = ec->tx_coalesce_usecs; in tg3_set_coalesce()
14153 tp->coal.rx_max_coalesced_frames = ec->rx_max_coalesced_frames; in tg3_set_coalesce()
14154 tp->coal.tx_max_coalesced_frames = ec->tx_max_coalesced_frames; in tg3_set_coalesce()
14155 tp->coal.rx_coalesce_usecs_irq = ec->rx_coalesce_usecs_irq; in tg3_set_coalesce()
14156 tp->coal.tx_coalesce_usecs_irq = ec->tx_coalesce_usecs_irq; in tg3_set_coalesce()
14157 tp->coal.rx_max_coalesced_frames_irq = ec->rx_max_coalesced_frames_irq; in tg3_set_coalesce()
14158 tp->coal.tx_max_coalesced_frames_irq = ec->tx_max_coalesced_frames_irq; in tg3_set_coalesce()
14159 tp->coal.stats_block_coalesce_usecs = ec->stats_block_coalesce_usecs; in tg3_set_coalesce()
14163 __tg3_set_coalesce(tp, &tp->coal); in tg3_set_coalesce()
14173 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) { in tg3_set_eee()
14174 netdev_warn(tp->dev, "Board does not support EEE!\n"); in tg3_set_eee()
14175 return -EOPNOTSUPP; in tg3_set_eee()
14178 if (!linkmode_equal(edata->advertised, tp->eee.advertised)) { in tg3_set_eee()
14179 netdev_warn(tp->dev, in tg3_set_eee()
14181 return -EINVAL; in tg3_set_eee()
14184 if (edata->tx_lpi_timer > TG3_CPMU_DBTMR1_LNKIDLE_MAX) { in tg3_set_eee()
14185 netdev_warn(tp->dev, in tg3_set_eee()
14188 return -EINVAL; in tg3_set_eee()
14191 tp->eee.eee_enabled = edata->eee_enabled; in tg3_set_eee()
14192 tp->eee.tx_lpi_enabled = edata->tx_lpi_enabled; in tg3_set_eee()
14193 tp->eee.tx_lpi_timer = edata->tx_lpi_timer; in tg3_set_eee()
14195 tp->phy_flags |= TG3_PHYFLG_USER_CONFIGURED; in tg3_set_eee()
14198 if (netif_running(tp->dev)) { in tg3_set_eee()
14212 if (!(tp->phy_flags & TG3_PHYFLG_EEE_CAP)) { in tg3_get_eee()
14213 netdev_warn(tp->dev, in tg3_get_eee()
14215 return -EOPNOTSUPP; in tg3_get_eee()
14218 *edata = tp->eee; in tg3_get_eee()
14269 spin_lock_bh(&tp->lock); in tg3_get_stats64()
14270 if (!tp->hw_stats || !tg3_flag(tp, INIT_COMPLETE)) { in tg3_get_stats64()
14271 *stats = tp->net_stats_prev; in tg3_get_stats64()
14272 spin_unlock_bh(&tp->lock); in tg3_get_stats64()
14277 spin_unlock_bh(&tp->lock); in tg3_get_stats64()
14295 WRITE_ONCE(dev->mtu, new_mtu); in tg3_set_mtu()
14381 tp->nvram_size = EEPROM_CHIP_SIZE; in tg3_get_eeprom_size()
14398 while (cursize < tp->nvram_size) { in tg3_get_eeprom_size()
14408 tp->nvram_size = cursize; in tg3_get_eeprom_size()
14427 * 16-bit value at offset 0xf2. The tg3_nvram_read() in tg3_get_nvram_size()
14431 * want will always reside in the lower 16-bits. in tg3_get_nvram_size()
14434 * opposite the endianness of the CPU. The 16-bit in tg3_get_nvram_size()
14437 tp->nvram_size = swab16((u16)(val & 0x0000ffff)) * 1024; in tg3_get_nvram_size()
14441 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_nvram_size()
14460 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_nvram_info()
14461 tp->nvram_pagesize = ATMEL_AT45DB0X1B_PAGE_SIZE; in tg3_get_nvram_info()
14465 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_nvram_info()
14466 tp->nvram_pagesize = ATMEL_AT25F512_PAGE_SIZE; in tg3_get_nvram_info()
14469 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_nvram_info()
14470 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_nvram_info()
14474 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_nvram_info()
14475 tp->nvram_pagesize = ST_M45PEX0_PAGE_SIZE; in tg3_get_nvram_info()
14479 tp->nvram_jedecnum = JEDEC_SAIFUN; in tg3_get_nvram_info()
14480 tp->nvram_pagesize = SAIFUN_SA25F0XX_PAGE_SIZE; in tg3_get_nvram_info()
14484 tp->nvram_jedecnum = JEDEC_SST; in tg3_get_nvram_info()
14485 tp->nvram_pagesize = SST_25VF0X0_PAGE_SIZE; in tg3_get_nvram_info()
14489 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_nvram_info()
14490 tp->nvram_pagesize = ATMEL_AT45DB0X1B_PAGE_SIZE; in tg3_get_nvram_info()
14499 tp->nvram_pagesize = 256; in tg3_nvram_get_pagesize()
14502 tp->nvram_pagesize = 512; in tg3_nvram_get_pagesize()
14505 tp->nvram_pagesize = 1024; in tg3_nvram_get_pagesize()
14508 tp->nvram_pagesize = 2048; in tg3_nvram_get_pagesize()
14511 tp->nvram_pagesize = 4096; in tg3_nvram_get_pagesize()
14514 tp->nvram_pagesize = 264; in tg3_nvram_get_pagesize()
14517 tp->nvram_pagesize = 528; in tg3_nvram_get_pagesize()
14535 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5752_nvram_info()
14539 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5752_nvram_info()
14546 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5752_nvram_info()
14556 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_5752_nvram_info()
14581 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5755_nvram_info()
14584 tp->nvram_pagesize = 264; in tg3_get_5755_nvram_info()
14587 tp->nvram_size = (protect ? 0x3e200 : in tg3_get_5755_nvram_info()
14590 tp->nvram_size = (protect ? 0x1f200 : in tg3_get_5755_nvram_info()
14593 tp->nvram_size = (protect ? 0x1f200 : in tg3_get_5755_nvram_info()
14599 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5755_nvram_info()
14602 tp->nvram_pagesize = 256; in tg3_get_5755_nvram_info()
14604 tp->nvram_size = (protect ? in tg3_get_5755_nvram_info()
14608 tp->nvram_size = (protect ? in tg3_get_5755_nvram_info()
14612 tp->nvram_size = (protect ? in tg3_get_5755_nvram_info()
14630 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5787_nvram_info()
14632 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_5787_nvram_info()
14641 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5787_nvram_info()
14644 tp->nvram_pagesize = 264; in tg3_get_5787_nvram_info()
14649 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5787_nvram_info()
14652 tp->nvram_pagesize = 256; in tg3_get_5787_nvram_info()
14679 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5761_nvram_info()
14683 tp->nvram_pagesize = 256; in tg3_get_5761_nvram_info()
14693 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5761_nvram_info()
14696 tp->nvram_pagesize = 256; in tg3_get_5761_nvram_info()
14701 tp->nvram_size = tr32(NVRAM_ADDR_LOCKOUT); in tg3_get_5761_nvram_info()
14708 tp->nvram_size = TG3_NVRAM_SIZE_2MB; in tg3_get_5761_nvram_info()
14714 tp->nvram_size = TG3_NVRAM_SIZE_1MB; in tg3_get_5761_nvram_info()
14720 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_5761_nvram_info()
14726 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_5761_nvram_info()
14734 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5906_nvram_info()
14736 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_5906_nvram_info()
14748 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_57780_nvram_info()
14750 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_57780_nvram_info()
14762 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_57780_nvram_info()
14770 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_57780_nvram_info()
14774 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_57780_nvram_info()
14778 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_57780_nvram_info()
14785 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_57780_nvram_info()
14791 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_57780_nvram_info()
14794 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_57780_nvram_info()
14797 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_57780_nvram_info()
14807 if (tp->nvram_pagesize != 264 && tp->nvram_pagesize != 528) in tg3_get_57780_nvram_info()
14821 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5717_nvram_info()
14823 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_5717_nvram_info()
14835 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5717_nvram_info()
14845 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_5717_nvram_info()
14848 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_5717_nvram_info()
14862 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5717_nvram_info()
14873 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_5717_nvram_info()
14876 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_5717_nvram_info()
14886 if (tp->nvram_pagesize != 264 && tp->nvram_pagesize != 528) in tg3_get_5717_nvram_info()
14909 tp->nvram_pagesize = 4096; in tg3_get_5720_nvram_info()
14910 tp->nvram_jedecnum = JEDEC_MACRONIX; in tg3_get_5720_nvram_info()
14915 tp->nvram_size = in tg3_get_5720_nvram_info()
14939 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5720_nvram_info()
14945 tp->nvram_pagesize = ATMEL_AT24C512_CHIP_SIZE; in tg3_get_5720_nvram_info()
14947 tp->nvram_pagesize = ATMEL_AT24C02_CHIP_SIZE; in tg3_get_5720_nvram_info()
14961 tp->nvram_jedecnum = JEDEC_ATMEL; in tg3_get_5720_nvram_info()
14969 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_5720_nvram_info()
14974 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_5720_nvram_info()
14978 tp->nvram_size = TG3_NVRAM_SIZE_1MB; in tg3_get_5720_nvram_info()
14982 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_5720_nvram_info()
15004 tp->nvram_jedecnum = JEDEC_ST; in tg3_get_5720_nvram_info()
15013 tp->nvram_size = TG3_NVRAM_SIZE_256KB; in tg3_get_5720_nvram_info()
15019 tp->nvram_size = TG3_NVRAM_SIZE_512KB; in tg3_get_5720_nvram_info()
15025 tp->nvram_size = TG3_NVRAM_SIZE_1MB; in tg3_get_5720_nvram_info()
15029 tp->nvram_size = TG3_NVRAM_SIZE_128KB; in tg3_get_5720_nvram_info()
15039 if (tp->nvram_pagesize != 264 && tp->nvram_pagesize != 528) in tg3_get_5720_nvram_info()
15082 netdev_warn(tp->dev, in tg3_nvram_init()
15089 tp->nvram_size = 0; in tg3_nvram_init()
15115 if (tp->nvram_size == 0) in tg3_nvram_init()
15204 tp->pdev->subsystem_vendor) && in tg3_lookup_by_subsys()
15206 tp->pdev->subsystem_device)) in tg3_lookup_by_subsys()
15216 tp->phy_id = TG3_PHY_ID_INVALID; in tg3_get_eeprom_hw_cfg()
15217 tp->led_ctrl = LED_CTRL_MODE_PHY_1; in tg3_get_eeprom_hw_cfg()
15234 device_set_wakeup_enable(&tp->pdev->dev, true); in tg3_get_eeprom_hw_cfg()
15247 tp->nic_sram_data_cfg = nic_cfg; in tg3_get_eeprom_hw_cfg()
15280 tp->phy_id = eeprom_phy_id; in tg3_get_eeprom_hw_cfg()
15283 tp->phy_flags |= TG3_PHYFLG_PHY_SERDES; in tg3_get_eeprom_hw_cfg()
15285 tp->phy_flags |= TG3_PHYFLG_MII_SERDES; in tg3_get_eeprom_hw_cfg()
15297 tp->led_ctrl = LED_CTRL_MODE_PHY_1; in tg3_get_eeprom_hw_cfg()
15301 tp->led_ctrl = LED_CTRL_MODE_PHY_2; in tg3_get_eeprom_hw_cfg()
15305 tp->led_ctrl = LED_CTRL_MODE_MAC; in tg3_get_eeprom_hw_cfg()
15312 tp->led_ctrl = LED_CTRL_MODE_PHY_1; in tg3_get_eeprom_hw_cfg()
15317 tp->led_ctrl = LED_CTRL_MODE_SHARED; in tg3_get_eeprom_hw_cfg()
15320 tp->led_ctrl |= (LED_CTRL_MODE_PHY_1 | in tg3_get_eeprom_hw_cfg()
15325 tp->led_ctrl |= LED_CTRL_BLINK_RATE_OVERRIDE | in tg3_get_eeprom_hw_cfg()
15331 tp->led_ctrl = LED_CTRL_MODE_SHASTA_MAC; in tg3_get_eeprom_hw_cfg()
15335 tp->led_ctrl = LED_CTRL_MODE_COMBO; in tg3_get_eeprom_hw_cfg()
15337 tp->led_ctrl |= (LED_CTRL_MODE_PHY_1 | in tg3_get_eeprom_hw_cfg()
15345 tp->pdev->subsystem_vendor == PCI_VENDOR_ID_DELL) in tg3_get_eeprom_hw_cfg()
15346 tp->led_ctrl = LED_CTRL_MODE_PHY_2; in tg3_get_eeprom_hw_cfg()
15349 tp->led_ctrl = LED_CTRL_MODE_PHY_1; in tg3_get_eeprom_hw_cfg()
15353 if ((tp->pdev->subsystem_vendor == in tg3_get_eeprom_hw_cfg()
15355 (tp->pdev->subsystem_device == 0x205a || in tg3_get_eeprom_hw_cfg()
15356 tp->pdev->subsystem_device == 0x2063)) in tg3_get_eeprom_hw_cfg()
15373 if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES && in tg3_get_eeprom_hw_cfg()
15380 device_set_wakeup_enable(&tp->pdev->dev, true); in tg3_get_eeprom_hw_cfg()
15384 tp->phy_flags |= TG3_PHYFLG_CAPACITIVE_COUPLING; in tg3_get_eeprom_hw_cfg()
15386 /* serdes signal pre-emphasis in register 0x590 set by */ in tg3_get_eeprom_hw_cfg()
15389 tp->phy_flags |= TG3_PHYFLG_SERDES_PREEMPHASIS; in tg3_get_eeprom_hw_cfg()
15395 tp->phy_flags |= TG3_PHYFLG_ENABLE_APD; in tg3_get_eeprom_hw_cfg()
15406 tp->phy_flags |= TG3_PHYFLG_KEEP_LINK_ON_PWRDN; in tg3_get_eeprom_hw_cfg()
15408 tp->phy_flags |= TG3_PHYFLG_1G_ON_VAUX_OK; in tg3_get_eeprom_hw_cfg()
15419 tp->phy_flags |= TG3_PHYFLG_DISABLE_1G_HD_ADV; in tg3_get_eeprom_hw_cfg()
15423 device_set_wakeup_enable(&tp->pdev->dev, in tg3_get_eeprom_hw_cfg()
15426 device_set_wakeup_capable(&tp->pdev->dev, false); in tg3_get_eeprom_hw_cfg()
15459 return -EBUSY; in tg3_ape_otp_read()
15478 return (val & OTP_STATUS_CMD_DONE) ? 0 : -EBUSY; in tg3_issue_otp_command()
15482 * configuration is a 32-bit value that straddles the alignment boundary.
15483 * We do two 32-bit reads and then shift and merge the results.
15515 if (!(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) { in tg3_phy_init_link_config()
15516 if (!(tp->phy_flags & TG3_PHYFLG_DISABLE_1G_HD_ADV)) in tg3_phy_init_link_config()
15521 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) in tg3_phy_init_link_config()
15530 tp->link_config.advertising = adv; in tg3_phy_init_link_config()
15531 tp->link_config.speed = SPEED_UNKNOWN; in tg3_phy_init_link_config()
15532 tp->link_config.duplex = DUPLEX_UNKNOWN; in tg3_phy_init_link_config()
15533 tp->link_config.autoneg = AUTONEG_ENABLE; in tg3_phy_init_link_config()
15534 tp->link_config.active_speed = SPEED_UNKNOWN; in tg3_phy_init_link_config()
15535 tp->link_config.active_duplex = DUPLEX_UNKNOWN; in tg3_phy_init_link_config()
15537 tp->old_link = -1; in tg3_phy_init_link_config()
15548 tp->link_config.flowctrl = FLOW_CTRL_TX | FLOW_CTRL_RX; in tg3_phy_probe()
15551 switch (tp->pci_fn) { in tg3_phy_probe()
15553 tp->phy_ape_lock = TG3_APE_LOCK_PHY0; in tg3_phy_probe()
15556 tp->phy_ape_lock = TG3_APE_LOCK_PHY1; in tg3_phy_probe()
15559 tp->phy_ape_lock = TG3_APE_LOCK_PHY2; in tg3_phy_probe()
15562 tp->phy_ape_lock = TG3_APE_LOCK_PHY3; in tg3_phy_probe()
15568 !(tp->phy_flags & TG3_PHYFLG_ANY_SERDES) && in tg3_phy_probe()
15569 !(tp->phy_flags & TG3_PHYFLG_10_100_ONLY)) in tg3_phy_probe()
15570 tp->phy_flags &= ~(TG3_PHYFLG_1G_ON_VAUX_OK | in tg3_phy_probe()
15585 * to either the hard-coded table based PHY_ID and failing in tg3_phy_probe()
15599 tp->phy_id = hw_phy_id; in tg3_phy_probe()
15601 tp->phy_flags |= TG3_PHYFLG_PHY_SERDES; in tg3_phy_probe()
15603 tp->phy_flags &= ~TG3_PHYFLG_PHY_SERDES; in tg3_phy_probe()
15605 if (tp->phy_id != TG3_PHY_ID_INVALID) { in tg3_phy_probe()
15617 tp->phy_id = p->phy_id; in tg3_phy_probe()
15626 return -ENODEV; in tg3_phy_probe()
15629 if (!tp->phy_id || in tg3_phy_probe()
15630 tp->phy_id == TG3_PHY_ID_BCM8002) in tg3_phy_probe()
15631 tp->phy_flags |= TG3_PHYFLG_PHY_SERDES; in tg3_phy_probe()
15635 if (!(tp->phy_flags & TG3_PHYFLG_ANY_SERDES) && in tg3_phy_probe()
15644 tp->phy_flags |= TG3_PHYFLG_EEE_CAP; in tg3_phy_probe()
15646 linkmode_zero(tp->eee.supported); in tg3_phy_probe()
15648 tp->eee.supported); in tg3_phy_probe()
15650 tp->eee.supported); in tg3_phy_probe()
15651 linkmode_copy(tp->eee.advertised, tp->eee.supported); in tg3_phy_probe()
15653 tp->eee.eee_enabled = 1; in tg3_phy_probe()
15654 tp->eee.tx_lpi_enabled = 1; in tg3_phy_probe()
15655 tp->eee.tx_lpi_timer = TG3_CPMU_DBTMR1_LNKIDLE_2047US; in tg3_phy_probe()
15660 if (!(tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN) && in tg3_phy_probe()
15661 !(tp->phy_flags & TG3_PHYFLG_ANY_SERDES) && in tg3_phy_probe()
15678 tg3_phy_autoneg_cfg(tp, tp->link_config.advertising, in tg3_phy_probe()
15679 tp->link_config.flowctrl); in tg3_phy_probe()
15687 if ((tp->phy_id & TG3_PHY_ID_MASK) == TG3_PHY_ID_BCM5401) { in tg3_phy_probe()
15721 memset(tp->fw_ver, 0, sizeof(tp->fw_ver)); in tg3_read_vpd()
15722 snprintf(tp->fw_ver, sizeof(tp->fw_ver), "%.*s bc ", len, vpd_data + i); in tg3_read_vpd()
15733 memcpy(tp->board_part_number, &vpd_data[i], len); in tg3_read_vpd()
15737 if (tp->board_part_number[0]) in tg3_read_vpd()
15742 if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717 || in tg3_read_vpd()
15743 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717_C) in tg3_read_vpd()
15744 strcpy(tp->board_part_number, "BCM5717"); in tg3_read_vpd()
15745 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_5718) in tg3_read_vpd()
15746 strcpy(tp->board_part_number, "BCM5718"); in tg3_read_vpd()
15750 if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57780) in tg3_read_vpd()
15751 strcpy(tp->board_part_number, "BCM57780"); in tg3_read_vpd()
15752 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57760) in tg3_read_vpd()
15753 strcpy(tp->board_part_number, "BCM57760"); in tg3_read_vpd()
15754 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57790) in tg3_read_vpd()
15755 strcpy(tp->board_part_number, "BCM57790"); in tg3_read_vpd()
15756 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57788) in tg3_read_vpd()
15757 strcpy(tp->board_part_number, "BCM57788"); in tg3_read_vpd()
15761 if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57761) in tg3_read_vpd()
15762 strcpy(tp->board_part_number, "BCM57761"); in tg3_read_vpd()
15763 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57765) in tg3_read_vpd()
15764 strcpy(tp->board_part_number, "BCM57765"); in tg3_read_vpd()
15765 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57781) in tg3_read_vpd()
15766 strcpy(tp->board_part_number, "BCM57781"); in tg3_read_vpd()
15767 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57785) in tg3_read_vpd()
15768 strcpy(tp->board_part_number, "BCM57785"); in tg3_read_vpd()
15769 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57791) in tg3_read_vpd()
15770 strcpy(tp->board_part_number, "BCM57791"); in tg3_read_vpd()
15771 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57795) in tg3_read_vpd()
15772 strcpy(tp->board_part_number, "BCM57795"); in tg3_read_vpd()
15776 if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57762) in tg3_read_vpd()
15777 strcpy(tp->board_part_number, "BCM57762"); in tg3_read_vpd()
15778 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57766) in tg3_read_vpd()
15779 strcpy(tp->board_part_number, "BCM57766"); in tg3_read_vpd()
15780 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57782) in tg3_read_vpd()
15781 strcpy(tp->board_part_number, "BCM57782"); in tg3_read_vpd()
15782 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57786) in tg3_read_vpd()
15783 strcpy(tp->board_part_number, "BCM57786"); in tg3_read_vpd()
15787 strcpy(tp->board_part_number, "BCM95906"); in tg3_read_vpd()
15790 strcpy(tp->board_part_number, "none"); in tg3_read_vpd()
15830 dst_off = strlen(tp->fw_ver); in tg3_read_bc_ver()
15833 if (TG3_VER_SIZE - dst_off < 16 || in tg3_read_bc_ver()
15837 offset = offset + ver_offset - start; in tg3_read_bc_ver()
15843 memcpy(tp->fw_ver + dst_off + i, &v, sizeof(v)); in tg3_read_bc_ver()
15854 snprintf(&tp->fw_ver[dst_off], TG3_VER_SIZE - dst_off, in tg3_read_bc_ver()
15872 snprintf(&tp->fw_ver[0], 32, "sb v%d.%02d", major, minor); in tg3_read_hwsb_ver()
15879 strncat(tp->fw_ver, "sb", TG3_VER_SIZE - strlen(tp->fw_ver) - 1); in tg3_read_sb_ver()
15919 offset = strlen(tp->fw_ver); in tg3_read_sb_ver()
15920 snprintf(&tp->fw_ver[offset], TG3_VER_SIZE - offset, in tg3_read_sb_ver()
15924 offset = strlen(tp->fw_ver); in tg3_read_sb_ver()
15925 if (offset < TG3_VER_SIZE - 1) in tg3_read_sb_ver()
15926 tp->fw_ver[offset] = 'a' + build - 1; in tg3_read_sb_ver()
15950 else if (tg3_nvram_read(tp, offset - 4, &start)) in tg3_read_mgmtfw_ver()
15958 offset += val - start; in tg3_read_mgmtfw_ver()
15960 vlen = strlen(tp->fw_ver); in tg3_read_mgmtfw_ver()
15962 tp->fw_ver[vlen++] = ','; in tg3_read_mgmtfw_ver()
15963 tp->fw_ver[vlen++] = ' '; in tg3_read_mgmtfw_ver()
15972 if (vlen > TG3_VER_SIZE - sizeof(v)) { in tg3_read_mgmtfw_ver()
15973 memcpy(&tp->fw_ver[vlen], &v, TG3_VER_SIZE - vlen); in tg3_read_mgmtfw_ver()
15977 memcpy(&tp->fw_ver[vlen], &v, sizeof(v)); in tg3_read_mgmtfw_ver()
16008 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_5725) in tg3_read_dash_ver()
16013 vlen = strlen(tp->fw_ver); in tg3_read_dash_ver()
16015 snprintf(&tp->fw_ver[vlen], TG3_VER_SIZE - vlen, " %s v%d.%d.%d.%d", in tg3_read_dash_ver()
16043 vlen = strlen(tp->fw_ver); in tg3_read_otp_ver()
16044 snprintf(&tp->fw_ver[vlen], TG3_VER_SIZE - vlen, " .%02d", ver); in tg3_read_otp_ver()
16053 if (tp->fw_ver[0] != 0) in tg3_read_fw_ver()
16057 strcat(tp->fw_ver, "sb"); in tg3_read_fw_ver()
16082 tp->fw_ver[TG3_VER_SIZE - 1] = 0; in tg3_read_fw_ver()
16105 unsigned int func, devnr = tp->pdev->devfn & ~7; in tg3_find_peer()
16108 peer = pci_get_slot(tp->pdev->bus, devnr | func); in tg3_find_peer()
16109 if (peer && peer != tp->pdev) in tg3_find_peer()
16113 /* 5704 can be configured in single-port mode, set peer to in tg3_find_peer()
16114 * tp->pdev in that case. in tg3_find_peer()
16117 peer = tp->pdev; in tg3_find_peer()
16132 tp->pci_chip_rev_id = misc_ctrl_reg >> MISC_HOST_CTRL_CHIPREV_SHIFT; in tg3_detect_asic_rev()
16141 if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717 || in tg3_detect_asic_rev()
16142 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717_C || in tg3_detect_asic_rev()
16143 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5718 || in tg3_detect_asic_rev()
16144 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5719 || in tg3_detect_asic_rev()
16145 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5720 || in tg3_detect_asic_rev()
16146 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57767 || in tg3_detect_asic_rev()
16147 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57764 || in tg3_detect_asic_rev()
16148 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5762 || in tg3_detect_asic_rev()
16149 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5725 || in tg3_detect_asic_rev()
16150 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5727 || in tg3_detect_asic_rev()
16151 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57787) in tg3_detect_asic_rev()
16153 else if (tp->pdev->device == TG3PCI_DEVICE_TIGON3_57781 || in tg3_detect_asic_rev()
16154 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57785 || in tg3_detect_asic_rev()
16155 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57761 || in tg3_detect_asic_rev()
16156 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57765 || in tg3_detect_asic_rev()
16157 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57791 || in tg3_detect_asic_rev()
16158 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57795 || in tg3_detect_asic_rev()
16159 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57762 || in tg3_detect_asic_rev()
16160 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57766 || in tg3_detect_asic_rev()
16161 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57782 || in tg3_detect_asic_rev()
16162 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57786) in tg3_detect_asic_rev()
16167 pci_read_config_dword(tp->pdev, reg, &tp->pci_chip_rev_id); in tg3_detect_asic_rev()
16174 tp->pci_chip_rev_id = CHIPREV_ID_5752_A0; in tg3_detect_asic_rev()
16177 tp->pci_chip_rev_id = CHIPREV_ID_5720_A0; in tg3_detect_asic_rev()
16225 (tp->phy_flags & TG3_PHYFLG_IS_FET)) in tg3_10_100_only_device()
16228 if (ent->driver_data & TG3_DRV_DATA_FLAG_10_100_ONLY) { in tg3_10_100_only_device()
16230 if (ent->driver_data & TG3_DRV_DATA_FLAG_5705_10_100) in tg3_10_100_only_device()
16252 * workaround but turns MWI off all the times so never uses in tg3_get_invariants()
16255 pci_read_config_word(tp->pdev, PCI_COMMAND, &pci_cmd); in tg3_get_invariants()
16257 pci_write_config_word(tp->pdev, PCI_COMMAND, pci_cmd); in tg3_get_invariants()
16259 /* Important! -- Make sure register accesses are byteswapped in tg3_get_invariants()
16264 pci_read_config_dword(tp->pdev, TG3PCI_MISC_HOST_CTRL, in tg3_get_invariants()
16266 tp->misc_host_ctrl |= (misc_ctrl_reg & in tg3_get_invariants()
16268 pci_write_config_dword(tp->pdev, TG3PCI_MISC_HOST_CTRL, in tg3_get_invariants()
16269 tp->misc_host_ctrl); in tg3_get_invariants()
16279 * will drive special cycles with non-zero data during the in tg3_get_invariants()
16282 * non-zero address during special cycles. However, only in tg3_get_invariants()
16283 * these ICH bridges are known to drive non-zero addresses in tg3_get_invariants()
16310 while (pci_id->vendor != 0) { in tg3_get_invariants()
16311 bridge = pci_get_device(pci_id->vendor, pci_id->device, in tg3_get_invariants()
16317 if (pci_id->rev != PCI_ANY_ID) { in tg3_get_invariants()
16318 if (bridge->revision > pci_id->rev) in tg3_get_invariants()
16321 if (bridge->subordinate && in tg3_get_invariants()
16322 (bridge->subordinate->number == in tg3_get_invariants()
16323 tp->pdev->bus->number)) { in tg3_get_invariants()
16343 while (pci_id->vendor != 0) { in tg3_get_invariants()
16344 bridge = pci_get_device(pci_id->vendor, in tg3_get_invariants()
16345 pci_id->device, in tg3_get_invariants()
16351 if (bridge->subordinate && in tg3_get_invariants()
16352 (bridge->subordinate->number <= in tg3_get_invariants()
16353 tp->pdev->bus->number) && in tg3_get_invariants()
16354 (bridge->subordinate->busn_res.end >= in tg3_get_invariants()
16355 tp->pdev->bus->number)) { in tg3_get_invariants()
16364 * DMA addresses > 40-bit. This bridge may have other additional in tg3_get_invariants()
16365 * 57xx devices behind it in some 4-port NIC designs for example. in tg3_get_invariants()
16366 * Any tg3 device found behind the bridge will also need the 40-bit in tg3_get_invariants()
16371 tp->msi_cap = tp->pdev->msi_cap; in tg3_get_invariants()
16379 if (bridge && bridge->subordinate && in tg3_get_invariants()
16380 (bridge->subordinate->number <= in tg3_get_invariants()
16381 tp->pdev->bus->number) && in tg3_get_invariants()
16382 (bridge->subordinate->busn_res.end >= in tg3_get_invariants()
16383 tp->pdev->bus->number)) { in tg3_get_invariants()
16393 tp->pdev_peer = tg3_find_peer(tp); in tg3_get_invariants()
16415 tp->fw_needed = FIRMWARE_TG3TSO5; in tg3_get_invariants()
16417 tp->fw_needed = FIRMWARE_TG3TSO; in tg3_get_invariants()
16433 tp->fw_needed = NULL; in tg3_get_invariants()
16437 tp->fw_needed = FIRMWARE_TG3; in tg3_get_invariants()
16440 tp->fw_needed = FIRMWARE_TG357766; in tg3_get_invariants()
16442 tp->irq_max = 1; in tg3_get_invariants()
16450 tp->pdev_peer == tp->pdev)) in tg3_get_invariants()
16460 tp->irq_max = TG3_IRQ_MAX_VECS; in tg3_get_invariants()
16464 tp->txq_max = 1; in tg3_get_invariants()
16465 tp->rxq_max = 1; in tg3_get_invariants()
16466 if (tp->irq_max > 1) { in tg3_get_invariants()
16467 tp->rxq_max = TG3_RSS_MAX_NUM_QS; in tg3_get_invariants()
16472 tp->txq_max = tp->irq_max - 1; in tg3_get_invariants()
16480 tp->dma_limit = TG3_TX_BD_DMA_MAX_4K; in tg3_get_invariants()
16497 pci_read_config_dword(tp->pdev, TG3PCI_PCISTATE, in tg3_get_invariants()
16500 if (pci_is_pcie(tp->pdev)) { in tg3_get_invariants()
16505 pcie_capability_read_word(tp->pdev, PCI_EXP_LNKCTL, &lnkctl); in tg3_get_invariants()
16527 tp->pcix_cap = pci_find_capability(tp->pdev, PCI_CAP_ID_PCIX); in tg3_get_invariants()
16528 if (!tp->pcix_cap) { in tg3_get_invariants()
16529 dev_err(&tp->pdev->dev, in tg3_get_invariants()
16530 "Cannot find PCI-X capability, aborting\n"); in tg3_get_invariants()
16531 return -EIO; in tg3_get_invariants()
16548 pci_read_config_byte(tp->pdev, PCI_CACHE_LINE_SIZE, in tg3_get_invariants()
16549 &tp->pci_cacheline_sz); in tg3_get_invariants()
16550 pci_read_config_byte(tp->pdev, PCI_LATENCY_TIMER, in tg3_get_invariants()
16551 &tp->pci_lat_timer); in tg3_get_invariants()
16553 tp->pci_lat_timer < 64) { in tg3_get_invariants()
16554 tp->pci_lat_timer = 64; in tg3_get_invariants()
16555 pci_write_config_byte(tp->pdev, PCI_LATENCY_TIMER, in tg3_get_invariants()
16556 tp->pci_lat_timer); in tg3_get_invariants()
16559 /* Important! -- It is critical that the PCI-X hw workaround in tg3_get_invariants()
16568 /* If we are in PCI-X mode, enable register write workaround. in tg3_get_invariants()
16582 pci_read_config_dword(tp->pdev, in tg3_get_invariants()
16583 tp->pdev->pm_cap + PCI_PM_CTRL, in tg3_get_invariants()
16587 pci_write_config_dword(tp->pdev, in tg3_get_invariants()
16588 tp->pdev->pm_cap + PCI_PM_CTRL, in tg3_get_invariants()
16592 pci_read_config_word(tp->pdev, PCI_COMMAND, &pci_cmd); in tg3_get_invariants()
16594 pci_write_config_word(tp->pdev, PCI_COMMAND, pci_cmd); in tg3_get_invariants()
16603 /* Chip-specific fixup from Broadcom driver */ in tg3_get_invariants()
16607 pci_write_config_dword(tp->pdev, TG3PCI_PCISTATE, pci_state_reg); in tg3_get_invariants()
16611 tp->read32 = tg3_read32; in tg3_get_invariants()
16612 tp->write32 = tg3_write32; in tg3_get_invariants()
16613 tp->read32_mbox = tg3_read32; in tg3_get_invariants()
16614 tp->write32_mbox = tg3_write32; in tg3_get_invariants()
16615 tp->write32_tx_mbox = tg3_write32; in tg3_get_invariants()
16616 tp->write32_rx_mbox = tg3_write32; in tg3_get_invariants()
16620 tp->write32 = tg3_write_indirect_reg32; in tg3_get_invariants()
16631 tp->write32 = tg3_write_flush_reg32; in tg3_get_invariants()
16635 tp->write32_tx_mbox = tg3_write32_tx_mbox; in tg3_get_invariants()
16637 tp->write32_rx_mbox = tg3_write_flush_reg32; in tg3_get_invariants()
16641 tp->read32 = tg3_read_indirect_reg32; in tg3_get_invariants()
16642 tp->write32 = tg3_write_indirect_reg32; in tg3_get_invariants()
16643 tp->read32_mbox = tg3_read_indirect_mbox; in tg3_get_invariants()
16644 tp->write32_mbox = tg3_write_indirect_mbox; in tg3_get_invariants()
16645 tp->write32_tx_mbox = tg3_write_indirect_mbox; in tg3_get_invariants()
16646 tp->write32_rx_mbox = tg3_write_indirect_mbox; in tg3_get_invariants()
16648 iounmap(tp->regs); in tg3_get_invariants()
16649 tp->regs = NULL; in tg3_get_invariants()
16651 pci_read_config_word(tp->pdev, PCI_COMMAND, &pci_cmd); in tg3_get_invariants()
16653 pci_write_config_word(tp->pdev, PCI_COMMAND, pci_cmd); in tg3_get_invariants()
16656 tp->read32_mbox = tg3_read32_mbox_5906; in tg3_get_invariants()
16657 tp->write32_mbox = tg3_write32_mbox_5906; in tg3_get_invariants()
16658 tp->write32_tx_mbox = tg3_write32_mbox_5906; in tg3_get_invariants()
16659 tp->write32_rx_mbox = tg3_write32_mbox_5906; in tg3_get_invariants()
16662 if (tp->write32 == tg3_write_indirect_reg32 || in tg3_get_invariants()
16676 tp->pci_fn = PCI_FUNC(tp->pdev->devfn) & 3; in tg3_get_invariants()
16680 pci_read_config_dword(tp->pdev, in tg3_get_invariants()
16681 tp->pcix_cap + PCI_X_STATUS, in tg3_get_invariants()
16683 tp->pci_fn = val & 0x7; in tg3_get_invariants()
16693 tp->pci_fn = (val & TG3_CPMU_STATUS_FMSK_5717) ? 1 : 0; in tg3_get_invariants()
16695 tp->pci_fn = (val & TG3_CPMU_STATUS_FMSK_5719) >> in tg3_get_invariants()
16700 tp->write32_tx_mbox = tg3_write_flush_reg32; in tg3_get_invariants()
16701 tp->write32_rx_mbox = tg3_write_flush_reg32; in tg3_get_invariants()
16717 tp->fw_needed = NULL; in tg3_get_invariants()
16727 pci_write_config_dword(tp->pdev, TG3PCI_PCISTATE, in tg3_get_invariants()
16731 tp->ape_hb_interval = in tg3_get_invariants()
16735 /* Set up tp->grc_local_ctrl before calling in tg3_get_invariants()
16740 tp->grc_local_ctrl = GRC_LCLCTRL_INT_ON_ATTN | GRC_LCLCTRL_AUTO_SEEPROM; in tg3_get_invariants()
16743 tp->grc_local_ctrl |= (GRC_LCLCTRL_GPIO_OE1 | in tg3_get_invariants()
16746 * are no pull-up resistors on unused GPIO pins. in tg3_get_invariants()
16749 tp->grc_local_ctrl |= GRC_LCLCTRL_GPIO_OE3; in tg3_get_invariants()
16754 tp->grc_local_ctrl |= GRC_LCLCTRL_GPIO_UART_SEL; in tg3_get_invariants()
16756 if (tp->pdev->device == PCI_DEVICE_ID_TIGON3_5761 || in tg3_get_invariants()
16757 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5761S) { in tg3_get_invariants()
16759 tp->grc_local_ctrl |= GRC_LCLCTRL_GPIO_UART_SEL; in tg3_get_invariants()
16762 tp->grc_local_ctrl |= GRC_LCLCTRL_GPIO_OE0 | in tg3_get_invariants()
16767 tp->grc_local_ctrl |= in tg3_get_invariants()
16776 if (tp->dev->mtu > ETH_DATA_LEN && !tg3_flag(tp, 5780_CLASS)) in tg3_get_invariants()
16790 tp->phy_flags |= TG3_PHYFLG_IS_FET; in tg3_get_invariants()
16797 (tp->phy_flags & TG3_PHYFLG_IS_FET) || in tg3_get_invariants()
16798 (tp->phy_flags & TG3_PHYFLG_ANY_SERDES)) in tg3_get_invariants()
16799 tp->phy_flags |= TG3_PHYFLG_NO_ETH_WIRE_SPEED; in tg3_get_invariants()
16803 tp->phy_flags |= TG3_PHYFLG_ADC_BUG; in tg3_get_invariants()
16805 tp->phy_flags |= TG3_PHYFLG_5704_A0_BUG; in tg3_get_invariants()
16808 !(tp->phy_flags & TG3_PHYFLG_IS_FET) && in tg3_get_invariants()
16816 if (tp->pdev->device != PCI_DEVICE_ID_TIGON3_5756 && in tg3_get_invariants()
16817 tp->pdev->device != PCI_DEVICE_ID_TIGON3_5722) in tg3_get_invariants()
16818 tp->phy_flags |= TG3_PHYFLG_JITTER_BUG; in tg3_get_invariants()
16819 if (tp->pdev->device == PCI_DEVICE_ID_TIGON3_5755M) in tg3_get_invariants()
16820 tp->phy_flags |= TG3_PHYFLG_ADJUST_TRIM; in tg3_get_invariants()
16822 tp->phy_flags |= TG3_PHYFLG_BER_BUG; in tg3_get_invariants()
16827 tp->phy_otp = tg3_read_otp_phycfg(tp); in tg3_get_invariants()
16828 if (tp->phy_otp == 0) in tg3_get_invariants()
16829 tp->phy_otp = TG3_OTP_DEFAULT; in tg3_get_invariants()
16833 tp->mi_mode = MAC_MI_MODE_500KHZ_CONST; in tg3_get_invariants()
16835 tp->mi_mode = MAC_MI_MODE_BASE; in tg3_get_invariants()
16837 tp->coalesce_mode = 0; in tg3_get_invariants()
16840 tp->coalesce_mode |= HOSTCC_MODE_32BYTE; in tg3_get_invariants()
16847 tp->coalesce_mode |= HOSTCC_MODE_ATTN; in tg3_get_invariants()
16848 tp->grc_mode |= GRC_MODE_IRQ_ON_FLOW_ATTN; in tg3_get_invariants()
16871 tw32(GRC_MODE, val | tp->grc_mode); in tg3_get_invariants()
16881 pci_read_config_dword(tp->pdev, TG3PCI_PCISTATE, in tg3_get_invariants()
16895 sram_base = tp->regs + NIC_SRAM_WIN_BASE + NIC_SRAM_STATS_BLK; in tg3_get_invariants()
16911 tp->fw_needed = NULL; in tg3_get_invariants()
16925 tp->coalesce_mode |= (HOSTCC_MODE_CLRTICK_RXBD | in tg3_get_invariants()
16928 tp->misc_host_ctrl |= MISC_HOST_CTRL_TAGGED_STATUS; in tg3_get_invariants()
16929 pci_write_config_dword(tp->pdev, TG3PCI_MISC_HOST_CTRL, in tg3_get_invariants()
16930 tp->misc_host_ctrl); in tg3_get_invariants()
16935 tp->mac_mode = MAC_MODE_APE_TX_EN | MAC_MODE_APE_RX_EN; in tg3_get_invariants()
16937 tp->mac_mode = 0; in tg3_get_invariants()
16940 tp->phy_flags |= TG3_PHYFLG_10_100_ONLY; in tg3_get_invariants()
16944 dev_err(&tp->pdev->dev, "phy probe failed, err %d\n", err); in tg3_get_invariants()
16952 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) { in tg3_get_invariants()
16953 tp->phy_flags &= ~TG3_PHYFLG_USE_MI_INTERRUPT; in tg3_get_invariants()
16956 tp->phy_flags |= TG3_PHYFLG_USE_MI_INTERRUPT; in tg3_get_invariants()
16958 tp->phy_flags &= ~TG3_PHYFLG_USE_MI_INTERRUPT; in tg3_get_invariants()
16974 if (tp->pdev->subsystem_vendor == PCI_VENDOR_ID_DELL && in tg3_get_invariants()
16976 !(tp->phy_flags & TG3_PHYFLG_PHY_SERDES)) { in tg3_get_invariants()
16977 tp->phy_flags |= TG3_PHYFLG_USE_MI_INTERRUPT; in tg3_get_invariants()
16982 if (tp->phy_flags & TG3_PHYFLG_PHY_SERDES) in tg3_get_invariants()
16990 tp->rx_offset = NET_SKB_PAD + NET_IP_ALIGN; in tg3_get_invariants()
16991 tp->rx_copy_thresh = TG3_RX_COPY_THRESHOLD; in tg3_get_invariants()
16994 tp->rx_offset = NET_SKB_PAD; in tg3_get_invariants()
16996 tp->rx_copy_thresh = ~(u16)0; in tg3_get_invariants()
17000 tp->rx_std_ring_mask = TG3_RX_STD_RING_SIZE(tp) - 1; in tg3_get_invariants()
17001 tp->rx_jmb_ring_mask = TG3_RX_JMB_RING_SIZE(tp) - 1; in tg3_get_invariants()
17002 tp->rx_ret_ring_mask = tg3_rx_ret_ring_size(tp) - 1; in tg3_get_invariants()
17004 tp->rx_std_max_post = tp->rx_std_ring_mask + 1; in tg3_get_invariants()
17012 tp->rx_std_max_post = 8; in tg3_get_invariants()
17015 tp->pwrmgmt_thresh = tr32(PCIE_PWR_MGMT_THRESH) & in tg3_get_invariants()
17027 if (!eth_platform_get_mac_address(&tp->pdev->dev, addr)) in tg3_get_device_address()
17031 err = ssb_gige_get_macaddr(tp->pdev, addr); in tg3_get_device_address()
17046 if (tp->pci_fn & 1) in tg3_get_device_address()
17048 if (tp->pci_fn > 1) in tg3_get_device_address()
17091 return -EINVAL; in tg3_get_device_address()
17104 pci_read_config_byte(tp->pdev, PCI_CACHE_LINE_SIZE, &byte); in tg3_calc_dma_bndry()
17137 * when a device tries to burst across a cache-line boundary. in tg3_calc_dma_bndry()
17140 * Unfortunately, for PCI-E there are only limited in tg3_calc_dma_bndry()
17141 * write-side controls for this, and thus for reads in tg3_calc_dma_bndry()
17271 * Broadcom noted the GRC reset will also reset all sub-components. in tg3_do_test_dma()
17290 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, in tg3_do_test_dma()
17292 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_DATA, val); in tg3_do_test_dma()
17294 pci_write_config_dword(tp->pdev, TG3PCI_MEM_WIN_BASE_ADDR, 0); in tg3_do_test_dma()
17301 ret = -ENODEV; in tg3_do_test_dma()
17333 buf = dma_alloc_coherent(&tp->pdev->dev, TEST_BUFFER_SIZE, in tg3_test_dma()
17336 ret = -ENOMEM; in tg3_test_dma()
17340 tp->dma_rwctrl = ((0x7 << DMA_RWCTRL_PCI_WRITE_CMD_SHIFT) | in tg3_test_dma()
17343 tp->dma_rwctrl = tg3_calc_dma_bndry(tp, tp->dma_rwctrl); in tg3_test_dma()
17350 tp->dma_rwctrl |= 0x00180000; in tg3_test_dma()
17354 tp->dma_rwctrl |= 0x003f0000; in tg3_test_dma()
17356 tp->dma_rwctrl |= 0x003f000f; in tg3_test_dma()
17369 tp->dma_rwctrl |= 0x8000; in tg3_test_dma()
17371 tp->dma_rwctrl |= DMA_RWCTRL_ONE_DMA; in tg3_test_dma()
17376 tp->dma_rwctrl |= in tg3_test_dma()
17382 tp->dma_rwctrl |= 0x00144000; in tg3_test_dma()
17385 tp->dma_rwctrl |= 0x00148000; in tg3_test_dma()
17387 tp->dma_rwctrl |= 0x001b000f; in tg3_test_dma()
17391 tp->dma_rwctrl |= DMA_RWCTRL_ONE_DMA; in tg3_test_dma()
17395 tp->dma_rwctrl &= 0xfffffff0; in tg3_test_dma()
17400 tp->dma_rwctrl |= DMA_RWCTRL_USE_MEM_READ_MULT; in tg3_test_dma()
17410 * on those chips to enable a PCI-X workaround. in tg3_test_dma()
17412 tp->dma_rwctrl |= DMA_RWCTRL_ASSERT_ALL_BE; in tg3_test_dma()
17415 tw32(TG3PCI_DMA_RW_CTRL, tp->dma_rwctrl); in tg3_test_dma()
17425 saved_dma_rwctrl = tp->dma_rwctrl; in tg3_test_dma()
17426 tp->dma_rwctrl &= ~DMA_RWCTRL_WRITE_BNDRY_MASK; in tg3_test_dma()
17427 tw32(TG3PCI_DMA_RW_CTRL, tp->dma_rwctrl); in tg3_test_dma()
17438 dev_err(&tp->pdev->dev, in tg3_test_dma()
17447 dev_err(&tp->pdev->dev, "%s: Buffer read failed. " in tg3_test_dma()
17457 if ((tp->dma_rwctrl & DMA_RWCTRL_WRITE_BNDRY_MASK) != in tg3_test_dma()
17459 tp->dma_rwctrl &= ~DMA_RWCTRL_WRITE_BNDRY_MASK; in tg3_test_dma()
17460 tp->dma_rwctrl |= DMA_RWCTRL_WRITE_BNDRY_16; in tg3_test_dma()
17461 tw32(TG3PCI_DMA_RW_CTRL, tp->dma_rwctrl); in tg3_test_dma()
17464 dev_err(&tp->pdev->dev, in tg3_test_dma()
17467 ret = -ENODEV; in tg3_test_dma()
17478 if ((tp->dma_rwctrl & DMA_RWCTRL_WRITE_BNDRY_MASK) != in tg3_test_dma()
17485 tp->dma_rwctrl &= ~DMA_RWCTRL_WRITE_BNDRY_MASK; in tg3_test_dma()
17486 tp->dma_rwctrl |= DMA_RWCTRL_WRITE_BNDRY_16; in tg3_test_dma()
17489 tp->dma_rwctrl = saved_dma_rwctrl; in tg3_test_dma()
17492 tw32(TG3PCI_DMA_RW_CTRL, tp->dma_rwctrl); in tg3_test_dma()
17496 dma_free_coherent(&tp->pdev->dev, TEST_BUFFER_SIZE, buf, buf_dma); in tg3_test_dma()
17504 tp->bufmgr_config.mbuf_read_dma_low_water = in tg3_init_bufmgr_config()
17506 tp->bufmgr_config.mbuf_mac_rx_low_water = in tg3_init_bufmgr_config()
17508 tp->bufmgr_config.mbuf_high_water = in tg3_init_bufmgr_config()
17511 tp->bufmgr_config.mbuf_read_dma_low_water_jumbo = in tg3_init_bufmgr_config()
17513 tp->bufmgr_config.mbuf_mac_rx_low_water_jumbo = in tg3_init_bufmgr_config()
17515 tp->bufmgr_config.mbuf_high_water_jumbo = in tg3_init_bufmgr_config()
17518 tp->bufmgr_config.mbuf_read_dma_low_water = in tg3_init_bufmgr_config()
17520 tp->bufmgr_config.mbuf_mac_rx_low_water = in tg3_init_bufmgr_config()
17522 tp->bufmgr_config.mbuf_high_water = in tg3_init_bufmgr_config()
17525 tp->bufmgr_config.mbuf_mac_rx_low_water = in tg3_init_bufmgr_config()
17527 tp->bufmgr_config.mbuf_high_water = in tg3_init_bufmgr_config()
17531 tp->bufmgr_config.mbuf_read_dma_low_water_jumbo = in tg3_init_bufmgr_config()
17533 tp->bufmgr_config.mbuf_mac_rx_low_water_jumbo = in tg3_init_bufmgr_config()
17535 tp->bufmgr_config.mbuf_high_water_jumbo = in tg3_init_bufmgr_config()
17538 tp->bufmgr_config.mbuf_read_dma_low_water = in tg3_init_bufmgr_config()
17540 tp->bufmgr_config.mbuf_mac_rx_low_water = in tg3_init_bufmgr_config()
17542 tp->bufmgr_config.mbuf_high_water = in tg3_init_bufmgr_config()
17545 tp->bufmgr_config.mbuf_read_dma_low_water_jumbo = in tg3_init_bufmgr_config()
17547 tp->bufmgr_config.mbuf_mac_rx_low_water_jumbo = in tg3_init_bufmgr_config()
17549 tp->bufmgr_config.mbuf_high_water_jumbo = in tg3_init_bufmgr_config()
17553 tp->bufmgr_config.dma_low_water = DEFAULT_DMA_LOW_WATER; in tg3_init_bufmgr_config()
17554 tp->bufmgr_config.dma_high_water = DEFAULT_DMA_HIGH_WATER; in tg3_init_bufmgr_config()
17559 switch (tp->phy_id & TG3_PHY_ID_MASK) { in tg3_phy_string()
17619 strcat(str, ":32-bit"); in tg3_bus_string()
17621 strcat(str, ":64-bit"); in tg3_bus_string()
17627 struct ethtool_coalesce *ec = &tp->coal; in tg3_init_coal()
17630 ec->cmd = ETHTOOL_GCOALESCE; in tg3_init_coal()
17631 ec->rx_coalesce_usecs = LOW_RXCOL_TICKS; in tg3_init_coal()
17632 ec->tx_coalesce_usecs = LOW_TXCOL_TICKS; in tg3_init_coal()
17633 ec->rx_max_coalesced_frames = LOW_RXMAX_FRAMES; in tg3_init_coal()
17634 ec->tx_max_coalesced_frames = LOW_TXMAX_FRAMES; in tg3_init_coal()
17635 ec->rx_coalesce_usecs_irq = DEFAULT_RXCOAL_TICK_INT; in tg3_init_coal()
17636 ec->tx_coalesce_usecs_irq = DEFAULT_TXCOAL_TICK_INT; in tg3_init_coal()
17637 ec->rx_max_coalesced_frames_irq = DEFAULT_RXCOAL_MAXF_INT; in tg3_init_coal()
17638 ec->tx_max_coalesced_frames_irq = DEFAULT_TXCOAL_MAXF_INT; in tg3_init_coal()
17639 ec->stats_block_coalesce_usecs = DEFAULT_STAT_COAL_TICKS; in tg3_init_coal()
17641 if (tp->coalesce_mode & (HOSTCC_MODE_CLRTICK_RXBD | in tg3_init_coal()
17643 ec->rx_coalesce_usecs = LOW_RXCOL_TICKS_CLRTCKS; in tg3_init_coal()
17644 ec->rx_coalesce_usecs_irq = DEFAULT_RXCOAL_TICK_INT_CLRTCKS; in tg3_init_coal()
17645 ec->tx_coalesce_usecs = LOW_TXCOL_TICKS_CLRTCKS; in tg3_init_coal()
17646 ec->tx_coalesce_usecs_irq = DEFAULT_TXCOAL_TICK_INT_CLRTCKS; in tg3_init_coal()
17650 ec->rx_coalesce_usecs_irq = 0; in tg3_init_coal()
17651 ec->tx_coalesce_usecs_irq = 0; in tg3_init_coal()
17652 ec->stats_block_coalesce_usecs = 0; in tg3_init_coal()
17670 dev_err(&pdev->dev, "Cannot enable PCI device, aborting\n"); in tg3_init_one()
17676 dev_err(&pdev->dev, "Cannot obtain PCI resources, aborting\n"); in tg3_init_one()
17684 err = -ENOMEM; in tg3_init_one()
17688 SET_NETDEV_DEV(dev, &pdev->dev); in tg3_init_one()
17691 tp->pdev = pdev; in tg3_init_one()
17692 tp->dev = dev; in tg3_init_one()
17693 tp->rx_mode = TG3_DEF_RX_MODE; in tg3_init_one()
17694 tp->tx_mode = TG3_DEF_TX_MODE; in tg3_init_one()
17695 tp->irq_sync = 1; in tg3_init_one()
17696 tp->pcierr_recovery = false; in tg3_init_one()
17699 tp->msg_enable = tg3_debug; in tg3_init_one()
17701 tp->msg_enable = TG3_DEF_MSG_ENABLE; in tg3_init_one()
17721 tp->misc_host_ctrl = in tg3_init_one()
17727 /* The NONFRM (non-frame) byte/word swap controls take effect in tg3_init_one()
17731 * are running in big-endian mode. in tg3_init_one()
17733 tp->grc_mode = (GRC_MODE_WSWAP_DATA | GRC_MODE_BSWAP_DATA | in tg3_init_one()
17736 tp->grc_mode |= GRC_MODE_BSWAP_NONFRM_DATA; in tg3_init_one()
17738 spin_lock_init(&tp->lock); in tg3_init_one()
17739 spin_lock_init(&tp->indirect_lock); in tg3_init_one()
17740 INIT_WORK(&tp->reset_task, tg3_reset_task); in tg3_init_one()
17742 tp->regs = pci_ioremap_bar(pdev, BAR_0); in tg3_init_one()
17743 if (!tp->regs) { in tg3_init_one()
17744 dev_err(&pdev->dev, "Cannot map device registers, aborting\n"); in tg3_init_one()
17745 err = -ENOMEM; in tg3_init_one()
17749 if (tp->pdev->device == PCI_DEVICE_ID_TIGON3_5761 || in tg3_init_one()
17750 tp->pdev->device == PCI_DEVICE_ID_TIGON3_5761E || in tg3_init_one()
17751 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5761S || in tg3_init_one()
17752 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5761SE || in tg3_init_one()
17753 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717 || in tg3_init_one()
17754 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5717_C || in tg3_init_one()
17755 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5718 || in tg3_init_one()
17756 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5719 || in tg3_init_one()
17757 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5720 || in tg3_init_one()
17758 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57767 || in tg3_init_one()
17759 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57764 || in tg3_init_one()
17760 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5762 || in tg3_init_one()
17761 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5725 || in tg3_init_one()
17762 tp->pdev->device == TG3PCI_DEVICE_TIGON3_5727 || in tg3_init_one()
17763 tp->pdev->device == TG3PCI_DEVICE_TIGON3_57787) { in tg3_init_one()
17765 tp->aperegs = pci_ioremap_bar(pdev, BAR_2); in tg3_init_one()
17766 if (!tp->aperegs) { in tg3_init_one()
17767 dev_err(&pdev->dev, in tg3_init_one()
17769 err = -ENOMEM; in tg3_init_one()
17774 tp->rx_pending = TG3_DEF_RX_RING_PENDING; in tg3_init_one()
17775 tp->rx_jumbo_pending = TG3_DEF_RX_JUMBO_RING_PENDING; in tg3_init_one()
17777 dev->ethtool_ops = &tg3_ethtool_ops; in tg3_init_one()
17778 dev->watchdog_timeo = TG3_TX_TIMEOUT; in tg3_init_one()
17779 dev->netdev_ops = &tg3_netdev_ops; in tg3_init_one()
17780 dev->irq = pdev->irq; in tg3_init_one()
17784 dev_err(&pdev->dev, in tg3_init_one()
17790 * device behind the EPB cannot support DMA addresses > 40-bit. in tg3_init_one()
17791 * On 64-bit systems with IOMMU, use 40-bit dma_mask. in tg3_init_one()
17792 * On 64-bit systems without IOMMU, use 64-bit dma_mask and in tg3_init_one()
17810 err = dma_set_mask(&pdev->dev, dma_mask); in tg3_init_one()
17813 err = dma_set_coherent_mask(&pdev->dev, in tg3_init_one()
17816 dev_err(&pdev->dev, "Unable to obtain 64 bit " in tg3_init_one()
17823 err = dma_set_mask(&pdev->dev, DMA_BIT_MASK(32)); in tg3_init_one()
17825 dev_err(&pdev->dev, in tg3_init_one()
17864 dev->features |= features | NETIF_F_HW_VLAN_CTAG_TX | in tg3_init_one()
17866 dev->vlan_features |= features; in tg3_init_one()
17870 * MAC-LOOPBACK. Eventually this need to be enhanced to allow INT-PHY in tg3_init_one()
17878 dev->hw_features |= features; in tg3_init_one()
17879 dev->priv_flags |= IFF_UNICAST_FLT; in tg3_init_one()
17881 /* MTU range: 60 - 9000 or 1500, depending on hardware */ in tg3_init_one()
17882 dev->min_mtu = TG3_MIN_MTU; in tg3_init_one()
17883 dev->max_mtu = TG3_MAX_MTU(tp); in tg3_init_one()
17889 tp->rx_pending = 63; in tg3_init_one()
17894 dev_err(&pdev->dev, in tg3_init_one()
17903 for (i = 0; i < tp->irq_max; i++) { in tg3_init_one()
17904 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_init_one()
17906 tnapi->tp = tp; in tg3_init_one()
17907 tnapi->tx_pending = TG3_DEF_TX_RING_PENDING; in tg3_init_one()
17909 tnapi->int_mbox = intmbx; in tg3_init_one()
17912 tnapi->consmbox = rcvmbx; in tg3_init_one()
17913 tnapi->prodmbox = sndmbx; in tg3_init_one()
17916 tnapi->coal_now = HOSTCC_MODE_COAL_VEC1_NOW << (i - 1); in tg3_init_one()
17918 tnapi->coal_now = HOSTCC_MODE_NOW; in tg3_init_one()
17936 sndmbx -= 0x4; in tg3_init_one()
17956 dev_err(&pdev->dev, "DMA engine test failed, aborting\n"); in tg3_init_one()
17975 dev_err(&pdev->dev, "Cannot register net device, aborting\n"); in tg3_init_one()
17981 tp->ptp_clock = ptp_clock_register(&tp->ptp_info, in tg3_init_one()
17982 &tp->pdev->dev); in tg3_init_one()
17983 if (IS_ERR(tp->ptp_clock)) in tg3_init_one()
17984 tp->ptp_clock = NULL; in tg3_init_one()
17988 tp->board_part_number, in tg3_init_one()
17991 dev->dev_addr); in tg3_init_one()
17993 if (!(tp->phy_flags & TG3_PHYFLG_IS_CONNECTED)) { in tg3_init_one()
17996 if (tp->phy_flags & TG3_PHYFLG_10_100_ONLY) in tg3_init_one()
17997 ethtype = "10/100Base-TX"; in tg3_init_one()
17998 else if (tp->phy_flags & TG3_PHYFLG_ANY_SERDES) in tg3_init_one()
17999 ethtype = "1000Base-SX"; in tg3_init_one()
18001 ethtype = "10/100/1000Base-T"; in tg3_init_one()
18006 (tp->phy_flags & TG3_PHYFLG_NO_ETH_WIRE_SPEED) == 0, in tg3_init_one()
18007 (tp->phy_flags & TG3_PHYFLG_EEE_CAP) != 0); in tg3_init_one()
18011 (dev->features & NETIF_F_RXCSUM) != 0, in tg3_init_one()
18013 (tp->phy_flags & TG3_PHYFLG_USE_MI_INTERRUPT) != 0, in tg3_init_one()
18016 netdev_info(dev, "dma_rwctrl[%08x] dma_mask[%d-bit]\n", in tg3_init_one()
18017 tp->dma_rwctrl, in tg3_init_one()
18018 pdev->dma_mask == DMA_BIT_MASK(32) ? 32 : in tg3_init_one()
18019 ((u64)pdev->dma_mask) == DMA_BIT_MASK(40) ? 40 : 64); in tg3_init_one()
18026 if (tp->aperegs) { in tg3_init_one()
18027 iounmap(tp->aperegs); in tg3_init_one()
18028 tp->aperegs = NULL; in tg3_init_one()
18032 if (tp->regs) { in tg3_init_one()
18033 iounmap(tp->regs); in tg3_init_one()
18034 tp->regs = NULL; in tg3_init_one()
18058 release_firmware(tp->fw); in tg3_remove_one()
18068 if (tp->aperegs) { in tg3_remove_one()
18069 iounmap(tp->aperegs); in tg3_remove_one()
18070 tp->aperegs = NULL; in tg3_remove_one()
18072 if (tp->regs) { in tg3_remove_one()
18073 iounmap(tp->regs); in tg3_remove_one()
18074 tp->regs = NULL; in tg3_remove_one()
18136 !(tp->phy_flags & TG3_PHYFLG_KEEP_LINK_ON_PWRDN)); in tg3_resume()
18220 pdev->current_state != PCI_D3cold && in tg3_shutdown()
18221 pdev->current_state != PCI_UNKNOWN) { in tg3_shutdown()
18238 * tg3_io_error_detected - called when PCI error is detected
18260 if (!netdev || tp->pcierr_recovery || !netif_running(netdev)) in tg3_io_error_detected()
18265 tp->pcierr_recovery = true; in tg3_io_error_detected()
18297 * tg3_io_slot_reset - called after the pci bus has been reset.
18300 * Restart the card from scratch, as if from a cold-boot.
18315 dev_err(&pdev->dev, in tg3_io_slot_reset()
18316 "Cannot re-enable PCI device after reset.\n"); in tg3_io_slot_reset()
18346 * tg3_io_resume - called when traffic can start flowing again.
18384 tp->pcierr_recovery = false; in tg3_io_resume()