Lines Matching full:hw

96 static int xm_phy_write(struct skge_hw *hw, int port, u16 reg, u16 val);
97 static int gm_phy_write(struct skge_hw *hw, int port, u16 reg, u16 val);
100 static void yukon_init(struct skge_hw *hw, int port);
101 static void genesis_mac_init(struct skge_hw *hw, int port);
114 static inline bool is_genesis(const struct skge_hw *hw) in is_genesis() argument
117 return hw->chip_id == CHIP_ID_GENESIS; in is_genesis()
137 const void __iomem *io = skge->hw->regs; in skge_get_regs()
150 static u32 wol_supported(const struct skge_hw *hw) in wol_supported() argument
152 if (is_genesis(hw)) in wol_supported()
155 if (hw->chip_id == CHIP_ID_YUKON && hw->chip_rev == 0) in wol_supported()
163 struct skge_hw *hw = skge->hw; in skge_wol_init() local
167 skge_write16(hw, B0_CTST, CS_RST_CLR); in skge_wol_init()
168 skge_write16(hw, SK_REG(port, GMAC_LINK_CTRL), GMLC_RST_CLR); in skge_wol_init()
171 skge_write8(hw, B0_POWER_CTRL, in skge_wol_init()
175 if (hw->chip_id == CHIP_ID_YUKON_LITE && in skge_wol_init()
176 hw->chip_rev >= CHIP_REV_YU_LITE_A3) { in skge_wol_init()
177 u32 reg = skge_read32(hw, B2_GP_IO); in skge_wol_init()
180 skge_write32(hw, B2_GP_IO, reg); in skge_wol_init()
183 skge_write32(hw, SK_REG(port, GPHY_CTRL), in skge_wol_init()
188 skge_write32(hw, SK_REG(port, GPHY_CTRL), in skge_wol_init()
193 skge_write32(hw, SK_REG(port, GMAC_CTRL), GMC_RST_CLR); in skge_wol_init()
196 gm_phy_write(hw, port, PHY_MARV_AUNE_ADV, in skge_wol_init()
200 gm_phy_write(hw, port, PHY_MARV_1000T_CTRL, 0); in skge_wol_init()
201 gm_phy_write(hw, port, PHY_MARV_CTRL, in skge_wol_init()
207 gma_write16(hw, port, GM_GP_CTRL, in skge_wol_init()
212 memcpy_toio(hw->regs + WOL_REGS(port, WOL_MAC_ADDR), in skge_wol_init()
216 skge_write16(hw, WOL_REGS(port, WOL_CTRL_STAT), WOL_CTL_CLEAR_RESULT); in skge_wol_init()
229 skge_write16(hw, WOL_REGS(port, WOL_CTRL_STAT), ctrl); in skge_wol_init()
232 skge_write8(hw, SK_REG(port, RX_GMF_CTRL_T), GMF_RST_SET); in skge_wol_init()
239 wol->supported = wol_supported(skge->hw); in skge_get_wol()
246 struct skge_hw *hw = skge->hw; in skge_set_wol() local
248 if ((wol->wolopts & ~wol_supported(hw)) || in skge_set_wol()
249 !device_can_wakeup(&hw->pdev->dev)) in skge_set_wol()
254 device_set_wakeup_enable(&hw->pdev->dev, skge->wol); in skge_set_wol()
262 static u32 skge_supported_modes(const struct skge_hw *hw) in skge_supported_modes() argument
266 if (hw->copper) { in skge_supported_modes()
276 if (is_genesis(hw)) in skge_supported_modes()
282 else if (hw->chip_id == CHIP_ID_YUKON) in skge_supported_modes()
297 struct skge_hw *hw = skge->hw; in skge_get_link_ksettings() local
300 supported = skge_supported_modes(hw); in skge_get_link_ksettings()
302 if (hw->copper) { in skge_get_link_ksettings()
304 cmd->base.phy_address = hw->phy_addr; in skge_get_link_ksettings()
325 const struct skge_hw *hw = skge->hw; in skge_set_link_ksettings() local
326 u32 supported = skge_supported_modes(hw); in skge_set_link_ksettings()
400 strlcpy(info->bus_info, pci_name(skge->hw->pdev), in skge_get_drvinfo()
450 if (is_genesis(skge->hw)) in skge_get_ethtool_stats()
465 if (is_genesis(skge->hw)) in skge_get_stats()
600 static inline u32 hwkhz(const struct skge_hw *hw) in hwkhz() argument
602 return is_genesis(hw) ? 53125 : 78125; in hwkhz()
606 static inline u32 skge_clk2usec(const struct skge_hw *hw, u32 ticks) in skge_clk2usec() argument
608 return (ticks * 1000) / hwkhz(hw); in skge_clk2usec()
612 static inline u32 skge_usecs2clk(const struct skge_hw *hw, u32 usec) in skge_usecs2clk() argument
614 return hwkhz(hw) * usec / 1000; in skge_usecs2clk()
621 struct skge_hw *hw = skge->hw; in skge_get_coalesce() local
627 if (skge_read32(hw, B2_IRQM_CTRL) & TIM_START) { in skge_get_coalesce()
628 u32 delay = skge_clk2usec(hw, skge_read32(hw, B2_IRQM_INI)); in skge_get_coalesce()
629 u32 msk = skge_read32(hw, B2_IRQM_MSK); in skge_get_coalesce()
645 struct skge_hw *hw = skge->hw; in skge_set_coalesce() local
647 u32 msk = skge_read32(hw, B2_IRQM_MSK); in skge_set_coalesce()
670 skge_write32(hw, B2_IRQM_MSK, msk); in skge_set_coalesce()
672 skge_write32(hw, B2_IRQM_CTRL, TIM_STOP); in skge_set_coalesce()
674 skge_write32(hw, B2_IRQM_INI, skge_usecs2clk(hw, delay)); in skge_set_coalesce()
675 skge_write32(hw, B2_IRQM_CTRL, TIM_START); in skge_set_coalesce()
683 struct skge_hw *hw = skge->hw; in skge_led() local
686 spin_lock_bh(&hw->phy_lock); in skge_led()
687 if (is_genesis(hw)) { in skge_led()
690 if (hw->phy_type == SK_PHY_BCOM) in skge_led()
691 xm_phy_write(hw, port, PHY_BCOM_P_EXT_CTRL, PHY_B_PEC_LED_OFF); in skge_led()
693 skge_write32(hw, SK_REG(port, TX_LED_VAL), 0); in skge_led()
694 skge_write8(hw, SK_REG(port, TX_LED_CTRL), LED_T_OFF); in skge_led()
696 skge_write8(hw, SK_REG(port, LNK_LED_REG), LINKLED_OFF); in skge_led()
697 skge_write32(hw, SK_REG(port, RX_LED_VAL), 0); in skge_led()
698 skge_write8(hw, SK_REG(port, RX_LED_CTRL), LED_T_OFF); in skge_led()
702 skge_write8(hw, SK_REG(port, LNK_LED_REG), LINKLED_ON); in skge_led()
703 skge_write8(hw, SK_REG(port, LNK_LED_REG), LINKLED_LINKSYNC_ON); in skge_led()
705 skge_write8(hw, SK_REG(port, RX_LED_CTRL), LED_START); in skge_led()
706 skge_write8(hw, SK_REG(port, TX_LED_CTRL), LED_START); in skge_led()
711 skge_write8(hw, SK_REG(port, RX_LED_TST), LED_T_ON); in skge_led()
712 skge_write32(hw, SK_REG(port, RX_LED_VAL), 100); in skge_led()
713 skge_write8(hw, SK_REG(port, RX_LED_CTRL), LED_START); in skge_led()
715 if (hw->phy_type == SK_PHY_BCOM) in skge_led()
716 xm_phy_write(hw, port, PHY_BCOM_P_EXT_CTRL, PHY_B_PEC_LED_ON); in skge_led()
718 skge_write8(hw, SK_REG(port, TX_LED_TST), LED_T_ON); in skge_led()
719 skge_write32(hw, SK_REG(port, TX_LED_VAL), 100); in skge_led()
720 skge_write8(hw, SK_REG(port, TX_LED_CTRL), LED_START); in skge_led()
727 gm_phy_write(hw, port, PHY_MARV_LED_CTRL, 0); in skge_led()
728 gm_phy_write(hw, port, PHY_MARV_LED_OVER, in skge_led()
736 gm_phy_write(hw, port, PHY_MARV_LED_CTRL, in skge_led()
742 gm_phy_write(hw, port, PHY_MARV_LED_OVER, in skge_led()
748 gm_phy_write(hw, port, PHY_MARV_LED_CTRL, 0); in skge_led()
749 gm_phy_write(hw, port, PHY_MARV_LED_OVER, in skge_led()
757 spin_unlock_bh(&hw->phy_lock); in skge_led()
791 pci_read_config_dword(skge->hw->pdev, PCI_DEV_REG2, &reg2); in skge_get_eeprom_len()
824 struct pci_dev *pdev = skge->hw->pdev; in skge_get_eeprom()
850 struct pci_dev *pdev = skge->hw->pdev; in skge_set_eeprom()
942 map = dma_map_single(&skge->hw->pdev->dev, skb->data, bufsize, in skge_rx_setup()
945 if (dma_mapping_error(&skge->hw->pdev->dev, map)) in skge_rx_setup()
984 struct skge_hw *hw = skge->hw; in skge_rx_clean() local
993 dma_unmap_single(&hw->pdev->dev, in skge_rx_clean()
1052 skge_write8(skge->hw, SK_REG(skge->port, LNK_LED_REG), in skge_link_up()
1067 skge_write8(skge->hw, SK_REG(skge->port, LNK_LED_REG), LED_REG_OFF); in skge_link_down()
1074 static void xm_link_down(struct skge_hw *hw, int port) in xm_link_down() argument
1076 struct net_device *dev = hw->dev[port]; in xm_link_down()
1079 xm_write16(hw, port, XM_IMSK, XM_IMSK_DISABLE); in xm_link_down()
1085 static int __xm_phy_read(struct skge_hw *hw, int port, u16 reg, u16 *val) in __xm_phy_read() argument
1089 xm_write16(hw, port, XM_PHY_ADDR, reg | hw->phy_addr); in __xm_phy_read()
1090 *val = xm_read16(hw, port, XM_PHY_DATA); in __xm_phy_read()
1092 if (hw->phy_type == SK_PHY_XMAC) in __xm_phy_read()
1096 if (xm_read16(hw, port, XM_MMU_CMD) & XM_MMU_PHY_RDY) in __xm_phy_read()
1103 *val = xm_read16(hw, port, XM_PHY_DATA); in __xm_phy_read()
1108 static u16 xm_phy_read(struct skge_hw *hw, int port, u16 reg) in xm_phy_read() argument
1111 if (__xm_phy_read(hw, port, reg, &v)) in xm_phy_read()
1112 pr_warn("%s: phy read timed out\n", hw->dev[port]->name); in xm_phy_read()
1116 static int xm_phy_write(struct skge_hw *hw, int port, u16 reg, u16 val) in xm_phy_write() argument
1120 xm_write16(hw, port, XM_PHY_ADDR, reg | hw->phy_addr); in xm_phy_write()
1122 if (!(xm_read16(hw, port, XM_MMU_CMD) & XM_MMU_PHY_BUSY)) in xm_phy_write()
1129 xm_write16(hw, port, XM_PHY_DATA, val); in xm_phy_write()
1131 if (!(xm_read16(hw, port, XM_MMU_CMD) & XM_MMU_PHY_BUSY)) in xm_phy_write()
1138 static void genesis_init(struct skge_hw *hw) in genesis_init() argument
1141 skge_write32(hw, B2_BSC_INI, (SK_BLK_DUR * SK_FACT_53) / 100); in genesis_init()
1142 skge_write8(hw, B2_BSC_CTRL, BSC_START); in genesis_init()
1145 skge_write16(hw, B3_MA_TO_CTRL, MA_RST_CLR); in genesis_init()
1148 skge_write8(hw, B3_MA_TOINI_RX1, SK_MAC_TO_53); in genesis_init()
1149 skge_write8(hw, B3_MA_TOINI_RX2, SK_MAC_TO_53); in genesis_init()
1150 skge_write8(hw, B3_MA_TOINI_TX1, SK_MAC_TO_53); in genesis_init()
1151 skge_write8(hw, B3_MA_TOINI_TX2, SK_MAC_TO_53); in genesis_init()
1153 skge_write8(hw, B3_MA_RCINI_RX1, 0); in genesis_init()
1154 skge_write8(hw, B3_MA_RCINI_RX2, 0); in genesis_init()
1155 skge_write8(hw, B3_MA_RCINI_TX1, 0); in genesis_init()
1156 skge_write8(hw, B3_MA_RCINI_TX2, 0); in genesis_init()
1159 skge_write16(hw, B3_PA_CTRL, PA_RST_CLR); in genesis_init()
1160 skge_write16(hw, B3_PA_TOINI_RX1, SK_PKT_TO_MAX); in genesis_init()
1161 skge_write16(hw, B3_PA_TOINI_TX1, SK_PKT_TO_MAX); in genesis_init()
1162 skge_write16(hw, B3_PA_TOINI_RX2, SK_PKT_TO_MAX); in genesis_init()
1163 skge_write16(hw, B3_PA_TOINI_TX2, SK_PKT_TO_MAX); in genesis_init()
1166 static void genesis_reset(struct skge_hw *hw, int port) in genesis_reset() argument
1171 skge_write8(hw, SK_REG(port, GMAC_IRQ_MSK), 0); in genesis_reset()
1174 xm_write32(hw, port, XM_GP_PORT, XM_GP_RES_STAT); in genesis_reset()
1175 xm_write16(hw, port, XM_IMSK, XM_IMSK_DISABLE); in genesis_reset()
1176 xm_write32(hw, port, XM_MODE, 0); /* clear Mode Reg */ in genesis_reset()
1177 xm_write16(hw, port, XM_TX_CMD, 0); /* reset TX CMD Reg */ in genesis_reset()
1178 xm_write16(hw, port, XM_RX_CMD, 0); /* reset RX CMD Reg */ in genesis_reset()
1181 if (hw->phy_type == SK_PHY_BCOM) in genesis_reset()
1182 xm_write16(hw, port, PHY_BCOM_INT_MASK, 0xffff); in genesis_reset()
1184 xm_outhash(hw, port, XM_HSM, zero); in genesis_reset()
1187 reg = xm_read32(hw, port, XM_MODE); in genesis_reset()
1188 xm_write32(hw, port, XM_MODE, reg | XM_MD_FTF); in genesis_reset()
1189 xm_write32(hw, port, XM_MODE, reg | XM_MD_FRF); in genesis_reset()
1210 static void bcom_check_link(struct skge_hw *hw, int port) in bcom_check_link() argument
1212 struct net_device *dev = hw->dev[port]; in bcom_check_link()
1217 xm_phy_read(hw, port, PHY_BCOM_STAT); in bcom_check_link()
1218 status = xm_phy_read(hw, port, PHY_BCOM_STAT); in bcom_check_link()
1221 xm_link_down(hw, port); in bcom_check_link()
1231 lpa = xm_phy_read(hw, port, PHY_XMAC_AUNE_LP); in bcom_check_link()
1237 aux = xm_phy_read(hw, port, PHY_BCOM_AUX_STAT); in bcom_check_link()
1278 struct skge_hw *hw = skge->hw; in bcom_phy_init() local
1298 id1 = xm_phy_read(hw, port, PHY_XMAC_ID1); in bcom_phy_init()
1301 r = xm_read16(hw, port, XM_MMU_CMD); in bcom_phy_init()
1303 xm_write16(hw, port, XM_MMU_CMD, r); in bcom_phy_init()
1312 xm_phy_write(hw, port, in bcom_phy_init()
1322 xm_phy_write(hw, port, in bcom_phy_init()
1331 r = xm_phy_read(hw, port, PHY_BCOM_AUX_CTRL); in bcom_phy_init()
1333 xm_phy_write(hw, port, PHY_BCOM_AUX_CTRL, r); in bcom_phy_init()
1336 xm_read16(hw, port, XM_ISRC); in bcom_phy_init()
1352 xm_phy_write(hw, port, PHY_BCOM_1000T_CTRL, adv); in bcom_phy_init()
1359 xm_phy_write(hw, port, PHY_BCOM_1000T_CTRL, PHY_B_1000C_MSE); in bcom_phy_init()
1363 xm_phy_write(hw, port, PHY_BCOM_AUNE_ADV, in bcom_phy_init()
1367 if (hw->dev[port]->mtu > ETH_DATA_LEN) { in bcom_phy_init()
1368 xm_phy_write(hw, port, PHY_BCOM_AUX_CTRL, in bcom_phy_init()
1375 xm_phy_write(hw, port, PHY_BCOM_P_EXT_CTRL, ext); in bcom_phy_init()
1376 xm_phy_write(hw, port, PHY_BCOM_CTRL, ctl); in bcom_phy_init()
1379 xm_phy_write(hw, port, PHY_BCOM_INT_MASK, PHY_B_DEF_MSK); in bcom_phy_init()
1384 struct skge_hw *hw = skge->hw; in xm_phy_init() local
1396 xm_phy_write(hw, port, PHY_XMAC_AUNE_ADV, ctrl); in xm_phy_init()
1410 xm_phy_write(hw, port, PHY_XMAC_CTRL, ctrl); in xm_phy_init()
1419 struct skge_hw *hw = skge->hw; in xm_check_link() local
1424 xm_phy_read(hw, port, PHY_XMAC_STAT); in xm_check_link()
1425 status = xm_phy_read(hw, port, PHY_XMAC_STAT); in xm_check_link()
1428 xm_link_down(hw, port); in xm_check_link()
1438 lpa = xm_phy_read(hw, port, PHY_XMAC_AUNE_LP); in xm_check_link()
1444 res = xm_phy_read(hw, port, PHY_XMAC_RES_ABI); in xm_check_link()
1493 struct skge_hw *hw = skge->hw; in xm_link_timer() local
1501 spin_lock_irqsave(&hw->phy_lock, flags); in xm_link_timer()
1508 if (xm_read16(hw, port, XM_GP_PORT) & XM_GP_INP_ASS) in xm_link_timer()
1514 u16 msk = xm_read16(hw, port, XM_IMSK); in xm_link_timer()
1516 xm_write16(hw, port, XM_IMSK, msk); in xm_link_timer()
1517 xm_read16(hw, port, XM_ISRC); in xm_link_timer()
1523 spin_unlock_irqrestore(&hw->phy_lock, flags); in xm_link_timer()
1526 static void genesis_mac_init(struct skge_hw *hw, int port) in genesis_mac_init() argument
1528 struct net_device *dev = hw->dev[port]; in genesis_mac_init()
1530 int jumbo = hw->dev[port]->mtu > ETH_DATA_LEN; in genesis_mac_init()
1536 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), in genesis_mac_init()
1538 if (skge_read16(hw, SK_REG(port, TX_MFF_CTRL1)) & MFF_SET_MAC_RST) in genesis_mac_init()
1547 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), MFF_CLR_MAC_RST); in genesis_mac_init()
1554 if (hw->phy_type != SK_PHY_XMAC) { in genesis_mac_init()
1556 r = skge_read32(hw, B2_GP_IO); in genesis_mac_init()
1562 skge_write32(hw, B2_GP_IO, r); in genesis_mac_init()
1565 xm_write16(hw, port, XM_HW_CFG, XM_HW_GMII_MD); in genesis_mac_init()
1569 switch (hw->phy_type) { in genesis_mac_init()
1575 bcom_check_link(hw, port); in genesis_mac_init()
1579 xm_outaddr(hw, port, XM_SA, dev->dev_addr); in genesis_mac_init()
1583 xm_outaddr(hw, port, XM_EXM(i), zero); in genesis_mac_init()
1586 xm_write16(hw, port, XM_STAT_CMD, in genesis_mac_init()
1589 xm_write16(hw, port, XM_STAT_CMD, in genesis_mac_init()
1593 xm_write16(hw, port, XM_RX_HI_WM, 1450); in genesis_mac_init()
1608 xm_write16(hw, port, XM_RX_CMD, r); in genesis_mac_init()
1611 xm_write16(hw, port, XM_TX_CMD, XM_TX_AUTO_PAD); in genesis_mac_init()
1614 if (hw->ports > 1 && jumbo) in genesis_mac_init()
1615 xm_write16(hw, port, XM_TX_THR, 1020); in genesis_mac_init()
1617 xm_write16(hw, port, XM_TX_THR, 512); in genesis_mac_init()
1633 xm_write32(hw, port, XM_MODE, XM_DEF_MODE); in genesis_mac_init()
1641 xm_write32(hw, port, XM_RX_EV_MSK, XMR_DEF_MSK); in genesis_mac_init()
1648 xm_write32(hw, port, XM_TX_EV_MSK, XMT_DEF_MSK); in genesis_mac_init()
1651 skge_write16(hw, B3_MA_TO_CTRL, MA_RST_CLR); in genesis_mac_init()
1654 skge_write8(hw, B3_MA_TOINI_RX1, 72); in genesis_mac_init()
1655 skge_write8(hw, B3_MA_TOINI_RX2, 72); in genesis_mac_init()
1656 skge_write8(hw, B3_MA_TOINI_TX1, 72); in genesis_mac_init()
1657 skge_write8(hw, B3_MA_TOINI_TX2, 72); in genesis_mac_init()
1659 skge_write8(hw, B3_MA_RCINI_RX1, 0); in genesis_mac_init()
1660 skge_write8(hw, B3_MA_RCINI_RX2, 0); in genesis_mac_init()
1661 skge_write8(hw, B3_MA_RCINI_TX1, 0); in genesis_mac_init()
1662 skge_write8(hw, B3_MA_RCINI_TX2, 0); in genesis_mac_init()
1665 skge_write8(hw, SK_REG(port, RX_MFF_CTRL2), MFF_RST_CLR); in genesis_mac_init()
1666 skge_write16(hw, SK_REG(port, RX_MFF_CTRL1), MFF_ENA_TIM_PAT); in genesis_mac_init()
1667 skge_write8(hw, SK_REG(port, RX_MFF_CTRL2), MFF_ENA_OP_MD); in genesis_mac_init()
1670 skge_write8(hw, SK_REG(port, TX_MFF_CTRL2), MFF_RST_CLR); in genesis_mac_init()
1671 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), MFF_TX_CTRL_DEF); in genesis_mac_init()
1672 skge_write8(hw, SK_REG(port, TX_MFF_CTRL2), MFF_ENA_OP_MD); in genesis_mac_init()
1676 skge_write16(hw, SK_REG(port, RX_MFF_CTRL1), MFF_ENA_FLUSH); in genesis_mac_init()
1679 skge_write16(hw, B3_PA_CTRL, in genesis_mac_init()
1686 struct skge_hw *hw = skge->hw; in genesis_stop() local
1692 cmd = xm_read16(hw, port, XM_MMU_CMD); in genesis_stop()
1694 xm_write16(hw, port, XM_MMU_CMD, cmd); in genesis_stop()
1696 genesis_reset(hw, port); in genesis_stop()
1699 skge_write16(hw, B3_PA_CTRL, in genesis_stop()
1703 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), MFF_CLR_MAC_RST); in genesis_stop()
1705 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), MFF_SET_MAC_RST); in genesis_stop()
1706 if (!(skge_read16(hw, SK_REG(port, TX_MFF_CTRL1)) & MFF_SET_MAC_RST)) in genesis_stop()
1711 if (hw->phy_type != SK_PHY_XMAC) { in genesis_stop()
1712 u32 reg = skge_read32(hw, B2_GP_IO); in genesis_stop()
1720 skge_write32(hw, B2_GP_IO, reg); in genesis_stop()
1721 skge_read32(hw, B2_GP_IO); in genesis_stop()
1724 xm_write16(hw, port, XM_MMU_CMD, in genesis_stop()
1725 xm_read16(hw, port, XM_MMU_CMD) in genesis_stop()
1728 xm_read16(hw, port, XM_MMU_CMD); in genesis_stop()
1734 struct skge_hw *hw = skge->hw; in genesis_get_stats() local
1739 xm_write16(hw, port, in genesis_get_stats()
1743 while (xm_read16(hw, port, XM_STAT_CMD) in genesis_get_stats()
1751 data[0] = (u64) xm_read32(hw, port, XM_TXO_OK_HI) << 32 in genesis_get_stats()
1752 | xm_read32(hw, port, XM_TXO_OK_LO); in genesis_get_stats()
1753 data[1] = (u64) xm_read32(hw, port, XM_RXO_OK_HI) << 32 in genesis_get_stats()
1754 | xm_read32(hw, port, XM_RXO_OK_LO); in genesis_get_stats()
1757 data[i] = xm_read32(hw, port, skge_stats[i].xmac_offset); in genesis_get_stats()
1760 static void genesis_mac_intr(struct skge_hw *hw, int port) in genesis_mac_intr() argument
1762 struct net_device *dev = hw->dev[port]; in genesis_mac_intr()
1764 u16 status = xm_read16(hw, port, XM_ISRC); in genesis_mac_intr()
1769 if (hw->phy_type == SK_PHY_XMAC && (status & XM_IS_INP_ASS)) { in genesis_mac_intr()
1770 xm_link_down(hw, port); in genesis_mac_intr()
1775 xm_write32(hw, port, XM_MODE, XM_MD_FTF); in genesis_mac_intr()
1782 struct skge_hw *hw = skge->hw; in genesis_link_up() local
1787 cmd = xm_read16(hw, port, XM_MMU_CMD); in genesis_link_up()
1801 xm_write16(hw, port, XM_MMU_CMD, cmd); in genesis_link_up()
1803 mode = xm_read32(hw, port, XM_MODE); in genesis_link_up()
1817 xm_write16(hw, port, XM_MAC_PTIME, 0xffff); in genesis_link_up()
1820 skge_write16(hw, SK_REG(port, RX_MFF_CTRL1), MFF_ENA_PAUSE); in genesis_link_up()
1829 skge_write16(hw, SK_REG(port, RX_MFF_CTRL1), MFF_DIS_PAUSE); in genesis_link_up()
1832 xm_write32(hw, port, XM_MODE, mode); in genesis_link_up()
1835 msk = xm_read16(hw, port, XM_IMSK); in genesis_link_up()
1837 xm_write16(hw, port, XM_IMSK, msk); in genesis_link_up()
1839 xm_read16(hw, port, XM_ISRC); in genesis_link_up()
1842 cmd = xm_read16(hw, port, XM_MMU_CMD); in genesis_link_up()
1843 if (hw->phy_type != SK_PHY_XMAC && skge->duplex == DUPLEX_FULL) in genesis_link_up()
1850 if (hw->phy_type == SK_PHY_BCOM) { in genesis_link_up()
1851 xm_phy_write(hw, port, PHY_BCOM_AUX_CTRL, in genesis_link_up()
1852 xm_phy_read(hw, port, PHY_BCOM_AUX_CTRL) in genesis_link_up()
1854 xm_phy_write(hw, port, PHY_BCOM_INT_MASK, PHY_B_DEF_MSK); in genesis_link_up()
1858 xm_write16(hw, port, XM_MMU_CMD, in genesis_link_up()
1866 struct skge_hw *hw = skge->hw; in bcom_phy_intr() local
1870 isrc = xm_phy_read(hw, port, PHY_BCOM_INT_STAT); in bcom_phy_intr()
1876 hw->dev[port]->name); in bcom_phy_intr()
1882 u16 ctrl = xm_phy_read(hw, port, PHY_BCOM_CTRL); in bcom_phy_intr()
1883 xm_phy_write(hw, port, PHY_BCOM_CTRL, in bcom_phy_intr()
1885 xm_phy_write(hw, port, PHY_BCOM_CTRL, in bcom_phy_intr()
1890 bcom_check_link(hw, port); in bcom_phy_intr()
1894 static int gm_phy_write(struct skge_hw *hw, int port, u16 reg, u16 val) in gm_phy_write() argument
1898 gma_write16(hw, port, GM_SMI_DATA, val); in gm_phy_write()
1899 gma_write16(hw, port, GM_SMI_CTRL, in gm_phy_write()
1900 GM_SMI_CT_PHY_AD(hw->phy_addr) | GM_SMI_CT_REG_AD(reg)); in gm_phy_write()
1904 if (!(gma_read16(hw, port, GM_SMI_CTRL) & GM_SMI_CT_BUSY)) in gm_phy_write()
1908 pr_warn("%s: phy write timeout\n", hw->dev[port]->name); in gm_phy_write()
1912 static int __gm_phy_read(struct skge_hw *hw, int port, u16 reg, u16 *val) in __gm_phy_read() argument
1916 gma_write16(hw, port, GM_SMI_CTRL, in __gm_phy_read()
1917 GM_SMI_CT_PHY_AD(hw->phy_addr) in __gm_phy_read()
1922 if (gma_read16(hw, port, GM_SMI_CTRL) & GM_SMI_CT_RD_VAL) in __gm_phy_read()
1928 *val = gma_read16(hw, port, GM_SMI_DATA); in __gm_phy_read()
1932 static u16 gm_phy_read(struct skge_hw *hw, int port, u16 reg) in gm_phy_read() argument
1935 if (__gm_phy_read(hw, port, reg, &v)) in gm_phy_read()
1936 pr_warn("%s: phy read timeout\n", hw->dev[port]->name); in gm_phy_read()
1941 static void yukon_init(struct skge_hw *hw, int port) in yukon_init() argument
1943 struct skge_port *skge = netdev_priv(hw->dev[port]); in yukon_init()
1947 u16 ectrl = gm_phy_read(hw, port, PHY_MARV_EXT_CTRL); in yukon_init()
1955 gm_phy_write(hw, port, PHY_MARV_EXT_CTRL, ectrl); in yukon_init()
1958 ctrl = gm_phy_read(hw, port, PHY_MARV_CTRL); in yukon_init()
1963 gm_phy_write(hw, port, PHY_MARV_CTRL, ctrl); in yukon_init()
1970 if (hw->copper) { in yukon_init()
2016 gm_phy_write(hw, port, PHY_MARV_1000T_CTRL, ct1000); in yukon_init()
2018 gm_phy_write(hw, port, PHY_MARV_AUNE_ADV, adv); in yukon_init()
2019 gm_phy_write(hw, port, PHY_MARV_CTRL, ctrl); in yukon_init()
2023 gm_phy_write(hw, port, PHY_MARV_INT_MASK, PHY_M_IS_AN_MSK); in yukon_init()
2025 gm_phy_write(hw, port, PHY_MARV_INT_MASK, PHY_M_IS_DEF_MSK); in yukon_init()
2028 static void yukon_reset(struct skge_hw *hw, int port) in yukon_reset() argument
2030 gm_phy_write(hw, port, PHY_MARV_INT_MASK, 0);/* disable PHY IRQs */ in yukon_reset()
2031 gma_write16(hw, port, GM_MC_ADDR_H1, 0); /* clear MC hash */ in yukon_reset()
2032 gma_write16(hw, port, GM_MC_ADDR_H2, 0); in yukon_reset()
2033 gma_write16(hw, port, GM_MC_ADDR_H3, 0); in yukon_reset()
2034 gma_write16(hw, port, GM_MC_ADDR_H4, 0); in yukon_reset()
2036 gma_write16(hw, port, GM_RX_CTRL, in yukon_reset()
2037 gma_read16(hw, port, GM_RX_CTRL) in yukon_reset()
2042 static int is_yukon_lite_a0(struct skge_hw *hw) in is_yukon_lite_a0() argument
2047 if (hw->chip_id != CHIP_ID_YUKON) in is_yukon_lite_a0()
2050 reg = skge_read32(hw, B2_FAR); in is_yukon_lite_a0()
2051 skge_write8(hw, B2_FAR + 3, 0xff); in is_yukon_lite_a0()
2052 ret = (skge_read8(hw, B2_FAR + 3) != 0); in is_yukon_lite_a0()
2053 skge_write32(hw, B2_FAR, reg); in is_yukon_lite_a0()
2057 static void yukon_mac_init(struct skge_hw *hw, int port) in yukon_mac_init() argument
2059 struct skge_port *skge = netdev_priv(hw->dev[port]); in yukon_mac_init()
2062 const u8 *addr = hw->dev[port]->dev_addr; in yukon_mac_init()
2065 if (hw->chip_id == CHIP_ID_YUKON_LITE && in yukon_mac_init()
2066 hw->chip_rev >= CHIP_REV_YU_LITE_A3) { in yukon_mac_init()
2067 reg = skge_read32(hw, B2_GP_IO); in yukon_mac_init()
2069 skge_write32(hw, B2_GP_IO, reg); in yukon_mac_init()
2073 skge_write32(hw, SK_REG(port, GPHY_CTRL), GPC_RST_SET); in yukon_mac_init()
2074 skge_write32(hw, SK_REG(port, GMAC_CTRL), GMC_RST_SET); in yukon_mac_init()
2077 if (hw->chip_id == CHIP_ID_YUKON_LITE && in yukon_mac_init()
2078 hw->chip_rev >= CHIP_REV_YU_LITE_A3) { in yukon_mac_init()
2079 reg = skge_read32(hw, B2_GP_IO); in yukon_mac_init()
2082 skge_write32(hw, B2_GP_IO, reg); in yukon_mac_init()
2088 reg |= hw->copper ? GPC_HWCFG_GMII_COP : GPC_HWCFG_GMII_FIB; in yukon_mac_init()
2091 skge_write32(hw, SK_REG(port, GPHY_CTRL), reg | GPC_RST_SET); in yukon_mac_init()
2092 skge_write32(hw, SK_REG(port, GPHY_CTRL), reg | GPC_RST_CLR); in yukon_mac_init()
2093 skge_write32(hw, SK_REG(port, GMAC_CTRL), GMC_PAUSE_ON | GMC_RST_CLR); in yukon_mac_init()
2097 gma_write16(hw, port, GM_GP_CTRL, in yukon_mac_init()
2098 gma_read16(hw, port, GM_GP_CTRL) | reg); in yukon_mac_init()
2121 skge_write32(hw, SK_REG(port, GMAC_CTRL), GMC_PAUSE_OFF); in yukon_mac_init()
2134 gma_write16(hw, port, GM_GP_CTRL, reg); in yukon_mac_init()
2135 skge_read16(hw, SK_REG(port, GMAC_IRQ_SRC)); in yukon_mac_init()
2137 yukon_init(hw, port); in yukon_mac_init()
2140 reg = gma_read16(hw, port, GM_PHY_ADDR); in yukon_mac_init()
2141 gma_write16(hw, port, GM_PHY_ADDR, reg | GM_PAR_MIB_CLR); in yukon_mac_init()
2144 gma_read16(hw, port, GM_MIB_CNT_BASE + 8*i); in yukon_mac_init()
2145 gma_write16(hw, port, GM_PHY_ADDR, reg); in yukon_mac_init()
2148 gma_write16(hw, port, GM_TX_CTRL, TX_COL_THR(TX_COL_DEF)); in yukon_mac_init()
2151 gma_write16(hw, port, GM_RX_CTRL, in yukon_mac_init()
2155 gma_write16(hw, port, GM_TX_FLOW_CTRL, 0xffff); in yukon_mac_init()
2158 gma_write16(hw, port, GM_TX_PARAM, in yukon_mac_init()
2168 if (hw->dev[port]->mtu > ETH_DATA_LEN) in yukon_mac_init()
2171 gma_write16(hw, port, GM_SERIAL_MODE, reg); in yukon_mac_init()
2174 gma_set_addr(hw, port, GM_SRC_ADDR_1L, addr); in yukon_mac_init()
2176 gma_set_addr(hw, port, GM_SRC_ADDR_2L, addr); in yukon_mac_init()
2179 gma_write16(hw, port, GM_TX_IRQ_MSK, 0); in yukon_mac_init()
2180 gma_write16(hw, port, GM_RX_IRQ_MSK, 0); in yukon_mac_init()
2181 gma_write16(hw, port, GM_TR_IRQ_MSK, 0); in yukon_mac_init()
2186 skge_write16(hw, SK_REG(port, RX_GMF_FL_MSK), RX_FF_FL_DEF_MSK); in yukon_mac_init()
2190 if (is_yukon_lite_a0(hw)) in yukon_mac_init()
2193 skge_write8(hw, SK_REG(port, RX_GMF_CTRL_T), GMF_RST_CLR); in yukon_mac_init()
2194 skge_write16(hw, SK_REG(port, RX_GMF_CTRL_T), reg); in yukon_mac_init()
2200 skge_write16(hw, SK_REG(port, RX_GMF_FL_THR), RX_GMF_FL_THR_DEF+1); in yukon_mac_init()
2203 skge_write8(hw, SK_REG(port, TX_GMF_CTRL_T), GMF_RST_CLR); in yukon_mac_init()
2204 skge_write16(hw, SK_REG(port, TX_GMF_CTRL_T), GMF_OPER_ON); in yukon_mac_init()
2208 static void yukon_suspend(struct skge_hw *hw, int port) in yukon_suspend() argument
2212 ctrl = gm_phy_read(hw, port, PHY_MARV_PHY_CTRL); in yukon_suspend()
2214 gm_phy_write(hw, port, PHY_MARV_PHY_CTRL, ctrl); in yukon_suspend()
2216 ctrl = gm_phy_read(hw, port, PHY_MARV_CTRL); in yukon_suspend()
2218 gm_phy_write(hw, port, PHY_MARV_CTRL, ctrl); in yukon_suspend()
2221 ctrl = gm_phy_read(hw, port, PHY_MARV_CTRL); in yukon_suspend()
2223 gm_phy_write(hw, port, PHY_MARV_CTRL, ctrl); in yukon_suspend()
2228 struct skge_hw *hw = skge->hw; in yukon_stop() local
2231 skge_write8(hw, SK_REG(port, GMAC_IRQ_MSK), 0); in yukon_stop()
2232 yukon_reset(hw, port); in yukon_stop()
2234 gma_write16(hw, port, GM_GP_CTRL, in yukon_stop()
2235 gma_read16(hw, port, GM_GP_CTRL) in yukon_stop()
2237 gma_read16(hw, port, GM_GP_CTRL); in yukon_stop()
2239 yukon_suspend(hw, port); in yukon_stop()
2242 skge_write8(hw, SK_REG(port, GPHY_CTRL), GPC_RST_SET); in yukon_stop()
2243 skge_write8(hw, SK_REG(port, GMAC_CTRL), GMC_RST_SET); in yukon_stop()
2248 struct skge_hw *hw = skge->hw; in yukon_get_stats() local
2252 data[0] = (u64) gma_read32(hw, port, GM_TXO_OK_HI) << 32 in yukon_get_stats()
2253 | gma_read32(hw, port, GM_TXO_OK_LO); in yukon_get_stats()
2254 data[1] = (u64) gma_read32(hw, port, GM_RXO_OK_HI) << 32 in yukon_get_stats()
2255 | gma_read32(hw, port, GM_RXO_OK_LO); in yukon_get_stats()
2258 data[i] = gma_read32(hw, port, in yukon_get_stats()
2262 static void yukon_mac_intr(struct skge_hw *hw, int port) in yukon_mac_intr() argument
2264 struct net_device *dev = hw->dev[port]; in yukon_mac_intr()
2266 u8 status = skge_read8(hw, SK_REG(port, GMAC_IRQ_SRC)); in yukon_mac_intr()
2273 skge_write8(hw, SK_REG(port, RX_GMF_CTRL_T), GMF_CLI_RX_FO); in yukon_mac_intr()
2278 skge_write8(hw, SK_REG(port, TX_GMF_CTRL_T), GMF_CLI_TX_FU); in yukon_mac_intr()
2283 static u16 yukon_speed(const struct skge_hw *hw, u16 aux) in yukon_speed() argument
2297 struct skge_hw *hw = skge->hw; in yukon_link_up() local
2302 skge_write8(hw, SK_REG(port, GMAC_IRQ_MSK), GMAC_DEF_MSK); in yukon_link_up()
2304 reg = gma_read16(hw, port, GM_GP_CTRL); in yukon_link_up()
2310 gma_write16(hw, port, GM_GP_CTRL, reg); in yukon_link_up()
2312 gm_phy_write(hw, port, PHY_MARV_INT_MASK, PHY_M_IS_DEF_MSK); in yukon_link_up()
2318 struct skge_hw *hw = skge->hw; in yukon_link_down() local
2322 ctrl = gma_read16(hw, port, GM_GP_CTRL); in yukon_link_down()
2324 gma_write16(hw, port, GM_GP_CTRL, ctrl); in yukon_link_down()
2327 ctrl = gm_phy_read(hw, port, PHY_MARV_AUNE_ADV); in yukon_link_down()
2330 gm_phy_write(hw, port, PHY_MARV_AUNE_ADV, ctrl); in yukon_link_down()
2335 yukon_init(hw, port); in yukon_link_down()
2340 struct skge_hw *hw = skge->hw; in yukon_phy_intr() local
2345 istatus = gm_phy_read(hw, port, PHY_MARV_INT_STAT); in yukon_phy_intr()
2346 phystat = gm_phy_read(hw, port, PHY_MARV_PHY_STAT); in yukon_phy_intr()
2352 if (gm_phy_read(hw, port, PHY_MARV_AUNE_LP) in yukon_phy_intr()
2358 if (gm_phy_read(hw, port, PHY_MARV_1000T_STAT) & PHY_B_1000S_MSF) { in yukon_phy_intr()
2370 skge->speed = yukon_speed(hw, phystat); in yukon_phy_intr()
2389 skge_write8(hw, SK_REG(port, GMAC_CTRL), GMC_PAUSE_OFF); in yukon_phy_intr()
2391 skge_write8(hw, SK_REG(port, GMAC_CTRL), GMC_PAUSE_ON); in yukon_phy_intr()
2397 skge->speed = yukon_speed(hw, phystat); in yukon_phy_intr()
2416 struct skge_hw *hw = skge->hw; in skge_phy_reset() local
2418 struct net_device *dev = hw->dev[port]; in skge_phy_reset()
2423 spin_lock_bh(&hw->phy_lock); in skge_phy_reset()
2424 if (is_genesis(hw)) { in skge_phy_reset()
2425 genesis_reset(hw, port); in skge_phy_reset()
2426 genesis_mac_init(hw, port); in skge_phy_reset()
2428 yukon_reset(hw, port); in skge_phy_reset()
2429 yukon_init(hw, port); in skge_phy_reset()
2431 spin_unlock_bh(&hw->phy_lock); in skge_phy_reset()
2441 struct skge_hw *hw = skge->hw; in skge_ioctl() local
2449 data->phy_id = hw->phy_addr; in skge_ioctl()
2454 spin_lock_bh(&hw->phy_lock); in skge_ioctl()
2456 if (is_genesis(hw)) in skge_ioctl()
2457 err = __xm_phy_read(hw, skge->port, data->reg_num & 0x1f, &val); in skge_ioctl()
2459 err = __gm_phy_read(hw, skge->port, data->reg_num & 0x1f, &val); in skge_ioctl()
2460 spin_unlock_bh(&hw->phy_lock); in skge_ioctl()
2466 spin_lock_bh(&hw->phy_lock); in skge_ioctl()
2467 if (is_genesis(hw)) in skge_ioctl()
2468 err = xm_phy_write(hw, skge->port, data->reg_num & 0x1f, in skge_ioctl()
2471 err = gm_phy_write(hw, skge->port, data->reg_num & 0x1f, in skge_ioctl()
2473 spin_unlock_bh(&hw->phy_lock); in skge_ioctl()
2479 static void skge_ramset(struct skge_hw *hw, u16 q, u32 start, size_t len) in skge_ramset() argument
2487 skge_write8(hw, RB_ADDR(q, RB_CTRL), RB_RST_CLR); in skge_ramset()
2488 skge_write32(hw, RB_ADDR(q, RB_START), start); in skge_ramset()
2489 skge_write32(hw, RB_ADDR(q, RB_WP), start); in skge_ramset()
2490 skge_write32(hw, RB_ADDR(q, RB_RP), start); in skge_ramset()
2491 skge_write32(hw, RB_ADDR(q, RB_END), end); in skge_ramset()
2495 skge_write32(hw, RB_ADDR(q, RB_RX_UTPP), in skge_ramset()
2497 skge_write32(hw, RB_ADDR(q, RB_RX_LTPP), in skge_ramset()
2503 skge_write8(hw, RB_ADDR(q, RB_CTRL), RB_ENA_STFWD); in skge_ramset()
2506 skge_write8(hw, RB_ADDR(q, RB_CTRL), RB_ENA_OP_MD); in skge_ramset()
2513 struct skge_hw *hw = skge->hw; in skge_qset() local
2518 if ((skge_read16(hw, B0_CTST) & (CS_BUS_CLOCK | CS_BUS_SLOT_SZ)) == 0) in skge_qset()
2521 skge_write32(hw, Q_ADDR(q, Q_CSR), CSR_CLR_RESET); in skge_qset()
2522 skge_write32(hw, Q_ADDR(q, Q_F), watermark); in skge_qset()
2523 skge_write32(hw, Q_ADDR(q, Q_DA_H), (u32)(base >> 32)); in skge_qset()
2524 skge_write32(hw, Q_ADDR(q, Q_DA_L), (u32)base); in skge_qset()
2530 struct skge_hw *hw = skge->hw; in skge_up() local
2550 skge->mem = dma_alloc_coherent(&hw->pdev->dev, skge->mem_size, in skge_up()
2558 dev_err(&hw->pdev->dev, "dma_alloc_coherent region crosses 4G boundary\n"); in skge_up()
2576 if (hw->ports == 1) { in skge_up()
2577 err = request_irq(hw->pdev->irq, skge_intr, IRQF_SHARED, in skge_up()
2578 dev->name, hw); in skge_up()
2581 hw->pdev->irq, err); in skge_up()
2588 spin_lock_bh(&hw->phy_lock); in skge_up()
2589 if (is_genesis(hw)) in skge_up()
2590 genesis_mac_init(hw, port); in skge_up()
2592 yukon_mac_init(hw, port); in skge_up()
2593 spin_unlock_bh(&hw->phy_lock); in skge_up()
2596 chunk = (hw->ram_size - hw->ram_offset) / (hw->ports * 2); in skge_up()
2597 ram_addr = hw->ram_offset + 2 * chunk * port; in skge_up()
2599 skge_ramset(hw, rxqaddr[port], ram_addr, chunk); in skge_up()
2603 skge_ramset(hw, txqaddr[port], ram_addr+chunk, chunk); in skge_up()
2608 skge_write8(hw, Q_ADDR(rxqaddr[port], Q_CSR), CSR_START | CSR_IRQ_CL_F); in skge_up()
2611 spin_lock_irq(&hw->hw_lock); in skge_up()
2612 hw->intr_mask |= portmask[port]; in skge_up()
2613 skge_write32(hw, B0_IMSK, hw->intr_mask); in skge_up()
2614 skge_read32(hw, B0_IMSK); in skge_up()
2615 spin_unlock_irq(&hw->hw_lock); in skge_up()
2629 dma_free_coherent(&hw->pdev->dev, skge->mem_size, skge->mem, in skge_up()
2637 static void skge_rx_stop(struct skge_hw *hw, int port) in skge_rx_stop() argument
2639 skge_write8(hw, Q_ADDR(rxqaddr[port], Q_CSR), CSR_STOP); in skge_rx_stop()
2640 skge_write32(hw, RB_ADDR(port ? Q_R2 : Q_R1, RB_CTRL), in skge_rx_stop()
2642 skge_write32(hw, Q_ADDR(rxqaddr[port], Q_CSR), CSR_SET_RESET); in skge_rx_stop()
2648 struct skge_hw *hw = skge->hw; in skge_down() local
2658 if (is_genesis(hw) && hw->phy_type == SK_PHY_XMAC) in skge_down()
2664 spin_lock_irq(&hw->hw_lock); in skge_down()
2665 hw->intr_mask &= ~portmask[port]; in skge_down()
2666 skge_write32(hw, B0_IMSK, (hw->ports == 1) ? 0 : hw->intr_mask); in skge_down()
2667 skge_read32(hw, B0_IMSK); in skge_down()
2668 spin_unlock_irq(&hw->hw_lock); in skge_down()
2670 if (hw->ports == 1) in skge_down()
2671 free_irq(hw->pdev->irq, hw); in skge_down()
2673 skge_write8(skge->hw, SK_REG(skge->port, LNK_LED_REG), LED_REG_OFF); in skge_down()
2674 if (is_genesis(hw)) in skge_down()
2680 skge_write8(hw, Q_ADDR(txqaddr[port], Q_CSR), CSR_STOP); in skge_down()
2681 skge_write32(hw, RB_ADDR(txqaddr[port], RB_CTRL), in skge_down()
2686 skge_write8(hw, SK_REG(port, TXA_CTRL), in skge_down()
2690 skge_write32(hw, SK_REG(port, TXA_ITI_INI), 0L); in skge_down()
2691 skge_write32(hw, SK_REG(port, TXA_LIM_INI), 0L); in skge_down()
2694 skge_write32(hw, Q_ADDR(txqaddr[port], Q_CSR), CSR_SET_RESET); in skge_down()
2695 skge_write32(hw, RB_ADDR(txqaddr[port], RB_CTRL), RB_RST_SET); in skge_down()
2698 skge_write8(hw, RB_ADDR(port == 0 ? Q_XA1 : Q_XA2, RB_CTRL), RB_RST_SET); in skge_down()
2700 skge_rx_stop(hw, port); in skge_down()
2702 if (is_genesis(hw)) { in skge_down()
2703 skge_write8(hw, SK_REG(port, TX_MFF_CTRL2), MFF_RST_SET); in skge_down()
2704 skge_write8(hw, SK_REG(port, RX_MFF_CTRL2), MFF_RST_SET); in skge_down()
2706 skge_write8(hw, SK_REG(port, RX_GMF_CTRL_T), GMF_RST_SET); in skge_down()
2707 skge_write8(hw, SK_REG(port, TX_GMF_CTRL_T), GMF_RST_SET); in skge_down()
2720 dma_free_coherent(&hw->pdev->dev, skge->mem_size, skge->mem, in skge_down()
2737 struct skge_hw *hw = skge->hw; in skge_xmit_frame() local
2755 map = dma_map_single(&hw->pdev->dev, skb->data, len, DMA_TO_DEVICE); in skge_xmit_frame()
2756 if (dma_mapping_error(&hw->pdev->dev, map)) in skge_xmit_frame()
2772 hw->chip_rev == 0 && hw->chip_id == CHIP_ID_YUKON) in skge_xmit_frame()
2792 map = skb_frag_dma_map(&hw->pdev->dev, frag, 0, in skge_xmit_frame()
2794 if (dma_mapping_error(&hw->pdev->dev, map)) in skge_xmit_frame()
2818 skge_write8(hw, Q_ADDR(txqaddr[skge->port], Q_CSR), CSR_START); in skge_xmit_frame()
2836 dma_unmap_single(&hw->pdev->dev, dma_unmap_addr(e, mapaddr), in skge_xmit_frame()
2840 dma_unmap_page(&hw->pdev->dev, dma_unmap_addr(e, mapaddr), in skge_xmit_frame()
2846 dev_warn(&hw->pdev->dev, "%s: tx mapping error\n", dev->name); in skge_xmit_frame()
2874 skge_tx_unmap(skge->hw->pdev, e, td->control); in skge_tx_clean()
2891 skge_write8(skge->hw, Q_ADDR(txqaddr[skge->port], Q_CSR), CSR_STOP); in skge_tx_timeout()
2930 struct skge_hw *hw = skge->hw; in genesis_set_multicast() local
2936 mode = xm_read32(hw, port, XM_MODE); in genesis_set_multicast()
2956 xm_write32(hw, port, XM_MODE, mode); in genesis_set_multicast()
2957 xm_outhash(hw, port, XM_HSM, filter); in genesis_set_multicast()
2969 struct skge_hw *hw = skge->hw; in yukon_set_multicast() local
2979 reg = gma_read16(hw, port, GM_RX_CTRL); in yukon_set_multicast()
2999 gma_write16(hw, port, GM_MC_ADDR_H1, in yukon_set_multicast()
3001 gma_write16(hw, port, GM_MC_ADDR_H2, in yukon_set_multicast()
3003 gma_write16(hw, port, GM_MC_ADDR_H3, in yukon_set_multicast()
3005 gma_write16(hw, port, GM_MC_ADDR_H4, in yukon_set_multicast()
3008 gma_write16(hw, port, GM_RX_CTRL, reg); in yukon_set_multicast()
3011 static inline u16 phy_length(const struct skge_hw *hw, u32 status) in phy_length() argument
3013 if (is_genesis(hw)) in phy_length()
3019 static inline int bad_phy_status(const struct skge_hw *hw, u32 status) in bad_phy_status() argument
3021 if (is_genesis(hw)) in bad_phy_status()
3032 if (is_genesis(skge->hw)) in skge_set_multicast()
3061 if (bad_phy_status(skge->hw, status)) in skge_rx_get()
3064 if (phy_length(skge->hw, status) != len) in skge_rx_get()
3072 dma_sync_single_for_cpu(&skge->hw->pdev->dev, in skge_rx_get()
3077 dma_sync_single_for_device(&skge->hw->pdev->dev, in skge_rx_get()
3100 dma_unmap_single(&skge->hw->pdev->dev, in skge_rx_get()
3121 if (is_genesis(skge->hw)) { in skge_rx_get()
3150 skge_write8(skge->hw, Q_ADDR(txqaddr[skge->port], Q_CSR), CSR_IRQ_CL_F); in skge_tx_done()
3158 skge_tx_unmap(skge->hw->pdev, e, control); in skge_tx_done()
3193 struct skge_hw *hw = skge->hw; in skge_poll() local
3200 skge_write8(hw, Q_ADDR(rxqaddr[skge->port], Q_CSR), CSR_IRQ_CL_F); in skge_poll()
3222 skge_write8(hw, Q_ADDR(rxqaddr[skge->port], Q_CSR), CSR_START); in skge_poll()
3227 spin_lock_irqsave(&hw->hw_lock, flags); in skge_poll()
3228 hw->intr_mask |= napimask[skge->port]; in skge_poll()
3229 skge_write32(hw, B0_IMSK, hw->intr_mask); in skge_poll()
3230 skge_read32(hw, B0_IMSK); in skge_poll()
3231 spin_unlock_irqrestore(&hw->hw_lock, flags); in skge_poll()
3240 static void skge_mac_parity(struct skge_hw *hw, int port) in skge_mac_parity() argument
3242 struct net_device *dev = hw->dev[port]; in skge_mac_parity()
3246 if (is_genesis(hw)) in skge_mac_parity()
3247 skge_write16(hw, SK_REG(port, TX_MFF_CTRL1), in skge_mac_parity()
3250 /* HW-Bug #8: cleared by GMF_CLI_TX_FC instead of GMF_CLI_TX_PE */ in skge_mac_parity()
3251 skge_write8(hw, SK_REG(port, TX_GMF_CTRL_T), in skge_mac_parity()
3252 (hw->chip_id == CHIP_ID_YUKON && hw->chip_rev == 0) in skge_mac_parity()
3256 static void skge_mac_intr(struct skge_hw *hw, int port) in skge_mac_intr() argument
3258 if (is_genesis(hw)) in skge_mac_intr()
3259 genesis_mac_intr(hw, port); in skge_mac_intr()
3261 yukon_mac_intr(hw, port); in skge_mac_intr()
3265 static void skge_error_irq(struct skge_hw *hw) in skge_error_irq() argument
3267 struct pci_dev *pdev = hw->pdev; in skge_error_irq()
3268 u32 hwstatus = skge_read32(hw, B0_HWE_ISRC); in skge_error_irq()
3270 if (is_genesis(hw)) { in skge_error_irq()
3273 skge_write16(hw, RX_MFF_CTRL1, MFF_CLR_INSTAT); in skge_error_irq()
3275 skge_write16(hw, RX_MFF_CTRL2, MFF_CLR_INSTAT); in skge_error_irq()
3279 skge_write8(hw, GMAC_TI_ST_CTRL, GMT_ST_CLR_IRQ); in skge_error_irq()
3284 skge_write16(hw, B3_RI_CTRL, RI_CLR_RD_PERR); in skge_error_irq()
3289 skge_write16(hw, B3_RI_CTRL, RI_CLR_WR_PERR); in skge_error_irq()
3293 skge_mac_parity(hw, 0); in skge_error_irq()
3296 skge_mac_parity(hw, 1); in skge_error_irq()
3300 hw->dev[0]->name); in skge_error_irq()
3301 skge_write32(hw, B0_R1_CSR, CSR_IRQ_CL_P); in skge_error_irq()
3306 hw->dev[1]->name); in skge_error_irq()
3307 skge_write32(hw, B0_R2_CSR, CSR_IRQ_CL_P); in skge_error_irq()
3321 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_ON); in skge_error_irq()
3325 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_OFF); in skge_error_irq()
3328 hwstatus = skge_read32(hw, B0_HWE_ISRC); in skge_error_irq()
3330 dev_warn(&hw->pdev->dev, "unable to clear error (so ignoring them)\n"); in skge_error_irq()
3331 hw->intr_mask &= ~IS_HW_ERR; in skge_error_irq()
3343 struct skge_hw *hw = from_tasklet(hw, t, phy_task); in skge_extirq() local
3346 for (port = 0; port < hw->ports; port++) { in skge_extirq()
3347 struct net_device *dev = hw->dev[port]; in skge_extirq()
3352 spin_lock(&hw->phy_lock); in skge_extirq()
3353 if (!is_genesis(hw)) in skge_extirq()
3355 else if (hw->phy_type == SK_PHY_BCOM) in skge_extirq()
3357 spin_unlock(&hw->phy_lock); in skge_extirq()
3361 spin_lock_irq(&hw->hw_lock); in skge_extirq()
3362 hw->intr_mask |= IS_EXT_REG; in skge_extirq()
3363 skge_write32(hw, B0_IMSK, hw->intr_mask); in skge_extirq()
3364 skge_read32(hw, B0_IMSK); in skge_extirq()
3365 spin_unlock_irq(&hw->hw_lock); in skge_extirq()
3370 struct skge_hw *hw = dev_id; in skge_intr() local
3374 spin_lock(&hw->hw_lock); in skge_intr()
3376 status = skge_read32(hw, B0_SP_ISRC); in skge_intr()
3381 status &= hw->intr_mask; in skge_intr()
3383 hw->intr_mask &= ~IS_EXT_REG; in skge_intr()
3384 tasklet_schedule(&hw->phy_task); in skge_intr()
3388 struct skge_port *skge = netdev_priv(hw->dev[0]); in skge_intr()
3389 hw->intr_mask &= ~(IS_XA1_F|IS_R1_F); in skge_intr()
3394 skge_write16(hw, B3_PA_CTRL, PA_CLR_TO_TX1); in skge_intr()
3397 ++hw->dev[0]->stats.rx_over_errors; in skge_intr()
3398 skge_write16(hw, B3_PA_CTRL, PA_CLR_TO_RX1); in skge_intr()
3403 skge_mac_intr(hw, 0); in skge_intr()
3405 if (hw->dev[1]) { in skge_intr()
3406 struct skge_port *skge = netdev_priv(hw->dev[1]); in skge_intr()
3409 hw->intr_mask &= ~(IS_XA2_F|IS_R2_F); in skge_intr()
3414 ++hw->dev[1]->stats.rx_over_errors; in skge_intr()
3415 skge_write16(hw, B3_PA_CTRL, PA_CLR_TO_RX2); in skge_intr()
3419 skge_write16(hw, B3_PA_CTRL, PA_CLR_TO_TX2); in skge_intr()
3422 skge_mac_intr(hw, 1); in skge_intr()
3426 skge_error_irq(hw); in skge_intr()
3428 skge_write32(hw, B0_IMSK, hw->intr_mask); in skge_intr()
3429 skge_read32(hw, B0_IMSK); in skge_intr()
3430 spin_unlock(&hw->hw_lock); in skge_intr()
3441 skge_intr(dev->irq, skge->hw); in skge_netpoll()
3449 struct skge_hw *hw = skge->hw; in skge_set_mac_address() local
3460 memcpy_toio(hw->regs + B2_MAC_1 + port*8, dev->dev_addr, ETH_ALEN); in skge_set_mac_address()
3461 memcpy_toio(hw->regs + B2_MAC_2 + port*8, dev->dev_addr, ETH_ALEN); in skge_set_mac_address()
3464 spin_lock_bh(&hw->phy_lock); in skge_set_mac_address()
3465 ctrl = gma_read16(hw, port, GM_GP_CTRL); in skge_set_mac_address()
3466 gma_write16(hw, port, GM_GP_CTRL, ctrl & ~GM_GPCR_RX_ENA); in skge_set_mac_address()
3468 memcpy_toio(hw->regs + B2_MAC_1 + port*8, dev->dev_addr, ETH_ALEN); in skge_set_mac_address()
3469 memcpy_toio(hw->regs + B2_MAC_2 + port*8, dev->dev_addr, ETH_ALEN); in skge_set_mac_address()
3471 if (is_genesis(hw)) in skge_set_mac_address()
3472 xm_outaddr(hw, port, XM_SA, dev->dev_addr); in skge_set_mac_address()
3474 gma_set_addr(hw, port, GM_SRC_ADDR_1L, dev->dev_addr); in skge_set_mac_address()
3475 gma_set_addr(hw, port, GM_SRC_ADDR_2L, dev->dev_addr); in skge_set_mac_address()
3478 gma_write16(hw, port, GM_GP_CTRL, ctrl); in skge_set_mac_address()
3479 spin_unlock_bh(&hw->phy_lock); in skge_set_mac_address()
3495 static const char *skge_board_name(const struct skge_hw *hw) in skge_board_name() argument
3501 if (skge_chips[i].id == hw->chip_id) in skge_board_name()
3504 snprintf(buf, sizeof(buf), "chipid 0x%x", hw->chip_id); in skge_board_name()
3513 static int skge_reset(struct skge_hw *hw) in skge_reset() argument
3520 ctst = skge_read16(hw, B0_CTST); in skge_reset()
3523 skge_write8(hw, B0_CTST, CS_RST_SET); in skge_reset()
3524 skge_write8(hw, B0_CTST, CS_RST_CLR); in skge_reset()
3527 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_ON); in skge_reset()
3528 skge_write8(hw, B2_TST_CTRL2, 0); in skge_reset()
3530 pci_read_config_word(hw->pdev, PCI_STATUS, &pci_status); in skge_reset()
3531 pci_write_config_word(hw->pdev, PCI_STATUS, in skge_reset()
3533 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_OFF); in skge_reset()
3534 skge_write8(hw, B0_CTST, CS_MRST_CLR); in skge_reset()
3537 skge_write16(hw, B0_CTST, in skge_reset()
3540 hw->chip_id = skge_read8(hw, B2_CHIP_ID); in skge_reset()
3541 hw->phy_type = skge_read8(hw, B2_E_1) & 0xf; in skge_reset()
3542 pmd_type = skge_read8(hw, B2_PMD_TYP); in skge_reset()
3543 hw->copper = (pmd_type == 'T' || pmd_type == '1'); in skge_reset()
3545 switch (hw->chip_id) { in skge_reset()
3548 switch (hw->phy_type) { in skge_reset()
3550 hw->phy_addr = PHY_ADDR_XMAC; in skge_reset()
3553 hw->phy_addr = PHY_ADDR_BCOM; in skge_reset()
3556 dev_err(&hw->pdev->dev, "unsupported phy type 0x%x\n", in skge_reset()
3557 hw->phy_type); in skge_reset()
3562 dev_err(&hw->pdev->dev, "Genesis chip detected but not configured\n"); in skge_reset()
3569 if (hw->phy_type < SK_PHY_MARV_COPPER && pmd_type != 'S') in skge_reset()
3570 hw->copper = 1; in skge_reset()
3572 hw->phy_addr = PHY_ADDR_MARV; in skge_reset()
3576 dev_err(&hw->pdev->dev, "unsupported chip type 0x%x\n", in skge_reset()
3577 hw->chip_id); in skge_reset()
3581 mac_cfg = skge_read8(hw, B2_MAC_CFG); in skge_reset()
3582 hw->ports = (mac_cfg & CFG_SNG_MAC) ? 1 : 2; in skge_reset()
3583 hw->chip_rev = (mac_cfg & CFG_CHIP_R_MSK) >> 4; in skge_reset()
3586 t8 = skge_read8(hw, B2_E_0); in skge_reset()
3587 if (is_genesis(hw)) { in skge_reset()
3590 hw->ram_size = 0x100000; in skge_reset()
3591 hw->ram_offset = 0x80000; in skge_reset()
3593 hw->ram_size = t8 * 512; in skge_reset()
3595 hw->ram_size = 0x20000; in skge_reset()
3597 hw->ram_size = t8 * 4096; in skge_reset()
3599 hw->intr_mask = IS_HW_ERR; in skge_reset()
3602 if (!(is_genesis(hw) && hw->phy_type == SK_PHY_XMAC)) in skge_reset()
3603 hw->intr_mask |= IS_EXT_REG; in skge_reset()
3605 if (is_genesis(hw)) in skge_reset()
3606 genesis_init(hw); in skge_reset()
3609 skge_write8(hw, B0_POWER_CTRL, in skge_reset()
3613 if ((skge_read32(hw, B0_ISRC) & IS_HW_ERR) && in skge_reset()
3614 (skge_read32(hw, B0_HWE_ISRC) & IS_IRQ_SENSOR)) { in skge_reset()
3615 dev_warn(&hw->pdev->dev, "stuck hardware sensor bit\n"); in skge_reset()
3616 hw->intr_mask &= ~IS_HW_ERR; in skge_reset()
3620 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_ON); in skge_reset()
3621 pci_read_config_dword(hw->pdev, PCI_DEV_REG1, &reg); in skge_reset()
3623 pci_write_config_dword(hw->pdev, PCI_DEV_REG1, reg); in skge_reset()
3624 skge_write8(hw, B2_TST_CTRL1, TST_CFG_WRITE_OFF); in skge_reset()
3627 for (i = 0; i < hw->ports; i++) { in skge_reset()
3628 skge_write16(hw, SK_REG(i, GMAC_LINK_CTRL), GMLC_RST_SET); in skge_reset()
3629 skge_write16(hw, SK_REG(i, GMAC_LINK_CTRL), GMLC_RST_CLR); in skge_reset()
3634 skge_write8(hw, B2_TI_CTRL, TIM_STOP); in skge_reset()
3635 skge_write8(hw, B2_TI_CTRL, TIM_CLR_IRQ); in skge_reset()
3636 skge_write8(hw, B0_LED, LED_STAT_ON); in skge_reset()
3639 for (i = 0; i < hw->ports; i++) in skge_reset()
3640 skge_write8(hw, SK_REG(i, TXA_CTRL), TXA_ENA_ARB); in skge_reset()
3643 skge_write16(hw, B3_RI_CTRL, RI_RST_CLR); in skge_reset()
3645 skge_write8(hw, B3_RI_WTO_R1, SK_RI_TO_53); in skge_reset()
3646 skge_write8(hw, B3_RI_WTO_XA1, SK_RI_TO_53); in skge_reset()
3647 skge_write8(hw, B3_RI_WTO_XS1, SK_RI_TO_53); in skge_reset()
3648 skge_write8(hw, B3_RI_RTO_R1, SK_RI_TO_53); in skge_reset()
3649 skge_write8(hw, B3_RI_RTO_XA1, SK_RI_TO_53); in skge_reset()
3650 skge_write8(hw, B3_RI_RTO_XS1, SK_RI_TO_53); in skge_reset()
3651 skge_write8(hw, B3_RI_WTO_R2, SK_RI_TO_53); in skge_reset()
3652 skge_write8(hw, B3_RI_WTO_XA2, SK_RI_TO_53); in skge_reset()
3653 skge_write8(hw, B3_RI_WTO_XS2, SK_RI_TO_53); in skge_reset()
3654 skge_write8(hw, B3_RI_RTO_R2, SK_RI_TO_53); in skge_reset()
3655 skge_write8(hw, B3_RI_RTO_XA2, SK_RI_TO_53); in skge_reset()
3656 skge_write8(hw, B3_RI_RTO_XS2, SK_RI_TO_53); in skge_reset()
3658 skge_write32(hw, B0_HWE_IMSK, IS_ERR_MSK); in skge_reset()
3663 skge_write32(hw, B2_IRQM_MSK, IS_XA1_F|IS_XA2_F); in skge_reset()
3664 skge_write32(hw, B2_IRQM_INI, skge_usecs2clk(hw, 100)); in skge_reset()
3665 skge_write32(hw, B2_IRQM_CTRL, TIM_START); in skge_reset()
3668 skge_write32(hw, B0_IMSK, 0); in skge_reset()
3670 for (i = 0; i < hw->ports; i++) { in skge_reset()
3671 if (is_genesis(hw)) in skge_reset()
3672 genesis_reset(hw, i); in skge_reset()
3674 yukon_reset(hw, i); in skge_reset()
3689 const struct skge_hw *hw = skge->hw; in skge_debug_show() local
3695 seq_printf(seq, "IRQ src=%x mask=%x\n", skge_read32(hw, B0_ISRC), in skge_debug_show()
3696 skge_read32(hw, B0_IMSK)); in skge_debug_show()
3803 static struct net_device *skge_devinit(struct skge_hw *hw, int port, in skge_devinit() argument
3812 SET_NETDEV_DEV(dev, &hw->pdev->dev); in skge_devinit()
3816 dev->irq = hw->pdev->irq; in skge_devinit()
3828 skge->hw = hw; in skge_devinit()
3839 skge->advertising = skge_supported_modes(hw); in skge_devinit()
3841 if (device_can_wakeup(&hw->pdev->dev)) { in skge_devinit()
3842 skge->wol = wol_supported(hw) & WAKE_MAGIC; in skge_devinit()
3843 device_set_wakeup_enable(&hw->pdev->dev, skge->wol); in skge_devinit()
3846 hw->dev[port] = dev; in skge_devinit()
3851 if (is_genesis(hw)) in skge_devinit()
3860 memcpy_fromio(dev->dev_addr, hw->regs + B2_MAC_1 + port*8, ETH_ALEN); in skge_devinit()
3877 struct skge_hw *hw; in skge_probe() local
3920 hw = kzalloc(sizeof(*hw) + strlen(DRV_NAME "@pci:") in skge_probe()
3922 if (!hw) in skge_probe()
3925 sprintf(hw->irq_name, DRV_NAME "@pci:%s", pci_name(pdev)); in skge_probe()
3927 hw->pdev = pdev; in skge_probe()
3928 spin_lock_init(&hw->hw_lock); in skge_probe()
3929 spin_lock_init(&hw->phy_lock); in skge_probe()
3930 tasklet_setup(&hw->phy_task, skge_extirq); in skge_probe()
3932 hw->regs = ioremap(pci_resource_start(pdev, 0), 0x4000); in skge_probe()
3933 if (!hw->regs) { in skge_probe()
3938 err = skge_reset(hw); in skge_probe()
3945 skge_board_name(hw), hw->chip_rev); in skge_probe()
3947 dev = skge_devinit(hw, 0, using_dac); in skge_probe()
3965 if (hw->ports > 1) { in skge_probe()
3966 dev1 = skge_devinit(hw, 1, using_dac); in skge_probe()
3979 hw->irq_name, hw); in skge_probe()
3988 pci_set_drvdata(pdev, hw); in skge_probe()
4001 skge_write16(hw, B0_LED, LED_STAT_OFF); in skge_probe()
4003 iounmap(hw->regs); in skge_probe()
4005 kfree(hw); in skge_probe()
4016 struct skge_hw *hw = pci_get_drvdata(pdev); in skge_remove() local
4019 if (!hw) in skge_remove()
4022 dev1 = hw->dev[1]; in skge_remove()
4025 dev0 = hw->dev[0]; in skge_remove()
4028 tasklet_kill(&hw->phy_task); in skge_remove()
4030 spin_lock_irq(&hw->hw_lock); in skge_remove()
4031 hw->intr_mask = 0; in skge_remove()
4033 if (hw->ports > 1) { in skge_remove()
4034 skge_write32(hw, B0_IMSK, 0); in skge_remove()
4035 skge_read32(hw, B0_IMSK); in skge_remove()
4037 spin_unlock_irq(&hw->hw_lock); in skge_remove()
4039 skge_write16(hw, B0_LED, LED_STAT_OFF); in skge_remove()
4040 skge_write8(hw, B0_CTST, CS_RST_SET); in skge_remove()
4042 if (hw->ports > 1) in skge_remove()
4043 free_irq(pdev->irq, hw); in skge_remove()
4050 iounmap(hw->regs); in skge_remove()
4051 kfree(hw); in skge_remove()
4057 struct skge_hw *hw = dev_get_drvdata(dev); in skge_suspend() local
4060 if (!hw) in skge_suspend()
4063 for (i = 0; i < hw->ports; i++) { in skge_suspend()
4064 struct net_device *dev = hw->dev[i]; in skge_suspend()
4074 skge_write32(hw, B0_IMSK, 0); in skge_suspend()
4081 struct skge_hw *hw = dev_get_drvdata(dev); in skge_resume() local
4084 if (!hw) in skge_resume()
4087 err = skge_reset(hw); in skge_resume()
4091 for (i = 0; i < hw->ports; i++) { in skge_resume()
4092 struct net_device *dev = hw->dev[i]; in skge_resume()
4118 struct skge_hw *hw = pci_get_drvdata(pdev); in skge_shutdown() local
4121 if (!hw) in skge_shutdown()
4124 for (i = 0; i < hw->ports; i++) { in skge_shutdown()
4125 struct net_device *dev = hw->dev[i]; in skge_shutdown()