Lines Matching refs:lp
263 static void count_rx_errors(int status, struct net_local *lp);
269 static void release_dma_buff(struct net_local *lp);
513 struct net_local *lp = netdev_priv(dev); in cs89x0_probe1() local
523 memset(lp, 0, sizeof(*lp)); in cs89x0_probe1()
524 spin_lock_init(&lp->lock); in cs89x0_probe1()
528 lp->use_dma = 1; in cs89x0_probe1()
529 lp->dma = g_cs89x0_dma; in cs89x0_probe1()
530 lp->dmasize = 16; /* Could make this an option... */ in cs89x0_probe1()
533 lp->force = g_cs89x0_media__force; in cs89x0_probe1()
611 lp->chip_type = rev_type &~ REVISON_BITS; in cs89x0_probe1()
612 lp->chip_revision = ((rev_type & REVISON_BITS) >> 8) + 'A'; in cs89x0_probe1()
616 lp->send_cmd = TX_AFTER_381; in cs89x0_probe1()
617 if (lp->chip_type == CS8900 && lp->chip_revision >= 'F') in cs89x0_probe1()
618 lp->send_cmd = TX_NOW; in cs89x0_probe1()
619 if (lp->chip_type != CS8900 && lp->chip_revision >= 'C') in cs89x0_probe1()
620 lp->send_cmd = TX_NOW; in cs89x0_probe1()
627 lp->chip_type==CS8900?'0':'2', in cs89x0_probe1()
628 lp->chip_type==CS8920M?"M":"", in cs89x0_probe1()
629 lp->chip_revision, in cs89x0_probe1()
693 lp->adapter_cnf = 0; in cs89x0_probe1()
697 lp->adapter_cnf |= A_CNF_DC_DC_POLARITY; in cs89x0_probe1()
700 lp->adapter_cnf |= A_CNF_EXTND_10B_2 | A_CNF_LOW_RX_SQUELCH; in cs89x0_probe1()
703 lp->adapter_cnf |= A_CNF_10B_T | A_CNF_MEDIA_10B_T; in cs89x0_probe1()
706 lp->adapter_cnf |= A_CNF_AUI | A_CNF_MEDIA_AUI; in cs89x0_probe1()
709 lp->adapter_cnf |= A_CNF_AUI | A_CNF_10B_T | in cs89x0_probe1()
714 dev->name, i, lp->adapter_cnf); in cs89x0_probe1()
717 if (lp->chip_type == CS8900) in cs89x0_probe1()
718 lp->isa_config = readreg(dev, PP_CS8900_ISAINT) & INT_NO_MASK; in cs89x0_probe1()
747 if (!lp->auto_neg_cnf) lp->auto_neg_cnf = eeprom_buff[AUTO_NEG_CNF_OFFSET/2]; in cs89x0_probe1()
749 if (!lp->adapter_cnf) lp->adapter_cnf = eeprom_buff[ADAPTER_CNF_OFFSET/2]; in cs89x0_probe1()
751 lp->isa_config = eeprom_buff[ISA_CNF_OFFSET/2]; in cs89x0_probe1()
762 dev->name, lp->adapter_cnf); in cs89x0_probe1()
768 if (lp->force & FORCE_RJ45) {lp->adapter_cnf |= A_CNF_10B_T; count++; } in cs89x0_probe1()
769 if (lp->force & FORCE_AUI) {lp->adapter_cnf |= A_CNF_AUI; count++; } in cs89x0_probe1()
770 if (lp->force & FORCE_BNC) {lp->adapter_cnf |= A_CNF_10B_2; count++; } in cs89x0_probe1()
771 if (count > 1) {lp->adapter_cnf |= A_CNF_MEDIA_AUTO; } in cs89x0_probe1()
772 else if (lp->force & FORCE_RJ45){lp->adapter_cnf |= A_CNF_MEDIA_10B_T; } in cs89x0_probe1()
773 else if (lp->force & FORCE_AUI) {lp->adapter_cnf |= A_CNF_MEDIA_AUI; } in cs89x0_probe1()
774 else if (lp->force & FORCE_BNC) {lp->adapter_cnf |= A_CNF_MEDIA_10B_2; } in cs89x0_probe1()
779 dev->name, lp->force, lp->adapter_cnf); in cs89x0_probe1()
789 (lp->adapter_cnf & A_CNF_10B_T)?"RJ-45,":"", in cs89x0_probe1()
790 (lp->adapter_cnf & A_CNF_AUI)?"AUI,":"", in cs89x0_probe1()
791 (lp->adapter_cnf & A_CNF_10B_2)?"BNC,":""); in cs89x0_probe1()
793 lp->irq_map = 0xffff; in cs89x0_probe1()
796 if (lp->chip_type != CS8900 && in cs89x0_probe1()
803 i = lp->isa_config & INT_NO_MASK; in cs89x0_probe1()
804 if (lp->chip_type == CS8900) { in cs89x0_probe1()
814 lp->irq_map = CS8900_IRQ_MAP; /* fixed IRQ map for CS8900 */ in cs89x0_probe1()
822 lp->irq_map = (irq_map_buff[0]>>8) | (irq_map_buff[1] << 8); in cs89x0_probe1()
833 if (lp->use_dma) { in cs89x0_probe1()
886 struct net_local *lp = netdev_priv(dev); in get_dma_channel() local
888 if (lp->dma) { in get_dma_channel()
889 dev->dma = lp->dma; in get_dma_channel()
890 lp->isa_config |= ISA_RxDMA; in get_dma_channel()
892 if ((lp->isa_config & ANY_ISA_DMA) == 0) in get_dma_channel()
894 dev->dma = lp->isa_config & DMA_NO_MASK; in get_dma_channel()
895 if (lp->chip_type == CS8900) in get_dma_channel()
898 lp->isa_config &= ~ANY_ISA_DMA; in get_dma_channel()
908 struct net_local *lp = netdev_priv(dev); in write_dma() local
909 if ((lp->isa_config & ANY_ISA_DMA) == 0) in write_dma()
921 struct net_local *lp = netdev_priv(dev); in set_dma_cfg() local
923 if (lp->use_dma) { in set_dma_cfg()
924 if ((lp->isa_config & ANY_ISA_DMA) == 0) { in set_dma_cfg()
929 if (lp->isa_config & ISA_RxDMA) { in set_dma_cfg()
930 lp->curr_rx_cfg |= RX_DMA_ONLY; in set_dma_cfg()
934 lp->curr_rx_cfg |= AUTO_RX_DMA; /* not that we support it... */ in set_dma_cfg()
944 struct net_local *lp = netdev_priv(dev); in dma_bufcfg() local
945 if (lp->use_dma) in dma_bufcfg()
946 return (lp->isa_config & ANY_ISA_DMA)? RX_DMA_ENBL : 0; in dma_bufcfg()
955 struct net_local *lp = netdev_priv(dev); in dma_busctl() local
956 if (lp->use_dma) { in dma_busctl()
957 if (lp->isa_config & ANY_ISA_DMA) in dma_busctl()
959 if (lp->isa_config & DMA_BURST) in dma_busctl()
961 if (lp->dmasize == 64) in dma_busctl()
971 struct net_local *lp = netdev_priv(dev); in dma_rx() local
974 unsigned char *bp = lp->rx_dma_ptr; in dma_rx()
984 count_rx_errors(status, lp); in dma_rx()
993 lp->stats.rx_dropped++; in dma_rx()
998 if (bp >= lp->end_dma_buff) bp -= lp->dmasize*1024; in dma_rx()
999 lp->rx_dma_ptr = bp; in dma_rx()
1004 if (bp + length > lp->end_dma_buff) { in dma_rx()
1005 int semi_cnt = lp->end_dma_buff - bp; in dma_rx()
1007 memcpy(skb_put(skb,length - semi_cnt), lp->dma_buff, in dma_rx()
1013 if (bp >= lp->end_dma_buff) bp -= lp->dmasize*1024; in dma_rx()
1014 lp->rx_dma_ptr = bp; in dma_rx()
1023 lp->stats.rx_packets++; in dma_rx()
1024 lp->stats.rx_bytes += length; in dma_rx()
1033 struct net_local *lp = netdev_priv(dev); in reset_chip() local
1044 if (lp->chip_type != CS8900) { in reset_chip()
1067 struct net_local *lp = netdev_priv(dev); in control_dc_dc() local
1075 if (((lp->adapter_cnf & A_CNF_DC_DC_POLARITY) != 0) ^ on_not_off) in control_dc_dc()
1095 struct net_local *lp = netdev_priv(dev); in detect_tp() local
1106 writereg(dev, PP_LineCTL, lp->linectl &~ AUI_ONLY); in detect_tp()
1115 if (lp->chip_type == CS8900) { in detect_tp()
1116 switch (lp->force & 0xf0) { in detect_tp()
1124 lp->force &= ~FORCE_AUTO; in detect_tp()
1125 lp->force |= FORCE_HALF; in detect_tp()
1135 switch (lp->force & 0xf0) { in detect_tp()
1137 lp->auto_neg_cnf = AUTO_NEG_ENABLE; in detect_tp()
1140 lp->auto_neg_cnf = 0; in detect_tp()
1143 lp->auto_neg_cnf = RE_NEG_NOW | ALLOW_FDX; in detect_tp()
1147 writereg(dev, PP_AutoNegCTL, lp->auto_neg_cnf & AUTO_NEG_MASK); in detect_tp()
1149 if ((lp->auto_neg_cnf & AUTO_NEG_BITS) == AUTO_NEG_ENABLE) { in detect_tp()
1210 struct net_local *lp = netdev_priv(dev); in detect_aui() local
1215 writereg(dev, PP_LineCTL, (lp->linectl &~ AUTO_AUI_10BASET) | AUI_ONLY); in detect_aui()
1226 struct net_local *lp = netdev_priv(dev); in detect_bnc() local
1231 writereg(dev, PP_LineCTL, (lp->linectl &~ AUTO_AUI_10BASET) | AUI_ONLY); in detect_bnc()
1272 struct net_local *lp = netdev_priv(dev); in net_open() local
1288 if ((1 << i) & lp->irq_map) { in net_open()
1291 write_irq(dev, lp->chip_type, i); in net_open()
1309 if (((1 << dev->irq) & lp->irq_map) == 0) { in net_open()
1311 dev->name, dev->irq, lp->irq_map); in net_open()
1322 write_irq(dev, lp->chip_type, dev->irq); in net_open()
1332 if (lp->use_dma) { in net_open()
1333 if (lp->isa_config & ANY_ISA_DMA) { in net_open()
1335 lp->dma_buff = (unsigned char *)__get_dma_pages(GFP_KERNEL, in net_open()
1336 get_order(lp->dmasize * 1024)); in net_open()
1338 if (!lp->dma_buff) { in net_open()
1339 printk(KERN_ERR "%s: cannot get %dK memory for DMA\n", dev->name, lp->dmasize); in net_open()
1345 (unsigned long)lp->dma_buff, in net_open()
1346 (unsigned long)isa_virt_to_bus(lp->dma_buff)); in net_open()
1348 if ((unsigned long) lp->dma_buff >= MAX_DMA_ADDRESS || in net_open()
1349 !dma_page_eq(lp->dma_buff, lp->dma_buff+lp->dmasize*1024-1)) { in net_open()
1353 memset(lp->dma_buff, 0, lp->dmasize * 1024); /* Why? */ in net_open()
1358 write_dma(dev, lp->chip_type, dev->dma); in net_open()
1359 lp->rx_dma_ptr = lp->dma_buff; in net_open()
1360 lp->end_dma_buff = lp->dma_buff + lp->dmasize*1024; in net_open()
1361 spin_lock_irqsave(&lp->lock, flags); in net_open()
1365 set_dma_addr(dev->dma, isa_virt_to_bus(lp->dma_buff)); in net_open()
1366 set_dma_count(dev->dma, lp->dmasize*1024); in net_open()
1368 spin_unlock_irqrestore(&lp->lock, flags); in net_open()
1381 if ((lp->adapter_cnf & A_CNF_EXTND_10B_2) && (lp->adapter_cnf & A_CNF_LOW_RX_SQUELCH)) in net_open()
1382 lp->linectl = LOW_RX_SQUELCH; in net_open()
1384 lp->linectl = 0; in net_open()
1387 switch(lp->adapter_cnf & A_CNF_MEDIA_TYPE) { in net_open()
1388 case A_CNF_MEDIA_10B_T: result = lp->adapter_cnf & A_CNF_10B_T; break; in net_open()
1389 case A_CNF_MEDIA_AUI: result = lp->adapter_cnf & A_CNF_AUI; break; in net_open()
1390 case A_CNF_MEDIA_10B_2: result = lp->adapter_cnf & A_CNF_10B_2; break; in net_open()
1391 default: result = lp->adapter_cnf & (A_CNF_10B_T | A_CNF_AUI | A_CNF_10B_2); in net_open()
1402 release_dma_buff(lp); in net_open()
1411 switch(lp->adapter_cnf & A_CNF_MEDIA_TYPE) { in net_open()
1416 if (lp->auto_neg_cnf & IMM_BIT) /* check "ignore missing media" bit */ in net_open()
1424 if (lp->auto_neg_cnf & IMM_BIT) /* check "ignore missing media" bit */ in net_open()
1432 if (lp->auto_neg_cnf & IMM_BIT) /* check "ignore missing media" bit */ in net_open()
1437 writereg(dev, PP_LineCTL, lp->linectl | AUTO_AUI_10BASET); in net_open()
1438 if (lp->adapter_cnf & A_CNF_10B_T) in net_open()
1441 if (lp->adapter_cnf & A_CNF_AUI) in net_open()
1444 if (lp->adapter_cnf & A_CNF_10B_2) in net_open()
1472 lp->rx_mode = 0; in net_open()
1475 lp->curr_rx_cfg = RX_OK_ENBL | RX_CRC_ERROR_ENBL; in net_open()
1477 if (lp->isa_config & STREAM_TRANSFER) in net_open()
1478 lp->curr_rx_cfg |= RX_STREAM_ENBL; in net_open()
1482 writereg(dev, PP_RxCFG, lp->curr_rx_cfg); in net_open()
1520 struct net_local *lp = netdev_priv(dev); in net_send_packet() local
1532 spin_lock_irq(&lp->lock); in net_send_packet()
1536 writeword(dev->base_addr, TX_CMD_PORT, lp->send_cmd); in net_send_packet()
1546 spin_unlock_irq(&lp->lock); in net_send_packet()
1552 spin_unlock_irq(&lp->lock); in net_send_packet()
1553 lp->stats.tx_bytes += skb->len; in net_send_packet()
1577 struct net_local *lp; in net_interrupt() local
1582 lp = netdev_priv(dev); in net_interrupt()
1600 lp->stats.tx_packets++; in net_interrupt()
1607 if ((status & TX_OK) == 0) lp->stats.tx_errors++; in net_interrupt()
1608 if (status & TX_LOST_CRS) lp->stats.tx_carrier_errors++; in net_interrupt()
1609 if (status & TX_SQE_ERROR) lp->stats.tx_heartbeat_errors++; in net_interrupt()
1610 if (status & TX_LATE_COL) lp->stats.tx_window_errors++; in net_interrupt()
1611 if (status & TX_16_COL) lp->stats.tx_aborted_errors++; in net_interrupt()
1625 lp->send_underrun++; in net_interrupt()
1626 if (lp->send_underrun == 3) lp->send_cmd = TX_AFTER_381; in net_interrupt()
1627 else if (lp->send_underrun == 6) lp->send_cmd = TX_AFTER_ALL; in net_interrupt()
1636 if (lp->use_dma && (status & RX_DMA)) { in net_interrupt()
1653 lp->stats.rx_missed_errors += (status >>6); in net_interrupt()
1656 lp->stats.collisions += (status >>6); in net_interrupt()
1664 count_rx_errors(int status, struct net_local *lp) in count_rx_errors() argument
1666 lp->stats.rx_errors++; in count_rx_errors()
1667 if (status & RX_RUNT) lp->stats.rx_length_errors++; in count_rx_errors()
1668 if (status & RX_EXTRA_DATA) lp->stats.rx_length_errors++; in count_rx_errors()
1671 lp->stats.rx_crc_errors++; in count_rx_errors()
1672 if (status & RX_DRIBBLE) lp->stats.rx_frame_errors++; in count_rx_errors()
1680 struct net_local *lp = netdev_priv(dev); in net_rx() local
1689 count_rx_errors(status, lp); in net_rx()
1699 lp->stats.rx_dropped++; in net_rx()
1716 lp->stats.rx_packets++; in net_rx()
1717 lp->stats.rx_bytes += length; in net_rx()
1721 static void release_dma_buff(struct net_local *lp) in release_dma_buff() argument
1723 if (lp->dma_buff) { in release_dma_buff()
1724 free_pages((unsigned long)(lp->dma_buff), get_order(lp->dmasize * 1024)); in release_dma_buff()
1725 lp->dma_buff = NULL; in release_dma_buff()
1735 struct net_local *lp = netdev_priv(dev); in net_close() local
1748 if (lp->use_dma && lp->dma) { in net_close()
1750 release_dma_buff(lp); in net_close()
1763 struct net_local *lp = netdev_priv(dev); in net_get_stats() local
1766 spin_lock_irqsave(&lp->lock, flags); in net_get_stats()
1768 lp->stats.rx_missed_errors += (readreg(dev, PP_RxMiss) >> 6); in net_get_stats()
1769 lp->stats.collisions += (readreg(dev, PP_TxCol) >> 6); in net_get_stats()
1770 spin_unlock_irqrestore(&lp->lock, flags); in net_get_stats()
1772 return &lp->stats; in net_get_stats()
1777 struct net_local *lp = netdev_priv(dev); in set_multicast_list() local
1780 spin_lock_irqsave(&lp->lock, flags); in set_multicast_list()
1783 lp->rx_mode = RX_ALL_ACCEPT; in set_multicast_list()
1789 lp->rx_mode = RX_MULTCAST_ACCEPT; in set_multicast_list()
1792 lp->rx_mode = 0; in set_multicast_list()
1794 writereg(dev, PP_RxCTL, DEF_RX_ACCEPT | lp->rx_mode); in set_multicast_list()
1797 writereg(dev, PP_RxCFG, lp->curr_rx_cfg | in set_multicast_list()
1798 (lp->rx_mode == RX_ALL_ACCEPT? (RX_CRC_ERROR_ENBL|RX_RUNT_ENBL|RX_EXTRA_DATA_ENBL) : 0)); in set_multicast_list()
1799 spin_unlock_irqrestore(&lp->lock, flags); in set_multicast_list()
1904 struct net_local *lp; in init_module() local
1917 lp = netdev_priv(dev); in init_module()
1921 lp->use_dma = use_dma; in init_module()
1922 lp->dma = dma; in init_module()
1923 lp->dmasize = dmasize; in init_module()
1927 spin_lock_init(&lp->lock); in init_module()
1931 lp->adapter_cnf = A_CNF_MEDIA_10B_T | A_CNF_10B_T; in init_module()
1933 lp->adapter_cnf = A_CNF_MEDIA_AUI | A_CNF_AUI; in init_module()
1935 lp->adapter_cnf = A_CNF_MEDIA_10B_2 | A_CNF_10B_2; in init_module()
1937 lp->adapter_cnf = A_CNF_MEDIA_10B_T | A_CNF_10B_T; in init_module()
1940 lp->auto_neg_cnf = AUTO_NEG_ENABLE; in init_module()