Lines Matching refs:bp

164 static inline unsigned long br32(const struct b44 *bp, unsigned long reg)  in br32()  argument
166 return ssb_read32(bp->sdev, reg); in br32()
169 static inline void bw32(const struct b44 *bp, in bw32() argument
172 ssb_write32(bp->sdev, reg, val); in bw32()
175 static int b44_wait_bit(struct b44 *bp, unsigned long reg, in b44_wait_bit() argument
181 u32 val = br32(bp, reg); in b44_wait_bit()
191 netdev_err(bp->dev, "BUG! Timeout waiting for bit %08x of register %lx to %s\n", in b44_wait_bit()
199 static inline void __b44_cam_read(struct b44 *bp, unsigned char *data, int index) in __b44_cam_read() argument
203 bw32(bp, B44_CAM_CTRL, (CAM_CTRL_READ | in __b44_cam_read()
206 b44_wait_bit(bp, B44_CAM_CTRL, CAM_CTRL_BUSY, 100, 1); in __b44_cam_read()
208 val = br32(bp, B44_CAM_DATA_LO); in __b44_cam_read()
215 val = br32(bp, B44_CAM_DATA_HI); in __b44_cam_read()
221 static inline void __b44_cam_write(struct b44 *bp, unsigned char *data, int index) in __b44_cam_write() argument
229 bw32(bp, B44_CAM_DATA_LO, val); in __b44_cam_write()
233 bw32(bp, B44_CAM_DATA_HI, val); in __b44_cam_write()
234 bw32(bp, B44_CAM_CTRL, (CAM_CTRL_WRITE | in __b44_cam_write()
236 b44_wait_bit(bp, B44_CAM_CTRL, CAM_CTRL_BUSY, 100, 1); in __b44_cam_write()
239 static inline void __b44_disable_ints(struct b44 *bp) in __b44_disable_ints() argument
241 bw32(bp, B44_IMASK, 0); in __b44_disable_ints()
244 static void b44_disable_ints(struct b44 *bp) in b44_disable_ints() argument
246 __b44_disable_ints(bp); in b44_disable_ints()
249 br32(bp, B44_IMASK); in b44_disable_ints()
252 static void b44_enable_ints(struct b44 *bp) in b44_enable_ints() argument
254 bw32(bp, B44_IMASK, bp->imask); in b44_enable_ints()
257 static int __b44_readphy(struct b44 *bp, int phy_addr, int reg, u32 *val) in __b44_readphy() argument
261 bw32(bp, B44_EMAC_ISTAT, EMAC_INT_MII); in __b44_readphy()
262 bw32(bp, B44_MDIO_DATA, (MDIO_DATA_SB_START | in __b44_readphy()
267 err = b44_wait_bit(bp, B44_EMAC_ISTAT, EMAC_INT_MII, 100, 0); in __b44_readphy()
268 *val = br32(bp, B44_MDIO_DATA) & MDIO_DATA_DATA; in __b44_readphy()
273 static int __b44_writephy(struct b44 *bp, int phy_addr, int reg, u32 val) in __b44_writephy() argument
275 bw32(bp, B44_EMAC_ISTAT, EMAC_INT_MII); in __b44_writephy()
276 bw32(bp, B44_MDIO_DATA, (MDIO_DATA_SB_START | in __b44_writephy()
282 return b44_wait_bit(bp, B44_EMAC_ISTAT, EMAC_INT_MII, 100, 0); in __b44_writephy()
285 static inline int b44_readphy(struct b44 *bp, int reg, u32 *val) in b44_readphy() argument
287 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_readphy()
290 return __b44_readphy(bp, bp->phy_addr, reg, val); in b44_readphy()
293 static inline int b44_writephy(struct b44 *bp, int reg, u32 val) in b44_writephy() argument
295 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_writephy()
298 return __b44_writephy(bp, bp->phy_addr, reg, val); in b44_writephy()
305 struct b44 *bp = netdev_priv(dev); in b44_mdio_read_mii() local
306 int rc = __b44_readphy(bp, phy_id, location, &val); in b44_mdio_read_mii()
315 struct b44 *bp = netdev_priv(dev); in b44_mdio_write_mii() local
316 __b44_writephy(bp, phy_id, location, val); in b44_mdio_write_mii()
322 struct b44 *bp = bus->priv; in b44_mdio_read_phylib() local
323 int rc = __b44_readphy(bp, phy_id, location, &val); in b44_mdio_read_phylib()
332 struct b44 *bp = bus->priv; in b44_mdio_write_phylib() local
333 return __b44_writephy(bp, phy_id, location, val); in b44_mdio_write_phylib()
336 static int b44_phy_reset(struct b44 *bp) in b44_phy_reset() argument
341 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_phy_reset()
343 err = b44_writephy(bp, MII_BMCR, BMCR_RESET); in b44_phy_reset()
347 err = b44_readphy(bp, MII_BMCR, &val); in b44_phy_reset()
350 netdev_err(bp->dev, "PHY Reset would not complete\n"); in b44_phy_reset()
358 static void __b44_set_flow_ctrl(struct b44 *bp, u32 pause_flags) in __b44_set_flow_ctrl() argument
362 bp->flags &= ~(B44_FLAG_TX_PAUSE | B44_FLAG_RX_PAUSE); in __b44_set_flow_ctrl()
363 bp->flags |= pause_flags; in __b44_set_flow_ctrl()
365 val = br32(bp, B44_RXCONFIG); in __b44_set_flow_ctrl()
370 bw32(bp, B44_RXCONFIG, val); in __b44_set_flow_ctrl()
372 val = br32(bp, B44_MAC_FLOW); in __b44_set_flow_ctrl()
378 bw32(bp, B44_MAC_FLOW, val); in __b44_set_flow_ctrl()
381 static void b44_set_flow_ctrl(struct b44 *bp, u32 local, u32 remote) in b44_set_flow_ctrl() argument
397 __b44_set_flow_ctrl(bp, pause_enab); in b44_set_flow_ctrl()
402 static void b44_wap54g10_workaround(struct b44 *bp) in b44_wap54g10_workaround() argument
416 err = __b44_readphy(bp, 0, MII_BMCR, &val); in b44_wap54g10_workaround()
422 err = __b44_writephy(bp, 0, MII_BMCR, val); in b44_wap54g10_workaround()
431 static inline void b44_wap54g10_workaround(struct b44 *bp) in b44_wap54g10_workaround() argument
436 static int b44_setup_phy(struct b44 *bp) in b44_setup_phy() argument
441 b44_wap54g10_workaround(bp); in b44_setup_phy()
443 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_setup_phy()
445 if ((err = b44_readphy(bp, B44_MII_ALEDCTRL, &val)) != 0) in b44_setup_phy()
447 if ((err = b44_writephy(bp, B44_MII_ALEDCTRL, in b44_setup_phy()
450 if ((err = b44_readphy(bp, B44_MII_TLEDCTRL, &val)) != 0) in b44_setup_phy()
452 if ((err = b44_writephy(bp, B44_MII_TLEDCTRL, in b44_setup_phy()
456 if (!(bp->flags & B44_FLAG_FORCE_LINK)) { in b44_setup_phy()
459 if (bp->flags & B44_FLAG_ADV_10HALF) in b44_setup_phy()
461 if (bp->flags & B44_FLAG_ADV_10FULL) in b44_setup_phy()
463 if (bp->flags & B44_FLAG_ADV_100HALF) in b44_setup_phy()
465 if (bp->flags & B44_FLAG_ADV_100FULL) in b44_setup_phy()
468 if (bp->flags & B44_FLAG_PAUSE_AUTO) in b44_setup_phy()
471 if ((err = b44_writephy(bp, MII_ADVERTISE, adv)) != 0) in b44_setup_phy()
473 if ((err = b44_writephy(bp, MII_BMCR, (BMCR_ANENABLE | in b44_setup_phy()
479 if ((err = b44_readphy(bp, MII_BMCR, &bmcr)) != 0) in b44_setup_phy()
482 if (bp->flags & B44_FLAG_100_BASE_T) in b44_setup_phy()
484 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_setup_phy()
486 if ((err = b44_writephy(bp, MII_BMCR, bmcr)) != 0) in b44_setup_phy()
493 b44_set_flow_ctrl(bp, 0, 0); in b44_setup_phy()
500 static void b44_stats_update(struct b44 *bp) in b44_stats_update() argument
505 val = &bp->hw_stats.tx_good_octets; in b44_stats_update()
506 u64_stats_update_begin(&bp->hw_stats.syncp); in b44_stats_update()
509 *val++ += br32(bp, reg); in b44_stats_update()
513 *val++ += br32(bp, reg); in b44_stats_update()
516 u64_stats_update_end(&bp->hw_stats.syncp); in b44_stats_update()
519 static void b44_link_report(struct b44 *bp) in b44_link_report() argument
521 if (!netif_carrier_ok(bp->dev)) { in b44_link_report()
522 netdev_info(bp->dev, "Link is down\n"); in b44_link_report()
524 netdev_info(bp->dev, "Link is up at %d Mbps, %s duplex\n", in b44_link_report()
525 (bp->flags & B44_FLAG_100_BASE_T) ? 100 : 10, in b44_link_report()
526 (bp->flags & B44_FLAG_FULL_DUPLEX) ? "full" : "half"); in b44_link_report()
528 netdev_info(bp->dev, "Flow control is %s for TX and %s for RX\n", in b44_link_report()
529 (bp->flags & B44_FLAG_TX_PAUSE) ? "on" : "off", in b44_link_report()
530 (bp->flags & B44_FLAG_RX_PAUSE) ? "on" : "off"); in b44_link_report()
534 static void b44_check_phy(struct b44 *bp) in b44_check_phy() argument
538 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_check_phy()
539 bp->flags |= B44_FLAG_100_BASE_T; in b44_check_phy()
540 if (!netif_carrier_ok(bp->dev)) { in b44_check_phy()
541 u32 val = br32(bp, B44_TX_CTRL); in b44_check_phy()
542 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_check_phy()
546 bw32(bp, B44_TX_CTRL, val); in b44_check_phy()
547 netif_carrier_on(bp->dev); in b44_check_phy()
548 b44_link_report(bp); in b44_check_phy()
553 if (!b44_readphy(bp, MII_BMSR, &bmsr) && in b44_check_phy()
554 !b44_readphy(bp, B44_MII_AUXCTRL, &aux) && in b44_check_phy()
557 bp->flags |= B44_FLAG_100_BASE_T; in b44_check_phy()
559 bp->flags &= ~B44_FLAG_100_BASE_T; in b44_check_phy()
561 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_check_phy()
563 bp->flags &= ~B44_FLAG_FULL_DUPLEX; in b44_check_phy()
565 if (!netif_carrier_ok(bp->dev) && in b44_check_phy()
567 u32 val = br32(bp, B44_TX_CTRL); in b44_check_phy()
570 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_check_phy()
574 bw32(bp, B44_TX_CTRL, val); in b44_check_phy()
576 if (!(bp->flags & B44_FLAG_FORCE_LINK) && in b44_check_phy()
577 !b44_readphy(bp, MII_ADVERTISE, &local_adv) && in b44_check_phy()
578 !b44_readphy(bp, MII_LPA, &remote_adv)) in b44_check_phy()
579 b44_set_flow_ctrl(bp, local_adv, remote_adv); in b44_check_phy()
582 netif_carrier_on(bp->dev); in b44_check_phy()
583 b44_link_report(bp); in b44_check_phy()
584 } else if (netif_carrier_ok(bp->dev) && !(bmsr & BMSR_LSTATUS)) { in b44_check_phy()
586 netif_carrier_off(bp->dev); in b44_check_phy()
587 b44_link_report(bp); in b44_check_phy()
591 netdev_warn(bp->dev, "Remote fault detected in PHY\n"); in b44_check_phy()
593 netdev_warn(bp->dev, "Jabber detected in PHY\n"); in b44_check_phy()
599 struct b44 *bp = from_timer(bp, t, timer); in b44_timer() local
601 spin_lock_irq(&bp->lock); in b44_timer()
603 b44_check_phy(bp); in b44_timer()
605 b44_stats_update(bp); in b44_timer()
607 spin_unlock_irq(&bp->lock); in b44_timer()
609 mod_timer(&bp->timer, round_jiffies(jiffies + HZ)); in b44_timer()
612 static void b44_tx(struct b44 *bp) in b44_tx() argument
617 cur = br32(bp, B44_DMATX_STAT) & DMATX_STAT_CDMASK; in b44_tx()
621 for (cons = bp->tx_cons; cons != cur; cons = NEXT_TX(cons)) { in b44_tx()
622 struct ring_info *rp = &bp->tx_buffers[cons]; in b44_tx()
627 dma_unmap_single(bp->sdev->dma_dev, in b44_tx()
639 netdev_completed_queue(bp->dev, pkts_compl, bytes_compl); in b44_tx()
640 bp->tx_cons = cons; in b44_tx()
641 if (netif_queue_stopped(bp->dev) && in b44_tx()
642 TX_BUFFS_AVAIL(bp) > B44_TX_WAKEUP_THRESH) in b44_tx()
643 netif_wake_queue(bp->dev); in b44_tx()
645 bw32(bp, B44_GPTIMER, 0); in b44_tx()
653 static int b44_alloc_rx_skb(struct b44 *bp, int src_idx, u32 dest_idx_unmasked) in b44_alloc_rx_skb() argument
665 src_map = &bp->rx_buffers[src_idx]; in b44_alloc_rx_skb()
667 map = &bp->rx_buffers[dest_idx]; in b44_alloc_rx_skb()
668 skb = netdev_alloc_skb(bp->dev, RX_PKT_BUF_SZ); in b44_alloc_rx_skb()
672 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb()
678 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb()
681 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb()
682 dma_unmap_single(bp->sdev->dma_dev, mapping, in b44_alloc_rx_skb()
688 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb()
691 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb()
693 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb()
694 dma_unmap_single(bp->sdev->dma_dev, mapping, RX_PKT_BUF_SZ,DMA_FROM_DEVICE); in b44_alloc_rx_skb()
698 bp->force_copybreak = 1; in b44_alloc_rx_skb()
716 dp = &bp->rx_ring[dest_idx]; in b44_alloc_rx_skb()
718 dp->addr = cpu_to_le32((u32) mapping + bp->dma_offset); in b44_alloc_rx_skb()
720 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_alloc_rx_skb()
721 b44_sync_dma_desc_for_device(bp->sdev, bp->rx_ring_dma, in b44_alloc_rx_skb()
728 static void b44_recycle_rx(struct b44 *bp, int src_idx, u32 dest_idx_unmasked) in b44_recycle_rx() argument
737 dest_desc = &bp->rx_ring[dest_idx]; in b44_recycle_rx()
738 dest_map = &bp->rx_buffers[dest_idx]; in b44_recycle_rx()
739 src_desc = &bp->rx_ring[src_idx]; in b44_recycle_rx()
740 src_map = &bp->rx_buffers[src_idx]; in b44_recycle_rx()
748 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_recycle_rx()
749 b44_sync_dma_desc_for_cpu(bp->sdev, bp->rx_ring_dma, in b44_recycle_rx()
764 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_recycle_rx()
765 b44_sync_dma_desc_for_device(bp->sdev, bp->rx_ring_dma, in b44_recycle_rx()
769 dma_sync_single_for_device(bp->sdev->dma_dev, dest_map->mapping, in b44_recycle_rx()
774 static int b44_rx(struct b44 *bp, int budget) in b44_rx() argument
780 prod = br32(bp, B44_DMARX_STAT) & DMARX_STAT_CDMASK; in b44_rx()
782 cons = bp->rx_cons; in b44_rx()
785 struct ring_info *rp = &bp->rx_buffers[cons]; in b44_rx()
791 dma_sync_single_for_cpu(bp->sdev->dma_dev, map, in b44_rx()
799 b44_recycle_rx(bp, cons, bp->rx_prod); in b44_rx()
801 bp->dev->stats.rx_dropped++; in b44_rx()
820 if (!bp->force_copybreak && len > RX_COPY_THRESHOLD) { in b44_rx()
822 skb_size = b44_alloc_rx_skb(bp, cons, bp->rx_prod); in b44_rx()
825 dma_unmap_single(bp->sdev->dma_dev, map, in b44_rx()
833 b44_recycle_rx(bp, cons, bp->rx_prod); in b44_rx()
834 copy_skb = napi_alloc_skb(&bp->napi, len); in b44_rx()
845 skb->protocol = eth_type_trans(skb, bp->dev); in b44_rx()
850 bp->rx_prod = (bp->rx_prod + 1) & in b44_rx()
855 bp->rx_cons = cons; in b44_rx()
856 bw32(bp, B44_DMARX_PTR, cons * sizeof(struct dma_desc)); in b44_rx()
863 struct b44 *bp = container_of(napi, struct b44, napi); in b44_poll() local
867 spin_lock_irqsave(&bp->lock, flags); in b44_poll()
869 if (bp->istat & (ISTAT_TX | ISTAT_TO)) { in b44_poll()
871 b44_tx(bp); in b44_poll()
874 if (bp->istat & ISTAT_RFO) { /* fast recovery, in ~20msec */ in b44_poll()
875 bp->istat &= ~ISTAT_RFO; in b44_poll()
876 b44_disable_ints(bp); in b44_poll()
877 ssb_device_enable(bp->sdev, 0); /* resets ISTAT_RFO */ in b44_poll()
878 b44_init_rings(bp); in b44_poll()
879 b44_init_hw(bp, B44_FULL_RESET_SKIP_PHY); in b44_poll()
880 netif_wake_queue(bp->dev); in b44_poll()
883 spin_unlock_irqrestore(&bp->lock, flags); in b44_poll()
886 if (bp->istat & ISTAT_RX) in b44_poll()
887 work_done += b44_rx(bp, budget); in b44_poll()
889 if (bp->istat & ISTAT_ERRORS) { in b44_poll()
890 spin_lock_irqsave(&bp->lock, flags); in b44_poll()
891 b44_halt(bp); in b44_poll()
892 b44_init_rings(bp); in b44_poll()
893 b44_init_hw(bp, B44_FULL_RESET_SKIP_PHY); in b44_poll()
894 netif_wake_queue(bp->dev); in b44_poll()
895 spin_unlock_irqrestore(&bp->lock, flags); in b44_poll()
901 b44_enable_ints(bp); in b44_poll()
910 struct b44 *bp = netdev_priv(dev); in b44_interrupt() local
914 spin_lock(&bp->lock); in b44_interrupt()
916 istat = br32(bp, B44_ISTAT); in b44_interrupt()
917 imask = br32(bp, B44_IMASK); in b44_interrupt()
932 if (napi_schedule_prep(&bp->napi)) { in b44_interrupt()
936 bp->istat = istat; in b44_interrupt()
937 __b44_disable_ints(bp); in b44_interrupt()
938 __napi_schedule(&bp->napi); in b44_interrupt()
942 bw32(bp, B44_ISTAT, istat); in b44_interrupt()
943 br32(bp, B44_ISTAT); in b44_interrupt()
945 spin_unlock(&bp->lock); in b44_interrupt()
951 struct b44 *bp = netdev_priv(dev); in b44_tx_timeout() local
955 spin_lock_irq(&bp->lock); in b44_tx_timeout()
957 b44_halt(bp); in b44_tx_timeout()
958 b44_init_rings(bp); in b44_tx_timeout()
959 b44_init_hw(bp, B44_FULL_RESET); in b44_tx_timeout()
961 spin_unlock_irq(&bp->lock); in b44_tx_timeout()
963 b44_enable_ints(bp); in b44_tx_timeout()
970 struct b44 *bp = netdev_priv(dev); in b44_start_xmit() local
977 spin_lock_irqsave(&bp->lock, flags); in b44_start_xmit()
980 if (unlikely(TX_BUFFS_AVAIL(bp) < 1)) { in b44_start_xmit()
986 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, len, DMA_TO_DEVICE); in b44_start_xmit()
987 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || mapping + len > DMA_BIT_MASK(30)) { in b44_start_xmit()
991 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_start_xmit()
992 dma_unmap_single(bp->sdev->dma_dev, mapping, len, in b44_start_xmit()
999 mapping = dma_map_single(bp->sdev->dma_dev, bounce_skb->data, in b44_start_xmit()
1001 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || mapping + len > DMA_BIT_MASK(30)) { in b44_start_xmit()
1002 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_start_xmit()
1003 dma_unmap_single(bp->sdev->dma_dev, mapping, in b44_start_xmit()
1014 entry = bp->tx_prod; in b44_start_xmit()
1015 bp->tx_buffers[entry].skb = skb; in b44_start_xmit()
1016 bp->tx_buffers[entry].mapping = mapping; in b44_start_xmit()
1023 bp->tx_ring[entry].ctrl = cpu_to_le32(ctrl); in b44_start_xmit()
1024 bp->tx_ring[entry].addr = cpu_to_le32((u32) mapping+bp->dma_offset); in b44_start_xmit()
1026 if (bp->flags & B44_FLAG_TX_RING_HACK) in b44_start_xmit()
1027 b44_sync_dma_desc_for_device(bp->sdev, bp->tx_ring_dma, in b44_start_xmit()
1028 entry * sizeof(bp->tx_ring[0]), in b44_start_xmit()
1033 bp->tx_prod = entry; in b44_start_xmit()
1037 bw32(bp, B44_DMATX_PTR, entry * sizeof(struct dma_desc)); in b44_start_xmit()
1038 if (bp->flags & B44_FLAG_BUGGY_TXPTR) in b44_start_xmit()
1039 bw32(bp, B44_DMATX_PTR, entry * sizeof(struct dma_desc)); in b44_start_xmit()
1040 if (bp->flags & B44_FLAG_REORDER_BUG) in b44_start_xmit()
1041 br32(bp, B44_DMATX_PTR); in b44_start_xmit()
1045 if (TX_BUFFS_AVAIL(bp) < 1) in b44_start_xmit()
1049 spin_unlock_irqrestore(&bp->lock, flags); in b44_start_xmit()
1060 struct b44 *bp = netdev_priv(dev); in b44_change_mtu() local
1070 spin_lock_irq(&bp->lock); in b44_change_mtu()
1071 b44_halt(bp); in b44_change_mtu()
1073 b44_init_rings(bp); in b44_change_mtu()
1074 b44_init_hw(bp, B44_FULL_RESET); in b44_change_mtu()
1075 spin_unlock_irq(&bp->lock); in b44_change_mtu()
1077 b44_enable_ints(bp); in b44_change_mtu()
1089 static void b44_free_rings(struct b44 *bp) in b44_free_rings() argument
1095 rp = &bp->rx_buffers[i]; in b44_free_rings()
1099 dma_unmap_single(bp->sdev->dma_dev, rp->mapping, RX_PKT_BUF_SZ, in b44_free_rings()
1107 rp = &bp->tx_buffers[i]; in b44_free_rings()
1111 dma_unmap_single(bp->sdev->dma_dev, rp->mapping, rp->skb->len, in b44_free_rings()
1124 static void b44_init_rings(struct b44 *bp) in b44_init_rings() argument
1128 b44_free_rings(bp); in b44_init_rings()
1130 memset(bp->rx_ring, 0, B44_RX_RING_BYTES); in b44_init_rings()
1131 memset(bp->tx_ring, 0, B44_TX_RING_BYTES); in b44_init_rings()
1133 if (bp->flags & B44_FLAG_RX_RING_HACK) in b44_init_rings()
1134 dma_sync_single_for_device(bp->sdev->dma_dev, bp->rx_ring_dma, in b44_init_rings()
1137 if (bp->flags & B44_FLAG_TX_RING_HACK) in b44_init_rings()
1138 dma_sync_single_for_device(bp->sdev->dma_dev, bp->tx_ring_dma, in b44_init_rings()
1141 for (i = 0; i < bp->rx_pending; i++) { in b44_init_rings()
1142 if (b44_alloc_rx_skb(bp, -1, i) < 0) in b44_init_rings()
1151 static void b44_free_consistent(struct b44 *bp) in b44_free_consistent() argument
1153 kfree(bp->rx_buffers); in b44_free_consistent()
1154 bp->rx_buffers = NULL; in b44_free_consistent()
1155 kfree(bp->tx_buffers); in b44_free_consistent()
1156 bp->tx_buffers = NULL; in b44_free_consistent()
1157 if (bp->rx_ring) { in b44_free_consistent()
1158 if (bp->flags & B44_FLAG_RX_RING_HACK) { in b44_free_consistent()
1159 dma_unmap_single(bp->sdev->dma_dev, bp->rx_ring_dma, in b44_free_consistent()
1161 kfree(bp->rx_ring); in b44_free_consistent()
1163 dma_free_coherent(bp->sdev->dma_dev, DMA_TABLE_BYTES, in b44_free_consistent()
1164 bp->rx_ring, bp->rx_ring_dma); in b44_free_consistent()
1165 bp->rx_ring = NULL; in b44_free_consistent()
1166 bp->flags &= ~B44_FLAG_RX_RING_HACK; in b44_free_consistent()
1168 if (bp->tx_ring) { in b44_free_consistent()
1169 if (bp->flags & B44_FLAG_TX_RING_HACK) { in b44_free_consistent()
1170 dma_unmap_single(bp->sdev->dma_dev, bp->tx_ring_dma, in b44_free_consistent()
1172 kfree(bp->tx_ring); in b44_free_consistent()
1174 dma_free_coherent(bp->sdev->dma_dev, DMA_TABLE_BYTES, in b44_free_consistent()
1175 bp->tx_ring, bp->tx_ring_dma); in b44_free_consistent()
1176 bp->tx_ring = NULL; in b44_free_consistent()
1177 bp->flags &= ~B44_FLAG_TX_RING_HACK; in b44_free_consistent()
1185 static int b44_alloc_consistent(struct b44 *bp, gfp_t gfp) in b44_alloc_consistent() argument
1190 bp->rx_buffers = kzalloc(size, gfp); in b44_alloc_consistent()
1191 if (!bp->rx_buffers) in b44_alloc_consistent()
1195 bp->tx_buffers = kzalloc(size, gfp); in b44_alloc_consistent()
1196 if (!bp->tx_buffers) in b44_alloc_consistent()
1200 bp->rx_ring = dma_alloc_coherent(bp->sdev->dma_dev, size, in b44_alloc_consistent()
1201 &bp->rx_ring_dma, gfp); in b44_alloc_consistent()
1202 if (!bp->rx_ring) { in b44_alloc_consistent()
1213 rx_ring_dma = dma_map_single(bp->sdev->dma_dev, rx_ring, in b44_alloc_consistent()
1217 if (dma_mapping_error(bp->sdev->dma_dev, rx_ring_dma) || in b44_alloc_consistent()
1223 bp->rx_ring = rx_ring; in b44_alloc_consistent()
1224 bp->rx_ring_dma = rx_ring_dma; in b44_alloc_consistent()
1225 bp->flags |= B44_FLAG_RX_RING_HACK; in b44_alloc_consistent()
1228 bp->tx_ring = dma_alloc_coherent(bp->sdev->dma_dev, size, in b44_alloc_consistent()
1229 &bp->tx_ring_dma, gfp); in b44_alloc_consistent()
1230 if (!bp->tx_ring) { in b44_alloc_consistent()
1241 tx_ring_dma = dma_map_single(bp->sdev->dma_dev, tx_ring, in b44_alloc_consistent()
1245 if (dma_mapping_error(bp->sdev->dma_dev, tx_ring_dma) || in b44_alloc_consistent()
1251 bp->tx_ring = tx_ring; in b44_alloc_consistent()
1252 bp->tx_ring_dma = tx_ring_dma; in b44_alloc_consistent()
1253 bp->flags |= B44_FLAG_TX_RING_HACK; in b44_alloc_consistent()
1259 b44_free_consistent(bp); in b44_alloc_consistent()
1264 static void b44_clear_stats(struct b44 *bp) in b44_clear_stats() argument
1268 bw32(bp, B44_MIB_CTRL, MIB_CTRL_CLR_ON_READ); in b44_clear_stats()
1270 br32(bp, reg); in b44_clear_stats()
1272 br32(bp, reg); in b44_clear_stats()
1276 static void b44_chip_reset(struct b44 *bp, int reset_kind) in b44_chip_reset() argument
1278 struct ssb_device *sdev = bp->sdev; in b44_chip_reset()
1281 was_enabled = ssb_device_is_enabled(bp->sdev); in b44_chip_reset()
1283 ssb_device_enable(bp->sdev, 0); in b44_chip_reset()
1287 bw32(bp, B44_RCV_LAZY, 0); in b44_chip_reset()
1288 bw32(bp, B44_ENET_CTRL, ENET_CTRL_DISABLE); in b44_chip_reset()
1289 b44_wait_bit(bp, B44_ENET_CTRL, ENET_CTRL_DISABLE, 200, 1); in b44_chip_reset()
1290 bw32(bp, B44_DMATX_CTRL, 0); in b44_chip_reset()
1291 bp->tx_prod = bp->tx_cons = 0; in b44_chip_reset()
1292 if (br32(bp, B44_DMARX_STAT) & DMARX_STAT_EMASK) { in b44_chip_reset()
1293 b44_wait_bit(bp, B44_DMARX_STAT, DMARX_STAT_SIDLE, in b44_chip_reset()
1296 bw32(bp, B44_DMARX_CTRL, 0); in b44_chip_reset()
1297 bp->rx_prod = bp->rx_cons = 0; in b44_chip_reset()
1300 b44_clear_stats(bp); in b44_chip_reset()
1311 bw32(bp, B44_MDIO_CTRL, (MDIO_CTRL_PREAMBLE | in b44_chip_reset()
1317 bw32(bp, B44_MDIO_CTRL, (MDIO_CTRL_PREAMBLE | in b44_chip_reset()
1326 br32(bp, B44_MDIO_CTRL); in b44_chip_reset()
1328 if (!(br32(bp, B44_DEVCTRL) & DEVCTRL_IPP)) { in b44_chip_reset()
1329 bw32(bp, B44_ENET_CTRL, ENET_CTRL_EPSEL); in b44_chip_reset()
1330 br32(bp, B44_ENET_CTRL); in b44_chip_reset()
1331 bp->flags |= B44_FLAG_EXTERNAL_PHY; in b44_chip_reset()
1333 u32 val = br32(bp, B44_DEVCTRL); in b44_chip_reset()
1336 bw32(bp, B44_DEVCTRL, (val & ~DEVCTRL_EPR)); in b44_chip_reset()
1337 br32(bp, B44_DEVCTRL); in b44_chip_reset()
1340 bp->flags &= ~B44_FLAG_EXTERNAL_PHY; in b44_chip_reset()
1345 static void b44_halt(struct b44 *bp) in b44_halt() argument
1347 b44_disable_ints(bp); in b44_halt()
1349 b44_phy_reset(bp); in b44_halt()
1351 netdev_info(bp->dev, "powering down PHY\n"); in b44_halt()
1352 bw32(bp, B44_MAC_CTRL, MAC_CTRL_PHY_PDOWN); in b44_halt()
1355 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_halt()
1356 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_halt()
1358 b44_chip_reset(bp, B44_CHIP_RESET_PARTIAL); in b44_halt()
1362 static void __b44_set_mac_addr(struct b44 *bp) in __b44_set_mac_addr() argument
1364 bw32(bp, B44_CAM_CTRL, 0); in __b44_set_mac_addr()
1365 if (!(bp->dev->flags & IFF_PROMISC)) { in __b44_set_mac_addr()
1368 __b44_cam_write(bp, bp->dev->dev_addr, 0); in __b44_set_mac_addr()
1369 val = br32(bp, B44_CAM_CTRL); in __b44_set_mac_addr()
1370 bw32(bp, B44_CAM_CTRL, val | CAM_CTRL_ENABLE); in __b44_set_mac_addr()
1376 struct b44 *bp = netdev_priv(dev); in b44_set_mac_addr() local
1388 spin_lock_irq(&bp->lock); in b44_set_mac_addr()
1390 val = br32(bp, B44_RXCONFIG); in b44_set_mac_addr()
1392 __b44_set_mac_addr(bp); in b44_set_mac_addr()
1394 spin_unlock_irq(&bp->lock); in b44_set_mac_addr()
1403 static void b44_init_hw(struct b44 *bp, int reset_kind) in b44_init_hw() argument
1407 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_init_hw()
1409 b44_phy_reset(bp); in b44_init_hw()
1410 b44_setup_phy(bp); in b44_init_hw()
1414 bw32(bp, B44_MAC_CTRL, MAC_CTRL_CRC32_ENAB | MAC_CTRL_PHY_LEDCTRL); in b44_init_hw()
1415 bw32(bp, B44_RCV_LAZY, (1 << RCV_LAZY_FC_SHIFT)); in b44_init_hw()
1418 __b44_set_rx_mode(bp->dev); in b44_init_hw()
1421 bw32(bp, B44_RXMAXLEN, bp->dev->mtu + ETH_HLEN + 8 + RX_HEADER_LEN); in b44_init_hw()
1422 bw32(bp, B44_TXMAXLEN, bp->dev->mtu + ETH_HLEN + 8 + RX_HEADER_LEN); in b44_init_hw()
1424 bw32(bp, B44_TX_WMARK, 56); /* XXX magic */ in b44_init_hw()
1426 bw32(bp, B44_DMARX_CTRL, (DMARX_CTRL_ENABLE | in b44_init_hw()
1429 bw32(bp, B44_DMATX_CTRL, DMATX_CTRL_ENABLE); in b44_init_hw()
1430 bw32(bp, B44_DMATX_ADDR, bp->tx_ring_dma + bp->dma_offset); in b44_init_hw()
1431 bw32(bp, B44_DMARX_CTRL, (DMARX_CTRL_ENABLE | in b44_init_hw()
1433 bw32(bp, B44_DMARX_ADDR, bp->rx_ring_dma + bp->dma_offset); in b44_init_hw()
1435 bw32(bp, B44_DMARX_PTR, bp->rx_pending); in b44_init_hw()
1436 bp->rx_prod = bp->rx_pending; in b44_init_hw()
1438 bw32(bp, B44_MIB_CTRL, MIB_CTRL_CLR_ON_READ); in b44_init_hw()
1441 val = br32(bp, B44_ENET_CTRL); in b44_init_hw()
1442 bw32(bp, B44_ENET_CTRL, (val | ENET_CTRL_ENABLE)); in b44_init_hw()
1444 netdev_reset_queue(bp->dev); in b44_init_hw()
1449 struct b44 *bp = netdev_priv(dev); in b44_open() local
1452 err = b44_alloc_consistent(bp, GFP_KERNEL); in b44_open()
1456 napi_enable(&bp->napi); in b44_open()
1458 b44_init_rings(bp); in b44_open()
1459 b44_init_hw(bp, B44_FULL_RESET); in b44_open()
1461 b44_check_phy(bp); in b44_open()
1465 napi_disable(&bp->napi); in b44_open()
1466 b44_chip_reset(bp, B44_CHIP_RESET_PARTIAL); in b44_open()
1467 b44_free_rings(bp); in b44_open()
1468 b44_free_consistent(bp); in b44_open()
1472 timer_setup(&bp->timer, b44_timer, 0); in b44_open()
1473 bp->timer.expires = jiffies + HZ; in b44_open()
1474 add_timer(&bp->timer); in b44_open()
1476 b44_enable_ints(bp); in b44_open()
1478 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_open()
1499 static void bwfilter_table(struct b44 *bp, u8 *pp, u32 bytes, u32 table_offset) in bwfilter_table() argument
1505 bw32(bp, B44_FILT_ADDR, table_offset + i); in bwfilter_table()
1506 bw32(bp, B44_FILT_DATA, pattern[i / sizeof(u32)]); in bwfilter_table()
1542 static void b44_setup_pseudo_magicp(struct b44 *bp) in b44_setup_pseudo_magicp() argument
1556 plen0 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1559 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, B44_PATTERN_BASE); in b44_setup_pseudo_magicp()
1560 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, B44_PMASK_BASE); in b44_setup_pseudo_magicp()
1565 plen1 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1568 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, in b44_setup_pseudo_magicp()
1570 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, in b44_setup_pseudo_magicp()
1576 plen2 = b44_magic_pattern(bp->dev->dev_addr, pwol_pattern, pwol_mask, in b44_setup_pseudo_magicp()
1579 bwfilter_table(bp, pwol_pattern, B44_PATTERN_SIZE, in b44_setup_pseudo_magicp()
1581 bwfilter_table(bp, pwol_mask, B44_PMASK_SIZE, in b44_setup_pseudo_magicp()
1588 bw32(bp, B44_WKUP_LEN, val); in b44_setup_pseudo_magicp()
1591 val = br32(bp, B44_DEVCTRL); in b44_setup_pseudo_magicp()
1592 bw32(bp, B44_DEVCTRL, val | DEVCTRL_PFE); in b44_setup_pseudo_magicp()
1597 static void b44_setup_wol_pci(struct b44 *bp) in b44_setup_wol_pci() argument
1601 if (bp->sdev->bus->bustype != SSB_BUSTYPE_SSB) { in b44_setup_wol_pci()
1602 bw32(bp, SSB_TMSLOW, br32(bp, SSB_TMSLOW) | SSB_TMSLOW_PE); in b44_setup_wol_pci()
1603 pci_read_config_word(bp->sdev->bus->host_pci, SSB_PMCSR, &val); in b44_setup_wol_pci()
1604 pci_write_config_word(bp->sdev->bus->host_pci, SSB_PMCSR, val | SSB_PE); in b44_setup_wol_pci()
1608 static inline void b44_setup_wol_pci(struct b44 *bp) { } in b44_setup_wol_pci() argument
1611 static void b44_setup_wol(struct b44 *bp) in b44_setup_wol() argument
1615 bw32(bp, B44_RXCONFIG, RXCONFIG_ALLMULTI); in b44_setup_wol()
1617 if (bp->flags & B44_FLAG_B0_ANDLATER) { in b44_setup_wol()
1619 bw32(bp, B44_WKUP_LEN, WKUP_LEN_DISABLE); in b44_setup_wol()
1621 val = bp->dev->dev_addr[2] << 24 | in b44_setup_wol()
1622 bp->dev->dev_addr[3] << 16 | in b44_setup_wol()
1623 bp->dev->dev_addr[4] << 8 | in b44_setup_wol()
1624 bp->dev->dev_addr[5]; in b44_setup_wol()
1625 bw32(bp, B44_ADDR_LO, val); in b44_setup_wol()
1627 val = bp->dev->dev_addr[0] << 8 | in b44_setup_wol()
1628 bp->dev->dev_addr[1]; in b44_setup_wol()
1629 bw32(bp, B44_ADDR_HI, val); in b44_setup_wol()
1631 val = br32(bp, B44_DEVCTRL); in b44_setup_wol()
1632 bw32(bp, B44_DEVCTRL, val | DEVCTRL_MPM | DEVCTRL_PFE); in b44_setup_wol()
1635 b44_setup_pseudo_magicp(bp); in b44_setup_wol()
1637 b44_setup_wol_pci(bp); in b44_setup_wol()
1642 struct b44 *bp = netdev_priv(dev); in b44_close() local
1646 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_close()
1649 napi_disable(&bp->napi); in b44_close()
1651 del_timer_sync(&bp->timer); in b44_close()
1653 spin_lock_irq(&bp->lock); in b44_close()
1655 b44_halt(bp); in b44_close()
1656 b44_free_rings(bp); in b44_close()
1659 spin_unlock_irq(&bp->lock); in b44_close()
1663 if (bp->flags & B44_FLAG_WOL_ENABLE) { in b44_close()
1664 b44_init_hw(bp, B44_PARTIAL_RESET); in b44_close()
1665 b44_setup_wol(bp); in b44_close()
1668 b44_free_consistent(bp); in b44_close()
1676 struct b44 *bp = netdev_priv(dev); in b44_get_stats64() local
1677 struct b44_hw_stats *hwstat = &bp->hw_stats; in b44_get_stats64()
1719 static int __b44_load_mcast(struct b44 *bp, struct net_device *dev) in __b44_load_mcast() argument
1729 __b44_cam_write(bp, ha->addr, i++ + 1); in __b44_load_mcast()
1736 struct b44 *bp = netdev_priv(dev); in __b44_set_rx_mode() local
1739 val = br32(bp, B44_RXCONFIG); in __b44_set_rx_mode()
1743 bw32(bp, B44_RXCONFIG, val); in __b44_set_rx_mode()
1748 __b44_set_mac_addr(bp); in __b44_set_rx_mode()
1754 i = __b44_load_mcast(bp, dev); in __b44_set_rx_mode()
1757 __b44_cam_write(bp, zero, i); in __b44_set_rx_mode()
1759 bw32(bp, B44_RXCONFIG, val); in __b44_set_rx_mode()
1760 val = br32(bp, B44_CAM_CTRL); in __b44_set_rx_mode()
1761 bw32(bp, B44_CAM_CTRL, val | CAM_CTRL_ENABLE); in __b44_set_rx_mode()
1767 struct b44 *bp = netdev_priv(dev); in b44_set_rx_mode() local
1769 spin_lock_irq(&bp->lock); in b44_set_rx_mode()
1771 spin_unlock_irq(&bp->lock); in b44_set_rx_mode()
1776 struct b44 *bp = netdev_priv(dev); in b44_get_msglevel() local
1777 return bp->msg_enable; in b44_get_msglevel()
1782 struct b44 *bp = netdev_priv(dev); in b44_set_msglevel() local
1783 bp->msg_enable = value; in b44_set_msglevel()
1788 struct b44 *bp = netdev_priv(dev); in b44_get_drvinfo() local
1789 struct ssb_bus *bus = bp->sdev->bus; in b44_get_drvinfo()
1808 struct b44 *bp = netdev_priv(dev); in b44_nway_reset() local
1812 spin_lock_irq(&bp->lock); in b44_nway_reset()
1813 b44_readphy(bp, MII_BMCR, &bmcr); in b44_nway_reset()
1814 b44_readphy(bp, MII_BMCR, &bmcr); in b44_nway_reset()
1817 b44_writephy(bp, MII_BMCR, in b44_nway_reset()
1821 spin_unlock_irq(&bp->lock); in b44_nway_reset()
1829 struct b44 *bp = netdev_priv(dev); in b44_get_link_ksettings() local
1832 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_get_link_ksettings()
1847 if (bp->flags & B44_FLAG_ADV_10HALF) in b44_get_link_ksettings()
1849 if (bp->flags & B44_FLAG_ADV_10FULL) in b44_get_link_ksettings()
1851 if (bp->flags & B44_FLAG_ADV_100HALF) in b44_get_link_ksettings()
1853 if (bp->flags & B44_FLAG_ADV_100FULL) in b44_get_link_ksettings()
1856 cmd->base.speed = (bp->flags & B44_FLAG_100_BASE_T) ? in b44_get_link_ksettings()
1858 cmd->base.duplex = (bp->flags & B44_FLAG_FULL_DUPLEX) ? in b44_get_link_ksettings()
1861 cmd->base.phy_address = bp->phy_addr; in b44_get_link_ksettings()
1862 cmd->base.autoneg = (bp->flags & B44_FLAG_FORCE_LINK) ? in b44_get_link_ksettings()
1883 struct b44 *bp = netdev_priv(dev); in b44_set_link_ksettings() local
1888 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_set_link_ksettings()
1890 spin_lock_irq(&bp->lock); in b44_set_link_ksettings()
1892 b44_setup_phy(bp); in b44_set_link_ksettings()
1896 spin_unlock_irq(&bp->lock); in b44_set_link_ksettings()
1919 spin_lock_irq(&bp->lock); in b44_set_link_ksettings()
1922 bp->flags &= ~(B44_FLAG_FORCE_LINK | in b44_set_link_ksettings()
1930 bp->flags |= (B44_FLAG_ADV_10HALF | in b44_set_link_ksettings()
1936 bp->flags |= B44_FLAG_ADV_10HALF; in b44_set_link_ksettings()
1938 bp->flags |= B44_FLAG_ADV_10FULL; in b44_set_link_ksettings()
1940 bp->flags |= B44_FLAG_ADV_100HALF; in b44_set_link_ksettings()
1942 bp->flags |= B44_FLAG_ADV_100FULL; in b44_set_link_ksettings()
1945 bp->flags |= B44_FLAG_FORCE_LINK; in b44_set_link_ksettings()
1946 bp->flags &= ~(B44_FLAG_100_BASE_T | B44_FLAG_FULL_DUPLEX); in b44_set_link_ksettings()
1948 bp->flags |= B44_FLAG_100_BASE_T; in b44_set_link_ksettings()
1950 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_set_link_ksettings()
1954 b44_setup_phy(bp); in b44_set_link_ksettings()
1956 spin_unlock_irq(&bp->lock); in b44_set_link_ksettings()
1964 struct b44 *bp = netdev_priv(dev); in b44_get_ringparam() local
1967 ering->rx_pending = bp->rx_pending; in b44_get_ringparam()
1975 struct b44 *bp = netdev_priv(dev); in b44_set_ringparam() local
1983 spin_lock_irq(&bp->lock); in b44_set_ringparam()
1985 bp->rx_pending = ering->rx_pending; in b44_set_ringparam()
1986 bp->tx_pending = ering->tx_pending; in b44_set_ringparam()
1988 b44_halt(bp); in b44_set_ringparam()
1989 b44_init_rings(bp); in b44_set_ringparam()
1990 b44_init_hw(bp, B44_FULL_RESET); in b44_set_ringparam()
1991 netif_wake_queue(bp->dev); in b44_set_ringparam()
1992 spin_unlock_irq(&bp->lock); in b44_set_ringparam()
1994 b44_enable_ints(bp); in b44_set_ringparam()
2002 struct b44 *bp = netdev_priv(dev); in b44_get_pauseparam() local
2005 (bp->flags & B44_FLAG_PAUSE_AUTO) != 0; in b44_get_pauseparam()
2007 (bp->flags & B44_FLAG_RX_PAUSE) != 0; in b44_get_pauseparam()
2009 (bp->flags & B44_FLAG_TX_PAUSE) != 0; in b44_get_pauseparam()
2015 struct b44 *bp = netdev_priv(dev); in b44_set_pauseparam() local
2017 spin_lock_irq(&bp->lock); in b44_set_pauseparam()
2019 bp->flags |= B44_FLAG_PAUSE_AUTO; in b44_set_pauseparam()
2021 bp->flags &= ~B44_FLAG_PAUSE_AUTO; in b44_set_pauseparam()
2023 bp->flags |= B44_FLAG_RX_PAUSE; in b44_set_pauseparam()
2025 bp->flags &= ~B44_FLAG_RX_PAUSE; in b44_set_pauseparam()
2027 bp->flags |= B44_FLAG_TX_PAUSE; in b44_set_pauseparam()
2029 bp->flags &= ~B44_FLAG_TX_PAUSE; in b44_set_pauseparam()
2030 if (bp->flags & B44_FLAG_PAUSE_AUTO) { in b44_set_pauseparam()
2031 b44_halt(bp); in b44_set_pauseparam()
2032 b44_init_rings(bp); in b44_set_pauseparam()
2033 b44_init_hw(bp, B44_FULL_RESET); in b44_set_pauseparam()
2035 __b44_set_flow_ctrl(bp, bp->flags); in b44_set_pauseparam()
2037 spin_unlock_irq(&bp->lock); in b44_set_pauseparam()
2039 b44_enable_ints(bp); in b44_set_pauseparam()
2066 struct b44 *bp = netdev_priv(dev); in b44_get_ethtool_stats() local
2067 struct b44_hw_stats *hwstat = &bp->hw_stats; in b44_get_ethtool_stats()
2072 spin_lock_irq(&bp->lock); in b44_get_ethtool_stats()
2073 b44_stats_update(bp); in b44_get_ethtool_stats()
2074 spin_unlock_irq(&bp->lock); in b44_get_ethtool_stats()
2089 struct b44 *bp = netdev_priv(dev); in b44_get_wol() local
2092 if (bp->flags & B44_FLAG_WOL_ENABLE) in b44_get_wol()
2101 struct b44 *bp = netdev_priv(dev); in b44_set_wol() local
2103 spin_lock_irq(&bp->lock); in b44_set_wol()
2105 bp->flags |= B44_FLAG_WOL_ENABLE; in b44_set_wol()
2107 bp->flags &= ~B44_FLAG_WOL_ENABLE; in b44_set_wol()
2108 spin_unlock_irq(&bp->lock); in b44_set_wol()
2110 device_set_wakeup_enable(bp->sdev->dev, wol->wolopts & WAKE_MAGIC); in b44_set_wol()
2135 struct b44 *bp = netdev_priv(dev); in b44_ioctl() local
2141 spin_lock_irq(&bp->lock); in b44_ioctl()
2142 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_ioctl()
2146 err = generic_mii_ioctl(&bp->mii_if, if_mii(ifr), cmd, NULL); in b44_ioctl()
2148 spin_unlock_irq(&bp->lock); in b44_ioctl()
2153 static int b44_get_invariants(struct b44 *bp) in b44_get_invariants() argument
2155 struct ssb_device *sdev = bp->sdev; in b44_get_invariants()
2159 bp->dma_offset = ssb_dma_translation(sdev); in b44_get_invariants()
2164 bp->phy_addr = sdev->bus->sprom.et1phyaddr; in b44_get_invariants()
2167 bp->phy_addr = sdev->bus->sprom.et0phyaddr; in b44_get_invariants()
2172 bp->phy_addr &= 0x1F; in b44_get_invariants()
2174 memcpy(bp->dev->dev_addr, addr, ETH_ALEN); in b44_get_invariants()
2176 if (!is_valid_ether_addr(&bp->dev->dev_addr[0])){ in b44_get_invariants()
2181 bp->imask = IMASK_DEF; in b44_get_invariants()
2187 if (bp->sdev->id.revision >= 7) in b44_get_invariants()
2188 bp->flags |= B44_FLAG_B0_ANDLATER; in b44_get_invariants()
2211 struct b44 *bp = netdev_priv(dev); in b44_adjust_link() local
2217 if (bp->old_link != phydev->link) { in b44_adjust_link()
2219 bp->old_link = phydev->link; in b44_adjust_link()
2225 (bp->flags & B44_FLAG_FULL_DUPLEX)) { in b44_adjust_link()
2227 bp->flags &= ~B44_FLAG_FULL_DUPLEX; in b44_adjust_link()
2229 !(bp->flags & B44_FLAG_FULL_DUPLEX)) { in b44_adjust_link()
2231 bp->flags |= B44_FLAG_FULL_DUPLEX; in b44_adjust_link()
2236 u32 val = br32(bp, B44_TX_CTRL); in b44_adjust_link()
2237 if (bp->flags & B44_FLAG_FULL_DUPLEX) in b44_adjust_link()
2241 bw32(bp, B44_TX_CTRL, val); in b44_adjust_link()
2246 static int b44_register_phy_one(struct b44 *bp) in b44_register_phy_one() argument
2250 struct ssb_device *sdev = bp->sdev; in b44_register_phy_one()
2263 mii_bus->priv = bp; in b44_register_phy_one()
2268 mii_bus->phy_mask = ~(1 << bp->phy_addr); in b44_register_phy_one()
2271 bp->mii_bus = mii_bus; in b44_register_phy_one()
2279 if (!mdiobus_is_registered_device(bp->mii_bus, bp->phy_addr) && in b44_register_phy_one()
2284 bp->phy_addr); in b44_register_phy_one()
2286 bp->phy_addr = 0; in b44_register_phy_one()
2288 bp->phy_addr); in b44_register_phy_one()
2291 bp->phy_addr); in b44_register_phy_one()
2294 phydev = phy_connect(bp->dev, bus_id, &b44_adjust_link, in b44_register_phy_one()
2298 bp->phy_addr); in b44_register_phy_one()
2311 bp->old_link = 0; in b44_register_phy_one()
2312 bp->phy_addr = phydev->mdio.addr; in b44_register_phy_one()
2328 static void b44_unregister_phy_one(struct b44 *bp) in b44_unregister_phy_one() argument
2330 struct net_device *dev = bp->dev; in b44_unregister_phy_one()
2331 struct mii_bus *mii_bus = bp->mii_bus; in b44_unregister_phy_one()
2342 struct b44 *bp; in b44_init_one() local
2347 dev = alloc_etherdev(sizeof(*bp)); in b44_init_one()
2358 bp = netdev_priv(dev); in b44_init_one()
2359 bp->sdev = sdev; in b44_init_one()
2360 bp->dev = dev; in b44_init_one()
2361 bp->force_copybreak = 0; in b44_init_one()
2363 bp->msg_enable = netif_msg_init(b44_debug, B44_DEF_MSG_ENABLE); in b44_init_one()
2365 spin_lock_init(&bp->lock); in b44_init_one()
2366 u64_stats_init(&bp->hw_stats.syncp); in b44_init_one()
2368 bp->rx_pending = B44_DEF_RX_RING_PENDING; in b44_init_one()
2369 bp->tx_pending = B44_DEF_TX_RING_PENDING; in b44_init_one()
2372 netif_napi_add(dev, &bp->napi, b44_poll, 64); in b44_init_one()
2393 err = b44_get_invariants(bp); in b44_init_one()
2400 if (bp->phy_addr == B44_PHY_ADDR_NO_PHY) { in b44_init_one()
2406 bp->mii_if.dev = dev; in b44_init_one()
2407 bp->mii_if.mdio_read = b44_mdio_read_mii; in b44_init_one()
2408 bp->mii_if.mdio_write = b44_mdio_write_mii; in b44_init_one()
2409 bp->mii_if.phy_id = bp->phy_addr; in b44_init_one()
2410 bp->mii_if.phy_id_mask = 0x1f; in b44_init_one()
2411 bp->mii_if.reg_num_mask = 0x1f; in b44_init_one()
2414 bp->flags |= (B44_FLAG_ADV_10HALF | B44_FLAG_ADV_10FULL | in b44_init_one()
2418 bp->flags |= B44_FLAG_PAUSE_AUTO; in b44_init_one()
2433 b44_chip_reset(bp, B44_CHIP_RESET_FULL); in b44_init_one()
2436 err = b44_phy_reset(bp); in b44_init_one()
2442 if (bp->flags & B44_FLAG_EXTERNAL_PHY) { in b44_init_one()
2443 err = b44_register_phy_one(bp); in b44_init_one()
2461 netif_napi_del(&bp->napi); in b44_init_one()
2471 struct b44 *bp = netdev_priv(dev); in b44_remove_one() local
2474 if (bp->flags & B44_FLAG_EXTERNAL_PHY) in b44_remove_one()
2475 b44_unregister_phy_one(bp); in b44_remove_one()
2478 netif_napi_del(&bp->napi); in b44_remove_one()
2487 struct b44 *bp = netdev_priv(dev); in b44_suspend() local
2492 del_timer_sync(&bp->timer); in b44_suspend()
2494 spin_lock_irq(&bp->lock); in b44_suspend()
2496 b44_halt(bp); in b44_suspend()
2497 netif_carrier_off(bp->dev); in b44_suspend()
2498 netif_device_detach(bp->dev); in b44_suspend()
2499 b44_free_rings(bp); in b44_suspend()
2501 spin_unlock_irq(&bp->lock); in b44_suspend()
2504 if (bp->flags & B44_FLAG_WOL_ENABLE) { in b44_suspend()
2505 b44_init_hw(bp, B44_PARTIAL_RESET); in b44_suspend()
2506 b44_setup_wol(bp); in b44_suspend()
2516 struct b44 *bp = netdev_priv(dev); in b44_resume() local
2529 spin_lock_irq(&bp->lock); in b44_resume()
2530 b44_init_rings(bp); in b44_resume()
2531 b44_init_hw(bp, B44_FULL_RESET); in b44_resume()
2532 spin_unlock_irq(&bp->lock); in b44_resume()
2542 spin_lock_irq(&bp->lock); in b44_resume()
2543 b44_halt(bp); in b44_resume()
2544 b44_free_rings(bp); in b44_resume()
2545 spin_unlock_irq(&bp->lock); in b44_resume()
2549 netif_device_attach(bp->dev); in b44_resume()
2551 b44_enable_ints(bp); in b44_resume()
2554 mod_timer(&bp->timer, jiffies + 1); in b44_resume()