Lines Matching refs:lp
131 static inline u32 axienet_dma_in32(struct axienet_local *lp, off_t reg) in axienet_dma_in32() argument
133 return ioread32(lp->dma_regs + reg); in axienet_dma_in32()
136 static void desc_set_phys_addr(struct axienet_local *lp, dma_addr_t addr, in desc_set_phys_addr() argument
140 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_set_phys_addr()
144 static dma_addr_t desc_get_phys_addr(struct axienet_local *lp, in desc_get_phys_addr() argument
149 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_get_phys_addr()
166 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_release() local
169 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
170 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_release()
171 lp->tx_bd_v, in axienet_dma_bd_release()
172 lp->tx_bd_p); in axienet_dma_bd_release()
174 if (!lp->rx_bd_v) in axienet_dma_bd_release()
177 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()
183 if (!lp->rx_bd_v[i].skb) in axienet_dma_bd_release()
186 dev_kfree_skb(lp->rx_bd_v[i].skb); in axienet_dma_bd_release()
192 if (lp->rx_bd_v[i].cntrl) { in axienet_dma_bd_release()
193 phys = desc_get_phys_addr(lp, &lp->rx_bd_v[i]); in axienet_dma_bd_release()
194 dma_unmap_single(lp->dev, phys, in axienet_dma_bd_release()
195 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_release()
199 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
200 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()
201 lp->rx_bd_v, in axienet_dma_bd_release()
202 lp->rx_bd_p); in axienet_dma_bd_release()
210 static u32 axienet_usec_to_timer(struct axienet_local *lp, u32 coalesce_usec) in axienet_usec_to_timer() argument
215 if (lp->axi_clk) in axienet_usec_to_timer()
216 clk_rate = clk_get_rate(lp->axi_clk); in axienet_usec_to_timer()
231 static void axienet_dma_start(struct axienet_local *lp) in axienet_dma_start() argument
234 lp->rx_dma_cr = (lp->coalesce_count_rx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
239 if (lp->coalesce_count_rx > 1) in axienet_dma_start()
240 lp->rx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_rx) in axienet_dma_start()
243 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
246 lp->tx_dma_cr = (lp->coalesce_count_tx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
251 if (lp->coalesce_count_tx > 1) in axienet_dma_start()
252 lp->tx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_tx) in axienet_dma_start()
255 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
260 axienet_dma_out_addr(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_start()
261 lp->rx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
262 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
263 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_start()
264 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_start()
270 axienet_dma_out_addr(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_start()
271 lp->tx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
272 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
289 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_init() local
292 lp->tx_bd_ci = 0; in axienet_dma_bd_init()
293 lp->tx_bd_tail = 0; in axienet_dma_bd_init()
294 lp->rx_bd_ci = 0; in axienet_dma_bd_init()
297 lp->tx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
298 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_init()
299 &lp->tx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
300 if (!lp->tx_bd_v) in axienet_dma_bd_init()
303 lp->rx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
304 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()
305 &lp->rx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
306 if (!lp->rx_bd_v) in axienet_dma_bd_init()
309 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_bd_init()
310 dma_addr_t addr = lp->tx_bd_p + in axienet_dma_bd_init()
311 sizeof(*lp->tx_bd_v) * in axienet_dma_bd_init()
312 ((i + 1) % lp->tx_bd_num); in axienet_dma_bd_init()
314 lp->tx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
315 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
316 lp->tx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
319 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()
322 addr = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * in axienet_dma_bd_init()
323 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()
324 lp->rx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
325 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
326 lp->rx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
328 skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_dma_bd_init()
332 lp->rx_bd_v[i].skb = skb; in axienet_dma_bd_init()
333 addr = dma_map_single(lp->dev, skb->data, in axienet_dma_bd_init()
334 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_init()
335 if (dma_mapping_error(lp->dev, addr)) { in axienet_dma_bd_init()
339 desc_set_phys_addr(lp, addr, &lp->rx_bd_v[i]); in axienet_dma_bd_init()
341 lp->rx_bd_v[i].cntrl = lp->max_frm_size; in axienet_dma_bd_init()
344 axienet_dma_start(lp); in axienet_dma_bd_init()
363 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_mac_address() local
371 axienet_iow(lp, XAE_UAW0_OFFSET, in axienet_set_mac_address()
376 axienet_iow(lp, XAE_UAW1_OFFSET, in axienet_set_mac_address()
377 (((axienet_ior(lp, XAE_UAW1_OFFSET)) & in axienet_set_mac_address()
416 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_multicast_list() local
425 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
427 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
445 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
448 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
449 axienet_iow(lp, XAE_AF0_OFFSET, af0reg); in axienet_set_multicast_list()
450 axienet_iow(lp, XAE_AF1_OFFSET, af1reg); in axienet_set_multicast_list()
454 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
457 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
460 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
463 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
464 axienet_iow(lp, XAE_AF0_OFFSET, 0); in axienet_set_multicast_list()
465 axienet_iow(lp, XAE_AF1_OFFSET, 0); in axienet_set_multicast_list()
486 struct axienet_local *lp = netdev_priv(ndev); in axienet_setoptions() local
490 reg = ((axienet_ior(lp, tp->reg)) & ~(tp->m_or)); in axienet_setoptions()
493 axienet_iow(lp, tp->reg, reg); in axienet_setoptions()
497 lp->options |= options; in axienet_setoptions()
500 static int __axienet_device_reset(struct axienet_local *lp) in __axienet_device_reset() argument
512 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, XAXIDMA_CR_RESET_MASK); in __axienet_device_reset()
515 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
518 dev_err(lp->dev, "%s: DMA reset timeout!\n", __func__); in __axienet_device_reset()
525 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
528 dev_err(lp->dev, "%s: timeout waiting for PhyRstCmplt\n", __func__); in __axienet_device_reset()
539 static void axienet_dma_stop(struct axienet_local *lp) in axienet_dma_stop() argument
544 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_stop()
546 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_stop()
547 synchronize_irq(lp->rx_irq); in axienet_dma_stop()
549 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_stop()
551 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_stop()
552 synchronize_irq(lp->tx_irq); in axienet_dma_stop()
555 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
558 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
561 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
564 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
568 axienet_lock_mii(lp); in axienet_dma_stop()
569 __axienet_device_reset(lp); in axienet_dma_stop()
570 axienet_unlock_mii(lp); in axienet_dma_stop()
588 struct axienet_local *lp = netdev_priv(ndev); in axienet_device_reset() local
591 ret = __axienet_device_reset(lp); in axienet_device_reset()
595 lp->max_frm_size = XAE_MAX_VLAN_FRAME_SIZE; in axienet_device_reset()
596 lp->options |= XAE_OPTION_VLAN; in axienet_device_reset()
597 lp->options &= (~XAE_OPTION_JUMBO); in axienet_device_reset()
601 lp->max_frm_size = ndev->mtu + VLAN_ETH_HLEN + in axienet_device_reset()
604 if (lp->max_frm_size <= lp->rxmem) in axienet_device_reset()
605 lp->options |= XAE_OPTION_JUMBO; in axienet_device_reset()
615 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_device_reset()
617 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_device_reset()
619 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_device_reset()
621 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_device_reset()
622 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_device_reset()
625 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_device_reset()
630 axienet_setoptions(ndev, lp->options & in axienet_device_reset()
634 axienet_setoptions(ndev, lp->options); in axienet_device_reset()
655 static int axienet_free_tx_chain(struct axienet_local *lp, u32 first_bd, in axienet_free_tx_chain() argument
664 cur_p = &lp->tx_bd_v[(first_bd + i) % lp->tx_bd_num]; in axienet_free_tx_chain()
675 phys = desc_get_phys_addr(lp, cur_p); in axienet_free_tx_chain()
676 dma_unmap_single(lp->dev, phys, in axienet_free_tx_chain()
713 static inline int axienet_check_tx_bd_space(struct axienet_local *lp, in axienet_check_tx_bd_space() argument
720 cur_p = &lp->tx_bd_v[(READ_ONCE(lp->tx_bd_tail) + num_frag) % in axienet_check_tx_bd_space()
721 lp->tx_bd_num]; in axienet_check_tx_bd_space()
743 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_tx); in axienet_tx_poll() local
744 struct net_device *ndev = lp->ndev; in axienet_tx_poll()
748 packets = axienet_free_tx_chain(lp, lp->tx_bd_ci, budget, false, &size, budget); in axienet_tx_poll()
751 lp->tx_bd_ci += packets; in axienet_tx_poll()
752 if (lp->tx_bd_ci >= lp->tx_bd_num) in axienet_tx_poll()
753 lp->tx_bd_ci %= lp->tx_bd_num; in axienet_tx_poll()
755 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_tx_poll()
756 u64_stats_add(&lp->tx_packets, packets); in axienet_tx_poll()
757 u64_stats_add(&lp->tx_bytes, size); in axienet_tx_poll()
758 u64_stats_update_end(&lp->tx_stat_sync); in axienet_tx_poll()
763 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_tx_poll()
772 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_tx_poll()
800 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit() local
803 orig_tail_ptr = lp->tx_bd_tail; in axienet_start_xmit()
807 cur_p = &lp->tx_bd_v[orig_tail_ptr]; in axienet_start_xmit()
809 if (axienet_check_tx_bd_space(lp, num_frag + 1)) { in axienet_start_xmit()
821 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit()
824 } else if (lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) { in axienet_start_xmit()
835 phys = dma_map_single(lp->dev, skb->data, in axienet_start_xmit()
837 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
843 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
847 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
849 cur_p = &lp->tx_bd_v[new_tail_ptr]; in axienet_start_xmit()
851 phys = dma_map_single(lp->dev, in axienet_start_xmit()
855 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
859 axienet_free_tx_chain(lp, orig_tail_ptr, ii + 1, in axienet_start_xmit()
863 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
870 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * new_tail_ptr; in axienet_start_xmit()
871 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
873 WRITE_ONCE(lp->tx_bd_tail, new_tail_ptr); in axienet_start_xmit()
876 axienet_dma_out_addr(lp, XAXIDMA_TX_TDESC_OFFSET, tail_p); in axienet_start_xmit()
879 if (axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) { in axienet_start_xmit()
886 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_start_xmit()
909 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_rx); in axienet_rx_poll() local
911 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
930 phys = desc_get_phys_addr(lp, cur_p); in axienet_rx_poll()
931 dma_unmap_single(lp->dev, phys, lp->max_frm_size, in axienet_rx_poll()
935 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_rx_poll()
940 if (lp->features & XAE_FEATURE_FULL_RX_CSUM) { in axienet_rx_poll()
947 } else if ((lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) != 0 && in axienet_rx_poll()
960 new_skb = napi_alloc_skb(napi, lp->max_frm_size); in axienet_rx_poll()
964 phys = dma_map_single(lp->dev, new_skb->data, in axienet_rx_poll()
965 lp->max_frm_size, in axienet_rx_poll()
967 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_rx_poll()
969 netdev_err(lp->ndev, "RX DMA mapping error\n"); in axienet_rx_poll()
973 desc_set_phys_addr(lp, phys, cur_p); in axienet_rx_poll()
975 cur_p->cntrl = lp->max_frm_size; in axienet_rx_poll()
982 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci; in axienet_rx_poll()
984 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_rx_poll()
985 lp->rx_bd_ci = 0; in axienet_rx_poll()
986 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
989 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_rx_poll()
990 u64_stats_add(&lp->rx_packets, packets); in axienet_rx_poll()
991 u64_stats_add(&lp->rx_bytes, size); in axienet_rx_poll()
992 u64_stats_update_end(&lp->rx_stat_sync); in axienet_rx_poll()
995 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, tail_p); in axienet_rx_poll()
1002 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_rx_poll()
1021 struct axienet_local *lp = netdev_priv(ndev); in axienet_tx_irq() local
1023 status = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_tx_irq()
1028 axienet_dma_out32(lp, XAXIDMA_TX_SR_OFFSET, status); in axienet_tx_irq()
1033 (lp->tx_bd_v[lp->tx_bd_ci]).phys_msb, in axienet_tx_irq()
1034 (lp->tx_bd_v[lp->tx_bd_ci]).phys); in axienet_tx_irq()
1035 schedule_work(&lp->dma_err_task); in axienet_tx_irq()
1040 u32 cr = lp->tx_dma_cr; in axienet_tx_irq()
1043 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_tx_irq()
1045 napi_schedule(&lp->napi_tx); in axienet_tx_irq()
1065 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_irq() local
1067 status = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_rx_irq()
1072 axienet_dma_out32(lp, XAXIDMA_RX_SR_OFFSET, status); in axienet_rx_irq()
1077 (lp->rx_bd_v[lp->rx_bd_ci]).phys_msb, in axienet_rx_irq()
1078 (lp->rx_bd_v[lp->rx_bd_ci]).phys); in axienet_rx_irq()
1079 schedule_work(&lp->dma_err_task); in axienet_rx_irq()
1084 u32 cr = lp->rx_dma_cr; in axienet_rx_irq()
1087 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_rx_irq()
1089 napi_schedule(&lp->napi_rx); in axienet_rx_irq()
1107 struct axienet_local *lp = netdev_priv(ndev); in axienet_eth_irq() local
1110 pending = axienet_ior(lp, XAE_IP_OFFSET); in axienet_eth_irq()
1120 axienet_iow(lp, XAE_IS_OFFSET, pending); in axienet_eth_irq()
1142 struct axienet_local *lp = netdev_priv(ndev); in axienet_open() local
1150 axienet_lock_mii(lp); in axienet_open()
1152 axienet_unlock_mii(lp); in axienet_open()
1154 ret = phylink_of_phy_connect(lp->phylink, lp->dev->of_node, 0); in axienet_open()
1156 dev_err(lp->dev, "phylink_of_phy_connect() failed: %d\n", ret); in axienet_open()
1160 phylink_start(lp->phylink); in axienet_open()
1163 INIT_WORK(&lp->dma_err_task, axienet_dma_err_handler); in axienet_open()
1165 napi_enable(&lp->napi_rx); in axienet_open()
1166 napi_enable(&lp->napi_tx); in axienet_open()
1169 ret = request_irq(lp->tx_irq, axienet_tx_irq, IRQF_SHARED, in axienet_open()
1174 ret = request_irq(lp->rx_irq, axienet_rx_irq, IRQF_SHARED, in axienet_open()
1179 if (lp->eth_irq > 0) { in axienet_open()
1180 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_open()
1189 free_irq(lp->rx_irq, ndev); in axienet_open()
1191 free_irq(lp->tx_irq, ndev); in axienet_open()
1193 napi_disable(&lp->napi_tx); in axienet_open()
1194 napi_disable(&lp->napi_rx); in axienet_open()
1195 phylink_stop(lp->phylink); in axienet_open()
1196 phylink_disconnect_phy(lp->phylink); in axienet_open()
1197 cancel_work_sync(&lp->dma_err_task); in axienet_open()
1198 dev_err(lp->dev, "request_irq() failed\n"); in axienet_open()
1214 struct axienet_local *lp = netdev_priv(ndev); in axienet_stop() local
1218 napi_disable(&lp->napi_tx); in axienet_stop()
1219 napi_disable(&lp->napi_rx); in axienet_stop()
1221 phylink_stop(lp->phylink); in axienet_stop()
1222 phylink_disconnect_phy(lp->phylink); in axienet_stop()
1224 axienet_setoptions(ndev, lp->options & in axienet_stop()
1227 axienet_dma_stop(lp); in axienet_stop()
1229 axienet_iow(lp, XAE_IE_OFFSET, 0); in axienet_stop()
1231 cancel_work_sync(&lp->dma_err_task); in axienet_stop()
1233 if (lp->eth_irq > 0) in axienet_stop()
1234 free_irq(lp->eth_irq, ndev); in axienet_stop()
1235 free_irq(lp->tx_irq, ndev); in axienet_stop()
1236 free_irq(lp->rx_irq, ndev); in axienet_stop()
1255 struct axienet_local *lp = netdev_priv(ndev); in axienet_change_mtu() local
1261 XAE_TRL_SIZE) > lp->rxmem) in axienet_change_mtu()
1279 struct axienet_local *lp = netdev_priv(ndev); in axienet_poll_controller() local
1280 disable_irq(lp->tx_irq); in axienet_poll_controller()
1281 disable_irq(lp->rx_irq); in axienet_poll_controller()
1282 axienet_rx_irq(lp->tx_irq, ndev); in axienet_poll_controller()
1283 axienet_tx_irq(lp->rx_irq, ndev); in axienet_poll_controller()
1284 enable_irq(lp->tx_irq); in axienet_poll_controller()
1285 enable_irq(lp->rx_irq); in axienet_poll_controller()
1291 struct axienet_local *lp = netdev_priv(dev); in axienet_ioctl() local
1296 return phylink_mii_ioctl(lp->phylink, rq, cmd); in axienet_ioctl()
1302 struct axienet_local *lp = netdev_priv(dev); in axienet_get_stats64() local
1308 start = u64_stats_fetch_begin_irq(&lp->rx_stat_sync); in axienet_get_stats64()
1309 stats->rx_packets = u64_stats_read(&lp->rx_packets); in axienet_get_stats64()
1310 stats->rx_bytes = u64_stats_read(&lp->rx_bytes); in axienet_get_stats64()
1311 } while (u64_stats_fetch_retry_irq(&lp->rx_stat_sync, start)); in axienet_get_stats64()
1314 start = u64_stats_fetch_begin_irq(&lp->tx_stat_sync); in axienet_get_stats64()
1315 stats->tx_packets = u64_stats_read(&lp->tx_packets); in axienet_get_stats64()
1316 stats->tx_bytes = u64_stats_read(&lp->tx_bytes); in axienet_get_stats64()
1317 } while (u64_stats_fetch_retry_irq(&lp->tx_stat_sync, start)); in axienet_get_stats64()
1380 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_regs() local
1386 data[0] = axienet_ior(lp, XAE_RAF_OFFSET); in axienet_ethtools_get_regs()
1387 data[1] = axienet_ior(lp, XAE_TPF_OFFSET); in axienet_ethtools_get_regs()
1388 data[2] = axienet_ior(lp, XAE_IFGP_OFFSET); in axienet_ethtools_get_regs()
1389 data[3] = axienet_ior(lp, XAE_IS_OFFSET); in axienet_ethtools_get_regs()
1390 data[4] = axienet_ior(lp, XAE_IP_OFFSET); in axienet_ethtools_get_regs()
1391 data[5] = axienet_ior(lp, XAE_IE_OFFSET); in axienet_ethtools_get_regs()
1392 data[6] = axienet_ior(lp, XAE_TTAG_OFFSET); in axienet_ethtools_get_regs()
1393 data[7] = axienet_ior(lp, XAE_RTAG_OFFSET); in axienet_ethtools_get_regs()
1394 data[8] = axienet_ior(lp, XAE_UAWL_OFFSET); in axienet_ethtools_get_regs()
1395 data[9] = axienet_ior(lp, XAE_UAWU_OFFSET); in axienet_ethtools_get_regs()
1396 data[10] = axienet_ior(lp, XAE_TPID0_OFFSET); in axienet_ethtools_get_regs()
1397 data[11] = axienet_ior(lp, XAE_TPID1_OFFSET); in axienet_ethtools_get_regs()
1398 data[12] = axienet_ior(lp, XAE_PPST_OFFSET); in axienet_ethtools_get_regs()
1399 data[13] = axienet_ior(lp, XAE_RCW0_OFFSET); in axienet_ethtools_get_regs()
1400 data[14] = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_ethtools_get_regs()
1401 data[15] = axienet_ior(lp, XAE_TC_OFFSET); in axienet_ethtools_get_regs()
1402 data[16] = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_regs()
1403 data[17] = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_ethtools_get_regs()
1404 data[18] = axienet_ior(lp, XAE_PHYC_OFFSET); in axienet_ethtools_get_regs()
1405 data[19] = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_ethtools_get_regs()
1406 data[20] = axienet_ior(lp, XAE_MDIO_MCR_OFFSET); in axienet_ethtools_get_regs()
1407 data[21] = axienet_ior(lp, XAE_MDIO_MWD_OFFSET); in axienet_ethtools_get_regs()
1408 data[22] = axienet_ior(lp, XAE_MDIO_MRD_OFFSET); in axienet_ethtools_get_regs()
1409 data[27] = axienet_ior(lp, XAE_UAW0_OFFSET); in axienet_ethtools_get_regs()
1410 data[28] = axienet_ior(lp, XAE_UAW1_OFFSET); in axienet_ethtools_get_regs()
1411 data[29] = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_ethtools_get_regs()
1412 data[30] = axienet_ior(lp, XAE_AF0_OFFSET); in axienet_ethtools_get_regs()
1413 data[31] = axienet_ior(lp, XAE_AF1_OFFSET); in axienet_ethtools_get_regs()
1414 data[32] = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_ethtools_get_regs()
1415 data[33] = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_ethtools_get_regs()
1416 data[34] = axienet_dma_in32(lp, XAXIDMA_TX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1417 data[35] = axienet_dma_in32(lp, XAXIDMA_TX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1418 data[36] = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_ethtools_get_regs()
1419 data[37] = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_ethtools_get_regs()
1420 data[38] = axienet_dma_in32(lp, XAXIDMA_RX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1421 data[39] = axienet_dma_in32(lp, XAXIDMA_RX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1430 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_ringparam() local
1436 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()
1439 ering->tx_pending = lp->tx_bd_num; in axienet_ethtools_get_ringparam()
1448 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_ringparam() local
1460 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()
1461 lp->tx_bd_num = ering->tx_pending; in axienet_ethtools_set_ringparam()
1478 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_pauseparam() local
1480 phylink_ethtool_get_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_get_pauseparam()
1499 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_pauseparam() local
1501 return phylink_ethtool_set_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_set_pauseparam()
1523 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_coalesce() local
1525 ecoalesce->rx_max_coalesced_frames = lp->coalesce_count_rx; in axienet_ethtools_get_coalesce()
1526 ecoalesce->rx_coalesce_usecs = lp->coalesce_usec_rx; in axienet_ethtools_get_coalesce()
1527 ecoalesce->tx_max_coalesced_frames = lp->coalesce_count_tx; in axienet_ethtools_get_coalesce()
1528 ecoalesce->tx_coalesce_usecs = lp->coalesce_usec_tx; in axienet_ethtools_get_coalesce()
1551 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_coalesce() local
1560 lp->coalesce_count_rx = ecoalesce->rx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1562 lp->coalesce_usec_rx = ecoalesce->rx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1564 lp->coalesce_count_tx = ecoalesce->tx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1566 lp->coalesce_usec_tx = ecoalesce->tx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1575 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_link_ksettings() local
1577 return phylink_ethtool_ksettings_get(lp->phylink, cmd); in axienet_ethtools_get_link_ksettings()
1584 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_link_ksettings() local
1586 return phylink_ethtool_ksettings_set(lp->phylink, cmd); in axienet_ethtools_set_link_ksettings()
1591 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_nway_reset() local
1593 return phylink_ethtool_nway_reset(lp->phylink); in axienet_ethtools_nway_reset()
1641 struct axienet_local *lp = netdev_priv(ndev); in axienet_pcs_config() local
1644 if (lp->switch_x_sgmii) { in axienet_pcs_config()
1673 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_select_pcs() local
1677 return &lp->pcs; in axienet_mac_select_pcs()
1702 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_link_up() local
1705 emmc_reg = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_mac_link_up()
1724 axienet_iow(lp, XAE_EMMC_OFFSET, emmc_reg); in axienet_mac_link_up()
1726 fcc_reg = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_mac_link_up()
1735 axienet_iow(lp, XAE_FCC_OFFSET, fcc_reg); in axienet_mac_link_up()
1758 struct axienet_local *lp = container_of(work, struct axienet_local, in axienet_dma_err_handler() local
1760 struct net_device *ndev = lp->ndev; in axienet_dma_err_handler()
1762 napi_disable(&lp->napi_tx); in axienet_dma_err_handler()
1763 napi_disable(&lp->napi_rx); in axienet_dma_err_handler()
1765 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1768 axienet_dma_stop(lp); in axienet_dma_err_handler()
1770 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_err_handler()
1771 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
1773 dma_addr_t addr = desc_get_phys_addr(lp, cur_p); in axienet_dma_err_handler()
1775 dma_unmap_single(lp->dev, addr, in axienet_dma_err_handler()
1794 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()
1795 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
1804 lp->tx_bd_ci = 0; in axienet_dma_err_handler()
1805 lp->tx_bd_tail = 0; in axienet_dma_err_handler()
1806 lp->rx_bd_ci = 0; in axienet_dma_err_handler()
1808 axienet_dma_start(lp); in axienet_dma_err_handler()
1810 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_dma_err_handler()
1812 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_dma_err_handler()
1814 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_dma_err_handler()
1816 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_dma_err_handler()
1817 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_dma_err_handler()
1819 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_dma_err_handler()
1824 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1828 axienet_setoptions(ndev, lp->options); in axienet_dma_err_handler()
1829 napi_enable(&lp->napi_rx); in axienet_dma_err_handler()
1830 napi_enable(&lp->napi_tx); in axienet_dma_err_handler()
1849 struct axienet_local *lp; in axienet_probe() local
1856 ndev = alloc_etherdev(sizeof(*lp)); in axienet_probe()
1872 lp = netdev_priv(ndev); in axienet_probe()
1873 lp->ndev = ndev; in axienet_probe()
1874 lp->dev = &pdev->dev; in axienet_probe()
1875 lp->options = XAE_OPTION_DEFAULTS; in axienet_probe()
1876 lp->rx_bd_num = RX_BD_NUM_DEFAULT; in axienet_probe()
1877 lp->tx_bd_num = TX_BD_NUM_DEFAULT; in axienet_probe()
1879 u64_stats_init(&lp->rx_stat_sync); in axienet_probe()
1880 u64_stats_init(&lp->tx_stat_sync); in axienet_probe()
1882 netif_napi_add(ndev, &lp->napi_rx, axienet_rx_poll); in axienet_probe()
1883 netif_napi_add(ndev, &lp->napi_tx, axienet_tx_poll); in axienet_probe()
1885 lp->axi_clk = devm_clk_get_optional(&pdev->dev, "s_axi_lite_clk"); in axienet_probe()
1886 if (!lp->axi_clk) { in axienet_probe()
1890 lp->axi_clk = devm_clk_get_optional(&pdev->dev, NULL); in axienet_probe()
1892 if (IS_ERR(lp->axi_clk)) { in axienet_probe()
1893 ret = PTR_ERR(lp->axi_clk); in axienet_probe()
1896 ret = clk_prepare_enable(lp->axi_clk); in axienet_probe()
1902 lp->misc_clks[0].id = "axis_clk"; in axienet_probe()
1903 lp->misc_clks[1].id = "ref_clk"; in axienet_probe()
1904 lp->misc_clks[2].id = "mgt_clk"; in axienet_probe()
1906 ret = devm_clk_bulk_get_optional(&pdev->dev, XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
1910 ret = clk_bulk_prepare_enable(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
1915 lp->regs = devm_platform_get_and_ioremap_resource(pdev, 0, ðres); in axienet_probe()
1916 if (IS_ERR(lp->regs)) { in axienet_probe()
1917 ret = PTR_ERR(lp->regs); in axienet_probe()
1920 lp->regs_start = ethres->start; in axienet_probe()
1923 lp->features = 0; in axienet_probe()
1929 lp->csum_offload_on_tx_path = in axienet_probe()
1931 lp->features |= XAE_FEATURE_PARTIAL_TX_CSUM; in axienet_probe()
1936 lp->csum_offload_on_tx_path = in axienet_probe()
1938 lp->features |= XAE_FEATURE_FULL_TX_CSUM; in axienet_probe()
1943 lp->csum_offload_on_tx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
1950 lp->csum_offload_on_rx_path = in axienet_probe()
1952 lp->features |= XAE_FEATURE_PARTIAL_RX_CSUM; in axienet_probe()
1955 lp->csum_offload_on_rx_path = in axienet_probe()
1957 lp->features |= XAE_FEATURE_FULL_RX_CSUM; in axienet_probe()
1960 lp->csum_offload_on_rx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
1969 of_property_read_u32(pdev->dev.of_node, "xlnx,rxmem", &lp->rxmem); in axienet_probe()
1971 lp->switch_x_sgmii = of_property_read_bool(pdev->dev.of_node, in axienet_probe()
1980 lp->phy_mode = PHY_INTERFACE_MODE_MII; in axienet_probe()
1983 lp->phy_mode = PHY_INTERFACE_MODE_GMII; in axienet_probe()
1986 lp->phy_mode = PHY_INTERFACE_MODE_RGMII_ID; in axienet_probe()
1989 lp->phy_mode = PHY_INTERFACE_MODE_SGMII; in axienet_probe()
1992 lp->phy_mode = PHY_INTERFACE_MODE_1000BASEX; in axienet_probe()
1999 ret = of_get_phy_mode(pdev->dev.of_node, &lp->phy_mode); in axienet_probe()
2003 if (lp->switch_x_sgmii && lp->phy_mode != PHY_INTERFACE_MODE_SGMII && in axienet_probe()
2004 lp->phy_mode != PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2022 lp->dma_regs = devm_ioremap_resource(&pdev->dev, in axienet_probe()
2024 lp->rx_irq = irq_of_parse_and_map(np, 1); in axienet_probe()
2025 lp->tx_irq = irq_of_parse_and_map(np, 0); in axienet_probe()
2027 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2030 lp->dma_regs = devm_platform_get_and_ioremap_resource(pdev, 1, NULL); in axienet_probe()
2031 lp->rx_irq = platform_get_irq(pdev, 1); in axienet_probe()
2032 lp->tx_irq = platform_get_irq(pdev, 0); in axienet_probe()
2033 lp->eth_irq = platform_get_irq_optional(pdev, 2); in axienet_probe()
2035 if (IS_ERR(lp->dma_regs)) { in axienet_probe()
2037 ret = PTR_ERR(lp->dma_regs); in axienet_probe()
2040 if ((lp->rx_irq <= 0) || (lp->tx_irq <= 0)) { in axienet_probe()
2054 if ((axienet_ior(lp, XAE_ID_OFFSET) >> 24) >= 0x9) { in axienet_probe()
2055 void __iomem *desc = lp->dma_regs + XAXIDMA_TX_CDESC_OFFSET + 4; in axienet_probe()
2061 lp->features |= XAE_FEATURE_DMA_64BIT; in axienet_probe()
2069 if (!IS_ENABLED(CONFIG_64BIT) && lp->features & XAE_FEATURE_DMA_64BIT) { in axienet_probe()
2082 if (lp->eth_irq <= 0) in axienet_probe()
2095 lp->coalesce_count_rx = XAXIDMA_DFT_RX_THRESHOLD; in axienet_probe()
2096 lp->coalesce_usec_rx = XAXIDMA_DFT_RX_USEC; in axienet_probe()
2097 lp->coalesce_count_tx = XAXIDMA_DFT_TX_THRESHOLD; in axienet_probe()
2098 lp->coalesce_usec_tx = XAXIDMA_DFT_TX_USEC; in axienet_probe()
2101 ret = __axienet_device_reset(lp); in axienet_probe()
2105 ret = axienet_mdio_setup(lp); in axienet_probe()
2110 if (lp->phy_mode == PHY_INTERFACE_MODE_SGMII || in axienet_probe()
2111 lp->phy_mode == PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2125 lp->pcs_phy = of_mdio_find_device(np); in axienet_probe()
2126 if (!lp->pcs_phy) { in axienet_probe()
2132 lp->pcs.ops = &axienet_pcs_ops; in axienet_probe()
2133 lp->pcs.poll = true; in axienet_probe()
2136 lp->phylink_config.dev = &ndev->dev; in axienet_probe()
2137 lp->phylink_config.type = PHYLINK_NETDEV; in axienet_probe()
2138 lp->phylink_config.mac_capabilities = MAC_SYM_PAUSE | MAC_ASYM_PAUSE | in axienet_probe()
2141 __set_bit(lp->phy_mode, lp->phylink_config.supported_interfaces); in axienet_probe()
2142 if (lp->switch_x_sgmii) { in axienet_probe()
2144 lp->phylink_config.supported_interfaces); in axienet_probe()
2146 lp->phylink_config.supported_interfaces); in axienet_probe()
2149 lp->phylink = phylink_create(&lp->phylink_config, pdev->dev.fwnode, in axienet_probe()
2150 lp->phy_mode, in axienet_probe()
2152 if (IS_ERR(lp->phylink)) { in axienet_probe()
2153 ret = PTR_ERR(lp->phylink); in axienet_probe()
2158 ret = register_netdev(lp->ndev); in axienet_probe()
2160 dev_err(lp->dev, "register_netdev() error (%i)\n", ret); in axienet_probe()
2167 phylink_destroy(lp->phylink); in axienet_probe()
2170 if (lp->pcs_phy) in axienet_probe()
2171 put_device(&lp->pcs_phy->dev); in axienet_probe()
2172 if (lp->mii_bus) in axienet_probe()
2173 axienet_mdio_teardown(lp); in axienet_probe()
2175 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2176 clk_disable_unprepare(lp->axi_clk); in axienet_probe()
2187 struct axienet_local *lp = netdev_priv(ndev); in axienet_remove() local
2191 if (lp->phylink) in axienet_remove()
2192 phylink_destroy(lp->phylink); in axienet_remove()
2194 if (lp->pcs_phy) in axienet_remove()
2195 put_device(&lp->pcs_phy->dev); in axienet_remove()
2197 axienet_mdio_teardown(lp); in axienet_remove()
2199 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_remove()
2200 clk_disable_unprepare(lp->axi_clk); in axienet_remove()