Lines Matching refs:tpd_ring
1043 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_setup_ring_resources() local
1051 size = sizeof(struct atl1_buffer) * (tpd_ring->count + rfd_ring->count); in atl1_setup_ring_resources()
1052 tpd_ring->buffer_info = kzalloc(size, GFP_KERNEL); in atl1_setup_ring_resources()
1053 if (unlikely(!tpd_ring->buffer_info)) { in atl1_setup_ring_resources()
1060 (struct atl1_buffer *)(tpd_ring->buffer_info + tpd_ring->count); in atl1_setup_ring_resources()
1068 sizeof(struct tx_packet_desc) * tpd_ring->count in atl1_setup_ring_resources()
1086 tpd_ring->dma = ring_header->dma; in atl1_setup_ring_resources()
1087 offset = (tpd_ring->dma & 0x7) ? (8 - (ring_header->dma & 0x7)) : 0; in atl1_setup_ring_resources()
1088 tpd_ring->dma += offset; in atl1_setup_ring_resources()
1089 tpd_ring->desc = (u8 *) ring_header->desc + offset; in atl1_setup_ring_resources()
1090 tpd_ring->size = sizeof(struct tx_packet_desc) * tpd_ring->count; in atl1_setup_ring_resources()
1093 rfd_ring->dma = tpd_ring->dma + tpd_ring->size; in atl1_setup_ring_resources()
1096 rfd_ring->desc = (u8 *) tpd_ring->desc + (tpd_ring->size + offset); in atl1_setup_ring_resources()
1126 kfree(tpd_ring->buffer_info); in atl1_setup_ring_resources()
1132 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_init_ring_ptrs() local
1136 atomic_set(&tpd_ring->next_to_use, 0); in atl1_init_ring_ptrs()
1137 atomic_set(&tpd_ring->next_to_clean, 0); in atl1_init_ring_ptrs()
1192 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_clean_tx_ring() local
1199 for (i = 0; i < tpd_ring->count; i++) { in atl1_clean_tx_ring()
1200 buffer_info = &tpd_ring->buffer_info[i]; in atl1_clean_tx_ring()
1208 for (i = 0; i < tpd_ring->count; i++) { in atl1_clean_tx_ring()
1209 buffer_info = &tpd_ring->buffer_info[i]; in atl1_clean_tx_ring()
1216 size = sizeof(struct atl1_buffer) * tpd_ring->count; in atl1_clean_tx_ring()
1217 memset(tpd_ring->buffer_info, 0, size); in atl1_clean_tx_ring()
1220 memset(tpd_ring->desc, 0, tpd_ring->size); in atl1_clean_tx_ring()
1222 atomic_set(&tpd_ring->next_to_use, 0); in atl1_clean_tx_ring()
1223 atomic_set(&tpd_ring->next_to_clean, 0); in atl1_clean_tx_ring()
1235 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_free_ring_resources() local
1243 kfree(tpd_ring->buffer_info); in atl1_free_ring_resources()
1247 tpd_ring->buffer_info = NULL; in atl1_free_ring_resources()
1248 tpd_ring->desc = NULL; in atl1_free_ring_resources()
1249 tpd_ring->dma = 0; in atl1_free_ring_resources()
1496 iowrite32((u32) ((adapter->tpd_ring.dma & 0xffffffff00000000ULL) >> 32), in atl1_configure()
1503 iowrite32((u32) (adapter->tpd_ring.dma & 0x00000000ffffffffULL), in atl1_configure()
1515 iowrite32(adapter->tpd_ring.count, hw->hw_addr + in atl1_configure()
1522 value = ((atomic_read(&adapter->tpd_ring.next_to_use) in atl1_configure()
1622 value = (hw->cmb_tpd > adapter->tpd_ring.count) ? in atl1_configure()
1623 hw->cmb_tpd : adapter->tpd_ring.count; in atl1_configure()
1761 tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use); in atl1_update_mailbox()
2050 tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use); in atl1_intr_rx()
2068 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_intr_tx() local
2073 sw_tpd_next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1_intr_tx()
2079 tpd = ATL1_TPD_DESC(tpd_ring, sw_tpd_next_to_clean); in atl1_intr_tx()
2080 buffer_info = &tpd_ring->buffer_info[sw_tpd_next_to_clean]; in atl1_intr_tx()
2092 if (++sw_tpd_next_to_clean == tpd_ring->count) in atl1_intr_tx()
2095 atomic_set(&tpd_ring->next_to_clean, sw_tpd_next_to_clean); in atl1_intr_tx()
2102 static u16 atl1_tpd_avail(struct atl1_tpd_ring *tpd_ring) in atl1_tpd_avail() argument
2104 u16 next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1_tpd_avail()
2105 u16 next_to_use = atomic_read(&tpd_ring->next_to_use); in atl1_tpd_avail()
2108 tpd_ring->count + next_to_clean - next_to_use - 1; in atl1_tpd_avail()
2200 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_tx_map() local
2214 next_to_use = atomic_read(&tpd_ring->next_to_use); in atl1_tx_map()
2215 buffer_info = &tpd_ring->buffer_info[next_to_use]; in atl1_tx_map()
2231 if (++next_to_use == tpd_ring->count) in atl1_tx_map()
2242 &tpd_ring->buffer_info[next_to_use]; in atl1_tx_map()
2256 if (++next_to_use == tpd_ring->count) in atl1_tx_map()
2267 if (++next_to_use == tpd_ring->count) in atl1_tx_map()
2281 buffer_info = &tpd_ring->buffer_info[next_to_use]; in atl1_tx_map()
2293 if (++next_to_use == tpd_ring->count) in atl1_tx_map()
2305 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_tx_queue() local
2310 u16 next_to_use = (u16) atomic_read(&tpd_ring->next_to_use); in atl1_tx_queue()
2313 buffer_info = &tpd_ring->buffer_info[next_to_use]; in atl1_tx_queue()
2314 tpd = ATL1_TPD_DESC(&adapter->tpd_ring, next_to_use); in atl1_tx_queue()
2338 if (++next_to_use == tpd_ring->count) in atl1_tx_queue()
2349 atomic_set(&tpd_ring->next_to_use, next_to_use); in atl1_tx_queue()
2356 struct atl1_tpd_ring *tpd_ring = &adapter->tpd_ring; in atl1_xmit_frame() local
2401 if (atl1_tpd_avail(&adapter->tpd_ring) < count) { in atl1_xmit_frame()
2410 ptpd = ATL1_TPD_DESC(tpd_ring, in atl1_xmit_frame()
2411 (u16) atomic_read(&tpd_ring->next_to_use)); in atl1_xmit_frame()
2966 adapter->tpd_ring.count = ATL1_DEFAULT_TPD; in atl1_probe()
3467 struct atl1_tpd_ring *txdr = &adapter->tpd_ring; in atl1_get_ringparam()
3484 struct atl1_tpd_ring *tpdr = &adapter->tpd_ring; in atl1_set_ringparam()
3496 tpd_old = adapter->tpd_ring; in atl1_set_ringparam()
3528 tpd_new = adapter->tpd_ring; in atl1_set_ringparam()
3532 adapter->tpd_ring = tpd_old; in atl1_set_ringparam()
3543 adapter->tpd_ring = tpd_new; in atl1_set_ringparam()
3557 adapter->tpd_ring = tpd_old; in atl1_set_ringparam()