/linux-2.6.39/drivers/gpu/drm/via/ |
D | via_dmablit.c | 66 int num_desc = vsg->num_desc; in via_unmap_blit_from_device() local 67 unsigned cur_descriptor_page = num_desc / vsg->descriptors_per_page; in via_unmap_blit_from_device() 68 unsigned descriptor_this_page = num_desc % vsg->descriptors_per_page; in via_unmap_blit_from_device() 73 while (num_desc--) { in via_unmap_blit_from_device() 109 int num_desc = 0; in via_map_blit_for_device() local 148 num_desc++; in via_map_blit_for_device() 161 vsg->num_desc = num_desc; in via_map_blit_for_device() 272 vsg->num_desc_pages = (vsg->num_desc + vsg->descriptors_per_page - 1) / in via_alloc_desc_pages() 285 vsg->num_desc); in via_alloc_desc_pages()
|
D | via_dmablit.h | 45 int num_desc; member
|
/linux-2.6.39/drivers/net/netxen/ |
D | netxen_nic_init.c | 117 for (i = 0; i < rds_ring->num_desc; ++i) { in netxen_release_rx_buffers() 139 for (i = 0; i < tx_ring->num_desc; i++) { in netxen_release_tx_buffers() 214 tx_ring->num_desc = adapter->num_txd; in netxen_alloc_sw_resources() 240 rds_ring->num_desc = adapter->num_rxd; in netxen_alloc_sw_resources() 259 rds_ring->num_desc = adapter->num_jumbo_rxd; in netxen_alloc_sw_resources() 275 rds_ring->num_desc = adapter->num_lro_rxd; in netxen_alloc_sw_resources() 295 for (i = 0; i < rds_ring->num_desc; i++) { in netxen_alloc_sw_resources() 309 sds_ring->num_desc = adapter->num_rxd; in netxen_alloc_sw_resources() 1423 index = get_next_index(index, sds_ring->num_desc); in netxen_handle_fw_message() 1518 if (unlikely(index >= rds_ring->num_desc)) in netxen_process_rcv() [all …]
|
D | netxen_nic_ethtool.c | 423 u32 num_desc; in netxen_validate_ringparam() local 424 num_desc = max(val, min); in netxen_validate_ringparam() 425 num_desc = min(num_desc, max); in netxen_validate_ringparam() 426 num_desc = roundup_pow_of_two(num_desc); in netxen_validate_ringparam() 428 if (val != num_desc) { in netxen_validate_ringparam() 430 netxen_nic_driver_name, r_name, num_desc, val); in netxen_validate_ringparam() 433 return num_desc; in netxen_validate_ringparam()
|
D | netxen_nic_ctx.c | 203 prq_rds[i].ring_size = cpu_to_le32(rds_ring->num_desc); in nx_fw_cmd_create_rx_ctx() 216 prq_sds[i].ring_size = cpu_to_le32(sds_ring->num_desc); in nx_fw_cmd_create_rx_ctx() 345 prq_cds->ring_size = cpu_to_le32(tx_ring->num_desc); in nx_fw_cmd_create_tx_ctx() 569 hwctx->cmd_ring_size = cpu_to_le32(tx_ring->num_desc); in netxen_init_old_ctx() 578 cpu_to_le32(rds_ring->num_desc); in netxen_init_old_ctx() 586 hwctx->sts_ring_size = cpu_to_le32(sds_ring->num_desc); in netxen_init_old_ctx() 589 hwctx->sts_rings[ring].size = cpu_to_le32(sds_ring->num_desc); in netxen_init_old_ctx()
|
D | netxen_nic.h | 78 (sizeof(struct rcv_desc) * (rds_ring)->num_desc) 80 (sizeof(struct netxen_rx_buffer) * rds_ring->num_desc) 82 (sizeof(struct status_desc) * (sds_ring)->num_desc) 84 (sizeof(struct netxen_cmd_buffer) * tx_ring->num_desc) 86 (sizeof(struct cmd_desc_type0) * tx_ring->num_desc) 627 u32 num_desc; member 641 u32 num_desc; member 662 u32 num_desc; member 1422 tx_ring->sw_consumer, tx_ring->num_desc); in netxen_tx_avail()
|
D | netxen_nic_main.c | 1758 producer = get_next_index(producer, tx_ring->num_desc); in netxen_tso_check() 1775 producer = get_next_index(producer, tx_ring->num_desc); in netxen_tso_check() 1852 u32 num_txd = tx_ring->num_desc; in netxen_nic_xmit_frame()
|
D | netxen_nic_hw.c | 623 producer = get_next_index(producer, tx_ring->num_desc); in netxen_send_cmd_descs()
|
/linux-2.6.39/drivers/net/qlcnic/ |
D | qlcnic_init.c | 100 for (i = 0; i < rds_ring->num_desc; ++i) { in qlcnic_release_rx_buffers() 129 for (i = 0; i < rds_ring->num_desc; i++) { in qlcnic_reset_rx_buffers_list() 145 for (i = 0; i < tx_ring->num_desc; i++) { in qlcnic_release_tx_buffers() 219 tx_ring->num_desc = adapter->num_txd; in qlcnic_alloc_sw_resources() 243 rds_ring->num_desc = adapter->num_rxd; in qlcnic_alloc_sw_resources() 249 rds_ring->num_desc = adapter->num_jumbo_rxd; in qlcnic_alloc_sw_resources() 272 for (i = 0; i < rds_ring->num_desc; i++) { in qlcnic_alloc_sw_resources() 285 sds_ring->num_desc = adapter->num_rxd; in qlcnic_alloc_sw_resources() 1318 index = get_next_index(index, sds_ring->num_desc); in qlcnic_handle_fw_message() 1441 if (unlikely(index >= rds_ring->num_desc)) in qlcnic_process_rcv() [all …]
|
D | qlcnic.h | 63 (sizeof(struct rcv_desc) * (rds_ring)->num_desc) 65 (sizeof(struct qlcnic_rx_buffer) * rds_ring->num_desc) 67 (sizeof(struct status_desc) * (sds_ring)->num_desc) 69 (sizeof(struct qlcnic_cmd_buffer) * tx_ring->num_desc) 71 (sizeof(struct cmd_desc_type0) * tx_ring->num_desc) 447 u32 num_desc; member 461 u32 num_desc; member 481 u32 num_desc; member 1386 return tx_ring->sw_consumer + tx_ring->num_desc - in qlcnic_tx_avail()
|
D | qlcnic_ethtool.c | 433 u32 num_desc; in qlcnic_validate_ringparam() local 434 num_desc = max(val, min); in qlcnic_validate_ringparam() 435 num_desc = min(num_desc, max); in qlcnic_validate_ringparam() 436 num_desc = roundup_pow_of_two(num_desc); in qlcnic_validate_ringparam() 438 if (val != num_desc) { in qlcnic_validate_ringparam() 440 qlcnic_driver_name, r_name, num_desc, val); in qlcnic_validate_ringparam() 443 return num_desc; in qlcnic_validate_ringparam()
|
D | qlcnic_ctx.c | 169 prq_rds[i].ring_size = cpu_to_le32(rds_ring->num_desc); in qlcnic_fw_cmd_create_rx_ctx() 184 prq_sds[i].ring_size = cpu_to_le32(sds_ring->num_desc); in qlcnic_fw_cmd_create_rx_ctx() 312 prq_cds->ring_size = cpu_to_le32(tx_ring->num_desc); in qlcnic_fw_cmd_create_tx_ctx()
|
D | qlcnic_main.c | 1821 tx_ring->producer = get_next_index(producer, tx_ring->num_desc); in qlcnic_change_filter() 1976 producer = get_next_index(producer, tx_ring->num_desc); in qlcnic_tso_check() 1993 producer = get_next_index(producer, tx_ring->num_desc); in qlcnic_tso_check() 2107 u32 num_txd = tx_ring->num_desc; in qlcnic_xmit_frame() 2422 sw_consumer = get_next_index(sw_consumer, tx_ring->num_desc); in qlcnic_process_cmd_ring()
|
D | qlcnic_hw.c | 345 producer = get_next_index(producer, tx_ring->num_desc); in qlcnic_send_cmd_descs()
|
/linux-2.6.39/drivers/net/ |
D | davinci_cpdma.c | 83 int num_desc, used_desc; member 156 pool->num_desc = size / pool->desc_size; in cpdma_desc_pool_create() 158 bitmap_size = (pool->num_desc / BITS_PER_LONG) * sizeof(long); in cpdma_desc_pool_create() 219 cpdma_desc_alloc(struct cpdma_desc_pool *pool, int num_desc) in cpdma_desc_alloc() argument 227 index = bitmap_find_next_zero_area(pool->bitmap, pool->num_desc, 0, in cpdma_desc_alloc() 228 num_desc, 0); in cpdma_desc_alloc() 229 if (index < pool->num_desc) { in cpdma_desc_alloc() 230 bitmap_set(pool->bitmap, index, num_desc); in cpdma_desc_alloc() 240 struct cpdma_desc __iomem *desc, int num_desc) in cpdma_desc_free() argument 247 bitmap_clear(pool->bitmap, index, num_desc); in cpdma_desc_free()
|
D | spider_net.c | 325 dma_free_coherent(&card->pdev->dev, chain->num_desc, in spider_net_free_chain() 349 alloc_size = chain->num_desc * sizeof(struct spider_net_hw_descr); in spider_net_init_chain() 357 memset(chain->ring, 0, chain->num_desc * sizeof(struct spider_net_descr)); in spider_net_init_chain() 363 for (i=0; i < chain->num_desc; i++, descr++, hwdescr++) { in spider_net_init_chain() 761 if (cnt < card->tx_chain.num_desc/4) in spider_net_set_low_watermark() 1037 chain->num_desc); in show_rx_chain() 1067 int from = (chain->num_desc + off - cnt) % chain->num_desc; in show_rx_chain() 1068 int to = (chain->num_desc + off - 1) % chain->num_desc; in show_rx_chain() 1136 for (i=0; i<chain->num_desc; i++) { in spider_net_resync_head_ptr() 1156 for (i=0; i<chain->num_desc; i++) { in spider_net_resync_tail_ptr() [all …]
|
D | spider_net_ethtool.c | 143 ering->tx_pending = card->tx_chain.num_desc; in spider_net_ethtool_get_ringparam() 145 ering->rx_pending = card->rx_chain.num_desc; in spider_net_ethtool_get_ringparam()
|
D | spider_net.h | 410 int num_desc; member
|
/linux-2.6.39/drivers/atm/ |
D | iphase.c | 614 int num_desc; in ia_que_tx() local 617 num_desc = ia_avail_descs(iadev); in ia_que_tx() 619 while (num_desc && (skb = skb_dequeue(&iadev->tx_backlog))) { in ia_que_tx() 634 num_desc--; in ia_que_tx()
|