/linux-6.6.21/drivers/char/agp/ |
D | compat_ioctl.c | 72 if ((unsigned) ureserve.seg_count >= ~0U/sizeof(struct agp_segment32)) in compat_agpioc_reserve_wrap() 76 kreserve.seg_count = ureserve.seg_count; in compat_agpioc_reserve_wrap() 80 if (kreserve.seg_count == 0) { in compat_agpioc_reserve_wrap() 98 if (ureserve.seg_count >= 16384) in compat_agpioc_reserve_wrap() 101 usegment = kmalloc_array(ureserve.seg_count, in compat_agpioc_reserve_wrap() 107 ksegment = kmalloc_array(kreserve.seg_count, in compat_agpioc_reserve_wrap() 116 sizeof(*usegment) * ureserve.seg_count)) { in compat_agpioc_reserve_wrap() 122 for (seg = 0; seg < ureserve.seg_count; seg++) { in compat_agpioc_reserve_wrap()
|
D | frontend.c | 170 seg = kzalloc((sizeof(struct agp_segment_priv) * region->seg_count), GFP_KERNEL); in agp_create_segment() 178 for (i = 0; i < region->seg_count; i++) { in agp_create_segment() 192 agp_add_seg_to_client(client, ret_seg, region->seg_count); in agp_create_segment() 809 if ((unsigned) reserve.seg_count >= ~0U/sizeof(struct agp_segment)) in agpioc_reserve_wrap() 814 if (reserve.seg_count == 0) { in agpioc_reserve_wrap() 830 if (reserve.seg_count >= 16384) in agpioc_reserve_wrap() 833 segment = kmalloc((sizeof(struct agp_segment) * reserve.seg_count), in agpioc_reserve_wrap() 840 sizeof(struct agp_segment) * reserve.seg_count)) { in agpioc_reserve_wrap()
|
D | compat_ioctl.h | 66 compat_size_t seg_count; /* number of segments */ member
|
/linux-6.6.21/drivers/gpu/drm/ |
D | drm_dma.c | 96 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown() 101 dma->bufs[i].seg_count); in drm_legacy_dma_takedown() 102 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
|
D | drm_bufs.c | 676 if (entry->seg_count) { in drm_cleanup_buf_error() 677 for (i = 0; i < entry->seg_count; i++) { in drm_cleanup_buf_error() 689 entry->seg_count = 0; in drm_cleanup_buf_error() 858 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_agp() 987 entry->seg_count = count; in drm_legacy_addbufs_pci() 1005 entry->seg_count = count; in drm_legacy_addbufs_pci() 1012 entry->seglist[entry->seg_count++] = dmah; in drm_legacy_addbufs_pci() 1042 entry->seg_count = count; in drm_legacy_addbufs_pci() 1082 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_pci() 1083 dma->page_count += entry->seg_count << page_order; in drm_legacy_addbufs_pci() [all …]
|
/linux-6.6.21/drivers/infiniband/core/ |
D | mad_rmpp.c | 579 paylen = (mad_send_wr->send_buf.seg_count * in send_next_seg() 584 if (mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) { in send_next_seg() 610 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in abort_send() 679 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) && in process_rmpp_ack() 686 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in process_rmpp_ack() 690 if (seg_num > mad_send_wr->send_buf.seg_count || in process_rmpp_ack() 706 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in process_rmpp_ack() 728 mad_send_wr->seg_num < mad_send_wr->send_buf.seg_count) { in process_rmpp_ack() 921 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in ib_process_rmpp_send_wc() 928 mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) in ib_process_rmpp_send_wc() [all …]
|
D | mad.c | 808 seg->num = ++send_buf->seg_count; in alloc_send_rmpp_list() 969 if (mad_send_wr->send_buf.seg_count) in ib_get_payload()
|
/linux-6.6.21/include/drm/ |
D | drm_legacy.h | 110 int seg_count; member 126 int seg_count; member
|
/linux-6.6.21/include/linux/ |
D | agpgart.h | 65 size_t seg_count; /* number of segments */ member
|
/linux-6.6.21/include/uapi/linux/ |
D | agpgart.h | 87 __kernel_size_t seg_count; /* number of segments */ member
|
/linux-6.6.21/drivers/net/ethernet/intel/ice/ |
D | ice_ddp.c | 35 u32 seg_count; in ice_verify_pkg() local 48 seg_count = le32_to_cpu(pkg->seg_count); in ice_verify_pkg() 49 if (seg_count < 1) in ice_verify_pkg() 53 if (len < struct_size(pkg, seg_offset, seg_count)) in ice_verify_pkg() 57 for (i = 0; i < seg_count; i++) { in ice_verify_pkg() 1486 for (i = 0; i < le32_to_cpu(pkg_hdr->seg_count); i++) { in ice_find_seg_in_pkg()
|
D | ice_ddp.h | 97 __le32 seg_count; member
|
/linux-6.6.21/drivers/memstick/core/ |
D | mspro_block.c | 168 unsigned int seg_count; member 540 if (msb->current_seg == msb->seg_count) { in h_mspro_block_transfer_data() 630 msb->seg_count = blk_rq_map_sg(msb->block_req->q, in mspro_block_issue_req() 634 if (!msb->seg_count) { in mspro_block_issue_req() 916 msb->seg_count = 1; in mspro_block_read_attributes() 1017 msb->seg_count = 1; in mspro_block_read_attributes()
|
/linux-6.6.21/drivers/net/ethernet/qlogic/ |
D | qla3xxx.c | 1938 if (tx_cb->seg_count == 0) { in ql_process_mac_tx_intr() 1949 tx_cb->seg_count--; in ql_process_mac_tx_intr() 1950 if (tx_cb->seg_count) { in ql_process_mac_tx_intr() 1951 for (i = 1; i < tx_cb->seg_count; i++) { in ql_process_mac_tx_intr() 2314 seg_cnt = tx_cb->seg_count; in ql_send_map() 2470 tx_cb->seg_count = ql_get_seg_count(qdev, in ql3xxx_send() 2472 if (tx_cb->seg_count == -1) { in ql3xxx_send() 3637 for (j = 1; j < tx_cb->seg_count; j++) { in ql_reset_work()
|
D | qla3xxx.h | 1038 int seg_count; member
|
/linux-6.6.21/include/rdma/ |
D | ib_mad.h | 471 int seg_count; member
|