Home
last modified time | relevance | path

Searched refs:seg_count (Results 1 – 16 of 16) sorted by relevance

/linux-6.6.21/drivers/char/agp/
Dcompat_ioctl.c72 if ((unsigned) ureserve.seg_count >= ~0U/sizeof(struct agp_segment32)) in compat_agpioc_reserve_wrap()
76 kreserve.seg_count = ureserve.seg_count; in compat_agpioc_reserve_wrap()
80 if (kreserve.seg_count == 0) { in compat_agpioc_reserve_wrap()
98 if (ureserve.seg_count >= 16384) in compat_agpioc_reserve_wrap()
101 usegment = kmalloc_array(ureserve.seg_count, in compat_agpioc_reserve_wrap()
107 ksegment = kmalloc_array(kreserve.seg_count, in compat_agpioc_reserve_wrap()
116 sizeof(*usegment) * ureserve.seg_count)) { in compat_agpioc_reserve_wrap()
122 for (seg = 0; seg < ureserve.seg_count; seg++) { in compat_agpioc_reserve_wrap()
Dfrontend.c170 seg = kzalloc((sizeof(struct agp_segment_priv) * region->seg_count), GFP_KERNEL); in agp_create_segment()
178 for (i = 0; i < region->seg_count; i++) { in agp_create_segment()
192 agp_add_seg_to_client(client, ret_seg, region->seg_count); in agp_create_segment()
809 if ((unsigned) reserve.seg_count >= ~0U/sizeof(struct agp_segment)) in agpioc_reserve_wrap()
814 if (reserve.seg_count == 0) { in agpioc_reserve_wrap()
830 if (reserve.seg_count >= 16384) in agpioc_reserve_wrap()
833 segment = kmalloc((sizeof(struct agp_segment) * reserve.seg_count), in agpioc_reserve_wrap()
840 sizeof(struct agp_segment) * reserve.seg_count)) { in agpioc_reserve_wrap()
Dcompat_ioctl.h66 compat_size_t seg_count; /* number of segments */ member
/linux-6.6.21/drivers/gpu/drm/
Ddrm_dma.c96 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown()
101 dma->bufs[i].seg_count); in drm_legacy_dma_takedown()
102 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
Ddrm_bufs.c676 if (entry->seg_count) { in drm_cleanup_buf_error()
677 for (i = 0; i < entry->seg_count; i++) { in drm_cleanup_buf_error()
689 entry->seg_count = 0; in drm_cleanup_buf_error()
858 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_agp()
987 entry->seg_count = count; in drm_legacy_addbufs_pci()
1005 entry->seg_count = count; in drm_legacy_addbufs_pci()
1012 entry->seglist[entry->seg_count++] = dmah; in drm_legacy_addbufs_pci()
1042 entry->seg_count = count; in drm_legacy_addbufs_pci()
1082 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_pci()
1083 dma->page_count += entry->seg_count << page_order; in drm_legacy_addbufs_pci()
[all …]
/linux-6.6.21/drivers/infiniband/core/
Dmad_rmpp.c579 paylen = (mad_send_wr->send_buf.seg_count * in send_next_seg()
584 if (mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) { in send_next_seg()
610 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in abort_send()
679 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) && in process_rmpp_ack()
686 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in process_rmpp_ack()
690 if (seg_num > mad_send_wr->send_buf.seg_count || in process_rmpp_ack()
706 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in process_rmpp_ack()
728 mad_send_wr->seg_num < mad_send_wr->send_buf.seg_count) { in process_rmpp_ack()
921 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in ib_process_rmpp_send_wc()
928 mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) in ib_process_rmpp_send_wc()
[all …]
Dmad.c808 seg->num = ++send_buf->seg_count; in alloc_send_rmpp_list()
969 if (mad_send_wr->send_buf.seg_count) in ib_get_payload()
/linux-6.6.21/include/drm/
Ddrm_legacy.h110 int seg_count; member
126 int seg_count; member
/linux-6.6.21/include/linux/
Dagpgart.h65 size_t seg_count; /* number of segments */ member
/linux-6.6.21/include/uapi/linux/
Dagpgart.h87 __kernel_size_t seg_count; /* number of segments */ member
/linux-6.6.21/drivers/net/ethernet/intel/ice/
Dice_ddp.c35 u32 seg_count; in ice_verify_pkg() local
48 seg_count = le32_to_cpu(pkg->seg_count); in ice_verify_pkg()
49 if (seg_count < 1) in ice_verify_pkg()
53 if (len < struct_size(pkg, seg_offset, seg_count)) in ice_verify_pkg()
57 for (i = 0; i < seg_count; i++) { in ice_verify_pkg()
1486 for (i = 0; i < le32_to_cpu(pkg_hdr->seg_count); i++) { in ice_find_seg_in_pkg()
Dice_ddp.h97 __le32 seg_count; member
/linux-6.6.21/drivers/memstick/core/
Dmspro_block.c168 unsigned int seg_count; member
540 if (msb->current_seg == msb->seg_count) { in h_mspro_block_transfer_data()
630 msb->seg_count = blk_rq_map_sg(msb->block_req->q, in mspro_block_issue_req()
634 if (!msb->seg_count) { in mspro_block_issue_req()
916 msb->seg_count = 1; in mspro_block_read_attributes()
1017 msb->seg_count = 1; in mspro_block_read_attributes()
/linux-6.6.21/drivers/net/ethernet/qlogic/
Dqla3xxx.c1938 if (tx_cb->seg_count == 0) { in ql_process_mac_tx_intr()
1949 tx_cb->seg_count--; in ql_process_mac_tx_intr()
1950 if (tx_cb->seg_count) { in ql_process_mac_tx_intr()
1951 for (i = 1; i < tx_cb->seg_count; i++) { in ql_process_mac_tx_intr()
2314 seg_cnt = tx_cb->seg_count; in ql_send_map()
2470 tx_cb->seg_count = ql_get_seg_count(qdev, in ql3xxx_send()
2472 if (tx_cb->seg_count == -1) { in ql3xxx_send()
3637 for (j = 1; j < tx_cb->seg_count; j++) { in ql_reset_work()
Dqla3xxx.h1038 int seg_count; member
/linux-6.6.21/include/rdma/
Dib_mad.h471 int seg_count; member