Home
last modified time | relevance | path

Searched refs:sg_count (Results 1 – 25 of 47) sorted by relevance

12

/linux-2.6.39/drivers/message/i2o/
Dmemory.c33 u16 sg_count = in i2o_sg_tablesize() local
42 sg_count -= 2; in i2o_sg_tablesize()
43 sg_count /= 3; in i2o_sg_tablesize()
45 sg_count /= 2; in i2o_sg_tablesize()
47 if (c->short_req && (sg_count > 8)) in i2o_sg_tablesize()
48 sg_count = 8; in i2o_sg_tablesize()
50 return sg_count; in i2o_sg_tablesize()
128 int sg_count, enum dma_data_direction direction, u32 ** sg_ptr) in i2o_dma_map_sg() argument
144 sg_count = dma_map_sg(&c->pdev->dev, sg, sg_count, direction); in i2o_dma_map_sg()
145 if (!sg_count) in i2o_dma_map_sg()
[all …]
Di2o_config.c531 u32 sg_count = 0; in i2o_cfg_passthru32() local
607 sg_count = in i2o_cfg_passthru32()
609 if (sg_count > SG_TABLESIZE) { in i2o_cfg_passthru32()
611 c->name, sg_count); in i2o_cfg_passthru32()
616 for (i = 0; i < sg_count; i++) { in i2o_cfg_passthru32()
634 c->name, sg_size, i, sg_count); in i2o_cfg_passthru32()
688 sg_count = in i2o_cfg_passthru32()
693 for (j = 0; j < sg_count; j++) { in i2o_cfg_passthru32()
777 u32 sg_count = 0; in i2o_cfg_passthru() local
845 sg_count = in i2o_cfg_passthru()
[all …]
/linux-2.6.39/drivers/staging/hv/
Dstorvsc_drv.c76 unsigned int sg_count,
79 unsigned int sg_count);
80 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count);
387 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) in do_bounce_buffer() argument
392 if (sg_count < 2) in do_bounce_buffer()
396 for (i = 0; i < sg_count; i++) { in do_bounce_buffer()
401 } else if (i == sg_count - 1) { in do_bounce_buffer()
415 unsigned int sg_count, in create_bounce_buffer() argument
444 unsigned int sg_count) in destroy_bounce_buffer() argument
449 for (i = 0; i < sg_count; i++) { in destroy_bounce_buffer()
[all …]
/linux-2.6.39/drivers/crypto/
Dtalitos.c974 static int sg_to_link_tbl(struct scatterlist *sg, int sg_count, in sg_to_link_tbl() argument
977 int n_sg = sg_count; in sg_to_link_tbl()
994 sg_count--; in sg_to_link_tbl()
1003 return sg_count; in sg_to_link_tbl()
1022 int sg_count, ret; in ipsec_esp() local
1049 sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, in ipsec_esp()
1054 if (sg_count == 1) { in ipsec_esp()
1062 sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len, in ipsec_esp()
1064 if (sg_count > 1) { in ipsec_esp()
1082 sg_count = talitos_map_sg(dev, areq->dst, in ipsec_esp()
[all …]
Dpicoxcell_crypto.c261 static int sg_count(struct scatterlist *sg_list, int nbytes) in sg_count() function
297 nents = sg_count(payload, nbytes); in spacc_sg_to_ddt()
325 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_make_ddts()
345 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); in spacc_aead_make_ddts()
409 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_free_ddts()
414 sg_count(areq->dst, areq->cryptlen), in spacc_aead_free_ddts()
420 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); in spacc_aead_free_ddts()
432 unsigned nents = sg_count(payload, nbytes); in spacc_free_ddt()
/linux-2.6.39/drivers/scsi/arm/
Dscsi.h101 unsigned i, sg_count = scsi_sg_count(SCpnt); in init_SCp() local
103 scsi_for_each_sg(SCpnt, sg, sg_count, i) in init_SCp()
/linux-2.6.39/drivers/scsi/fnic/
Dfnic_scsi.c263 int sg_count) in fnic_queue_wq_copy_desc() argument
277 if (sg_count) { in fnic_queue_wq_copy_desc()
280 for_each_sg(scsi_sglist(sc), sg, sg_count, i) { in fnic_queue_wq_copy_desc()
290 sizeof(io_req->sgl_list[0]) * sg_count, in fnic_queue_wq_copy_desc()
360 int sg_count; in fnic_queuecommand_lck() local
394 sg_count = scsi_dma_map(sc); in fnic_queuecommand_lck()
395 if (sg_count < 0) { in fnic_queuecommand_lck()
401 io_req->sgl_cnt = sg_count; in fnic_queuecommand_lck()
403 if (sg_count > FNIC_DFLT_SG_DESC_CNT) in fnic_queuecommand_lck()
406 if (sg_count) { in fnic_queuecommand_lck()
[all …]
/linux-2.6.39/include/linux/mmc/
Dsdhci.h129 int sg_count; /* Mapped sg entries */ member
/linux-2.6.39/drivers/scsi/
Dqlogicpti.c896 int sg_count; in load_cmd() local
899 sg_count = dma_map_sg(&qpti->op->dev, sg, in load_cmd()
904 cmd->segment_cnt = sg_count; in load_cmd()
907 n = sg_count; in load_cmd()
914 sg_count -= 4; in load_cmd()
916 while (sg_count > 0) { in load_cmd()
931 n = sg_count; in load_cmd()
938 sg_count -= n; in load_cmd()
Dstex.c183 __le16 sg_count; member
286 int sg_count; member
425 ccb->sg_count = nseg; in stex_map_sg()
426 dst->sg_count = cpu_to_le16((u16)nseg); in stex_map_sg()
457 ccb->sg_count = nseg; in stex_ss_map_sg()
458 dst->sg_count = cpu_to_le16((u16)nseg); in stex_ss_map_sg()
534 addr += (hba->ccb[tag].sg_count+4)/11; in stex_ss_send_cmd()
693 hba->ccb[tag].sg_count = 0; in stex_queuecommand_lck()
1754 hba->ccb[tag].sg_count = 0; in stex_hba_stop()
Dips.h431 uint8_t sg_count; member
640 uint8_t sg_count; member
655 uint16_t sg_count; member
1114 int sg_count; member
Ddc395x.c236 u8 sg_count; /* No of HW sg entries for this request */ member
990 srb->sg_count = 0; in build_srb()
1019 srb->sg_count = nseg; in build_srb()
1024 srb->sg_count); in build_srb()
1026 scsi_for_each_sg(cmd, sg, srb->sg_count, i) { in build_srb()
1033 sgp += srb->sg_count - 1; in build_srb()
1212 srb->segment_x, srb->sg_count, srb->sg_index, in dump_register_info()
1940 for (; idx < srb->sg_count; psge++, idx++) in sg_verify_length()
1970 for (idx = srb->sg_index; idx < srb->sg_count; idx++) { in sg_update_list()
2285 srb->sg_count, &offset, &len); in data_in_phase0()
[all …]
Dhptiop.c760 int sg_count = 0; in hptiop_queuecommand_lck() local
796 sg_count = hptiop_buildsgl(scp, req->sg_list); in hptiop_queuecommand_lck()
797 if (!sg_count) in hptiop_queuecommand_lck()
810 + sg_count * sizeof(struct hpt_iopsg)); in hptiop_queuecommand_lck()
Daha1542.c666 int i, sg_count = scsi_sg_count(SCpnt); in aha1542_queuecommand_lck() local
668 SCpnt->host_scribble = kmalloc(sizeof(*cptr)*sg_count, in aha1542_queuecommand_lck()
676 scsi_for_each_sg(SCpnt, sg, sg_count, i) { in aha1542_queuecommand_lck()
680 any2scsi(ccb[mbo].datalen, sg_count * sizeof(struct chain)); in aha1542_queuecommand_lck()
Ddpt_i2o.c1806 u32 sg_count = 0; in adpt_i2o_passthru() local
1852 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); in adpt_i2o_passthru()
1853 if (sg_count > pHba->sg_tablesize){ in adpt_i2o_passthru()
1854 printk(KERN_DEBUG"%s:IOCTL SG List too large (%u)\n", pHba->name,sg_count); in adpt_i2o_passthru()
1859 for(i = 0; i < sg_count; i++) { in adpt_i2o_passthru()
1872 pHba->name,sg_size,i,sg_count); in adpt_i2o_passthru()
1937 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); in adpt_i2o_passthru()
1941 for (j = 0; j < sg_count; j++) { in adpt_i2o_passthru()
Dsr.c405 int i, size = 0, sg_count = scsi_sg_count(SCpnt); in sr_prep_fn() local
407 scsi_for_each_sg(SCpnt, sg, sg_count, i) in sr_prep_fn()
Dscsi_lib.c2524 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, in scsi_kmap_atomic_sg() argument
2534 for_each_sg(sgl, sg, sg_count, i) { in scsi_kmap_atomic_sg()
2541 if (unlikely(i == sg_count)) { in scsi_kmap_atomic_sg()
2544 __func__, sg_len, *offset, sg_count); in scsi_kmap_atomic_sg()
D3w-sas.c331 int i, sg_count; in twl_scsiop_execute_scsi() local
373 sg_count = twl_map_scsi_sg_data(tw_dev, request_id); in twl_scsiop_execute_scsi()
374 if (sg_count == 0) in twl_scsiop_execute_scsi()
377 scsi_for_each_sg(srb, sg, sg_count, i) { in twl_scsiop_execute_scsi()
/linux-2.6.39/include/scsi/
Dlibiscsi_tcp.h115 struct scatterlist *sg_list, unsigned int sg_count,
Dscsi_cmnd.h141 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
/linux-2.6.39/drivers/memstick/host/
Dr592.c278 int len, sg_count; in r592_transfer_fifo_dma() local
296 sg_count = dma_map_sg(&dev->pci_dev->dev, &dev->req->sg, 1, is_write ? in r592_transfer_fifo_dma()
299 if (sg_count != 1 || in r592_transfer_fifo_dma()
/linux-2.6.39/drivers/infiniband/hw/ipath/
Dipath_sdma.c693 if (tx->txreq.sg_count > ipath_sdma_descq_freecnt(dd)) { in ipath_sdma_verbs_send()
798 dd->ipath_sdma_descq_added += tx->txreq.sg_count; in ipath_sdma_verbs_send()
/linux-2.6.39/drivers/infiniband/hw/qib/
Dqib_sdma.c547 if (tx->txreq.sg_count > qib_sdma_descq_freecnt(ppd)) { in qib_sdma_verbs_send()
643 ppd->sdma_descq_added += tx->txreq.sg_count; in qib_sdma_verbs_send()
/linux-2.6.39/drivers/mmc/host/
Dsdhci.c448 host->sg_count = dma_map_sg(mmc_dev(host->mmc), in sdhci_adma_table_pre()
450 if (host->sg_count == 0) in sdhci_adma_table_pre()
458 for_each_sg(data->sg, sg, host->sg_count, i) { in sdhci_adma_table_pre()
576 for_each_sg(data->sg, sg, host->sg_count, i) { in sdhci_adma_table_post()
/linux-2.6.39/drivers/target/
Dtarget_core_transport.c4578 u32 sg_count = 1, cmd_size = cmd->data_length; in transport_map_sg_to_mem() local
4603 sg_count++; in transport_map_sg_to_mem()
4619 " struct se_mem\n", sg_count, *se_mem_cnt); in transport_map_sg_to_mem()
4621 if (sg_count != *se_mem_cnt) in transport_map_sg_to_mem()
4746 u32 task_sg_num = 0, sg_count = 0; in transport_do_task_sg_chain() local
4779 sg_count += (task->task_sg_num + 1); in transport_do_task_sg_chain()
4781 sg_count += task->task_sg_num; in transport_do_task_sg_chain()
4801 sg_count += (task->task_sg_num + 1); in transport_do_task_sg_chain()
4803 sg_count += task->task_sg_num; in transport_do_task_sg_chain()
4810 T_TASK(cmd)->t_tasks_sg_chained_no = sg_count; in transport_do_task_sg_chain()

12