Home
last modified time | relevance | path

Searched refs:max_hw_sectors (Results 1 – 19 of 19) sorted by relevance

/linux-6.6.21/block/
Dblk-settings.c43 lim->max_sectors = lim->max_hw_sectors = BLK_SAFE_MAX_SECTORS; in blk_set_default_limits()
79 lim->max_hw_sectors = UINT_MAX; in blk_set_stacking_limits()
123 void blk_queue_max_hw_sectors(struct request_queue *q, unsigned int max_hw_sectors) in blk_queue_max_hw_sectors() argument
128 if ((max_hw_sectors << 9) < PAGE_SIZE) { in blk_queue_max_hw_sectors()
129 max_hw_sectors = 1 << (PAGE_SHIFT - 9); in blk_queue_max_hw_sectors()
131 __func__, max_hw_sectors); in blk_queue_max_hw_sectors()
134 max_hw_sectors = round_down(max_hw_sectors, in blk_queue_max_hw_sectors()
136 limits->max_hw_sectors = max_hw_sectors; in blk_queue_max_hw_sectors()
138 max_sectors = min_not_zero(max_hw_sectors, limits->max_dev_sectors); in blk_queue_max_hw_sectors()
224 max_sectors = min(q->limits.max_hw_sectors, max_zone_append_sectors); in blk_queue_max_zone_append_sectors()
[all …]
Dblk-merge.c597 return q->limits.max_hw_sectors; in blk_rq_get_max_sectors()
/linux-6.6.21/drivers/nvme/target/
Dpassthru.c86 unsigned int max_hw_sectors; in nvmet_passthru_override_id_ctrl() local
105 max_hw_sectors = min_not_zero(pctrl->max_segments << PAGE_SECTORS_SHIFT, in nvmet_passthru_override_id_ctrl()
106 pctrl->max_hw_sectors); in nvmet_passthru_override_id_ctrl()
112 max_hw_sectors = min_not_zero(BIO_MAX_VECS << PAGE_SECTORS_SHIFT, in nvmet_passthru_override_id_ctrl()
113 max_hw_sectors); in nvmet_passthru_override_id_ctrl()
117 id->mdts = ilog2(max_hw_sectors) + 9 - page_shift; in nvmet_passthru_override_id_ctrl()
Dloop.c375 ctrl->ctrl.max_hw_sectors = in nvme_loop_configure_admin_queue()
/linux-6.6.21/drivers/block/rnbd/
Drnbd-proto.h147 __le32 max_hw_sectors; member
DREADME73 information: side, max_hw_sectors, etc.
Drnbd-srv.c545 rsp->max_hw_sectors = in rnbd_srv_fill_msg_open_rsp()
/linux-6.6.21/drivers/nvme/host/
Dcore.c1876 if (ctrl->max_hw_sectors) { in nvme_set_queue_limits()
1878 (ctrl->max_hw_sectors / (NVME_CTRL_PAGE_SIZE >> 9)) + 1; in nvme_set_queue_limits()
1881 blk_queue_max_hw_sectors(q, ctrl->max_hw_sectors); in nvme_set_queue_limits()
1977 is_power_of_2(ctrl->max_hw_sectors)) in nvme_set_chunk_sectors()
1978 iob = ctrl->max_hw_sectors; in nvme_set_chunk_sectors()
2931 ctrl->max_zeroes_sectors = ctrl->max_hw_sectors; in nvme_init_non_mdts_limits()
3024 u32 max_hw_sectors; in nvme_init_identify() local
3083 max_hw_sectors = nvme_mps_to_sectors(ctrl, id->mdts); in nvme_init_identify()
3085 max_hw_sectors = UINT_MAX; in nvme_init_identify()
3086 ctrl->max_hw_sectors = in nvme_init_identify()
[all …]
Dzns.c43 ctrl->max_zone_append = ctrl->max_hw_sectors; in nvme_set_max_append()
Dnvme.h302 u32 max_hw_sectors; member
Dmultipath.c910 size_t max_transfer_size = ctrl->max_hw_sectors << SECTOR_SHIFT; in nvme_mpath_init_identify()
Dapple.c1052 anv->ctrl.max_hw_sectors = min_t(u32, NVME_MAX_KB_SZ << 1, in apple_nvme_reset_work()
Drdma.c830 ctrl->ctrl.max_hw_sectors = ctrl->max_fr_pages << (ilog2(SZ_4K) - 9); in nvme_rdma_configure_admin_queue()
Dfc.c3115 ctrl->ctrl.max_hw_sectors = ctrl->ctrl.max_segments << in nvme_fc_create_association()
Dpci.c2975 dev->ctrl.max_hw_sectors = min_t(u32, in nvme_pci_alloc_dev()
/linux-6.6.21/include/linux/
Dblkdev.h292 unsigned int max_hw_sectors; member
1107 return q->limits.max_hw_sectors; in queue_max_hw_sectors()
/linux-6.6.21/drivers/block/drbd/
Ddrbd_nl.c1275 unsigned int max_hw_sectors = max_bio_size >> 9; in drbd_setup_queue_param() local
1283 max_hw_sectors = min(queue_max_hw_sectors(b), max_bio_size >> 9); in drbd_setup_queue_param()
1292 blk_queue_max_hw_sectors(q, max_hw_sectors); in drbd_setup_queue_param()
/linux-6.6.21/drivers/md/bcache/
Dsuper.c952 q->limits.max_hw_sectors = UINT_MAX; in bcache_device_init()
/linux-6.6.21/drivers/scsi/
Dsd.c3542 q->limits.max_sectors > q->limits.max_hw_sectors) in sd_revalidate_disk()