Lines Matching refs:NVME_CTRL_PAGE_SIZE
38 #define SGES_PER_PAGE (NVME_CTRL_PAGE_SIZE / sizeof(struct nvme_sgl_desc))
390 unsigned max_bytes = (NVME_MAX_KB_SZ * 1024) + NVME_CTRL_PAGE_SIZE; in nvme_pci_npages_prp()
391 unsigned nprps = DIV_ROUND_UP(max_bytes, NVME_CTRL_PAGE_SIZE); in nvme_pci_npages_prp()
392 return DIV_ROUND_UP(8 * nprps, NVME_CTRL_PAGE_SIZE - 8); in nvme_pci_npages_prp()
526 const int last_prp = NVME_CTRL_PAGE_SIZE / sizeof(__le64) - 1; in nvme_free_prps()
588 int offset = dma_addr & (NVME_CTRL_PAGE_SIZE - 1); in nvme_pci_setup_prps()
593 length -= (NVME_CTRL_PAGE_SIZE - offset); in nvme_pci_setup_prps()
599 dma_len -= (NVME_CTRL_PAGE_SIZE - offset); in nvme_pci_setup_prps()
601 dma_addr += (NVME_CTRL_PAGE_SIZE - offset); in nvme_pci_setup_prps()
608 if (length <= NVME_CTRL_PAGE_SIZE) { in nvme_pci_setup_prps()
613 nprps = DIV_ROUND_UP(length, NVME_CTRL_PAGE_SIZE); in nvme_pci_setup_prps()
631 if (i == NVME_CTRL_PAGE_SIZE >> 3) { in nvme_pci_setup_prps()
642 dma_len -= NVME_CTRL_PAGE_SIZE; in nvme_pci_setup_prps()
643 dma_addr += NVME_CTRL_PAGE_SIZE; in nvme_pci_setup_prps()
644 length -= NVME_CTRL_PAGE_SIZE; in nvme_pci_setup_prps()
735 unsigned int offset = bv->bv_offset & (NVME_CTRL_PAGE_SIZE - 1); in nvme_setup_prp_simple()
736 unsigned int first_prp_len = NVME_CTRL_PAGE_SIZE - offset; in nvme_setup_prp_simple()
781 if (bv.bv_offset + bv.bv_len <= NVME_CTRL_PAGE_SIZE * 2) in nvme_map_data()
1473 NVME_CTRL_PAGE_SIZE); in nvme_cmb_qdepth()
1478 mem_per_q = round_down(mem_per_q, NVME_CTRL_PAGE_SIZE); in nvme_cmb_qdepth()
1915 size_t size = le32_to_cpu(desc->size) * NVME_CTRL_PAGE_SIZE; in nvme_free_host_mem()
1967 descs[i].size = cpu_to_le32(len / NVME_CTRL_PAGE_SIZE); in __nvme_alloc_host_mem()
1983 size_t size = le32_to_cpu(descs[i].size) * NVME_CTRL_PAGE_SIZE; in __nvme_alloc_host_mem()
2635 NVME_CTRL_PAGE_SIZE, in nvme_setup_prp_pools()
2636 NVME_CTRL_PAGE_SIZE, 0); in nvme_setup_prp_pools()
2968 dma_set_min_align_mask(&pdev->dev, NVME_CTRL_PAGE_SIZE - 1); in nvme_pci_alloc_dev()