Home
last modified time | relevance | path

Searched refs:pg_idx (Results 1 – 10 of 10) sorted by relevance

/linux-5.19.10/drivers/pci/controller/mobiveil/
Dpcie-mobiveil.c28 static void mobiveil_pcie_sel_page(struct mobiveil_pcie *pcie, u8 pg_idx) in mobiveil_pcie_sel_page() argument
34 val |= (pg_idx & PAGE_SEL_MASK) << PAGE_SEL_SHIFT; in mobiveil_pcie_sel_page()
/linux-5.19.10/drivers/hwtracing/coresight/
Dcoresight-catu.c119 unsigned int table_nr, pg_idx, pg_offset; in catu_get_table() local
132 pg_idx = table_nr / CATU_PAGES_PER_SYSPAGE; in catu_get_table()
135 *daddrp = table_pages->daddrs[pg_idx] + pg_offset; in catu_get_table()
136 ptr = page_address(table_pages->pages[pg_idx]); in catu_get_table()
Dcoresight-tmc-etr.c392 int pg_idx = offset >> PAGE_SHIFT; in tmc_sg_table_get_data() local
405 *bufpp = page_address(data_pages->pages[pg_idx]) + pg_offset; in tmc_sg_table_get_data()
1462 long pg_idx, pg_offset; in tmc_etr_sync_perf_buffer() local
1467 pg_idx = head >> PAGE_SHIFT; in tmc_etr_sync_perf_buffer()
1488 memcpy(dst_pages[pg_idx] + pg_offset, src_buf, bytes); in tmc_etr_sync_perf_buffer()
1496 if (++pg_idx == etr_perf->nr_pages) in tmc_etr_sync_perf_buffer()
1497 pg_idx = 0; in tmc_etr_sync_perf_buffer()
/linux-5.19.10/drivers/infiniband/hw/bnxt_re/
Dqplib_res.h315 u32 pg_num, pg_idx; in bnxt_qplib_get_qe() local
318 pg_idx = (indx % hwq->qe_ppg); in bnxt_qplib_get_qe()
321 return (void *)(hwq->pbl_ptr[pg_num] + hwq->element_size * pg_idx); in bnxt_qplib_get_qe()
/linux-5.19.10/drivers/net/ethernet/chelsio/libcxgb/
Dlibcxgb_ppm.h113 unsigned char pg_idx:2; member
/linux-5.19.10/drivers/scsi/cxgbi/cxgb3i/
Dcxgb3i.c1154 unsigned int tid, int pg_idx) in ddp_setup_conn_pgidx() argument
1159 u64 val = pg_idx < DDP_PGIDX_MAX ? pg_idx : 0; in ddp_setup_conn_pgidx()
1162 "csk 0x%p, tid %u, pg_idx %d.\n", csk, tid, pg_idx); in ddp_setup_conn_pgidx()
/linux-5.19.10/drivers/scsi/cxgbi/cxgb4i/
Dcxgb4i.c2078 int pg_idx) in ddp_setup_conn_pgidx() argument
2083 if (!pg_idx || pg_idx >= DDP_PGIDX_MAX) in ddp_setup_conn_pgidx()
2097 req->val = cpu_to_be64(pg_idx << 8); in ddp_setup_conn_pgidx()
2101 "csk 0x%p, tid 0x%x, pg_idx %u.\n", csk, csk->tid, pg_idx); in ddp_setup_conn_pgidx()
/linux-5.19.10/drivers/infiniband/hw/irdma/
Dverbs.c2265 u32 pg_idx; in irdma_check_mem_contiguous() local
2267 for (pg_idx = 0; pg_idx < npages; pg_idx++) { in irdma_check_mem_contiguous()
2268 if ((*arr + (pg_size * pg_idx)) != arr[pg_idx]) in irdma_check_mem_contiguous()
/linux-5.19.10/drivers/target/iscsi/cxgbit/
Dcxgbit_cm.c1545 int cxgbit_setup_conn_pgidx(struct cxgbit_sock *csk, u32 pg_idx) in cxgbit_setup_conn_pgidx() argument
1563 req->val = cpu_to_be64(pg_idx << 8); in cxgbit_setup_conn_pgidx()
/linux-5.19.10/net/core/
Dskbuff.c3716 unsigned int pg_idx, pg_off, pg_sz; in skb_seq_read() local
3720 pg_idx = 0; in skb_seq_read()
3725 pg_idx = (pg_off + st->frag_off) >> PAGE_SHIFT; in skb_seq_read()
3734 st->frag_data = kmap_atomic(skb_frag_page(frag) + pg_idx); in skb_seq_read()