Home
last modified time | relevance | path

Searched refs:page_base (Results 1 – 25 of 31) sorted by relevance

12

/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gart.c159 u64 page_base; in amdgpu_gart_unbind() local
173 page_base = adev->dummy_page_addr; in amdgpu_gart_unbind()
179 t, page_base, flags); in amdgpu_gart_unbind()
180 page_base += AMDGPU_GPU_PAGE_SIZE; in amdgpu_gart_unbind()
208 uint64_t page_base; in amdgpu_gart_map() local
218 page_base = dma_addr[i]; in amdgpu_gart_map()
220 amdgpu_gmc_set_pte_pde(adev, dst, t, page_base, flags); in amdgpu_gart_map()
221 page_base += AMDGPU_GPU_PAGE_SIZE; in amdgpu_gart_map()
/linux-6.1.9/net/sunrpc/xprtrdma/
Drpc_rdma.c138 offset = offset_in_page(xdr->page_base); in rpcrdma_args_inline()
190 ppages = buf->pages + (buf->page_base >> PAGE_SHIFT); in rpcrdma_alloc_sparse_pages()
232 unsigned long page_base; in rpcrdma_convert_iovs() local
241 ppages = xdrbuf->pages + (xdrbuf->page_base >> PAGE_SHIFT); in rpcrdma_convert_iovs()
242 page_base = offset_in_page(xdrbuf->page_base); in rpcrdma_convert_iovs()
245 seg->mr_offset = page_base; in rpcrdma_convert_iovs()
246 seg->mr_len = min_t(u32, PAGE_SIZE - page_base, len); in rpcrdma_convert_iovs()
251 page_base = 0; in rpcrdma_convert_iovs()
601 unsigned int page_base, len, remaining; in rpcrdma_prepare_pagelist() local
605 ppages = xdr->pages + (xdr->page_base >> PAGE_SHIFT); in rpcrdma_prepare_pagelist()
[all …]
Dsvc_rdma_sendto.c587 ppages = xdr->pages + (xdr->page_base >> PAGE_SHIFT); in svc_rdma_xb_dma_map()
588 pageoff = offset_in_page(xdr->page_base); in svc_rdma_xb_dma_map()
632 offset = offset_in_page(xdr->page_base); in svc_rdma_xb_count_sges()
701 ppages = xdr->pages + (xdr->page_base >> PAGE_SHIFT); in svc_rdma_xb_linearize()
702 pageoff = offset_in_page(xdr->page_base); in svc_rdma_xb_linearize()
Dsvc_rdma_recvfrom.c397 arg->page_base = 0; in svc_rdma_build_arg_xdr()
Dsvc_rdma_rw.c439 page_off = info->wi_next_off + xdr->page_base; in svc_rdma_pagelist_to_sg()
/linux-6.1.9/drivers/gpu/drm/r128/
Dati_pcigart.c125 u32 *pci_gart = NULL, page_base, gart_idx; in drm_ati_pcigart_init() local
187 page_base = (u32) entry->busaddr[i]; in drm_ati_pcigart_init()
195 val = page_base | 0xc; in drm_ati_pcigart_init()
198 val = (page_base >> 8) | 0xc; in drm_ati_pcigart_init()
202 val = page_base; in drm_ati_pcigart_init()
213 page_base += ATI_PCIGART_PAGE_SIZE; in drm_ati_pcigart_init()
/linux-6.1.9/arch/ia64/mm/
Dioremap.c41 unsigned long page_base; in ioremap() local
69 page_base = phys_addr & PAGE_MASK; in ioremap()
70 size = PAGE_ALIGN(phys_addr + size) - page_base; in ioremap()
71 if (efi_mem_attribute(page_base, size) & EFI_MEMORY_WB) { in ioremap()
/linux-6.1.9/arch/x86/mm/
Dkmmio.c235 unsigned long page_base = addr; in kmmio_handler() local
240 page_base &= page_level_mask(l); in kmmio_handler()
253 faultpage = get_kmmio_fault_page(page_base); in kmmio_handler()
265 if (page_base == ctx->addr) { in kmmio_handler()
293 ctx->probe = get_kmmio_probe(page_base); in kmmio_handler()
295 ctx->addr = page_base; in kmmio_handler()
/linux-6.1.9/net/sunrpc/
Dxdr.c131 kaddr[buf->page_base + len] = '\0'; in xdr_terminate_string()
140 return (buf->page_base + buf->page_len + PAGE_SIZE - 1) >> PAGE_SHIFT; in xdr_buf_pagecount()
189 xdr->page_base = base; in xdr_inline_pages()
460 pgbase += buf->page_base; in xdr_buf_pages_zero()
493 npages = (pagelen + buf->page_base + PAGE_SIZE - 1) >> PAGE_SHIFT; in xdr_buf_pages_fill_sparse()
502 if (pagelen > buf->page_base) in xdr_buf_pages_fill_sparse()
503 buflen += pagelen - buf->page_base; in xdr_buf_pages_fill_sparse()
577 buf->page_base + base + pglen, talen); in xdr_buf_pages_copy_right()
578 _shift_data_right_pages(buf->pages, buf->page_base + to, in xdr_buf_pages_copy_right()
579 buf->page_base + base, pglen); in xdr_buf_pages_copy_right()
[all …]
Dsocklib.c114 if (base || xdr->page_base) { in xdr_partial_copy_from_skb()
116 base += xdr->page_base; in xdr_partial_copy_from_skb()
225 xdr->page_len + xdr->page_base); in xprt_send_pagedata()
226 return xprt_sendmsg(sock, msg, base + xdr->page_base); in xprt_send_pagedata()
Dsvcsock.c496 rqstp->rq_arg.page_base = 0; in svc_udp_recvfrom()
1001 rqstp->rq_arg.page_base = 0; in svc_tcp_recvfrom()
1113 bvec = xdr->bvec + (xdr->page_base >> PAGE_SHIFT); in svc_tcp_sendmsg()
1114 offset = offset_in_page(xdr->page_base); in svc_tcp_sendmsg()
Dxprtsock.c340 n = (buf->page_base + want + PAGE_SIZE - 1) >> PAGE_SHIFT; in xs_alloc_sparse_pages()
347 return i > buf->page_base ? i - buf->page_base : 0; in xs_alloc_sparse_pages()
437 want + buf->page_base, in xs_read_xdr_buf()
438 seek + buf->page_base); in xs_read_xdr_buf()
441 xs_flush_bvec(buf->bvec, ret, seek + buf->page_base); in xs_read_xdr_buf()
442 ret -= buf->page_base; in xs_read_xdr_buf()
Dsvc_xprt.c549 rqstp->rq_res.page_base = 0; in svc_xprt_release()
704 arg->page_base = 0; in svc_alloc_arg()
/linux-6.1.9/drivers/gpu/drm/radeon/
Dradeon_gart.c293 uint64_t page_base, page_entry; in radeon_gart_bind() local
306 page_base = dma_addr[i]; in radeon_gart_bind()
308 page_entry = radeon_gart_get_page_entry(page_base, flags); in radeon_gart_bind()
313 page_base += RADEON_GPU_PAGE_SIZE; in radeon_gart_bind()
/linux-6.1.9/drivers/gpu/drm/qxl/
Dqxl_image.c166 unsigned int page_base, page_offset, out_offset; in qxl_image_init_helper() local
174 page_base = out_offset & PAGE_MASK; in qxl_image_init_helper()
178 ptr = qxl_bo_kmap_atomic_page(qdev, chunk_bo, page_base); in qxl_image_init_helper()
/linux-6.1.9/drivers/gpu/drm/i915/
Di915_gem.c412 u32 page_base = node.start; in i915_gem_gtt_pread() local
421 page_base += offset & PAGE_MASK; in i915_gem_gtt_pread()
424 if (gtt_user_read(&ggtt->iomap, page_base, page_offset, in i915_gem_gtt_pread()
584 u32 page_base = node.start; in i915_gem_gtt_pwrite_fast() local
596 page_base += offset & PAGE_MASK; in i915_gem_gtt_pwrite_fast()
604 if (ggtt_write(&ggtt->iomap, page_base, page_offset, in i915_gem_gtt_pwrite_fast()
/linux-6.1.9/net/sunrpc/auth_gss/
Dgss_rpc_xdr.h125 unsigned int page_base; /* Start of page data */ member
Dgss_krb5_wrap.c81 unsigned int last = (buf->page_base + len - 1) in gss_krb5_remove_padding()
83 unsigned int offset = (buf->page_base + len - 1) in gss_krb5_remove_padding()
Dauth_gss.c1792 first = snd_buf->page_base >> PAGE_SHIFT; in alloc_enc_pages()
1793 last = (snd_buf->page_base + snd_buf->page_len - 1) >> PAGE_SHIFT; in alloc_enc_pages()
1841 first = snd_buf->page_base >> PAGE_SHIFT; in gss_wrap_req_priv()
1844 snd_buf->page_base -= first << PAGE_SHIFT; in gss_wrap_req_priv()
Dgss_rpc_xdr.c56 xdr_write_pages(xdr, in->pages, in->page_base, in->page_len); in gssx_enc_in_token()
Dgss_krb5_crypto.c349 int i = (page_pos + outbuf->page_base) >> PAGE_SHIFT; in encryptor()
/linux-6.1.9/include/trace/events/
Dsunrpc.h67 __field(unsigned int, page_base)
80 __entry->page_base = xdr->page_base;
89 __entry->page_len, __entry->page_base,
1558 __field(unsigned int, page_base)
1569 __entry->page_base = xdr->page_base;
1577 __entry->page_len, __entry->page_base,
/linux-6.1.9/fs/nfsd/
Dnfscache.c334 idx = buf->page_base / PAGE_SIZE; in nfsd_cache_csum()
335 base = buf->page_base & ~PAGE_MASK; in nfsd_cache_csum()
Dnfsxdr.c494 xdr_write_pages(xdr, resp->pages, rqstp->rq_res.page_base, in nfssvc_encode_readres()
/linux-6.1.9/include/linux/sunrpc/
Dxdr.h62 unsigned int page_base, /* Start of page data */ member

12