/linux-6.6.21/fs/nilfs2/ |
D | cpfile.c | 71 void *kaddr, in nilfs_cpfile_block_add_valid_checkpoints() argument 74 struct nilfs_checkpoint *cp = kaddr + bh_offset(bh); in nilfs_cpfile_block_add_valid_checkpoints() 85 void *kaddr, in nilfs_cpfile_block_sub_valid_checkpoints() argument 88 struct nilfs_checkpoint *cp = kaddr + bh_offset(bh); in nilfs_cpfile_block_sub_valid_checkpoints() 100 void *kaddr) in nilfs_cpfile_block_get_header() argument 102 return kaddr + bh_offset(bh); in nilfs_cpfile_block_get_header() 108 void *kaddr) in nilfs_cpfile_block_get_checkpoint() argument 110 return kaddr + bh_offset(bh) + nilfs_cpfile_get_offset(cpfile, cno) * in nilfs_cpfile_block_get_checkpoint() 116 void *kaddr) in nilfs_cpfile_block_init() argument 118 struct nilfs_checkpoint *cp = kaddr + bh_offset(bh); in nilfs_cpfile_block_init() [all …]
|
D | sufile.c | 75 struct buffer_head *bh, void *kaddr) in nilfs_sufile_block_get_segment_usage() argument 77 return kaddr + bh_offset(bh) + in nilfs_sufile_block_get_segment_usage() 108 void *kaddr; in nilfs_sufile_mod_counter() local 110 kaddr = kmap_atomic(header_bh->b_page); in nilfs_sufile_mod_counter() 111 header = kaddr + bh_offset(header_bh); in nilfs_sufile_mod_counter() 114 kunmap_atomic(kaddr); in nilfs_sufile_mod_counter() 309 void *kaddr; in nilfs_sufile_alloc() local 318 kaddr = kmap_atomic(header_bh->b_page); in nilfs_sufile_alloc() 319 header = kaddr + bh_offset(header_bh); in nilfs_sufile_alloc() 321 kunmap_atomic(kaddr); in nilfs_sufile_alloc() [all …]
|
D | dat.c | 92 void *kaddr; in nilfs_dat_commit_alloc() local 94 kaddr = kmap_atomic(req->pr_entry_bh->b_page); in nilfs_dat_commit_alloc() 96 req->pr_entry_bh, kaddr); in nilfs_dat_commit_alloc() 100 kunmap_atomic(kaddr); in nilfs_dat_commit_alloc() 116 void *kaddr; in nilfs_dat_commit_free() local 118 kaddr = kmap_atomic(req->pr_entry_bh->b_page); in nilfs_dat_commit_free() 120 req->pr_entry_bh, kaddr); in nilfs_dat_commit_free() 124 kunmap_atomic(kaddr); in nilfs_dat_commit_free() 146 void *kaddr; in nilfs_dat_commit_start() local 148 kaddr = kmap_atomic(req->pr_entry_bh->b_page); in nilfs_dat_commit_start() [all …]
|
D | dir.c | 121 char *kaddr = page_address(page); in nilfs_check_page() local 135 p = (struct nilfs_dir_entry *)(kaddr + offs); in nilfs_check_page() 179 p = (struct nilfs_dir_entry *)(kaddr + offs); in nilfs_check_page() 276 char *kaddr, *limit; in nilfs_readdir() local 285 kaddr = page_address(page); in nilfs_readdir() 286 de = (struct nilfs_dir_entry *)(kaddr + offset); in nilfs_readdir() 287 limit = kaddr + nilfs_last_byte(inode, n) - in nilfs_readdir() 348 char *kaddr; in nilfs_find_entry() local 352 kaddr = page_address(page); in nilfs_find_entry() 353 de = (struct nilfs_dir_entry *)kaddr; in nilfs_find_entry() [all …]
|
/linux-6.6.21/arch/arm64/kernel/ |
D | paravirt.c | 38 struct pvclock_vcpu_stolen_time __rcu *kaddr; member 55 struct pvclock_vcpu_stolen_time *kaddr = NULL; in para_steal_clock() local 67 kaddr = rcu_dereference(reg->kaddr); in para_steal_clock() 68 if (!kaddr) { in para_steal_clock() 73 ret = le64_to_cpu(READ_ONCE(kaddr->stolen_time)); in para_steal_clock() 80 struct pvclock_vcpu_stolen_time *kaddr = NULL; in stolen_time_cpu_down_prepare() local 84 if (!reg->kaddr) in stolen_time_cpu_down_prepare() 87 kaddr = rcu_replace_pointer(reg->kaddr, NULL, true); in stolen_time_cpu_down_prepare() 89 memunmap(kaddr); in stolen_time_cpu_down_prepare() 96 struct pvclock_vcpu_stolen_time *kaddr = NULL; in stolen_time_cpu_online() local [all …]
|
/linux-6.6.21/fs/btrfs/ |
D | accessors.c | 30 token->kaddr = page_address(eb->pages[0]); in btrfs_init_map_token() 71 ASSERT(token->kaddr); \ 75 return get_unaligned_le##bits(token->kaddr + oip); \ 77 token->kaddr = page_address(token->eb->pages[idx]); \ 80 return get_unaligned_le##bits(token->kaddr + oip); \ 82 memcpy(lebytes, token->kaddr + oip, part); \ 83 token->kaddr = page_address(token->eb->pages[idx + 1]); \ 85 memcpy(lebytes + part, token->kaddr, size - part); \ 94 char *kaddr = page_address(eb->pages[idx]); \ 101 return get_unaligned_le##bits(kaddr + oip); \ [all …]
|
D | lzo.c | 141 char *kaddr; in copy_compressed_data_to_page() local 161 kaddr = kmap_local_page(cur_page); in copy_compressed_data_to_page() 162 write_compress_length(kaddr + offset_in_page(*cur_out), in copy_compressed_data_to_page() 173 kunmap_local(kaddr); in copy_compressed_data_to_page() 186 kaddr = kmap_local_page(cur_page); in copy_compressed_data_to_page() 188 memcpy(kaddr + offset_in_page(*cur_out), in copy_compressed_data_to_page() 203 memset(kaddr + offset_in_page(*cur_out), 0, in copy_compressed_data_to_page() 208 kunmap_local(kaddr); in copy_compressed_data_to_page() 335 char *kaddr; in lzo_decompress_bio() local 344 kaddr = kmap_local_page(cb->compressed_pages[0]); in lzo_decompress_bio() [all …]
|
/linux-6.6.21/fs/sysv/ |
D | dir.c | 88 char *kaddr, *limit; in sysv_readdir() local 92 kaddr = dir_get_page(inode, n, &page); in sysv_readdir() 93 if (IS_ERR(kaddr)) in sysv_readdir() 95 de = (struct sysv_dir_entry *)(kaddr+offset); in sysv_readdir() 96 limit = kaddr + PAGE_SIZE - SYSV_DIRSIZE; in sysv_readdir() 106 unmap_and_put_page(page, kaddr); in sysv_readdir() 110 unmap_and_put_page(page, kaddr); in sysv_readdir() 157 char *kaddr = dir_get_page(dir, n, &page); in sysv_find_entry() local 159 if (!IS_ERR(kaddr)) { in sysv_find_entry() 160 de = (struct sysv_dir_entry *)kaddr; in sysv_find_entry() [all …]
|
/linux-6.6.21/fs/ext2/ |
D | dir.c | 99 static bool ext2_check_page(struct page *page, int quiet, char *kaddr) in ext2_check_page() argument 118 p = (ext2_dirent *)(kaddr + offs); in ext2_check_page() 170 p = (ext2_dirent *)(kaddr + offs); in ext2_check_page() 278 char *kaddr = ext2_get_page(inode, n, 0, &page); in ext2_readdir() local 281 if (IS_ERR(kaddr)) { in ext2_readdir() 286 return PTR_ERR(kaddr); in ext2_readdir() 290 offset = ext2_validate_entry(kaddr, offset, chunk_mask); in ext2_readdir() 296 de = (ext2_dirent *)(kaddr+offset); in ext2_readdir() 297 limit = kaddr + ext2_last_byte(inode, n) - EXT2_DIR_REC_LEN(1); in ext2_readdir() 320 ext2_put_page(page, kaddr); in ext2_readdir() [all …]
|
/linux-6.6.21/arch/m68k/sun3/ |
D | dvma.c | 23 static unsigned long dvma_page(unsigned long kaddr, unsigned long vaddr) in dvma_page() argument 29 j = *(volatile unsigned long *)kaddr; in dvma_page() 30 *(volatile unsigned long *)kaddr = j; in dvma_page() 32 ptep = pfn_pte(virt_to_pfn((void *)kaddr), PAGE_KERNEL); in dvma_page() 40 return (vaddr + (kaddr & ~PAGE_MASK)); in dvma_page() 44 int dvma_map_iommu(unsigned long kaddr, unsigned long baddr, in dvma_map_iommu() argument 56 dvma_page(kaddr, vaddr); in dvma_map_iommu() 57 kaddr += PAGE_SIZE; in dvma_map_iommu()
|
D | sun3dvma.c | 283 unsigned long dvma_map_align(unsigned long kaddr, int len, int align) in dvma_map_align() argument 292 if(!kaddr || !len) { in dvma_map_align() 298 pr_debug("dvma_map request %08x bytes from %08lx\n", len, kaddr); in dvma_map_align() 299 off = kaddr & ~DVMA_PAGE_MASK; in dvma_map_align() 300 kaddr &= PAGE_MASK; in dvma_map_align() 312 if(!dvma_map_iommu(kaddr, baddr, len)) in dvma_map_align() 315 pr_crit("dvma_map failed kaddr %lx baddr %lx len %x\n", kaddr, baddr, in dvma_map_align() 340 unsigned long kaddr; in dvma_malloc_align() local 350 if((kaddr = __get_free_pages(GFP_ATOMIC, get_order(len))) == 0) in dvma_malloc_align() 353 if((baddr = (unsigned long)dvma_map_align(kaddr, len, align)) == 0) { in dvma_malloc_align() [all …]
|
/linux-6.6.21/drivers/acpi/ |
D | nvs.c | 78 void *kaddr; member 138 if (entry->kaddr) { in suspend_nvs_free() 140 iounmap(entry->kaddr); in suspend_nvs_free() 143 acpi_os_unmap_iomem(entry->kaddr, in suspend_nvs_free() 146 entry->kaddr = NULL; in suspend_nvs_free() 182 entry->kaddr = acpi_os_get_iomem(phys, size); in suspend_nvs_save() 183 if (!entry->kaddr) { in suspend_nvs_save() 184 entry->kaddr = acpi_os_ioremap(phys, size); in suspend_nvs_save() 185 entry->unmap = !!entry->kaddr; in suspend_nvs_save() 187 if (!entry->kaddr) { in suspend_nvs_save() [all …]
|
/linux-6.6.21/arch/loongarch/include/asm/ |
D | page.h | 86 #define virt_to_pfn(kaddr) PFN_DOWN(PHYSADDR(kaddr)) argument 88 #define virt_to_page(kaddr) \ argument 90 (likely((unsigned long)kaddr < vm_map_base)) ? \ 91 dmw_virt_to_page((unsigned long)kaddr) : tlb_virt_to_page((unsigned long)kaddr);\ 94 extern int __virt_addr_valid(volatile void *kaddr); 95 #define virt_addr_valid(kaddr) __virt_addr_valid((volatile void *)(kaddr)) argument
|
/linux-6.6.21/fs/minix/ |
D | dir.c | 106 char *p, *kaddr, *limit; in minix_readdir() local 111 kaddr = (char *)page_address(page); in minix_readdir() 112 p = kaddr+offset; in minix_readdir() 113 limit = kaddr + minix_last_byte(inode, n) - chunk_size; in minix_readdir() 174 char *kaddr, *limit; in minix_find_entry() local 180 kaddr = (char*)page_address(page); in minix_find_entry() 181 limit = kaddr + minix_last_byte(dir, n) - sbi->s_dirsize; in minix_find_entry() 182 for (p = kaddr; p <= limit; p = minix_next_entry(p, sbi)) { in minix_find_entry() 216 char *kaddr, *p; in minix_add_link() local 237 kaddr = (char*)page_address(page); in minix_add_link() [all …]
|
/linux-6.6.21/fs/ufs/ |
D | dir.c | 120 char *kaddr = page_address(page); in ufs_check_page() local 135 p = (struct ufs_dir_entry *)(kaddr + offs); in ufs_check_page() 185 p = (struct ufs_dir_entry *)(kaddr + offs); in ufs_check_page() 283 char *kaddr; in ufs_find_entry() local 286 kaddr = page_address(page); in ufs_find_entry() 287 de = (struct ufs_dir_entry *) kaddr; in ufs_find_entry() 288 kaddr += ufs_last_byte(dir, n) - reclen; in ufs_find_entry() 289 while ((char *) de <= kaddr) { in ufs_find_entry() 324 char *kaddr; in ufs_add_link() local 343 kaddr = page_address(page); in ufs_add_link() [all …]
|
/linux-6.6.21/fs/erofs/ |
D | xattr.c | 15 void *kaddr; member 84 it.kaddr = erofs_bread(&it.buf, erofs_blknr(sb, it.pos), EROFS_KMAP); in erofs_init_inode_xattrs() 85 if (IS_ERR(it.kaddr)) { in erofs_init_inode_xattrs() 86 ret = PTR_ERR(it.kaddr); in erofs_init_inode_xattrs() 90 ih = it.kaddr + erofs_blkoff(sb, it.pos); in erofs_init_inode_xattrs() 105 it.kaddr = erofs_bread(&it.buf, erofs_blknr(sb, it.pos), in erofs_init_inode_xattrs() 107 if (IS_ERR(it.kaddr)) { in erofs_init_inode_xattrs() 110 ret = PTR_ERR(it.kaddr); in erofs_init_inode_xattrs() 114 (it.kaddr + erofs_blkoff(sb, it.pos))); in erofs_init_inode_xattrs() 188 it->kaddr = erofs_bread(&it->buf, erofs_blknr(sb, it->pos), in erofs_xattr_copy_to_buffer() [all …]
|
D | inode.c | 20 void *kaddr; in erofs_read_inode() local 29 kaddr = erofs_read_metabuf(buf, sb, blkaddr, EROFS_KMAP); in erofs_read_inode() 30 if (IS_ERR(kaddr)) { in erofs_read_inode() 32 vi->nid, PTR_ERR(kaddr)); in erofs_read_inode() 33 return kaddr; in erofs_read_inode() 36 dic = kaddr + *ofs; in erofs_read_inode() 70 kaddr = erofs_read_metabuf(buf, sb, blkaddr + 1, in erofs_read_inode() 72 if (IS_ERR(kaddr)) { in erofs_read_inode() 74 vi->nid, PTR_ERR(kaddr)); in erofs_read_inode() 76 return kaddr; in erofs_read_inode() [all …]
|
/linux-6.6.21/arch/m68k/sun3x/ |
D | dvma.c | 78 inline int dvma_map_cpu(unsigned long kaddr, in dvma_map_cpu() argument 87 kaddr &= PAGE_MASK; in dvma_map_cpu() 92 pr_debug("dvma: mapping kern %08lx to virt %08lx\n", kaddr, vaddr); in dvma_map_cpu() 127 __pa(kaddr), vaddr); in dvma_map_cpu() 128 set_pte(pte, pfn_pte(virt_to_pfn((void *)kaddr), in dvma_map_cpu() 131 kaddr += PAGE_SIZE; in dvma_map_cpu() 146 inline int dvma_map_iommu(unsigned long kaddr, unsigned long baddr, in dvma_map_iommu() argument 163 dvma_entry_set(index, __pa(kaddr)); in dvma_map_iommu() 168 kaddr += DVMA_PAGE_SIZE; in dvma_map_iommu()
|
/linux-6.6.21/arch/arm/include/asm/ |
D | tlbflush.h | 472 static inline void __local_flush_tlb_kernel_page(unsigned long kaddr) in __local_flush_tlb_kernel_page() argument 477 tlb_op(TLB_V4_U_PAGE, "c8, c7, 1", kaddr); in __local_flush_tlb_kernel_page() 478 tlb_op(TLB_V4_D_PAGE, "c8, c6, 1", kaddr); in __local_flush_tlb_kernel_page() 479 tlb_op(TLB_V4_I_PAGE, "c8, c5, 1", kaddr); in __local_flush_tlb_kernel_page() 483 tlb_op(TLB_V6_U_PAGE, "c8, c7, 1", kaddr); in __local_flush_tlb_kernel_page() 484 tlb_op(TLB_V6_D_PAGE, "c8, c6, 1", kaddr); in __local_flush_tlb_kernel_page() 485 tlb_op(TLB_V6_I_PAGE, "c8, c5, 1", kaddr); in __local_flush_tlb_kernel_page() 488 static inline void local_flush_tlb_kernel_page(unsigned long kaddr) in local_flush_tlb_kernel_page() argument 492 kaddr &= PAGE_MASK; in local_flush_tlb_kernel_page() 497 __local_flush_tlb_kernel_page(kaddr); in local_flush_tlb_kernel_page() [all …]
|
/linux-6.6.21/fs/xfs/scrub/ |
D | xfile.c | 149 void *p, *kaddr; in xfile_pread() local 175 kaddr = kmap_local_page(page); in xfile_pread() 176 p = kaddr + offset_in_page(pos); in xfile_pread() 178 kunmap_local(kaddr); in xfile_pread() 228 void *p, *kaddr; in xfile_pwrite() local 250 kaddr = kmap_local_page(page); in xfile_pwrite() 252 memset(kaddr, 0, PAGE_SIZE); in xfile_pwrite() 255 p = kaddr + offset_in_page(pos); in xfile_pwrite() 257 kunmap_local(kaddr); in xfile_pwrite() 363 void *kaddr; in xfile_get_page() local [all …]
|
/linux-6.6.21/fs/ntfs/ |
D | bitmap.c | 40 u8 *kaddr; in __ntfs_bitmap_set_bits_in_run() local 68 kaddr = page_address(page); in __ntfs_bitmap_set_bits_in_run() 78 u8 *byte = kaddr + pos; in __ntfs_bitmap_set_bits_in_run() 98 memset(kaddr + pos, value ? 0xff : 0, len); in __ntfs_bitmap_set_bits_in_run() 116 kaddr = page_address(page); in __ntfs_bitmap_set_bits_in_run() 122 memset(kaddr, value ? 0xff : 0, len); in __ntfs_bitmap_set_bits_in_run() 136 byte = kaddr + len; in __ntfs_bitmap_set_bits_in_run()
|
/linux-6.6.21/arch/hexagon/include/asm/ |
D | page.h | 93 #define virt_to_page(kaddr) pfn_to_page(PFN_DOWN(__pa(kaddr))) argument 98 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT) argument 128 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) argument
|
/linux-6.6.21/arch/arc/include/asm/ |
D | page.h | 94 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) argument 125 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr)) argument 126 #define virt_addr_valid(kaddr) pfn_valid(virt_to_pfn(kaddr)) argument
|
/linux-6.6.21/arch/m68k/include/asm/ |
D | page_no.h | 22 static inline unsigned long virt_to_pfn(const void *kaddr) in virt_to_pfn() argument 24 return __pa(kaddr) >> PAGE_SHIFT; in virt_to_pfn() 35 #define virt_addr_valid(kaddr) (((unsigned long)(kaddr) >= PAGE_OFFSET) && \ argument 36 ((unsigned long)(kaddr) < memory_end))
|
/linux-6.6.21/tools/testing/nvdimm/ |
D | pmem-dax.c | 12 long nr_pages, enum dax_access_mode mode, void **kaddr, in __pmem_direct_access() argument 28 if (kaddr) in __pmem_direct_access() 29 *kaddr = pmem->virt_addr + offset; in __pmem_direct_access() 39 if (kaddr) in __pmem_direct_access() 40 *kaddr = pmem->virt_addr + offset; in __pmem_direct_access()
|