/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vf_error.c | 41 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 42 index = adev->virt.vf_errors.write_count % AMDGPU_VF_ERROR_ENTRY_SIZE; in amdgpu_vf_error_put() 43 adev->virt.vf_errors.code [index] = error_code; in amdgpu_vf_error_put() 44 adev->virt.vf_errors.flags [index] = error_flags; in amdgpu_vf_error_put() 45 adev->virt.vf_errors.data [index] = error_data; in amdgpu_vf_error_put() 46 adev->virt.vf_errors.write_count ++; in amdgpu_vf_error_put() 47 mutex_unlock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 58 (!adev->virt.ops) || (!adev->virt.ops->trans_msg)) { in amdgpu_vf_error_trans_all() 69 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_trans_all() 71 …if (adev->virt.vf_errors.write_count - adev->virt.vf_errors.read_count > AMDGPU_VF_ERROR_ENTRY_SIZ… in amdgpu_vf_error_trans_all() [all …]
|
D | amdgpu_virt.c | 135 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu() local 138 if (virt->ops && virt->ops->req_full_gpu) { in amdgpu_virt_request_full_gpu() 139 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu() 143 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu() 158 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu() local 161 if (virt->ops && virt->ops->rel_full_gpu) { in amdgpu_virt_release_full_gpu() 162 r = virt->ops->rel_full_gpu(adev, init); in amdgpu_virt_release_full_gpu() 166 adev->virt.caps |= AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_release_full_gpu() 179 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_reset_gpu() local 182 if (virt->ops && virt->ops->reset_gpu) { in amdgpu_virt_reset_gpu() [all …]
|
D | mxgpu_nv.c | 187 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests() 190 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests() 194 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests() 195 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests() 201 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests() 276 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_nv_mailbox_flr_work() local 277 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() 350 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq() 380 adev->virt.ack_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs() 381 adev->virt.ack_irq.funcs = &xgpu_nv_mailbox_ack_irq_funcs; in xgpu_nv_mailbox_set_irq_funcs() [all …]
|
D | mxgpu_ai.c | 181 adev->virt.fw_reserve.checksum_key = in xgpu_ai_send_access_requests() 189 adev->virt.req_init_data_ver = 0; in xgpu_ai_send_access_requests() 254 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_ai_mailbox_flr_work() local 255 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_ai_mailbox_flr_work() 322 &adev->virt.flr_work), in xgpu_ai_mailbox_rcv_irq() 355 adev->virt.ack_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 356 adev->virt.ack_irq.funcs = &xgpu_ai_mailbox_ack_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs() 357 adev->virt.rcv_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 358 adev->virt.rcv_irq.funcs = &xgpu_ai_mailbox_rcv_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs() 365 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt.rcv_irq); in xgpu_ai_mailbox_add_irq_id() [all …]
|
D | amdgpu_virt.h | 271 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_ENABLE_IOV) 274 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_IS_VF) 277 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS) 280 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_RUNTIME) 287 ((adev)->virt.gim_feature & (AMDGIM_FEATURE_INDIRECT_REG_ACCESS))) 291 ((adev)->virt.reg_access & (AMDGIM_FEATURE_IH_REG_PSP_EN))) 295 ((adev)->virt.reg_access & (AMDGIM_FEATURE_MMHUB_REG_RLC_EN))) 299 ((adev)->virt.reg_access & (AMDGIM_FEATURE_GC_REG_RLC_EN))) 305 ((adev)->virt.caps & AMDGPU_PASSTHROUGH_MODE) 308 ((adev)->virt.caps & AMDGPU_VF_MMIO_ACCESS_PROTECT) [all …]
|
D | mxgpu_vi.c | 515 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_vi_mailbox_flr_work() local 516 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_vi_mailbox_flr_work() 565 &adev->virt.flr_work), in xgpu_vi_mailbox_rcv_irq() 585 adev->virt.ack_irq.num_types = 1; in xgpu_vi_mailbox_set_irq_funcs() 586 adev->virt.ack_irq.funcs = &xgpu_vi_mailbox_ack_irq_funcs; in xgpu_vi_mailbox_set_irq_funcs() 587 adev->virt.rcv_irq.num_types = 1; in xgpu_vi_mailbox_set_irq_funcs() 588 adev->virt.rcv_irq.funcs = &xgpu_vi_mailbox_rcv_irq_funcs; in xgpu_vi_mailbox_set_irq_funcs() 595 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 135, &adev->virt.rcv_irq); in xgpu_vi_mailbox_add_irq_id() 599 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 138, &adev->virt.ack_irq); in xgpu_vi_mailbox_add_irq_id() 601 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_vi_mailbox_add_irq_id() [all …]
|
/linux-6.6.21/drivers/staging/media/atomisp/pci/hmm/ |
D | hmm.c | 230 void hmm_free(ia_css_ptr virt) in hmm_free() argument 234 dev_dbg(atomisp_dev, "%s: free 0x%08x\n", __func__, virt); in hmm_free() 236 if (WARN_ON(virt == mmgr_EXCEPTION)) in hmm_free() 239 bo = hmm_bo_device_search_start(&bo_device, (unsigned int)virt); in hmm_free() 244 (unsigned int)virt); in hmm_free() 278 static int load_and_flush_by_kmap(ia_css_ptr virt, void *data, in load_and_flush_by_kmap() argument 286 bo = hmm_bo_device_search_in_range(&bo_device, virt); in load_and_flush_by_kmap() 287 ret = hmm_check_bo(bo, virt); in load_and_flush_by_kmap() 293 idx = (virt - bo->start) >> PAGE_SHIFT; in load_and_flush_by_kmap() 294 offset = (virt - bo->start) - (idx << PAGE_SHIFT); in load_and_flush_by_kmap() [all …]
|
/linux-6.6.21/arch/m68k/mm/ |
D | sun3kmap.c | 29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument 39 sun3_put_pte(virt, pte); in do_page_mapin() 43 print_pte_vaddr(virt); in do_page_mapin() 48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument 52 if(sun3_get_segmap(virt & ~SUN3_PMEG_MASK) == SUN3_INVALID_PMEG) in do_pmeg_mapin() 53 mmu_emu_map_pmeg(sun3_get_context(), virt); in do_pmeg_mapin() 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 58 virt += PAGE_SIZE; in do_pmeg_mapin() 67 unsigned long offset, virt, ret; in sun3_ioremap() local 88 virt = (unsigned long)area->addr; in sun3_ioremap() [all …]
|
/linux-6.6.21/drivers/staging/media/atomisp/include/hmm/ |
D | hmm.h | 43 int hmm_load(ia_css_ptr virt, void *data, unsigned int bytes); 44 int hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes); 45 int hmm_set(ia_css_ptr virt, int c, unsigned int bytes); 46 int hmm_flush(ia_css_ptr virt, unsigned int bytes); 51 phys_addr_t hmm_virt_to_phys(ia_css_ptr virt); 60 void *hmm_vmap(ia_css_ptr virt, bool cached); 61 void hmm_vunmap(ia_css_ptr virt); 67 void hmm_flush_vmap(ia_css_ptr virt); 77 int hmm_mmap(struct vm_area_struct *vma, ia_css_ptr virt);
|
/linux-6.6.21/drivers/gpio/ |
D | gpio-stp-xway.c | 82 void __iomem *virt; member 105 return (xway_stp_r32(chip->virt, XWAY_STP_CPU0) & BIT(gpio)); in xway_stp_get() 124 xway_stp_w32(chip->virt, chip->shadow, XWAY_STP_CPU0); in xway_stp_set() 126 xway_stp_w32_mask(chip->virt, 0, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_set() 170 xway_stp_w32(chip->virt, 0, XWAY_STP_AR); in xway_stp_hw_init() 171 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU0); in xway_stp_hw_init() 172 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU1); in xway_stp_hw_init() 173 xway_stp_w32(chip->virt, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_hw_init() 174 xway_stp_w32(chip->virt, 0, XWAY_STP_CON1); in xway_stp_hw_init() 177 xway_stp_w32_mask(chip->virt, XWAY_STP_EDGE_MASK, in xway_stp_hw_init() [all …]
|
/linux-6.6.21/drivers/scsi/lpfc/ |
D | lpfc_mem.c | 107 pool->elements[i].virt = dma_pool_alloc(phba->lpfc_mbuf_pool, in lpfc_mem_alloc() 109 if (!pool->elements[i].virt) in lpfc_mem_alloc() 180 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_alloc() 252 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_free() 297 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 307 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 321 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 342 phba->cgn_i->virt, phba->cgn_i->phys); in lpfc_mem_free_all() 391 ret = pool->elements[pool->current_count].virt; in lpfc_mbuf_alloc() 413 __lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma) in __lpfc_mbuf_free() argument [all …]
|
/linux-6.6.21/include/asm-generic/ |
D | sections.h | 91 static inline bool memory_contains(void *begin, void *end, void *virt, in memory_contains() argument 94 return virt >= begin && virt + size <= end; in memory_contains() 108 static inline bool memory_intersects(void *begin, void *end, void *virt, in memory_intersects() argument 111 void *vend = virt + size; in memory_intersects() 113 if (virt < end && vend > begin) in memory_intersects() 128 static inline bool init_section_contains(void *virt, size_t size) in init_section_contains() argument 130 return memory_contains(__init_begin, __init_end, virt, size); in init_section_contains() 142 static inline bool init_section_intersects(void *virt, size_t size) in init_section_intersects() argument 144 return memory_intersects(__init_begin, __init_end, virt, size); in init_section_intersects()
|
/linux-6.6.21/drivers/net/ethernet/qlogic/qed/ |
D | qed_chain.c | 73 void *virt, *virt_next; in qed_chain_free_next_ptr() local 77 virt = chain->p_virt_addr; in qed_chain_free_next_ptr() 81 if (!virt) in qed_chain_free_next_ptr() 84 next = virt + size; in qed_chain_free_next_ptr() 88 dma_free_coherent(dev, chain->page_size, virt, phys); in qed_chain_free_next_ptr() 90 virt = virt_next; in qed_chain_free_next_ptr() 202 void *virt, *virt_prev = NULL; in qed_chain_alloc_next_ptr() local 207 virt = dma_alloc_coherent(dev, chain->page_size, &phys, in qed_chain_alloc_next_ptr() 209 if (!virt) in qed_chain_alloc_next_ptr() 213 qed_chain_init_mem(chain, virt, phys); in qed_chain_alloc_next_ptr() [all …]
|
/linux-6.6.21/drivers/media/platform/samsung/s5p-mfc/ |
D | s5p_mfc_opr.c | 54 b->virt = dev->mem_virt + offset; in s5p_mfc_alloc_priv_buf() 61 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_priv_buf() 62 if (!b->virt) in s5p_mfc_alloc_priv_buf() 67 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_alloc_priv_buf() 72 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_priv_buf() 87 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_generic_buf() 88 if (!b->virt) in s5p_mfc_alloc_generic_buf() 91 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_generic_buf() 109 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_release_priv_buf() 111 b->virt = NULL; in s5p_mfc_release_priv_buf() [all …]
|
/linux-6.6.21/drivers/mtd/maps/ |
D | nettel.c | 220 nettel_amd_map.virt = ioremap(amdaddr, maxsize); in nettel_init() 221 if (!nettel_amd_map.virt) { in nettel_init() 280 iounmap(nettel_amd_map.virt); in nettel_init() 281 nettel_amd_map.virt = NULL; in nettel_init() 306 nettel_intel_map.virt = ioremap(intel0addr, maxsize); in nettel_init() 307 if (!nettel_intel_map.virt) { in nettel_init() 337 iounmap(nettel_intel_map.virt); in nettel_init() 340 nettel_intel_map.virt = ioremap(intel0addr, maxsize); in nettel_init() 341 if (!nettel_intel_map.virt) { in nettel_init() 412 iounmap(nettel_intel_map.virt); in nettel_init() [all …]
|
D | dc21285.c | 57 val.x[0] = *(uint8_t*)(map->virt + ofs); in dc21285_read8() 64 val.x[0] = *(uint16_t*)(map->virt + ofs); in dc21285_read16() 71 val.x[0] = *(uint32_t*)(map->virt + ofs); in dc21285_read32() 77 memcpy(to, (void*)(map->virt + from), len); in dc21285_copy_from() 86 *(uint8_t*)(map->virt + adr) = d.x[0]; in dc21285_write8() 95 *(uint16_t*)(map->virt + adr) = d.x[0]; in dc21285_write16() 102 *(uint32_t*)(map->virt + adr) = d.x[0]; in dc21285_write32() 179 dc21285_map.virt = ioremap(DC21285_FLASH, 16*1024*1024); in init_dc21285() 180 if (!dc21285_map.virt) { in init_dc21285() 192 iounmap(dc21285_map.virt); in init_dc21285() [all …]
|
D | ts5500_flash.c | 59 ts5500_map.virt = ioremap(ts5500_map.phys, ts5500_map.size); in init_ts5500_map() 61 if (!ts5500_map.virt) { in init_ts5500_map() 84 iounmap(ts5500_map.virt); in init_ts5500_map() 96 if (ts5500_map.virt) { in cleanup_ts5500_map() 97 iounmap(ts5500_map.virt); in cleanup_ts5500_map() 98 ts5500_map.virt = NULL; in cleanup_ts5500_map()
|
D | netsc520.c | 85 netsc520_map.virt = ioremap(netsc520_map.phys, netsc520_map.size); in init_netsc520() 87 if (!netsc520_map.virt) { in init_netsc520() 101 iounmap(netsc520_map.virt); in init_netsc520() 116 if (netsc520_map.virt) { in cleanup_netsc520() 117 iounmap(netsc520_map.virt); in cleanup_netsc520() 118 netsc520_map.virt = NULL; in cleanup_netsc520()
|
/linux-6.6.21/drivers/firmware/ |
D | iscsi_ibft_find.c | 55 void *virt = NULL; in reserve_ibft_region() local 74 if (virt) in reserve_ibft_region() 75 early_memunmap(virt, PAGE_SIZE); in reserve_ibft_region() 76 virt = early_memremap_ro(pos, PAGE_SIZE); in reserve_ibft_region() 81 if (memcmp(virt + (pos - virt_pos), ibft_signs[i].sign, in reserve_ibft_region() 84 (unsigned long *)(virt + pos - virt_pos + 4); in reserve_ibft_region() 99 early_memunmap(virt, PAGE_SIZE); in reserve_ibft_region()
|
/linux-6.6.21/arch/parisc/kernel/ |
D | kexec.c | 81 void *virt = (void *)__fix_to_virt(FIX_TEXT_KEXEC); in machine_kexec() local 90 desc.addr = (long long)virt; in machine_kexec() 92 reloc = (void *)virt; in machine_kexec() 95 memcpy(virt, dereference_function_descriptor(relocate_new_kernel), in machine_kexec() 98 *(unsigned long *)(virt + kexec_cmdline_offset) = arch->cmdline; in machine_kexec() 99 *(unsigned long *)(virt + kexec_initrd_start_offset) = arch->initrd_start; in machine_kexec() 100 *(unsigned long *)(virt + kexec_initrd_end_offset) = arch->initrd_end; in machine_kexec() 101 *(unsigned long *)(virt + kexec_free_mem_offset) = PAGE0->mem_free; in machine_kexec()
|
/linux-6.6.21/arch/powerpc/mm/nohash/ |
D | e500.c | 97 static void settlbcam(int index, unsigned long virt, phys_addr_t phys, in settlbcam() argument 111 TLBCAM[index].MAS2 = virt & PAGE_MASK; in settlbcam() 133 tlbcam_addrs[index].start = virt; in settlbcam() 134 tlbcam_addrs[index].limit = virt + size - 1; in settlbcam() 138 static unsigned long calc_cam_sz(unsigned long ram, unsigned long virt, in calc_cam_sz() argument 142 unsigned int align = __ffs(virt | phys); in calc_cam_sz() 163 static unsigned long map_mem_in_cams_addr(phys_addr_t phys, unsigned long virt, in map_mem_in_cams_addr() argument 181 cam_sz = calc_cam_sz(boundary, virt, phys); in map_mem_in_cams_addr() 183 settlbcam(i, virt, phys, cam_sz, pgprot_val(prot), 0); in map_mem_in_cams_addr() 187 virt += cam_sz; in map_mem_in_cams_addr() [all …]
|
/linux-6.6.21/arch/arm/mach-hisi/ |
D | platsmp.c | 108 void __iomem *virt; in hix5hd2_set_scu_boot_addr() local 110 virt = ioremap(start_addr, PAGE_SIZE); in hix5hd2_set_scu_boot_addr() 112 writel_relaxed(0xe51ff004, virt); /* ldr pc, [pc, #-4] */ in hix5hd2_set_scu_boot_addr() 113 writel_relaxed(jump_addr, virt + 4); /* pc jump phy address */ in hix5hd2_set_scu_boot_addr() 114 iounmap(virt); in hix5hd2_set_scu_boot_addr() 144 void __iomem *virt; in hip01_set_boot_addr() local 146 virt = phys_to_virt(start_addr); in hip01_set_boot_addr() 148 writel_relaxed(0xe51ff004, virt); in hip01_set_boot_addr() 149 writel_relaxed(jump_addr, virt + 4); in hip01_set_boot_addr()
|
/linux-6.6.21/drivers/net/ipa/ |
D | gsi.c | 202 iowrite32(val, gsi->virt + reg_offset(reg)); in gsi_irq_type_update() 230 iowrite32(~0, gsi->virt + reg_offset(reg)); in gsi_irq_ev_ctrl_enable() 233 iowrite32(val, gsi->virt + reg_offset(reg)); in gsi_irq_ev_ctrl_enable() 245 iowrite32(0, gsi->virt + reg_offset(reg)); in gsi_irq_ev_ctrl_disable() 263 iowrite32(~0, gsi->virt + reg_offset(reg)); in gsi_irq_ch_ctrl_enable() 266 iowrite32(val, gsi->virt + reg_offset(reg)); in gsi_irq_ch_ctrl_enable() 279 iowrite32(0, gsi->virt + reg_offset(reg)); in gsi_irq_ch_ctrl_disable() 292 iowrite32(val, gsi->virt + reg_offset(reg)); in gsi_irq_ieob_enable_one() 312 iowrite32(val, gsi->virt + reg_offset(reg)); in gsi_irq_ieob_disable() 330 iowrite32(ERROR_INT, gsi->virt + reg_offset(reg)); in gsi_irq_enable() [all …]
|
/linux-6.6.21/crypto/ |
D | cfb.c | 49 u8 *src = walk->src.virt.addr; in crypto_cfb_final() 50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() 63 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment() 64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() 86 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_inplace() 113 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt() 133 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_segment() 134 u8 *dst = walk->dst.virt.addr; in crypto_cfb_decrypt_segment() 156 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_inplace() 173 if (walk->src.virt.addr == walk->dst.virt.addr) in crypto_cfb_decrypt_blocks()
|
/linux-6.6.21/drivers/media/platform/amphion/ |
D | vpu_helpers.c | 242 void *virt; in vpu_helper_copy_from_stream_buffer() local 253 virt = stream_buffer->virt; in vpu_helper_copy_from_stream_buffer() 259 memcpy(dst, virt + (offset - start), size); in vpu_helper_copy_from_stream_buffer() 261 memcpy(dst, virt + (offset - start), end - offset); in vpu_helper_copy_from_stream_buffer() 262 memcpy(dst + end - offset, virt, size + offset - end); in vpu_helper_copy_from_stream_buffer() 276 void *virt; in vpu_helper_copy_to_stream_buffer() local 287 virt = stream_buffer->virt; in vpu_helper_copy_to_stream_buffer() 292 memcpy(virt + (offset - start), src, size); in vpu_helper_copy_to_stream_buffer() 294 memcpy(virt + (offset - start), src, end - offset); in vpu_helper_copy_to_stream_buffer() 295 memcpy(virt, src + end - offset, size + offset - end); in vpu_helper_copy_to_stream_buffer() [all …]
|