Lines Matching refs:virt_addr

28 bool hl_is_dram_va(struct hl_device *hdev, u64 virt_addr)  in hl_is_dram_va()  argument
32 return hl_mem_area_inside_range(virt_addr, prop->dmmu.page_size, in hl_is_dram_va()
226 int hl_mmu_unmap_page(struct hl_ctx *ctx, u64 virt_addr, u32 page_size, bool flush_pte) in hl_mmu_unmap_page() argument
239 is_dram_addr = hl_is_dram_va(hdev, virt_addr); in hl_mmu_unmap_page()
251 real_virt_addr = virt_addr; in hl_mmu_unmap_page()
265 trace_habanalabs_mmu_unmap(hdev->dev, virt_addr, 0, page_size, flush_pte); in hl_mmu_unmap_page()
292 int hl_mmu_map_page(struct hl_ctx *ctx, u64 virt_addr, u64 phys_addr, u32 page_size, in hl_mmu_map_page() argument
307 is_dram_addr = hl_is_dram_va(hdev, virt_addr); in hl_mmu_map_page()
326 (hdev->asic_funcs->scramble_addr(hdev, virt_addr) & in hl_mmu_map_page()
329 (virt_addr & (real_page_size - 1))))) in hl_mmu_map_page()
332 phys_addr, virt_addr, real_page_size); in hl_mmu_map_page()
335 real_virt_addr = virt_addr; in hl_mmu_map_page()
352 trace_habanalabs_mmu_map(hdev->dev, virt_addr, phys_addr, page_size, flush_pte); in hl_mmu_map_page()
357 real_virt_addr = virt_addr; in hl_mmu_map_page()
381 int hl_mmu_map_contiguous(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_map_contiguous() argument
391 if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
394 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
397 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_map_contiguous()
404 curr_va = virt_addr + off; in hl_mmu_map_contiguous()
423 curr_va = virt_addr + off; in hl_mmu_map_contiguous()
442 int hl_mmu_unmap_contiguous(struct hl_ctx *ctx, u64 virt_addr, u32 size) in hl_mmu_unmap_contiguous() argument
451 if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
454 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
457 else if (hl_mem_area_inside_range(virt_addr, size, in hl_mmu_unmap_contiguous()
464 curr_va = virt_addr + off; in hl_mmu_unmap_contiguous()
515 static void hl_mmu_pa_page_with_offset(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_pa_page_with_offset() argument
551 abs_virt_addr = virt_addr - dram_base; in hl_mmu_pa_page_with_offset()
567 (virt_addr & offset_mask); in hl_mmu_pa_page_with_offset()
571 int hl_mmu_va_to_pa(struct hl_ctx *ctx, u64 virt_addr, u64 *phys_addr) in hl_mmu_va_to_pa() argument
578 rc = hl_mmu_get_tlb_info(ctx, virt_addr, &hops); in hl_mmu_va_to_pa()
582 hl_mmu_pa_page_with_offset(ctx, virt_addr, &hops, phys_addr); in hl_mmu_va_to_pa()
587 int hl_mmu_get_tlb_info(struct hl_ctx *ctx, u64 virt_addr, in hl_mmu_get_tlb_info() argument
601 hops->scrambled_vaddr = virt_addr; /* assume no scrambling */ in hl_mmu_get_tlb_info()
603 is_dram_addr = hl_mem_area_inside_range(virt_addr, prop->dmmu.page_size, in hl_mmu_get_tlb_info()
613 rc = mmu_funcs->get_tlb_info(ctx, virt_addr, hops); in hl_mmu_get_tlb_info()
621 hl_mmu_pa_page_with_offset(ctx, virt_addr, hops, &hops->unscrambled_paddr); in hl_mmu_get_tlb_info()
765 u8 hop_idx, u64 hop_addr, u64 virt_addr) in hl_mmu_get_hop_pte_phys_addr() argument
777 return hop_addr + ctx->hdev->asic_prop.mmu_pte_size * ((virt_addr & mask) >> shift); in hl_mmu_get_hop_pte_phys_addr()
822 if (ZERO_OR_NULL_PTR(hop0_pgt->virt_addr)) in hl_mmu_hr_pool_destroy()
825 gen_pool_free(*pool, (uintptr_t) hop0_pgt->virt_addr, hop_table_size); in hl_mmu_hr_pool_destroy()
859 u64 virt_addr; in hl_mmu_hr_init() local
882 virt_addr = (uintptr_t) hl_asic_dma_alloc_coherent(hdev, pool_chunk_size, in hl_mmu_hr_init()
885 if (ZERO_OR_NULL_PTR(virt_addr)) { in hl_mmu_hr_init()
892 rc = gen_pool_add_virt(hr_priv->mmu_pgt_pool, virt_addr, (phys_addr_t) dma_addr, in hl_mmu_hr_init()
902 hop0_pgt->virt_addr = (uintptr_t) in hl_mmu_hr_init()
907 if (!hop0_pgt->virt_addr) { in hl_mmu_hr_init()
964 gen_pool_free(hr_priv->mmu_pgt_pool, pgt_info->virt_addr, hop_table_size); in hl_mmu_hr_free_hop_remove_pgt()
987 return pgt->virt_addr + pte_offset; in hl_mmu_hr_pte_phys_to_virt()
1005 u64 virt_addr = hl_mmu_hr_pte_phys_to_virt(ctx, pgt_info, phys_pte_addr, hop_table_size); in hl_mmu_hr_write_pte() local
1007 *((u64 *) (uintptr_t) virt_addr) = val; in hl_mmu_hr_write_pte()
1099 void *virt_addr; in hl_mmu_hr_alloc_hop() local
1107 virt_addr = gen_pool_dma_zalloc_align(hr_priv->mmu_pgt_pool, in hl_mmu_hr_alloc_hop()
1111 if (virt_addr) in hl_mmu_hr_alloc_hop()
1115 virt_addr = hl_asic_dma_alloc_coherent(hdev, SZ_2M, &phys_addr, in hl_mmu_hr_alloc_hop()
1117 if (ZERO_OR_NULL_PTR(virt_addr)) in hl_mmu_hr_alloc_hop()
1120 if (gen_pool_add_virt(hr_priv->mmu_pgt_pool, (unsigned long)virt_addr, in hl_mmu_hr_alloc_hop()
1122 hl_asic_dma_free_coherent(hdev, SZ_2M, virt_addr, phys_addr); in hl_mmu_hr_alloc_hop()
1123 virt_addr = NULL; in hl_mmu_hr_alloc_hop()
1128 if (ZERO_OR_NULL_PTR(virt_addr)) { in hl_mmu_hr_alloc_hop()
1135 pgt_info->virt_addr = (unsigned long)virt_addr; in hl_mmu_hr_alloc_hop()
1183 int hl_mmu_hr_get_tlb_info(struct hl_ctx *ctx, u64 virt_addr, struct hl_mmu_hop_info *hops, in hl_mmu_hr_get_tlb_info() argument
1193 rc = hr_func->get_tlb_mapping_params(hdev, &mmu_prop, hops, virt_addr, &is_huge); in hl_mmu_hr_get_tlb_info()
1203 hops->scrambled_vaddr = hdev->asic_funcs->scramble_addr(hdev, virt_addr); in hl_mmu_hr_get_tlb_info()
1236 if (hops->scrambled_vaddr != virt_addr) in hl_mmu_hr_get_tlb_info()