Lines Matching refs:ua
355 unsigned long *ua) in kvmppc_tce_to_ua() argument
364 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_tce_to_ua()
376 unsigned long ua = 0; in kvmppc_tce_validate() local
385 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
394 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_tce_validate()
395 if (!mem || mm_iommu_ua_to_hpa(mem, ua, shift, &hpa)) { in kvmppc_tce_validate()
512 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_do_map() argument
524 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_tce_iommu_do_map()
529 if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa))) in kvmppc_tce_iommu_do_map()
544 *pua = cpu_to_be64(ua); in kvmppc_tce_iommu_do_map()
551 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_map() argument
562 io_entry + i, ua + pgoff, dir); in kvmppc_tce_iommu_map()
578 unsigned long entry, ua = 0; in kvmppc_h_put_tce() local
600 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce()
613 entry, ua, dir); in kvmppc_h_put_tce()
637 unsigned long entry, ua = 0; in kvmppc_h_put_tce_indirect() local
662 if (kvmppc_tce_to_ua(vcpu->kvm, tce_list, &ua)) { in kvmppc_h_put_tce_indirect()
666 tces = (u64 __user *) ua; in kvmppc_h_put_tce_indirect()
697 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce_indirect()
704 stit->tbl, entry + i, ua, in kvmppc_h_put_tce_indirect()