Lines Matching refs:v0

61 		struct nvif_mmu_heap_v0 v0;  in nvkm_ummu_heap()  member
66 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_ummu_heap()
67 if ((index = args->v0.index) >= mmu->heap_nr) in nvkm_ummu_heap()
69 args->v0.size = mmu->heap[index].size; in nvkm_ummu_heap()
81 struct nvif_mmu_type_v0 v0; in nvkm_ummu_type() member
86 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_ummu_type()
87 if ((index = args->v0.index) >= mmu->type_nr) in nvkm_ummu_type()
90 args->v0.heap = mmu->type[index].heap; in nvkm_ummu_type()
91 args->v0.vram = !!(type & NVKM_MEM_VRAM); in nvkm_ummu_type()
92 args->v0.host = !!(type & NVKM_MEM_HOST); in nvkm_ummu_type()
93 args->v0.comp = !!(type & NVKM_MEM_COMP); in nvkm_ummu_type()
94 args->v0.disp = !!(type & NVKM_MEM_DISP); in nvkm_ummu_type()
95 args->v0.kind = !!(type & NVKM_MEM_KIND); in nvkm_ummu_type()
96 args->v0.mappable = !!(type & NVKM_MEM_MAPPABLE); in nvkm_ummu_type()
97 args->v0.coherent = !!(type & NVKM_MEM_COHERENT); in nvkm_ummu_type()
98 args->v0.uncached = !!(type & NVKM_MEM_UNCACHED); in nvkm_ummu_type()
110 struct nvif_mmu_kind_v0 v0; in nvkm_ummu_kind() member
119 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, true))) { in nvkm_ummu_kind()
120 if (argc != args->v0.count * sizeof(*args->v0.data)) in nvkm_ummu_kind()
122 if (args->v0.count > count) in nvkm_ummu_kind()
124 args->v0.kind_inv = kind_inv; in nvkm_ummu_kind()
125 memcpy(args->v0.data, kind, args->v0.count); in nvkm_ummu_kind()
157 struct nvif_mmu_v0 v0; in nvkm_ummu_new() member
167 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_ummu_new()
168 args->v0.dmabits = mmu->dma_bits; in nvkm_ummu_new()
169 args->v0.heap_nr = mmu->heap_nr; in nvkm_ummu_new()
170 args->v0.type_nr = mmu->type_nr; in nvkm_ummu_new()
171 args->v0.kind_nr = kinds; in nvkm_ummu_new()