Lines Matching refs:area
73 dmab->area = __snd_dma_alloc_pages(dmab, size); in snd_dma_alloc_dir_pages()
74 if (!dmab->area) in snd_dma_alloc_dir_pages()
109 if (! dmab->area) in snd_dma_alloc_pages_fallback()
186 struct vm_area_struct *area) in snd_dma_buffer_mmap() argument
194 return ops->mmap(dmab, area); in snd_dma_buffer_mmap()
252 return virt_to_page(dmab->area + offset); in snd_sgbuf_get_page()
327 do_free_pages(dmab->area, dmab->bytes, false); in snd_dma_continuous_free()
331 struct vm_area_struct *area) in snd_dma_continuous_mmap() argument
333 return remap_pfn_range(area, area->vm_start, in snd_dma_continuous_mmap()
335 area->vm_end - area->vm_start, in snd_dma_continuous_mmap()
336 area->vm_page_prot); in snd_dma_continuous_mmap()
355 vfree(dmab->area); in snd_dma_vmalloc_free()
359 struct vm_area_struct *area) in snd_dma_vmalloc_mmap() argument
361 return remap_vmalloc_range(area, dmab->area, 0); in snd_dma_vmalloc_mmap()
365 page_to_phys(vmalloc_to_page((dmab)->area + (offset)))
376 return vmalloc_to_page(dmab->area + offset); in snd_dma_vmalloc_get_page()
443 if (pool && dmab->area) in snd_dma_iram_free()
444 gen_pool_free(pool, (unsigned long)dmab->area, dmab->bytes); in snd_dma_iram_free()
448 struct vm_area_struct *area) in snd_dma_iram_mmap() argument
450 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_iram_mmap()
451 return remap_pfn_range(area, area->vm_start, in snd_dma_iram_mmap()
453 area->vm_end - area->vm_start, in snd_dma_iram_mmap()
454 area->vm_page_prot); in snd_dma_iram_mmap()
474 dma_free_coherent(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_dev_free()
478 struct vm_area_struct *area) in snd_dma_dev_mmap() argument
480 return dma_mmap_coherent(dmab->dev.dev, area, in snd_dma_dev_mmap()
481 dmab->area, dmab->addr, dmab->bytes); in snd_dma_dev_mmap()
502 do_free_pages(dmab->area, dmab->bytes, true); in snd_dma_wc_free()
506 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
508 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_wc_mmap()
509 return snd_dma_continuous_mmap(dmab, area); in snd_dma_wc_mmap()
519 dma_free_wc(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_wc_free()
523 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
525 return dma_mmap_wc(dmab->dev.dev, area, in snd_dma_wc_mmap()
526 dmab->area, dmab->addr, dmab->bytes); in snd_dma_wc_mmap()
572 dma_vunmap_noncontiguous(dmab->dev.dev, dmab->area); in snd_dma_noncontig_free()
578 struct vm_area_struct *area) in snd_dma_noncontig_mmap() argument
580 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_noncontig_mmap()
590 invalidate_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
596 flush_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
700 struct vm_area_struct *area) in snd_dma_sg_wc_mmap() argument
702 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_sg_wc_mmap()
703 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_sg_wc_mmap()
832 vunmap(dmab->area); in snd_dma_sg_fallback_free()
846 struct vm_area_struct *area) in snd_dma_sg_fallback_mmap() argument
851 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_sg_fallback_mmap()
852 return vm_map_pages(area, sgbuf->pages, sgbuf->count); in snd_dma_sg_fallback_mmap()
882 dma_free_noncoherent(dmab->dev.dev, dmab->bytes, dmab->area, in snd_dma_noncoherent_free()
887 struct vm_area_struct *area) in snd_dma_noncoherent_mmap() argument
889 area->vm_page_prot = vm_get_page_prot(area->vm_flags); in snd_dma_noncoherent_mmap()
890 return dma_mmap_pages(dmab->dev.dev, area, in snd_dma_noncoherent_mmap()
891 area->vm_end - area->vm_start, in snd_dma_noncoherent_mmap()
892 virt_to_page(dmab->area)); in snd_dma_noncoherent_mmap()