Lines Matching refs:dmab

27 static const struct snd_malloc_ops *snd_dma_get_ops(struct snd_dma_buffer *dmab);
30 static void *snd_dma_sg_fallback_alloc(struct snd_dma_buffer *dmab, size_t size);
33 static void *__snd_dma_alloc_pages(struct snd_dma_buffer *dmab, size_t size) in __snd_dma_alloc_pages() argument
35 const struct snd_malloc_ops *ops = snd_dma_get_ops(dmab); in __snd_dma_alloc_pages()
39 return ops->alloc(dmab, size); in __snd_dma_alloc_pages()
59 struct snd_dma_buffer *dmab) in snd_dma_alloc_dir_pages() argument
63 if (WARN_ON(!dmab)) in snd_dma_alloc_dir_pages()
67 dmab->dev.type = type; in snd_dma_alloc_dir_pages()
68 dmab->dev.dev = device; in snd_dma_alloc_dir_pages()
69 dmab->dev.dir = dir; in snd_dma_alloc_dir_pages()
70 dmab->bytes = 0; in snd_dma_alloc_dir_pages()
71 dmab->addr = 0; in snd_dma_alloc_dir_pages()
72 dmab->private_data = NULL; in snd_dma_alloc_dir_pages()
73 dmab->area = __snd_dma_alloc_pages(dmab, size); in snd_dma_alloc_dir_pages()
74 if (!dmab->area) in snd_dma_alloc_dir_pages()
76 dmab->bytes = size; in snd_dma_alloc_dir_pages()
97 struct snd_dma_buffer *dmab) in snd_dma_alloc_pages_fallback() argument
101 while ((err = snd_dma_alloc_pages(type, device, size, dmab)) < 0) { in snd_dma_alloc_pages_fallback()
109 if (! dmab->area) in snd_dma_alloc_pages_fallback()
121 void snd_dma_free_pages(struct snd_dma_buffer *dmab) in snd_dma_free_pages() argument
123 const struct snd_malloc_ops *ops = snd_dma_get_ops(dmab); in snd_dma_free_pages()
126 ops->free(dmab); in snd_dma_free_pages()
156 struct snd_dma_buffer *dmab; in snd_devm_alloc_dir_pages() local
163 dmab = devres_alloc(__snd_release_pages, sizeof(*dmab), GFP_KERNEL); in snd_devm_alloc_dir_pages()
164 if (!dmab) in snd_devm_alloc_dir_pages()
167 err = snd_dma_alloc_dir_pages(type, dev, dir, size, dmab); in snd_devm_alloc_dir_pages()
169 devres_free(dmab); in snd_devm_alloc_dir_pages()
173 devres_add(dev, dmab); in snd_devm_alloc_dir_pages()
174 return dmab; in snd_devm_alloc_dir_pages()
185 int snd_dma_buffer_mmap(struct snd_dma_buffer *dmab, in snd_dma_buffer_mmap() argument
190 if (!dmab) in snd_dma_buffer_mmap()
192 ops = snd_dma_get_ops(dmab); in snd_dma_buffer_mmap()
194 return ops->mmap(dmab, area); in snd_dma_buffer_mmap()
206 void snd_dma_buffer_sync(struct snd_dma_buffer *dmab, in snd_dma_buffer_sync() argument
211 if (!dmab || !dmab->dev.need_sync) in snd_dma_buffer_sync()
213 ops = snd_dma_get_ops(dmab); in snd_dma_buffer_sync()
215 ops->sync(dmab, mode); in snd_dma_buffer_sync()
227 dma_addr_t snd_sgbuf_get_addr(struct snd_dma_buffer *dmab, size_t offset) in snd_sgbuf_get_addr() argument
229 const struct snd_malloc_ops *ops = snd_dma_get_ops(dmab); in snd_sgbuf_get_addr()
232 return ops->get_addr(dmab, offset); in snd_sgbuf_get_addr()
234 return dmab->addr + offset; in snd_sgbuf_get_addr()
245 struct page *snd_sgbuf_get_page(struct snd_dma_buffer *dmab, size_t offset) in snd_sgbuf_get_page() argument
247 const struct snd_malloc_ops *ops = snd_dma_get_ops(dmab); in snd_sgbuf_get_page()
250 return ops->get_page(dmab, offset); in snd_sgbuf_get_page()
252 return virt_to_page(dmab->area + offset); in snd_sgbuf_get_page()
265 unsigned int snd_sgbuf_get_chunk_size(struct snd_dma_buffer *dmab, in snd_sgbuf_get_chunk_size() argument
268 const struct snd_malloc_ops *ops = snd_dma_get_ops(dmab); in snd_sgbuf_get_chunk_size()
271 return ops->get_chunk_size(dmab, ofs, size); in snd_sgbuf_get_chunk_size()
320 static void *snd_dma_continuous_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_continuous_alloc() argument
322 return do_alloc_pages(dmab->dev.dev, size, &dmab->addr, false); in snd_dma_continuous_alloc()
325 static void snd_dma_continuous_free(struct snd_dma_buffer *dmab) in snd_dma_continuous_free() argument
327 do_free_pages(dmab->area, dmab->bytes, false); in snd_dma_continuous_free()
330 static int snd_dma_continuous_mmap(struct snd_dma_buffer *dmab, in snd_dma_continuous_mmap() argument
334 dmab->addr >> PAGE_SHIFT, in snd_dma_continuous_mmap()
348 static void *snd_dma_vmalloc_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_vmalloc_alloc() argument
353 static void snd_dma_vmalloc_free(struct snd_dma_buffer *dmab) in snd_dma_vmalloc_free() argument
355 vfree(dmab->area); in snd_dma_vmalloc_free()
358 static int snd_dma_vmalloc_mmap(struct snd_dma_buffer *dmab, in snd_dma_vmalloc_mmap() argument
361 return remap_vmalloc_range(area, dmab->area, 0); in snd_dma_vmalloc_mmap()
364 #define get_vmalloc_page_addr(dmab, offset) \ argument
365 page_to_phys(vmalloc_to_page((dmab)->area + (offset)))
367 static dma_addr_t snd_dma_vmalloc_get_addr(struct snd_dma_buffer *dmab, in snd_dma_vmalloc_get_addr() argument
370 return get_vmalloc_page_addr(dmab, offset) + offset % PAGE_SIZE; in snd_dma_vmalloc_get_addr()
373 static struct page *snd_dma_vmalloc_get_page(struct snd_dma_buffer *dmab, in snd_dma_vmalloc_get_page() argument
376 return vmalloc_to_page(dmab->area + offset); in snd_dma_vmalloc_get_page()
380 snd_dma_vmalloc_get_chunk_size(struct snd_dma_buffer *dmab, in snd_dma_vmalloc_get_chunk_size() argument
389 addr = get_vmalloc_page_addr(dmab, start); in snd_dma_vmalloc_get_chunk_size()
395 if (get_vmalloc_page_addr(dmab, start) != addr) in snd_dma_vmalloc_get_chunk_size()
416 static void *snd_dma_iram_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_iram_alloc() argument
418 struct device *dev = dmab->dev.dev; in snd_dma_iram_alloc()
425 dmab->private_data = pool; in snd_dma_iram_alloc()
427 p = gen_pool_dma_alloc_align(pool, size, &dmab->addr, PAGE_SIZE); in snd_dma_iram_alloc()
435 dmab->dev.type = SNDRV_DMA_TYPE_DEV; in snd_dma_iram_alloc()
436 return __snd_dma_alloc_pages(dmab, size); in snd_dma_iram_alloc()
439 static void snd_dma_iram_free(struct snd_dma_buffer *dmab) in snd_dma_iram_free() argument
441 struct gen_pool *pool = dmab->private_data; in snd_dma_iram_free()
443 if (pool && dmab->area) in snd_dma_iram_free()
444 gen_pool_free(pool, (unsigned long)dmab->area, dmab->bytes); in snd_dma_iram_free()
447 static int snd_dma_iram_mmap(struct snd_dma_buffer *dmab, in snd_dma_iram_mmap() argument
452 dmab->addr >> PAGE_SHIFT, in snd_dma_iram_mmap()
467 static void *snd_dma_dev_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_dev_alloc() argument
469 return dma_alloc_coherent(dmab->dev.dev, size, &dmab->addr, DEFAULT_GFP); in snd_dma_dev_alloc()
472 static void snd_dma_dev_free(struct snd_dma_buffer *dmab) in snd_dma_dev_free() argument
474 dma_free_coherent(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_dev_free()
477 static int snd_dma_dev_mmap(struct snd_dma_buffer *dmab, in snd_dma_dev_mmap() argument
480 return dma_mmap_coherent(dmab->dev.dev, area, in snd_dma_dev_mmap()
481 dmab->area, dmab->addr, dmab->bytes); in snd_dma_dev_mmap()
495 static void *snd_dma_wc_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_wc_alloc() argument
497 return do_alloc_pages(dmab->dev.dev, size, &dmab->addr, true); in snd_dma_wc_alloc()
500 static void snd_dma_wc_free(struct snd_dma_buffer *dmab) in snd_dma_wc_free() argument
502 do_free_pages(dmab->area, dmab->bytes, true); in snd_dma_wc_free()
505 static int snd_dma_wc_mmap(struct snd_dma_buffer *dmab, in snd_dma_wc_mmap() argument
509 return snd_dma_continuous_mmap(dmab, area); in snd_dma_wc_mmap()
512 static void *snd_dma_wc_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_wc_alloc() argument
514 return dma_alloc_wc(dmab->dev.dev, size, &dmab->addr, DEFAULT_GFP); in snd_dma_wc_alloc()
517 static void snd_dma_wc_free(struct snd_dma_buffer *dmab) in snd_dma_wc_free() argument
519 dma_free_wc(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_wc_free()
522 static int snd_dma_wc_mmap(struct snd_dma_buffer *dmab, in snd_dma_wc_mmap() argument
525 return dma_mmap_wc(dmab->dev.dev, area, in snd_dma_wc_mmap()
526 dmab->area, dmab->addr, dmab->bytes); in snd_dma_wc_mmap()
539 static void *snd_dma_noncontig_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_noncontig_alloc() argument
546 return snd_dma_sg_fallback_alloc(dmab, size); in snd_dma_noncontig_alloc()
548 sgt = dma_alloc_noncontiguous(dmab->dev.dev, size, dmab->dev.dir, in snd_dma_noncontig_alloc()
551 if (!sgt && !get_dma_ops(dmab->dev.dev)) in snd_dma_noncontig_alloc()
552 return snd_dma_sg_fallback_alloc(dmab, size); in snd_dma_noncontig_alloc()
557 dmab->dev.need_sync = dma_need_sync(dmab->dev.dev, in snd_dma_noncontig_alloc()
559 p = dma_vmap_noncontiguous(dmab->dev.dev, size, sgt); in snd_dma_noncontig_alloc()
561 dmab->private_data = sgt; in snd_dma_noncontig_alloc()
563 dmab->addr = snd_sgbuf_get_addr(dmab, 0); in snd_dma_noncontig_alloc()
565 dma_free_noncontiguous(dmab->dev.dev, size, sgt, dmab->dev.dir); in snd_dma_noncontig_alloc()
570 static void snd_dma_noncontig_free(struct snd_dma_buffer *dmab) in snd_dma_noncontig_free() argument
572 dma_vunmap_noncontiguous(dmab->dev.dev, dmab->area); in snd_dma_noncontig_free()
573 dma_free_noncontiguous(dmab->dev.dev, dmab->bytes, dmab->private_data, in snd_dma_noncontig_free()
574 dmab->dev.dir); in snd_dma_noncontig_free()
577 static int snd_dma_noncontig_mmap(struct snd_dma_buffer *dmab, in snd_dma_noncontig_mmap() argument
580 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_noncontig_mmap()
581 dmab->bytes, dmab->private_data); in snd_dma_noncontig_mmap()
584 static void snd_dma_noncontig_sync(struct snd_dma_buffer *dmab, in snd_dma_noncontig_sync() argument
588 if (dmab->dev.dir == DMA_TO_DEVICE) in snd_dma_noncontig_sync()
590 invalidate_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
591 dma_sync_sgtable_for_cpu(dmab->dev.dev, dmab->private_data, in snd_dma_noncontig_sync()
592 dmab->dev.dir); in snd_dma_noncontig_sync()
594 if (dmab->dev.dir == DMA_FROM_DEVICE) in snd_dma_noncontig_sync()
596 flush_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
597 dma_sync_sgtable_for_device(dmab->dev.dev, dmab->private_data, in snd_dma_noncontig_sync()
598 dmab->dev.dir); in snd_dma_noncontig_sync()
602 static inline void snd_dma_noncontig_iter_set(struct snd_dma_buffer *dmab, in snd_dma_noncontig_iter_set() argument
606 struct sg_table *sgt = dmab->private_data; in snd_dma_noncontig_iter_set()
612 static dma_addr_t snd_dma_noncontig_get_addr(struct snd_dma_buffer *dmab, in snd_dma_noncontig_get_addr() argument
617 snd_dma_noncontig_iter_set(dmab, &iter.base, offset); in snd_dma_noncontig_get_addr()
622 static struct page *snd_dma_noncontig_get_page(struct snd_dma_buffer *dmab, in snd_dma_noncontig_get_page() argument
627 snd_dma_noncontig_iter_set(dmab, &iter, offset); in snd_dma_noncontig_get_page()
633 snd_dma_noncontig_get_chunk_size(struct snd_dma_buffer *dmab, in snd_dma_noncontig_get_chunk_size() argument
642 snd_dma_noncontig_iter_set(dmab, &iter.base, start); in snd_dma_noncontig_get_chunk_size()
674 static void *snd_dma_sg_wc_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_sg_wc_alloc() argument
676 void *p = snd_dma_noncontig_alloc(dmab, size); in snd_dma_sg_wc_alloc()
677 struct sg_table *sgt = dmab->private_data; in snd_dma_sg_wc_alloc()
682 if (dmab->dev.type != SNDRV_DMA_TYPE_DEV_WC_SG) in snd_dma_sg_wc_alloc()
689 static void snd_dma_sg_wc_free(struct snd_dma_buffer *dmab) in snd_dma_sg_wc_free() argument
691 struct sg_table *sgt = dmab->private_data; in snd_dma_sg_wc_free()
696 snd_dma_noncontig_free(dmab); in snd_dma_sg_wc_free()
699 static int snd_dma_sg_wc_mmap(struct snd_dma_buffer *dmab, in snd_dma_sg_wc_mmap() argument
703 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_sg_wc_mmap()
704 dmab->bytes, dmab->private_data); in snd_dma_sg_wc_mmap()
726 static void __snd_dma_sg_fallback_free(struct snd_dma_buffer *dmab, in __snd_dma_sg_fallback_free() argument
740 dma_free_coherent(dmab->dev.dev, size << PAGE_SHIFT, in __snd_dma_sg_fallback_free()
754 static void *snd_dma_sg_fallback_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_sg_fallback_alloc() argument
764 if (dmab->dev.type == SNDRV_DMA_TYPE_DEV_SG) in snd_dma_sg_fallback_alloc()
765 dmab->dev.type = SNDRV_DMA_TYPE_DEV_SG_FALLBACK; in snd_dma_sg_fallback_alloc()
766 else if (dmab->dev.type == SNDRV_DMA_TYPE_DEV_WC_SG) in snd_dma_sg_fallback_alloc()
767 dmab->dev.type = SNDRV_DMA_TYPE_DEV_WC_SG_FALLBACK; in snd_dma_sg_fallback_alloc()
786 p = dma_alloc_coherent(dmab->dev.dev, chunk, &addr, DEFAULT_GFP); in snd_dma_sg_fallback_alloc()
788 p = do_alloc_pages(dmab->dev.dev, chunk, &addr, false); in snd_dma_sg_fallback_alloc()
813 if (dmab->dev.type == SNDRV_DMA_TYPE_DEV_WC_SG_FALLBACK) in snd_dma_sg_fallback_alloc()
816 dmab->private_data = sgbuf; in snd_dma_sg_fallback_alloc()
818 dmab->addr = sgbuf->addrs[0] & PAGE_MASK; in snd_dma_sg_fallback_alloc()
822 __snd_dma_sg_fallback_free(dmab, sgbuf); in snd_dma_sg_fallback_alloc()
826 static void snd_dma_sg_fallback_free(struct snd_dma_buffer *dmab) in snd_dma_sg_fallback_free() argument
828 struct snd_dma_sg_fallback *sgbuf = dmab->private_data; in snd_dma_sg_fallback_free()
830 if (dmab->dev.type == SNDRV_DMA_TYPE_DEV_WC_SG_FALLBACK) in snd_dma_sg_fallback_free()
832 vunmap(dmab->area); in snd_dma_sg_fallback_free()
833 __snd_dma_sg_fallback_free(dmab, dmab->private_data); in snd_dma_sg_fallback_free()
836 static dma_addr_t snd_dma_sg_fallback_get_addr(struct snd_dma_buffer *dmab, in snd_dma_sg_fallback_get_addr() argument
839 struct snd_dma_sg_fallback *sgbuf = dmab->private_data; in snd_dma_sg_fallback_get_addr()
845 static int snd_dma_sg_fallback_mmap(struct snd_dma_buffer *dmab, in snd_dma_sg_fallback_mmap() argument
848 struct snd_dma_sg_fallback *sgbuf = dmab->private_data; in snd_dma_sg_fallback_mmap()
850 if (dmab->dev.type == SNDRV_DMA_TYPE_DEV_WC_SG_FALLBACK) in snd_dma_sg_fallback_mmap()
869 static void *snd_dma_noncoherent_alloc(struct snd_dma_buffer *dmab, size_t size) in snd_dma_noncoherent_alloc() argument
873 p = dma_alloc_noncoherent(dmab->dev.dev, size, &dmab->addr, in snd_dma_noncoherent_alloc()
874 dmab->dev.dir, DEFAULT_GFP); in snd_dma_noncoherent_alloc()
876 dmab->dev.need_sync = dma_need_sync(dmab->dev.dev, dmab->addr); in snd_dma_noncoherent_alloc()
880 static void snd_dma_noncoherent_free(struct snd_dma_buffer *dmab) in snd_dma_noncoherent_free() argument
882 dma_free_noncoherent(dmab->dev.dev, dmab->bytes, dmab->area, in snd_dma_noncoherent_free()
883 dmab->addr, dmab->dev.dir); in snd_dma_noncoherent_free()
886 static int snd_dma_noncoherent_mmap(struct snd_dma_buffer *dmab, in snd_dma_noncoherent_mmap() argument
890 return dma_mmap_pages(dmab->dev.dev, area, in snd_dma_noncoherent_mmap()
892 virt_to_page(dmab->area)); in snd_dma_noncoherent_mmap()
895 static void snd_dma_noncoherent_sync(struct snd_dma_buffer *dmab, in snd_dma_noncoherent_sync() argument
899 if (dmab->dev.dir != DMA_TO_DEVICE) in snd_dma_noncoherent_sync()
900 dma_sync_single_for_cpu(dmab->dev.dev, dmab->addr, in snd_dma_noncoherent_sync()
901 dmab->bytes, dmab->dev.dir); in snd_dma_noncoherent_sync()
903 if (dmab->dev.dir != DMA_FROM_DEVICE) in snd_dma_noncoherent_sync()
904 dma_sync_single_for_device(dmab->dev.dev, dmab->addr, in snd_dma_noncoherent_sync()
905 dmab->bytes, dmab->dev.dir); in snd_dma_noncoherent_sync()
942 static const struct snd_malloc_ops *snd_dma_get_ops(struct snd_dma_buffer *dmab) in snd_dma_get_ops() argument
944 if (WARN_ON_ONCE(!dmab)) in snd_dma_get_ops()
946 if (WARN_ON_ONCE(dmab->dev.type <= SNDRV_DMA_TYPE_UNKNOWN || in snd_dma_get_ops()
947 dmab->dev.type >= ARRAY_SIZE(snd_dma_ops))) in snd_dma_get_ops()
949 return snd_dma_ops[dmab->dev.type]; in snd_dma_get_ops()