Searched refs:__GFP_DMA (Results 1 – 16 of 16) sorted by relevance
70 #define __GFP_DMA ((__force gfp_t)___GFP_DMA) macro74 #define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE)332 #define GFP_DMA __GFP_DMA
395 (IS_ENABLED(CONFIG_ZONE_DMA) ? __GFP_DMA : 0) | \422 if (IS_ENABLED(CONFIG_ZONE_DMA) && (flags & __GFP_DMA)) in kmalloc_type()
35 return __GFP_DMA; in xen_swiotlb_gfp()
448 if (IS_ENABLED(CONFIG_ZONE_DMA) && (gfp_mask & __GFP_DMA)) in swiotlb_init_late()629 gfp |= __GFP_DMA; in swiotlb_alloc_tlb()636 !(gfp & (__GFP_DMA32 | __GFP_DMA))) in swiotlb_alloc_tlb()639 !(gfp & __GFP_DMA)) in swiotlb_alloc_tlb()640 gfp = (gfp & ~__GFP_DMA32) | __GFP_DMA; in swiotlb_alloc_tlb()
48 if (gfp & __GFP_DMA) in dma_atomic_pool_size_add()
517 flag &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM); in dma_alloc_attrs()565 if (WARN_ON_ONCE(gfp & (__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM))) in __dma_alloc_pages()
288 as->pd = alloc_page(GFP_KERNEL | __GFP_DMA | __GFP_ZERO); in tegra_smmu_domain_alloc()678 page = alloc_page(gfp | __GFP_DMA | __GFP_ZERO); in as_get_pde_page()
2514 if (WARN_ON_ONCE(gfp & (__GFP_COMP | __GFP_DMA | __GFP_DMA32 | in iommu_map()2628 if (WARN_ON_ONCE(gfp & (__GFP_COMP | __GFP_DMA | __GFP_DMA32 | in iommu_map_sg()
878 gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM | __GFP_COMP); in __iommu_dma_alloc_noncontiguous()
687 gfp = GFP_ATOMIC|__GFP_DMA; in flags()
514 flag &= ~(__GFP_DMA | __GFP_HIGHMEM); in ps3_alloc_coherent()
89 flags = __GFP_DMA | __GFP_HIGH | __GFP_KSWAPD_RECLAIM; in vmlfb_alloc_vram_area()
562 gfp & ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM)); in __dma_alloc()
1114 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
3235 ((gfp_mask & __GFP_DMA) && !has_managed_dma())) in warn_alloc()
2867 #define OBJCGS_CLEAR_MASK (__GFP_DMA | __GFP_RECLAIMABLE | \