Lines Matching full:mem
33 struct dma_coherent_mem * mem) in dma_get_device_base() argument
35 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base()
36 return (mem->pfn_base - dev->dma_pfn_offset) << PAGE_SHIFT; in dma_get_device_base()
38 return mem->device_base; in dma_get_device_base()
43 struct dma_coherent_mem **mem) in dma_init_coherent_memory() argument
79 *mem = dma_mem; in dma_init_coherent_memory()
89 static void dma_release_coherent_memory(struct dma_coherent_mem *mem) in dma_release_coherent_memory() argument
91 if (!mem) in dma_release_coherent_memory()
94 memunmap(mem->virt_base); in dma_release_coherent_memory()
95 kfree(mem->bitmap); in dma_release_coherent_memory()
96 kfree(mem); in dma_release_coherent_memory()
100 struct dma_coherent_mem *mem) in dma_assign_coherent_memory() argument
108 dev->dma_mem = mem; in dma_assign_coherent_memory()
115 struct dma_coherent_mem *mem; in dma_declare_coherent_memory() local
118 ret = dma_init_coherent_memory(phys_addr, device_addr, size, flags, &mem); in dma_declare_coherent_memory()
122 ret = dma_assign_coherent_memory(dev, mem); in dma_declare_coherent_memory()
124 dma_release_coherent_memory(mem); in dma_declare_coherent_memory()
131 struct dma_coherent_mem *mem = dev->dma_mem; in dma_release_declared_memory() local
133 if (!mem) in dma_release_declared_memory()
135 dma_release_coherent_memory(mem); in dma_release_declared_memory()
143 struct dma_coherent_mem *mem = dev->dma_mem; in dma_mark_declared_memory_occupied() local
149 if (!mem) in dma_mark_declared_memory_occupied()
152 spin_lock_irqsave(&mem->spinlock, flags); in dma_mark_declared_memory_occupied()
153 pos = PFN_DOWN(device_addr - dma_get_device_base(dev, mem)); in dma_mark_declared_memory_occupied()
154 err = bitmap_allocate_region(mem->bitmap, pos, get_order(size)); in dma_mark_declared_memory_occupied()
155 spin_unlock_irqrestore(&mem->spinlock, flags); in dma_mark_declared_memory_occupied()
159 return mem->virt_base + (pos << PAGE_SHIFT); in dma_mark_declared_memory_occupied()
163 static void *__dma_alloc_from_coherent(struct dma_coherent_mem *mem, in __dma_alloc_from_coherent() argument
171 spin_lock_irqsave(&mem->spinlock, flags); in __dma_alloc_from_coherent()
173 if (unlikely(size > (mem->size << PAGE_SHIFT))) in __dma_alloc_from_coherent()
176 pageno = bitmap_find_free_region(mem->bitmap, mem->size, order); in __dma_alloc_from_coherent()
183 *dma_handle = mem->device_base + (pageno << PAGE_SHIFT); in __dma_alloc_from_coherent()
184 ret = mem->virt_base + (pageno << PAGE_SHIFT); in __dma_alloc_from_coherent()
185 spin_unlock_irqrestore(&mem->spinlock, flags); in __dma_alloc_from_coherent()
189 spin_unlock_irqrestore(&mem->spinlock, flags); in __dma_alloc_from_coherent()
210 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); in dma_alloc_from_dev_coherent() local
212 if (!mem) in dma_alloc_from_dev_coherent()
215 *ret = __dma_alloc_from_coherent(mem, size, dma_handle); in dma_alloc_from_dev_coherent()
224 return mem->flags & DMA_MEMORY_EXCLUSIVE; in dma_alloc_from_dev_coherent()
237 static int __dma_release_from_coherent(struct dma_coherent_mem *mem, in __dma_release_from_coherent() argument
240 if (mem && vaddr >= mem->virt_base && vaddr < in __dma_release_from_coherent()
241 (mem->virt_base + (mem->size << PAGE_SHIFT))) { in __dma_release_from_coherent()
242 int page = (vaddr - mem->virt_base) >> PAGE_SHIFT; in __dma_release_from_coherent()
245 spin_lock_irqsave(&mem->spinlock, flags); in __dma_release_from_coherent()
246 bitmap_release_region(mem->bitmap, page, order); in __dma_release_from_coherent()
247 spin_unlock_irqrestore(&mem->spinlock, flags); in __dma_release_from_coherent()
267 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); in dma_release_from_dev_coherent() local
269 return __dma_release_from_coherent(mem, order, vaddr); in dma_release_from_dev_coherent()
282 static int __dma_mmap_from_coherent(struct dma_coherent_mem *mem, in __dma_mmap_from_coherent() argument
285 if (mem && vaddr >= mem->virt_base && vaddr + size <= in __dma_mmap_from_coherent()
286 (mem->virt_base + (mem->size << PAGE_SHIFT))) { in __dma_mmap_from_coherent()
288 int start = (vaddr - mem->virt_base) >> PAGE_SHIFT; in __dma_mmap_from_coherent()
294 unsigned long pfn = mem->pfn_base + start + off; in __dma_mmap_from_coherent()
322 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); in dma_mmap_from_dev_coherent() local
324 return __dma_mmap_from_coherent(mem, vma, vaddr, size, ret); in dma_mmap_from_dev_coherent()
350 struct dma_coherent_mem *mem = rmem->priv; in rmem_dma_device_init() local
353 if (!mem) { in rmem_dma_device_init()
356 DMA_MEMORY_EXCLUSIVE, &mem); in rmem_dma_device_init()
363 mem->use_dev_dma_pfn_offset = true; in rmem_dma_device_init()
364 rmem->priv = mem; in rmem_dma_device_init()
365 dma_assign_coherent_memory(dev, mem); in rmem_dma_device_init()