Lines Matching refs:dma_map
121 struct dma_mapping *dma_map) in __genwqe_add_mapping() argument
126 list_add(&dma_map->card_list, &cfile->map_list); in __genwqe_add_mapping()
131 struct dma_mapping *dma_map) in __genwqe_del_mapping() argument
136 list_del(&dma_map->card_list); in __genwqe_del_mapping()
193 struct dma_mapping *dma_map; in genwqe_remove_mappings() local
198 dma_map = list_entry(node, struct dma_mapping, card_list); in genwqe_remove_mappings()
200 list_del_init(&dma_map->card_list); in genwqe_remove_mappings()
211 __func__, i++, dma_map->u_vaddr, in genwqe_remove_mappings()
212 (unsigned long)dma_map->k_vaddr, in genwqe_remove_mappings()
213 (unsigned long)dma_map->dma_addr); in genwqe_remove_mappings()
215 if (dma_map->type == GENWQE_MAPPING_RAW) { in genwqe_remove_mappings()
217 __genwqe_free_consistent(cd, dma_map->size, in genwqe_remove_mappings()
218 dma_map->k_vaddr, in genwqe_remove_mappings()
219 dma_map->dma_addr); in genwqe_remove_mappings()
220 kfree(dma_map); in genwqe_remove_mappings()
221 } else if (dma_map->type == GENWQE_MAPPING_SGL_TEMP) { in genwqe_remove_mappings()
223 genwqe_user_vunmap(cd, dma_map); in genwqe_remove_mappings()
231 struct dma_mapping *dma_map; in genwqe_remove_pinnings() local
235 dma_map = list_entry(node, struct dma_mapping, pin_list); in genwqe_remove_pinnings()
245 list_del_init(&dma_map->pin_list); in genwqe_remove_pinnings()
246 genwqe_user_vunmap(cd, dma_map); in genwqe_remove_pinnings()
247 kfree(dma_map); in genwqe_remove_pinnings()
394 struct dma_mapping *dma_map; in genwqe_vma_close() local
401 dma_map = __genwqe_search_mapping(cfile, vma->vm_start, vsize, in genwqe_vma_close()
403 if (dma_map == NULL) { in genwqe_vma_close()
410 __genwqe_del_mapping(cfile, dma_map); in genwqe_vma_close()
411 __genwqe_free_consistent(cd, dma_map->size, dma_map->k_vaddr, in genwqe_vma_close()
412 dma_map->dma_addr); in genwqe_vma_close()
413 kfree(dma_map); in genwqe_vma_close()
441 struct dma_mapping *dma_map; in genwqe_mmap() local
449 dma_map = kzalloc(sizeof(struct dma_mapping), GFP_KERNEL); in genwqe_mmap()
450 if (dma_map == NULL) in genwqe_mmap()
453 genwqe_mapping_init(dma_map, GENWQE_MAPPING_RAW); in genwqe_mmap()
454 dma_map->u_vaddr = (void *)vma->vm_start; in genwqe_mmap()
455 dma_map->size = vsize; in genwqe_mmap()
456 dma_map->nr_pages = DIV_ROUND_UP(vsize, PAGE_SIZE); in genwqe_mmap()
457 dma_map->k_vaddr = __genwqe_alloc_consistent(cd, vsize, in genwqe_mmap()
458 &dma_map->dma_addr); in genwqe_mmap()
459 if (dma_map->k_vaddr == NULL) { in genwqe_mmap()
465 *(dma_addr_t *)dma_map->k_vaddr = dma_map->dma_addr; in genwqe_mmap()
467 pfn = virt_to_phys(dma_map->k_vaddr) >> PAGE_SHIFT; in genwqe_mmap()
480 __genwqe_add_mapping(cfile, dma_map); in genwqe_mmap()
485 __genwqe_free_consistent(cd, dma_map->size, in genwqe_mmap()
486 dma_map->k_vaddr, in genwqe_mmap()
487 dma_map->dma_addr); in genwqe_mmap()
489 kfree(dma_map); in genwqe_mmap()
774 struct dma_mapping *dma_map; in genwqe_pin_mem() local
786 dma_map = kzalloc(sizeof(struct dma_mapping), GFP_KERNEL); in genwqe_pin_mem()
787 if (dma_map == NULL) in genwqe_pin_mem()
790 genwqe_mapping_init(dma_map, GENWQE_MAPPING_SGL_PINNED); in genwqe_pin_mem()
791 rc = genwqe_user_vmap(cd, dma_map, (void *)map_addr, map_size); in genwqe_pin_mem()
795 kfree(dma_map); in genwqe_pin_mem()
799 genwqe_add_pin(cfile, dma_map); in genwqe_pin_mem()
806 struct dma_mapping *dma_map; in genwqe_unpin_mem() local
816 dma_map = genwqe_search_pin(cfile, map_addr, map_size, NULL); in genwqe_unpin_mem()
817 if (dma_map == NULL) in genwqe_unpin_mem()
820 genwqe_del_pin(cfile, dma_map); in genwqe_unpin_mem()
821 genwqe_user_vunmap(cd, dma_map); in genwqe_unpin_mem()
822 kfree(dma_map); in genwqe_unpin_mem()
836 struct dma_mapping *dma_map; in ddcb_cmd_cleanup() local
840 dma_map = &req->dma_mappings[i]; in ddcb_cmd_cleanup()
842 if (dma_mapping_used(dma_map)) { in ddcb_cmd_cleanup()
843 __genwqe_del_mapping(cfile, dma_map); in ddcb_cmd_cleanup()
844 genwqe_user_vunmap(cd, dma_map); in ddcb_cmd_cleanup()