Lines Matching refs:iovad
25 init_iova_domain(struct iova_domain *iovad, unsigned long granule, in init_iova_domain() argument
35 spin_lock_init(&iovad->iova_rbtree_lock); in init_iova_domain()
36 iovad->rbroot = RB_ROOT; in init_iova_domain()
37 iovad->cached32_node = NULL; in init_iova_domain()
38 iovad->granule = granule; in init_iova_domain()
39 iovad->start_pfn = start_pfn; in init_iova_domain()
40 iovad->dma_32bit_pfn = pfn_32bit; in init_iova_domain()
45 __get_cached_rbnode(struct iova_domain *iovad, unsigned long *limit_pfn) in __get_cached_rbnode() argument
47 if ((*limit_pfn != iovad->dma_32bit_pfn) || in __get_cached_rbnode()
48 (iovad->cached32_node == NULL)) in __get_cached_rbnode()
49 return rb_last(&iovad->rbroot); in __get_cached_rbnode()
51 struct rb_node *prev_node = rb_prev(iovad->cached32_node); in __get_cached_rbnode()
53 container_of(iovad->cached32_node, struct iova, node); in __get_cached_rbnode()
60 __cached_rbnode_insert_update(struct iova_domain *iovad, in __cached_rbnode_insert_update() argument
63 if (limit_pfn != iovad->dma_32bit_pfn) in __cached_rbnode_insert_update()
65 iovad->cached32_node = &new->node; in __cached_rbnode_insert_update()
69 __cached_rbnode_delete_update(struct iova_domain *iovad, struct iova *free) in __cached_rbnode_delete_update() argument
74 if (!iovad->cached32_node) in __cached_rbnode_delete_update()
76 curr = iovad->cached32_node; in __cached_rbnode_delete_update()
84 if (node && iova->pfn_lo < iovad->dma_32bit_pfn) in __cached_rbnode_delete_update()
85 iovad->cached32_node = node; in __cached_rbnode_delete_update()
87 iovad->cached32_node = NULL; in __cached_rbnode_delete_update()
101 static int __alloc_and_insert_iova_range(struct iova_domain *iovad, in __alloc_and_insert_iova_range() argument
111 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in __alloc_and_insert_iova_range()
113 curr = __get_cached_rbnode(iovad, &limit_pfn); in __alloc_and_insert_iova_range()
138 if ((iovad->start_pfn + size + pad_size) > limit_pfn) { in __alloc_and_insert_iova_range()
139 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in __alloc_and_insert_iova_range()
158 entry = &iovad->rbroot.rb_node; in __alloc_and_insert_iova_range()
176 rb_insert_color(&new->node, &iovad->rbroot); in __alloc_and_insert_iova_range()
178 __cached_rbnode_insert_update(iovad, saved_pfn, new); in __alloc_and_insert_iova_range()
180 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in __alloc_and_insert_iova_range()
271 alloc_iova(struct iova_domain *iovad, unsigned long size, in alloc_iova() argument
282 ret = __alloc_and_insert_iova_range(iovad, size, limit_pfn, in alloc_iova()
301 struct iova *find_iova(struct iova_domain *iovad, unsigned long pfn) in find_iova() argument
307 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in find_iova()
308 node = iovad->rbroot.rb_node; in find_iova()
314 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in find_iova()
330 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in find_iova()
342 __free_iova(struct iova_domain *iovad, struct iova *iova) in __free_iova() argument
346 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in __free_iova()
347 __cached_rbnode_delete_update(iovad, iova); in __free_iova()
348 rb_erase(&iova->node, &iovad->rbroot); in __free_iova()
349 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in __free_iova()
362 free_iova(struct iova_domain *iovad, unsigned long pfn) in free_iova() argument
364 struct iova *iova = find_iova(iovad, pfn); in free_iova()
367 __free_iova(iovad, iova); in free_iova()
377 void put_iova_domain(struct iova_domain *iovad) in put_iova_domain() argument
382 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in put_iova_domain()
383 node = rb_first(&iovad->rbroot); in put_iova_domain()
387 rb_erase(node, &iovad->rbroot); in put_iova_domain()
389 node = rb_first(&iovad->rbroot); in put_iova_domain()
391 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in put_iova_domain()
421 __insert_new_range(struct iova_domain *iovad, in __insert_new_range() argument
428 iova_insert_rbtree(&iovad->rbroot, iova); in __insert_new_range()
452 reserve_iova(struct iova_domain *iovad, in reserve_iova() argument
460 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in reserve_iova()
461 for (node = rb_first(&iovad->rbroot); node; node = rb_next(node)) { in reserve_iova()
477 iova = __insert_new_range(iovad, pfn_lo, pfn_hi); in reserve_iova()
480 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in reserve_iova()
513 split_and_remove_iova(struct iova_domain *iovad, struct iova *iova, in split_and_remove_iova() argument
519 spin_lock_irqsave(&iovad->iova_rbtree_lock, flags); in split_and_remove_iova()
531 __cached_rbnode_delete_update(iovad, iova); in split_and_remove_iova()
532 rb_erase(&iova->node, &iovad->rbroot); in split_and_remove_iova()
535 iova_insert_rbtree(&iovad->rbroot, prev); in split_and_remove_iova()
539 iova_insert_rbtree(&iovad->rbroot, next); in split_and_remove_iova()
542 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in split_and_remove_iova()
547 spin_unlock_irqrestore(&iovad->iova_rbtree_lock, flags); in split_and_remove_iova()