Lines Matching refs:node
171 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_acquire_dma() local
172 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_acquire_dma()
177 return node->vaddr; in gk20a_instobj_acquire_dma()
183 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_acquire_iommu() local
184 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_acquire_iommu()
192 if (node->base.vaddr) { in gk20a_instobj_acquire_iommu()
193 if (!node->use_cpt) { in gk20a_instobj_acquire_iommu()
195 list_del(&node->vaddr_node); in gk20a_instobj_acquire_iommu()
204 node->base.vaddr = vmap(node->pages, size >> PAGE_SHIFT, VM_MAP, in gk20a_instobj_acquire_iommu()
206 if (!node->base.vaddr) { in gk20a_instobj_acquire_iommu()
217 node->use_cpt++; in gk20a_instobj_acquire_iommu()
220 return node->base.vaddr; in gk20a_instobj_acquire_iommu()
226 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_release_dma() local
227 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_release_dma()
238 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_release_iommu() local
239 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_release_iommu()
245 if (WARN_ON(node->use_cpt == 0)) in gk20a_instobj_release_iommu()
249 if (--node->use_cpt == 0) in gk20a_instobj_release_iommu()
250 list_add_tail(&node->vaddr_node, &imem->vaddr_lru); in gk20a_instobj_release_iommu()
262 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_rd32() local
264 return node->vaddr[offset / 4]; in gk20a_instobj_rd32()
270 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_wr32() local
272 node->vaddr[offset / 4] = data; in gk20a_instobj_wr32()
278 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_map() local
280 nvkm_vm_map_at(vma, offset, &node->mem); in gk20a_instobj_map()
286 struct gk20a_instobj_dma *node = gk20a_instobj_dma(memory); in gk20a_instobj_dtor_dma() local
287 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_dma()
290 if (unlikely(!node->base.vaddr)) in gk20a_instobj_dtor_dma()
293 dma_free_attrs(dev, node->base.mem.size << PAGE_SHIFT, node->base.vaddr, in gk20a_instobj_dtor_dma()
294 node->handle, imem->attrs); in gk20a_instobj_dtor_dma()
297 return node; in gk20a_instobj_dtor_dma()
303 struct gk20a_instobj_iommu *node = gk20a_instobj_iommu(memory); in gk20a_instobj_dtor_iommu() local
304 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_iommu()
306 struct nvkm_mm_node *r = node->base.mem.mem; in gk20a_instobj_dtor_iommu()
315 if (node->base.vaddr) in gk20a_instobj_dtor_iommu()
316 gk20a_instobj_iommu_recycle_vaddr(node); in gk20a_instobj_dtor_iommu()
324 for (i = 0; i < node->base.mem.size; i++) { in gk20a_instobj_dtor_iommu()
327 dma_unmap_page(dev, node->dma_addrs[i], PAGE_SIZE, in gk20a_instobj_dtor_iommu()
329 __free_page(node->pages[i]); in gk20a_instobj_dtor_iommu()
338 return node; in gk20a_instobj_dtor_iommu()
371 struct gk20a_instobj_dma *node; in gk20a_instobj_ctor_dma() local
375 if (!(node = kzalloc(sizeof(*node), GFP_KERNEL))) in gk20a_instobj_ctor_dma()
377 *_node = &node->base; in gk20a_instobj_ctor_dma()
379 nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.memory); in gk20a_instobj_ctor_dma()
381 node->base.vaddr = dma_alloc_attrs(dev, npages << PAGE_SHIFT, in gk20a_instobj_ctor_dma()
382 &node->handle, GFP_KERNEL, in gk20a_instobj_ctor_dma()
384 if (!node->base.vaddr) { in gk20a_instobj_ctor_dma()
390 if (unlikely(node->handle & (align - 1))) in gk20a_instobj_ctor_dma()
393 &node->handle, align); in gk20a_instobj_ctor_dma()
396 node->r.type = 12; in gk20a_instobj_ctor_dma()
397 node->r.offset = node->handle >> 12; in gk20a_instobj_ctor_dma()
398 node->r.length = (npages << PAGE_SHIFT) >> 12; in gk20a_instobj_ctor_dma()
400 node->base.mem.offset = node->handle; in gk20a_instobj_ctor_dma()
401 node->base.mem.mem = &node->r; in gk20a_instobj_ctor_dma()
409 struct gk20a_instobj_iommu *node; in gk20a_instobj_ctor_iommu() local
420 if (!(node = kzalloc(sizeof(*node) + ((sizeof(node->pages[0]) + in gk20a_instobj_ctor_iommu()
421 sizeof(*node->dma_addrs)) * npages), GFP_KERNEL))) in gk20a_instobj_ctor_iommu()
423 *_node = &node->base; in gk20a_instobj_ctor_iommu()
424 node->dma_addrs = (void *)(node->pages + npages); in gk20a_instobj_ctor_iommu()
426 nvkm_memory_ctor(&gk20a_instobj_func_iommu, &node->base.memory); in gk20a_instobj_ctor_iommu()
437 node->pages[i] = p; in gk20a_instobj_ctor_iommu()
444 node->dma_addrs[i] = dma_adr; in gk20a_instobj_ctor_iommu()
461 ret = iommu_map(imem->domain, offset, node->dma_addrs[i], in gk20a_instobj_ctor_iommu()
477 node->base.mem.offset = ((u64)r->offset) << imem->iommu_pgshift; in gk20a_instobj_ctor_iommu()
478 node->base.mem.mem = r; in gk20a_instobj_ctor_iommu()
487 for (i = 0; i < npages && node->pages[i] != NULL; i++) { in gk20a_instobj_ctor_iommu()
488 dma_addr_t dma_addr = node->dma_addrs[i]; in gk20a_instobj_ctor_iommu()
492 __free_page(node->pages[i]); in gk20a_instobj_ctor_iommu()
504 struct gk20a_instobj *node = NULL; in gk20a_instobj_new() local
516 align, &node); in gk20a_instobj_new()
519 align, &node); in gk20a_instobj_new()
520 *pmemory = node ? &node->memory : NULL; in gk20a_instobj_new()
524 node->imem = imem; in gk20a_instobj_new()
527 node->mem.size = size >> 12; in gk20a_instobj_new()
528 node->mem.memtype = 0; in gk20a_instobj_new()
529 node->mem.page_shift = 12; in gk20a_instobj_new()
532 size, align, node->mem.offset); in gk20a_instobj_new()