/kernel/linux/linux-5.10/arch/powerpc/mm/book3s64/ |
D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0]))); in mm_iommu_do_alloc() 93 if (!mem->hpas) { in mm_iommu_do_alloc() 94 kfree(mem); in mm_iommu_do_alloc() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
D | intel_memory_region.c | 34 intel_memory_region_free_pages(struct intel_memory_region *mem, in intel_memory_region_free_pages() argument 41 size += i915_buddy_block_size(&mem->mm, block); in intel_memory_region_free_pages() 42 i915_buddy_free(&mem->mm, block); in intel_memory_region_free_pages() 50 __intel_memory_region_put_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_put_pages_buddy() argument 53 mutex_lock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 54 mem->avail += intel_memory_region_free_pages(mem, blocks); in __intel_memory_region_put_pages_buddy() 55 mutex_unlock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 69 __intel_memory_region_get_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_get_pages_buddy() argument 77 GEM_BUG_ON(!IS_ALIGNED(size, mem->mm.chunk_size)); in __intel_memory_region_get_pages_buddy() 81 min_order = ilog2(mem->min_page_size) - in __intel_memory_region_get_pages_buddy() [all …]
|
D | intel_region_lmem.c | 12 static int init_fake_lmem_bar(struct intel_memory_region *mem) in init_fake_lmem_bar() argument 14 struct drm_i915_private *i915 = mem->i915; in init_fake_lmem_bar() 21 mem->fake_mappable.start = 0; in init_fake_lmem_bar() 22 mem->fake_mappable.size = resource_size(&mem->region); in init_fake_lmem_bar() 23 mem->fake_mappable.color = I915_COLOR_UNEVICTABLE; in init_fake_lmem_bar() 25 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); in init_fake_lmem_bar() 29 mem->remap_addr = dma_map_resource(&i915->drm.pdev->dev, in init_fake_lmem_bar() 30 mem->region.start, in init_fake_lmem_bar() 31 mem->fake_mappable.size, in init_fake_lmem_bar() 34 if (dma_mapping_error(&i915->drm.pdev->dev, mem->remap_addr)) { in init_fake_lmem_bar() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/ |
D | rxe_mr.c | 27 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument 29 switch (mem->type) { in mem_check_range() 35 if (iova < mem->iova || in mem_check_range() 36 length > mem->length || in mem_check_range() 37 iova > mem->iova + mem->length - length) in mem_check_range() 50 static void rxe_mem_init(int access, struct rxe_mem *mem) in rxe_mem_init() argument 52 u32 lkey = mem->pelem.index << 8 | rxe_get_key(); in rxe_mem_init() 55 mem->ibmr.lkey = lkey; in rxe_mem_init() 56 mem->ibmr.rkey = rkey; in rxe_mem_init() 57 mem->state = RXE_MEM_STATE_INVALID; in rxe_mem_init() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | mem.c | 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem->dma, in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_dtor() local 86 if (mem->mem) { in nvkm_mem_dtor() 87 while (mem->pages--) { in nvkm_mem_dtor() [all …]
|
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/runtime/isp_param/src/ |
D | isp_param.c | 27 enum ia_css_isp_memories mem, in ia_css_isp_param_set_mem_init() argument 30 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_mem_init() 31 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_mem_init() 38 enum ia_css_isp_memories mem, in ia_css_isp_param_set_css_mem_init() argument 41 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_css_mem_init() 42 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_css_mem_init() 49 enum ia_css_isp_memories mem, in ia_css_isp_param_set_isp_mem_init() argument 52 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_isp_mem_init() 53 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_isp_mem_init() 61 enum ia_css_isp_memories mem) in ia_css_isp_param_get_mem_init() argument [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
D | nouveau_mem.c | 36 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 54 args.nv50.kind = mem->kind; in nouveau_mem_map() 55 args.nv50.comp = mem->comp; in nouveau_mem_map() 62 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 68 args.gf100.kind = mem->kind; in nouveau_mem_map() 78 ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, in nouveau_mem_map() 79 &mem->mem, 0); in nouveau_mem_map() 85 nouveau_mem_fini(struct nouveau_mem *mem) in nouveau_mem_fini() argument 87 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 88 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini() [all …]
|
D | nouveau_vmm.c | 31 if (vma->mem) { in nouveau_vma_unmap() 33 vma->mem = NULL; in nouveau_vma_unmap() 38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument 41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 44 vma->mem = mem; in nouveau_vma_map() 80 struct nouveau_mem *mem = nouveau_mem(&nvbo->bo.mem); in nouveau_vma_new() local 95 vma->mem = NULL; in nouveau_vma_new() 99 if (nvbo->bo.mem.mem_type != TTM_PL_SYSTEM && in nouveau_vma_new() 100 mem->mem.page == nvbo->page) { in nouveau_vma_new() 101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new() [all …]
|
/kernel/linux/linux-5.10/drivers/pci/endpoint/ |
D | pci-epc-mem.c | 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() 51 struct pci_epc_mem *mem = NULL; in pci_epc_multi_mem_init() local 77 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in pci_epc_multi_mem_init() 78 if (!mem) { in pci_epc_multi_mem_init() 87 kfree(mem); in pci_epc_multi_mem_init() 92 mem->window.phys_base = windows[i].phys_base; in pci_epc_multi_mem_init() 93 mem->window.size = windows[i].size; in pci_epc_multi_mem_init() 94 mem->window.page_size = page_size; in pci_epc_multi_mem_init() 95 mem->bitmap = bitmap; in pci_epc_multi_mem_init() [all …]
|
/kernel/linux/linux-5.10/drivers/media/v4l2-core/ |
D | videobuf-dma-contig.c | 38 struct videobuf_dma_contig_memory *mem, in __videobuf_dc_alloc() argument 41 mem->size = size; in __videobuf_dc_alloc() 42 mem->vaddr = dma_alloc_coherent(dev, mem->size, in __videobuf_dc_alloc() 43 &mem->dma_handle, flags); in __videobuf_dc_alloc() 45 if (!mem->vaddr) { in __videobuf_dc_alloc() 46 dev_err(dev, "memory alloc size %ld failed\n", mem->size); in __videobuf_dc_alloc() 50 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 56 struct videobuf_dma_contig_memory *mem) in __videobuf_dc_free() argument 58 dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle); in __videobuf_dc_free() 60 mem->vaddr = NULL; in __videobuf_dc_free() [all …]
|
D | videobuf-vmalloc.c | 72 struct videobuf_vmalloc_memory *mem; in videobuf_vm_close() local 88 mem = q->bufs[i]->priv; in videobuf_vm_close() 89 if (mem) { in videobuf_vm_close() 96 MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM); in videobuf_vm_close() 102 __func__, i, mem->vaddr); in videobuf_vm_close() 104 vfree(mem->vaddr); in videobuf_vm_close() 105 mem->vaddr = NULL; in videobuf_vm_close() 137 struct videobuf_vmalloc_memory *mem; in __videobuf_alloc_vb() local 140 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb() 144 mem = vb->priv = ((char *)vb) + size; in __videobuf_alloc_vb() [all …]
|
/kernel/linux/linux-5.10/kernel/dma/ |
D | coherent.c | 33 struct dma_coherent_mem * mem) in dma_get_device_base() argument 35 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 36 return phys_to_dma(dev, PFN_PHYS(mem->pfn_base)); in dma_get_device_base() 37 return mem->device_base; in dma_get_device_base() 42 struct dma_coherent_mem **mem) in dma_init_coherent_memory() argument 77 *mem = dma_mem; in dma_init_coherent_memory() 87 static void dma_release_coherent_memory(struct dma_coherent_mem *mem) in dma_release_coherent_memory() argument 89 if (!mem) in dma_release_coherent_memory() 92 memunmap(mem->virt_base); in dma_release_coherent_memory() 93 kfree(mem->bitmap); in dma_release_coherent_memory() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/siw/ |
D | siw_mem.c | 50 struct siw_mem *mem; in siw_mem_id2obj() local 53 mem = xa_load(&sdev->mem_xa, stag_index); in siw_mem_id2obj() 54 if (likely(mem && kref_get_unless_zero(&mem->ref))) { in siw_mem_id2obj() 56 return mem; in siw_mem_id2obj() 93 struct siw_mem *mem = kzalloc(sizeof(*mem), GFP_KERNEL); in siw_mr_add_mem() local 97 if (!mem) in siw_mr_add_mem() 100 mem->mem_obj = mem_obj; in siw_mr_add_mem() 101 mem->stag_valid = 0; in siw_mr_add_mem() 102 mem->sdev = sdev; in siw_mr_add_mem() 103 mem->va = start; in siw_mr_add_mem() [all …]
|
/kernel/linux/linux-5.10/drivers/net/ipa/ |
D | ipa_qmi.c | 288 const struct ipa_mem *mem; in init_modem_driver_req() local 301 mem = &ipa->mem[IPA_MEM_MODEM_HEADER]; in init_modem_driver_req() 302 if (mem->size) { in init_modem_driver_req() 304 req.hdr_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() 305 req.hdr_tbl_info.end = req.hdr_tbl_info.start + mem->size - 1; in init_modem_driver_req() 308 mem = &ipa->mem[IPA_MEM_V4_ROUTE]; in init_modem_driver_req() 310 req.v4_route_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() 311 req.v4_route_tbl_info.count = mem->size / IPA_TABLE_ENTRY_SIZE; in init_modem_driver_req() 313 mem = &ipa->mem[IPA_MEM_V6_ROUTE]; in init_modem_driver_req() 315 req.v6_route_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_amdkfd_gpuvm.c | 71 struct kgd_mem *mem) in check_if_add_bo_to_vm() argument 75 list_for_each_entry(entry, &mem->bo_va_list, bo_list) in check_if_add_bo_to_vm() 89 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local 92 mem = si.totalram - si.totalhigh; in amdgpu_amdkfd_gpuvm_init_mem_limits() 93 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits() 96 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits() 97 kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits() 390 static uint64_t get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) in get_pte_flags() argument 392 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags() 393 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags() [all …]
|
/kernel/linux/linux-5.10/drivers/base/ |
D | memory.c | 101 struct memory_block *mem = to_memory_block(dev); in memory_block_release() local 103 kfree(mem); in memory_block_release() 118 struct memory_block *mem = to_memory_block(dev); in phys_index_show() local 121 phys_index = mem->start_section_nr / sections_per_block; in phys_index_show() 142 struct memory_block *mem = to_memory_block(dev); in state_show() local 149 switch (mem->state) { in state_show() 161 return sysfs_emit(buf, "ERROR-UNKNOWN-%ld\n", mem->state); in state_show() 202 static int memory_block_change_state(struct memory_block *mem, in memory_block_change_state() argument 207 if (mem->state != from_state_req) in memory_block_change_state() 211 mem->state = MEM_GOING_OFFLINE; in memory_block_change_state() [all …]
|
/kernel/liteos_a/testsuites/unittest/basic/mem/vm/smoke/ |
D | mmap_test_006.cpp | 42 void *mem = NULL; in Testcase() local 53 mem = mmap(invalueAddr, len, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_FIXED, fd, 0); in Testcase() 54 ICUNIT_GOTO_EQUAL(mem, MAP_FAILED, mem, EXIT); in Testcase() 57 mem = mmap((void *)INVALID_VADDR, len, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_FIXED, fd, 0); in Testcase() 58 ICUNIT_GOTO_EQUAL(mem, MAP_FAILED, mem, EXIT); in Testcase() 61 mem = mmap((void *)INVALID_VADDR, len, PROT_READ | PROT_WRITE, MAP_ANON | MAP_SHARED, -1, 0); in Testcase() 62 ICUNIT_GOTO_NOT_EQUAL(mem, MAP_FAILED, mem, EXIT); in Testcase() 63 ret = munmap(mem, len); in Testcase() 67 mem = mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); in Testcase() 68 ICUNIT_GOTO_EQUAL(mem, MAP_FAILED, mem, EXIT); in Testcase() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/ttm/ |
D | ttm_agp_backend.c | 47 struct agp_memory *mem; member 56 struct agp_memory *mem; in ttm_agp_bind() local 60 if (agp_be->mem) in ttm_agp_bind() 63 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind() 64 if (unlikely(mem == NULL)) in ttm_agp_bind() 67 mem->page_count = 0; in ttm_agp_bind() 74 mem->pages[mem->page_count++] = page; in ttm_agp_bind() 76 agp_be->mem = mem; in ttm_agp_bind() 78 mem->is_flushed = 1; in ttm_agp_bind() 79 mem->type = (cached) ? AGP_USER_CACHED_MEMORY : AGP_USER_MEMORY; in ttm_agp_bind() [all …]
|
/kernel/linux/linux-5.10/drivers/spi/ |
D | spi-mem.c | 102 static int spi_check_buswidth_req(struct spi_mem *mem, u8 buswidth, bool tx) in spi_check_buswidth_req() argument 104 u32 mode = mem->spi->mode; in spi_check_buswidth_req() 140 bool spi_mem_default_supports_op(struct spi_mem *mem, in spi_mem_default_supports_op() argument 143 if (spi_check_buswidth_req(mem, op->cmd.buswidth, true)) in spi_mem_default_supports_op() 147 spi_check_buswidth_req(mem, op->addr.buswidth, true)) in spi_mem_default_supports_op() 151 spi_check_buswidth_req(mem, op->dummy.buswidth, true)) in spi_mem_default_supports_op() 155 spi_check_buswidth_req(mem, op->data.buswidth, in spi_mem_default_supports_op() 196 static bool spi_mem_internal_supports_op(struct spi_mem *mem, in spi_mem_internal_supports_op() argument 199 struct spi_controller *ctlr = mem->spi->controller; in spi_mem_internal_supports_op() 202 return ctlr->mem_ops->supports_op(mem, op); in spi_mem_internal_supports_op() [all …]
|
/kernel/linux/linux-5.10/drivers/firmware/efi/ |
D | cper.c | 214 static int cper_mem_err_location(struct cper_mem_err_compact *mem, char *msg) in cper_mem_err_location() argument 223 if (mem->validation_bits & CPER_MEM_VALID_NODE) in cper_mem_err_location() 224 n += scnprintf(msg + n, len - n, "node: %d ", mem->node); in cper_mem_err_location() 225 if (mem->validation_bits & CPER_MEM_VALID_CARD) in cper_mem_err_location() 226 n += scnprintf(msg + n, len - n, "card: %d ", mem->card); in cper_mem_err_location() 227 if (mem->validation_bits & CPER_MEM_VALID_MODULE) in cper_mem_err_location() 228 n += scnprintf(msg + n, len - n, "module: %d ", mem->module); in cper_mem_err_location() 229 if (mem->validation_bits & CPER_MEM_VALID_RANK_NUMBER) in cper_mem_err_location() 230 n += scnprintf(msg + n, len - n, "rank: %d ", mem->rank); in cper_mem_err_location() 231 if (mem->validation_bits & CPER_MEM_VALID_BANK) in cper_mem_err_location() [all …]
|
/kernel/linux/linux-5.10/mm/ |
D | cma_debug.c | 71 static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) in cma_add_to_cma_mem_list() argument 74 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list() 80 struct cma_mem *mem = NULL; in cma_get_entry_from_list() local 84 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list() 85 hlist_del_init(&mem->node); in cma_get_entry_from_list() 89 return mem; in cma_get_entry_from_list() 94 struct cma_mem *mem = NULL; in cma_free_mem() local 97 mem = cma_get_entry_from_list(cma); in cma_free_mem() 98 if (mem == NULL) in cma_free_mem() 101 if (mem->n <= count) { in cma_free_mem() [all …]
|
/kernel/linux/linux-5.10/drivers/char/hw_random/ |
D | intel-rng.c | 155 static inline u8 hwstatus_get(void __iomem *mem) in hwstatus_get() argument 157 return readb(mem + INTEL_RNG_HW_STATUS); in hwstatus_get() 160 static inline u8 hwstatus_set(void __iomem *mem, in hwstatus_set() argument 163 writeb(hw_status, mem + INTEL_RNG_HW_STATUS); in hwstatus_set() 164 return hwstatus_get(mem); in hwstatus_set() 169 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_present() local 173 data = !!(readb(mem + INTEL_RNG_STATUS) & in intel_rng_data_present() 184 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_read() local 186 *data = readb(mem + INTEL_RNG_DATA); in intel_rng_data_read() 193 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_init() local [all …]
|
/kernel/linux/linux-5.10/tools/perf/ |
D | builtin-mem.c | 40 struct perf_mem *mem = *(struct perf_mem **)opt->value; in parse_record_events() local 49 mem->operation = 0; in parse_record_events() 61 static int __cmd_record(int argc, const char **argv, struct perf_mem *mem) in __cmd_record() argument 68 OPT_CALLBACK('e', "event", &mem, "event", in __cmd_record() 89 if (mem->operation & MEM_OPERATION_LOAD) in __cmd_record() 92 if (mem->operation & MEM_OPERATION_STORE) in __cmd_record() 100 if (mem->phys_addr) in __cmd_record() 148 struct perf_mem *mem = container_of(tool, struct perf_mem, tool); in dump_raw_samples() local 158 if (al.filtered || (mem->hide_unresolved && al.sym == NULL)) in dump_raw_samples() 164 if (mem->phys_addr) { in dump_raw_samples() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvif/ |
D | mem.c | 29 struct nvif_mem *mem) in nvif_mem_ctor_map() argument 31 int ret = nvif_mem_ctor(mmu, name, mmu->mem, NVIF_MEM_MAPPABLE | type, in nvif_mem_ctor_map() 32 0, size, NULL, 0, mem); in nvif_mem_ctor_map() 34 ret = nvif_object_map(&mem->object, NULL, 0); in nvif_mem_ctor_map() 36 nvif_mem_dtor(mem); in nvif_mem_ctor_map() 42 nvif_mem_dtor(struct nvif_mem *mem) in nvif_mem_dtor() argument 44 nvif_object_dtor(&mem->object); in nvif_mem_dtor() 50 struct nvif_mem *mem) in nvif_mem_ctor_type() argument 56 mem->object.client = NULL; in nvif_mem_ctor_type() 73 args, sizeof(*args) + argc, &mem->object); in nvif_mem_ctor_type() [all …]
|
/kernel/linux/linux-5.10/drivers/media/platform/mtk-vcodec/ |
D | mtk_vcodec_util.c | 37 struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_alloc() argument 39 unsigned long size = mem->size; in mtk_vcodec_mem_alloc() 43 mem->va = dma_alloc_coherent(dev, size, &mem->dma_addr, GFP_KERNEL); in mtk_vcodec_mem_alloc() 44 if (!mem->va) { in mtk_vcodec_mem_alloc() 50 mtk_v4l2_debug(3, "[%d] - va = %p", ctx->id, mem->va); in mtk_vcodec_mem_alloc() 52 (unsigned long)mem->dma_addr); in mtk_vcodec_mem_alloc() 60 struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_free() argument 62 unsigned long size = mem->size; in mtk_vcodec_mem_free() 66 if (!mem->va) { in mtk_vcodec_mem_free() 72 mtk_v4l2_debug(3, "[%d] - va = %p", ctx->id, mem->va); in mtk_vcodec_mem_free() [all …]
|