| /kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
| D | intel_memory_region.c | 34 intel_memory_region_free_pages(struct intel_memory_region *mem, in intel_memory_region_free_pages() argument 41 size += i915_buddy_block_size(&mem->mm, block); in intel_memory_region_free_pages() 42 i915_buddy_free(&mem->mm, block); in intel_memory_region_free_pages() 50 __intel_memory_region_put_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_put_pages_buddy() argument 53 mutex_lock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 54 mem->avail += intel_memory_region_free_pages(mem, blocks); in __intel_memory_region_put_pages_buddy() 55 mutex_unlock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 69 __intel_memory_region_get_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_get_pages_buddy() argument 77 GEM_BUG_ON(!IS_ALIGNED(size, mem->mm.chunk_size)); in __intel_memory_region_get_pages_buddy() 81 min_order = ilog2(mem->min_page_size) - in __intel_memory_region_get_pages_buddy() [all …]
|
| D | intel_region_lmem.c | 12 static int init_fake_lmem_bar(struct intel_memory_region *mem) in init_fake_lmem_bar() argument 14 struct drm_i915_private *i915 = mem->i915; in init_fake_lmem_bar() 21 mem->fake_mappable.start = 0; in init_fake_lmem_bar() 22 mem->fake_mappable.size = resource_size(&mem->region); in init_fake_lmem_bar() 23 mem->fake_mappable.color = I915_COLOR_UNEVICTABLE; in init_fake_lmem_bar() 25 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); in init_fake_lmem_bar() 29 mem->remap_addr = dma_map_resource(&i915->drm.pdev->dev, in init_fake_lmem_bar() 30 mem->region.start, in init_fake_lmem_bar() 31 mem->fake_mappable.size, in init_fake_lmem_bar() 34 if (dma_mapping_error(&i915->drm.pdev->dev, mem->remap_addr)) { in init_fake_lmem_bar() [all …]
|
| /kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/ |
| D | rxe_mr.c | 27 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument 29 switch (mem->type) { in mem_check_range() 35 if (iova < mem->iova || in mem_check_range() 36 length > mem->length || in mem_check_range() 37 iova > mem->iova + mem->length - length) in mem_check_range() 50 static void rxe_mem_init(int access, struct rxe_mem *mem) in rxe_mem_init() argument 52 u32 lkey = mem->pelem.index << 8 | rxe_get_key(); in rxe_mem_init() 55 mem->ibmr.lkey = lkey; in rxe_mem_init() 56 mem->ibmr.rkey = rkey; in rxe_mem_init() 57 mem->state = RXE_MEM_STATE_INVALID; in rxe_mem_init() [all …]
|
| /kernel/linux/linux-5.10/arch/powerpc/mm/book3s64/ |
| D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0]))); in mm_iommu_do_alloc() 93 if (!mem->hpas) { in mm_iommu_do_alloc() 94 kfree(mem); in mm_iommu_do_alloc() [all …]
|
| /kernel/linux/linux-4.19/drivers/infiniband/sw/rxe/ |
| D | rxe_mr.c | 54 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument 56 switch (mem->type) { in mem_check_range() 62 if (iova < mem->iova || in mem_check_range() 63 length > mem->length || in mem_check_range() 64 iova > mem->iova + mem->length - length) in mem_check_range() 77 static void rxe_mem_init(int access, struct rxe_mem *mem) in rxe_mem_init() argument 79 u32 lkey = mem->pelem.index << 8 | rxe_get_key(); in rxe_mem_init() 82 if (mem->pelem.pool->type == RXE_TYPE_MR) { in rxe_mem_init() 83 mem->ibmr.lkey = lkey; in rxe_mem_init() 84 mem->ibmr.rkey = rkey; in rxe_mem_init() [all …]
|
| /kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
| D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem->dma, in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_dtor() local 86 if (mem->mem) { in nvkm_mem_dtor() [all …]
|
| /kernel/linux/linux-4.19/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
| D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem->dma, in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_dtor() local 86 if (mem->mem) { in nvkm_mem_dtor() [all …]
|
| /kernel/linux/linux-4.19/arch/powerpc/mm/ |
| D | mmu_context_iommu.c | 132 struct mm_iommu_table_group_mem_t *mem; in mm_iommu_get() local 141 list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, in mm_iommu_get() 143 if ((mem->ua == ua) && (mem->entries == entries)) { in mm_iommu_get() 144 ++mem->used; in mm_iommu_get() 145 *pmem = mem; in mm_iommu_get() 150 if ((mem->ua < (ua + (entries << PAGE_SHIFT))) && in mm_iommu_get() 151 (ua < (mem->ua + in mm_iommu_get() 152 (mem->entries << PAGE_SHIFT)))) { in mm_iommu_get() 165 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_get() 166 if (!mem) { in mm_iommu_get() [all …]
|
| /kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/runtime/isp_param/src/ |
| D | isp_param.c | 27 enum ia_css_isp_memories mem, in ia_css_isp_param_set_mem_init() argument 30 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_mem_init() 31 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_mem_init() 38 enum ia_css_isp_memories mem, in ia_css_isp_param_set_css_mem_init() argument 41 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_css_mem_init() 42 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_css_mem_init() 49 enum ia_css_isp_memories mem, in ia_css_isp_param_set_isp_mem_init() argument 52 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_isp_mem_init() 53 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_isp_mem_init() 61 enum ia_css_isp_memories mem) in ia_css_isp_param_get_mem_init() argument [all …]
|
| /kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
| D | nouveau_mem.c | 36 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 54 args.nv50.kind = mem->kind; in nouveau_mem_map() 55 args.nv50.comp = mem->comp; in nouveau_mem_map() 62 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 68 args.gf100.kind = mem->kind; in nouveau_mem_map() 78 ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, in nouveau_mem_map() 79 &mem->mem, 0); in nouveau_mem_map() 85 nouveau_mem_fini(struct nouveau_mem *mem) in nouveau_mem_fini() argument 87 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 88 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini() [all …]
|
| /kernel/linux/linux-4.19/drivers/gpu/drm/nouveau/ |
| D | nouveau_mem.c | 36 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 54 args.nv50.kind = mem->kind; in nouveau_mem_map() 55 args.nv50.comp = mem->comp; in nouveau_mem_map() 62 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 68 args.gf100.kind = mem->kind; in nouveau_mem_map() 78 ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, in nouveau_mem_map() 79 &mem->mem, 0); in nouveau_mem_map() 85 nouveau_mem_fini(struct nouveau_mem *mem) in nouveau_mem_fini() argument 87 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 88 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini() [all …]
|
| /kernel/linux/linux-5.10/drivers/pci/endpoint/ |
| D | pci-epc-mem.c | 17 * @mem: address space of the endpoint controller 20 * Reimplement get_order() for mem->page_size since the generic get_order 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() 51 struct pci_epc_mem *mem = NULL; in pci_epc_multi_mem_init() local 77 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in pci_epc_multi_mem_init() 78 if (!mem) { in pci_epc_multi_mem_init() 87 kfree(mem); in pci_epc_multi_mem_init() 92 mem->window.phys_base = windows[i].phys_base; in pci_epc_multi_mem_init() 93 mem->window.size = windows[i].size; in pci_epc_multi_mem_init() [all …]
|
| /kernel/linux/linux-4.19/drivers/pci/endpoint/ |
| D | pci-epc-mem.c | 17 * @mem: address space of the endpoint controller 20 * Reimplement get_order() for mem->page_size since the generic get_order 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->page_size); in pci_epc_mem_get_order() 52 struct pci_epc_mem *mem; in __pci_epc_mem_init() local 65 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in __pci_epc_mem_init() 66 if (!mem) { in __pci_epc_mem_init() 77 mem->bitmap = bitmap; in __pci_epc_mem_init() 78 mem->phys_base = phys_base; in __pci_epc_mem_init() 79 mem->page_size = page_size; in __pci_epc_mem_init() [all …]
|
| /kernel/linux/linux-5.10/drivers/infiniband/sw/siw/ |
| D | siw_mem.c | 36 siw_dbg_mem(m, "new MEM object\n"); in siw_mem_add() 50 struct siw_mem *mem; in siw_mem_id2obj() local 53 mem = xa_load(&sdev->mem_xa, stag_index); in siw_mem_id2obj() 54 if (likely(mem && kref_get_unless_zero(&mem->ref))) { in siw_mem_id2obj() 56 return mem; in siw_mem_id2obj() 93 struct siw_mem *mem = kzalloc(sizeof(*mem), GFP_KERNEL); in siw_mr_add_mem() local 97 if (!mem) in siw_mr_add_mem() 100 mem->mem_obj = mem_obj; in siw_mr_add_mem() 101 mem->stag_valid = 0; in siw_mr_add_mem() 102 mem->sdev = sdev; in siw_mr_add_mem() [all …]
|
| /kernel/linux/linux-4.19/kernel/dma/ |
| D | coherent.c | 33 struct dma_coherent_mem * mem) in dma_get_device_base() argument 35 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 36 return (mem->pfn_base - dev->dma_pfn_offset) << PAGE_SHIFT; in dma_get_device_base() 38 return mem->device_base; in dma_get_device_base() 43 struct dma_coherent_mem **mem) in dma_init_coherent_memory() argument 79 *mem = dma_mem; in dma_init_coherent_memory() 89 static void dma_release_coherent_memory(struct dma_coherent_mem *mem) in dma_release_coherent_memory() argument 91 if (!mem) in dma_release_coherent_memory() 94 memunmap(mem->virt_base); in dma_release_coherent_memory() 95 kfree(mem->bitmap); in dma_release_coherent_memory() [all …]
|
| /kernel/linux/linux-5.10/drivers/media/v4l2-core/ |
| D | videobuf-dma-contig.c | 38 struct videobuf_dma_contig_memory *mem, in __videobuf_dc_alloc() argument 41 mem->size = size; in __videobuf_dc_alloc() 42 mem->vaddr = dma_alloc_coherent(dev, mem->size, in __videobuf_dc_alloc() 43 &mem->dma_handle, flags); in __videobuf_dc_alloc() 45 if (!mem->vaddr) { in __videobuf_dc_alloc() 46 dev_err(dev, "memory alloc size %ld failed\n", mem->size); in __videobuf_dc_alloc() 50 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 56 struct videobuf_dma_contig_memory *mem) in __videobuf_dc_free() argument 58 dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle); in __videobuf_dc_free() 60 mem->vaddr = NULL; in __videobuf_dc_free() [all …]
|
| D | videobuf-vmalloc.c | 72 struct videobuf_vmalloc_memory *mem; in videobuf_vm_close() local 88 mem = q->bufs[i]->priv; in videobuf_vm_close() 89 if (mem) { in videobuf_vm_close() 96 MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM); in videobuf_vm_close() 102 __func__, i, mem->vaddr); in videobuf_vm_close() 104 vfree(mem->vaddr); in videobuf_vm_close() 105 mem->vaddr = NULL; in videobuf_vm_close() 137 struct videobuf_vmalloc_memory *mem; in __videobuf_alloc_vb() local 140 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb() 144 mem = vb->priv = ((char *)vb) + size; in __videobuf_alloc_vb() [all …]
|
| /kernel/linux/linux-4.19/drivers/media/v4l2-core/ |
| D | videobuf-dma-contig.c | 41 struct videobuf_dma_contig_memory *mem, in __videobuf_dc_alloc() argument 44 mem->size = size; in __videobuf_dc_alloc() 45 mem->vaddr = dma_alloc_coherent(dev, mem->size, in __videobuf_dc_alloc() 46 &mem->dma_handle, flags); in __videobuf_dc_alloc() 48 if (!mem->vaddr) { in __videobuf_dc_alloc() 49 dev_err(dev, "memory alloc size %ld failed\n", mem->size); in __videobuf_dc_alloc() 53 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 59 struct videobuf_dma_contig_memory *mem) in __videobuf_dc_free() argument 61 dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle); in __videobuf_dc_free() 63 mem->vaddr = NULL; in __videobuf_dc_free() [all …]
|
| D | videobuf-vmalloc.c | 75 struct videobuf_vmalloc_memory *mem; in videobuf_vm_close() local 91 mem = q->bufs[i]->priv; in videobuf_vm_close() 92 if (mem) { in videobuf_vm_close() 99 MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM); in videobuf_vm_close() 105 __func__, i, mem->vaddr); in videobuf_vm_close() 107 vfree(mem->vaddr); in videobuf_vm_close() 108 mem->vaddr = NULL; in videobuf_vm_close() 140 struct videobuf_vmalloc_memory *mem; in __videobuf_alloc_vb() local 143 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb() 147 mem = vb->priv = ((char *)vb) + size; in __videobuf_alloc_vb() [all …]
|
| /kernel/linux/linux-5.10/kernel/dma/ |
| D | coherent.c | 33 struct dma_coherent_mem * mem) in dma_get_device_base() argument 35 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 36 return phys_to_dma(dev, PFN_PHYS(mem->pfn_base)); in dma_get_device_base() 37 return mem->device_base; in dma_get_device_base() 42 struct dma_coherent_mem **mem) in dma_init_coherent_memory() argument 77 *mem = dma_mem; in dma_init_coherent_memory() 87 static void dma_release_coherent_memory(struct dma_coherent_mem *mem) in dma_release_coherent_memory() argument 89 if (!mem) in dma_release_coherent_memory() 92 memunmap(mem->virt_base); in dma_release_coherent_memory() 93 kfree(mem->bitmap); in dma_release_coherent_memory() [all …]
|
| /kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_amdkfd_gpuvm.c | 71 struct kgd_mem *mem) in check_if_add_bo_to_vm() argument 75 list_for_each_entry(entry, &mem->bo_va_list, bo_list) in check_if_add_bo_to_vm() 89 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local 92 mem = si.totalram - si.totalhigh; in amdgpu_amdkfd_gpuvm_init_mem_limits() 93 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits() 96 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits() 97 kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits() 390 static uint64_t get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) in get_pte_flags() argument 392 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags() 393 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags() [all …]
|
| /kernel/linux/linux-4.19/drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_amdkfd_gpuvm.c | 81 struct kgd_mem *mem) in check_if_add_bo_to_vm() argument 85 list_for_each_entry(entry, &mem->bo_va_list, bo_list) in check_if_add_bo_to_vm() 99 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local 102 mem = si.totalram - si.totalhigh; in amdgpu_amdkfd_gpuvm_init_mem_limits() 103 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits() 106 kfd_mem_limit.max_system_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits() 107 kfd_mem_limit.max_userptr_mem_limit = mem - (mem >> 2); in amdgpu_amdkfd_gpuvm_init_mem_limits() 443 static int add_bo_to_vm(struct amdgpu_device *adev, struct kgd_mem *mem, in add_bo_to_vm() argument 450 struct amdgpu_bo *bo = mem->bo; in add_bo_to_vm() 451 uint64_t va = mem->va; in add_bo_to_vm() [all …]
|
| /kernel/linux/linux-5.10/mm/ |
| D | cma_debug.c | 71 static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) in cma_add_to_cma_mem_list() argument 74 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list() 80 struct cma_mem *mem = NULL; in cma_get_entry_from_list() local 84 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list() 85 hlist_del_init(&mem->node); in cma_get_entry_from_list() 89 return mem; in cma_get_entry_from_list() 94 struct cma_mem *mem = NULL; in cma_free_mem() local 97 mem = cma_get_entry_from_list(cma); in cma_free_mem() 98 if (mem == NULL) in cma_free_mem() 101 if (mem->n <= count) { in cma_free_mem() [all …]
|
| /kernel/linux/linux-4.19/mm/ |
| D | cma_debug.c | 73 static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) in cma_add_to_cma_mem_list() argument 76 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list() 82 struct cma_mem *mem = NULL; in cma_get_entry_from_list() local 86 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list() 87 hlist_del_init(&mem->node); in cma_get_entry_from_list() 91 return mem; in cma_get_entry_from_list() 96 struct cma_mem *mem = NULL; in cma_free_mem() local 99 mem = cma_get_entry_from_list(cma); in cma_free_mem() 100 if (mem == NULL) in cma_free_mem() 103 if (mem->n <= count) { in cma_free_mem() [all …]
|
| /kernel/linux/linux-5.10/drivers/spi/ |
| D | spi-mem.c | 11 #include <linux/spi/spi-mem.h> 102 static int spi_check_buswidth_req(struct spi_mem *mem, u8 buswidth, bool tx) in spi_check_buswidth_req() argument 104 u32 mode = mem->spi->mode; in spi_check_buswidth_req() 140 bool spi_mem_default_supports_op(struct spi_mem *mem, in spi_mem_default_supports_op() argument 143 if (spi_check_buswidth_req(mem, op->cmd.buswidth, true)) in spi_mem_default_supports_op() 147 spi_check_buswidth_req(mem, op->addr.buswidth, true)) in spi_mem_default_supports_op() 151 spi_check_buswidth_req(mem, op->dummy.buswidth, true)) in spi_mem_default_supports_op() 155 spi_check_buswidth_req(mem, op->data.buswidth, in spi_mem_default_supports_op() 196 static bool spi_mem_internal_supports_op(struct spi_mem *mem, in spi_mem_internal_supports_op() argument 199 struct spi_controller *ctlr = mem->spi->controller; in spi_mem_internal_supports_op() [all …]
|