Home
last modified time | relevance | path

Searched refs:memory (Results 1 – 25 of 225) sorted by relevance

123456789

/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dbase.c32 #define nvkm_instobj(p) container_of((p), struct nvkm_instobj, memory)
35 struct nvkm_memory memory; member
44 nvkm_instobj_target(struct nvkm_memory *memory) in nvkm_instobj_target() argument
46 memory = nvkm_instobj(memory)->parent; in nvkm_instobj_target()
47 return nvkm_memory_target(memory); in nvkm_instobj_target()
51 nvkm_instobj_addr(struct nvkm_memory *memory) in nvkm_instobj_addr() argument
53 memory = nvkm_instobj(memory)->parent; in nvkm_instobj_addr()
54 return nvkm_memory_addr(memory); in nvkm_instobj_addr()
58 nvkm_instobj_size(struct nvkm_memory *memory) in nvkm_instobj_size() argument
60 memory = nvkm_instobj(memory)->parent; in nvkm_instobj_size()
[all …]
Dnv50.c42 #define nv50_instobj(p) container_of((p), struct nv50_instobj, memory)
45 struct nvkm_memory memory; member
53 nv50_instobj_target(struct nvkm_memory *memory) in nv50_instobj_target() argument
59 nv50_instobj_addr(struct nvkm_memory *memory) in nv50_instobj_addr() argument
61 return nv50_instobj(memory)->mem->offset; in nv50_instobj_addr()
65 nv50_instobj_size(struct nvkm_memory *memory) in nv50_instobj_size() argument
67 return (u64)nv50_instobj(memory)->mem->size << NVKM_RAM_MM_SHIFT; in nv50_instobj_size()
71 nv50_instobj_boot(struct nvkm_memory *memory, struct nvkm_vm *vm) in nv50_instobj_boot() argument
73 struct nv50_instobj *iobj = nv50_instobj(memory); in nv50_instobj_boot()
76 u64 size = nvkm_memory_size(memory); in nv50_instobj_boot()
[all …]
Dnv04.c38 #define nv04_instobj(p) container_of((p), struct nv04_instobj, memory)
41 struct nvkm_memory memory; member
47 nv04_instobj_target(struct nvkm_memory *memory) in nv04_instobj_target() argument
53 nv04_instobj_addr(struct nvkm_memory *memory) in nv04_instobj_addr() argument
55 return nv04_instobj(memory)->node->offset; in nv04_instobj_addr()
59 nv04_instobj_size(struct nvkm_memory *memory) in nv04_instobj_size() argument
61 return nv04_instobj(memory)->node->length; in nv04_instobj_size()
65 nv04_instobj_acquire(struct nvkm_memory *memory) in nv04_instobj_acquire() argument
67 struct nv04_instobj *iobj = nv04_instobj(memory); in nv04_instobj_acquire()
73 nv04_instobj_release(struct nvkm_memory *memory) in nv04_instobj_release() argument
[all …]
Dnv40.c40 #define nv40_instobj(p) container_of((p), struct nv40_instobj, memory)
43 struct nvkm_memory memory; member
49 nv40_instobj_target(struct nvkm_memory *memory) in nv40_instobj_target() argument
55 nv40_instobj_addr(struct nvkm_memory *memory) in nv40_instobj_addr() argument
57 return nv40_instobj(memory)->node->offset; in nv40_instobj_addr()
61 nv40_instobj_size(struct nvkm_memory *memory) in nv40_instobj_size() argument
63 return nv40_instobj(memory)->node->length; in nv40_instobj_size()
67 nv40_instobj_acquire(struct nvkm_memory *memory) in nv40_instobj_acquire() argument
69 struct nv40_instobj *iobj = nv40_instobj(memory); in nv40_instobj_acquire()
74 nv40_instobj_release(struct nvkm_memory *memory) in nv40_instobj_release() argument
[all …]
Dgk20a.c53 struct nvkm_memory memory; member
60 #define gk20a_instobj(p) container_of((p), struct gk20a_instobj, memory)
117 gk20a_instobj_target(struct nvkm_memory *memory) in gk20a_instobj_target() argument
123 gk20a_instobj_addr(struct nvkm_memory *memory) in gk20a_instobj_addr() argument
125 return gk20a_instobj(memory)->mem.offset; in gk20a_instobj_addr()
129 gk20a_instobj_size(struct nvkm_memory *memory) in gk20a_instobj_size() argument
131 return (u64)gk20a_instobj(memory)->mem.size << 12; in gk20a_instobj_size()
146 imem->vaddr_use -= nvkm_memory_size(&obj->base.memory); in gk20a_instobj_iommu_recycle_vaddr()
169 gk20a_instobj_acquire_dma(struct nvkm_memory *memory) in gk20a_instobj_acquire_dma() argument
171 struct gk20a_instobj *node = gk20a_instobj(memory); in gk20a_instobj_acquire_dma()
[all …]
/drivers/gpu/drm/nouveau/nvkm/core/
Dmemory.c29 struct nvkm_memory *memory) in nvkm_memory_ctor() argument
31 memory->func = func; in nvkm_memory_ctor()
37 struct nvkm_memory *memory = *pmemory; in nvkm_memory_del() local
38 if (memory && !WARN_ON(!memory->func)) { in nvkm_memory_del()
39 if (memory->func->dtor) in nvkm_memory_del()
40 *pmemory = memory->func->dtor(memory); in nvkm_memory_del()
52 struct nvkm_memory *memory; in nvkm_memory_new() local
58 ret = nvkm_instobj_new(imem, size, align, zero, &memory); in nvkm_memory_new()
62 *pmemory = memory; in nvkm_memory_new()
Dgpuobj.c48 return nvkm_ro32(gpuobj->memory, offset); in nvkm_gpuobj_heap_rd32()
54 nvkm_wo32(gpuobj->memory, offset, data); in nvkm_gpuobj_heap_wr32()
62 nvkm_done(gpuobj->memory); in nvkm_gpuobj_heap_release()
82 gpuobj->map = nvkm_kmap(gpuobj->memory); in nvkm_gpuobj_heap_acquire()
179 abs(align), zero, &gpuobj->memory); in nvkm_gpuobj_ctor()
184 gpuobj->addr = nvkm_memory_addr(gpuobj->memory); in nvkm_gpuobj_ctor()
185 gpuobj->size = nvkm_memory_size(gpuobj->memory); in nvkm_gpuobj_ctor()
199 nvkm_memory_del(&gpuobj->memory); in nvkm_gpuobj_del()
225 struct nvkm_memory *memory = gpuobj->memory; in nvkm_gpuobj_map() local
228 nvkm_memory_map(memory, vma, 0); in nvkm_gpuobj_map()
[all …]
/drivers/staging/octeon/
Dethernet-mem.c55 char *memory; in cvm_oct_free_hw_skbuff() local
58 memory = cvmx_fpa_alloc(pool); in cvm_oct_free_hw_skbuff()
59 if (memory) { in cvm_oct_free_hw_skbuff()
61 *(struct sk_buff **)(memory - sizeof(void *)); in cvm_oct_free_hw_skbuff()
65 } while (memory); in cvm_oct_free_hw_skbuff()
85 char *memory; in cvm_oct_fill_hw_memory() local
100 memory = kmalloc(size + 256, GFP_ATOMIC); in cvm_oct_fill_hw_memory()
101 if (unlikely(!memory)) { in cvm_oct_fill_hw_memory()
106 fpa = (char *)(((unsigned long)memory + 256) & ~0x7fUL); in cvm_oct_fill_hw_memory()
107 *((char **)fpa - 1) = memory; in cvm_oct_fill_hw_memory()
[all …]
/drivers/gpu/drm/
Ddrm_agpsupport.c200 struct agp_memory *memory; in drm_agp_alloc() local
211 if (!(memory = agp_allocate_memory(dev->agp->bridge, pages, type))) { in drm_agp_alloc()
216 entry->handle = (unsigned long)memory->key + 1; in drm_agp_alloc()
217 entry->memory = memory; in drm_agp_alloc()
220 list_add(&entry->head, &dev->agp->memory); in drm_agp_alloc()
223 request->physical = memory->physical; in drm_agp_alloc()
252 list_for_each_entry(entry, &dev->agp->memory, head) { in drm_agp_lookup_entry()
282 ret = drm_unbind_agp(entry->memory); in drm_agp_unbind()
324 if ((retcode = drm_bind_agp(entry->memory, page))) in drm_agp_bind()
365 drm_unbind_agp(entry->memory); in drm_agp_free()
[all …]
/drivers/nvdimm/
DKconfig7 Generic support for non-volatile memory devices including
10 bus is registered to advertise PMEM (persistent memory)
13 memory resource that may span multiple DIMMs and support DAX
16 mode to non-volatile memory.
21 tristate "PMEM: Persistent memory block device support"
28 non-standard OEM-specific E820 memory type (type-12, see
32 these persistent memory ranges into block devices that are
44 access capability. BLK-mode access uses memory-mapped-i/o
63 update semantics for persistent memory devices, so that
77 bool "PFN: Map persistent (device) memory"
[all …]
/drivers/char/agp/
Dcompat_ioctl.c150 struct agp_memory *memory; in compat_agpioc_allocate_wrap() local
157 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in compat_agpioc_allocate_wrap()
159 if (memory == NULL) in compat_agpioc_allocate_wrap()
162 alloc.key = memory->key; in compat_agpioc_allocate_wrap()
163 alloc.physical = memory->physical; in compat_agpioc_allocate_wrap()
166 agp_free_memory_wrap(memory); in compat_agpioc_allocate_wrap()
175 struct agp_memory *memory; in compat_agpioc_bind_wrap() local
181 memory = agp_find_mem_by_key(bind_info.key); in compat_agpioc_bind_wrap()
183 if (memory == NULL) in compat_agpioc_bind_wrap()
186 return agp_bind_memory(memory, bind_info.pg_start); in compat_agpioc_bind_wrap()
[all …]
Dfrontend.c270 void agp_free_memory_wrap(struct agp_memory *memory) in agp_free_memory_wrap() argument
272 agp_remove_from_pool(memory); in agp_free_memory_wrap()
273 agp_free_memory(memory); in agp_free_memory_wrap()
278 struct agp_memory *memory; in agp_allocate_memory_wrap() local
280 memory = agp_allocate_memory(agp_bridge, pg_count, type); in agp_allocate_memory_wrap()
281 if (memory == NULL) in agp_allocate_memory_wrap()
284 agp_insert_into_pool(memory); in agp_allocate_memory_wrap()
285 return memory; in agp_allocate_memory_wrap()
360 struct agp_memory *memory; in agp_remove_all_memory() local
363 memory = controller->pool; in agp_remove_all_memory()
[all …]
/drivers/xen/
DKconfig5 bool "Xen memory balloon driver"
8 The balloon driver allows the Xen domain to request more memory from
9 the system to expand the domain's memory allocation, or alternatively
10 return unneeded memory to the system.
13 bool "Dynamically self-balloon kernel memory to target"
17 Self-ballooning dynamically balloons available kernel memory driven
18 by the current usage of anonymous memory ("committed AS") and
33 Memory hotplug support for Xen balloon driver allows expanding memory
40 1) target domain: ensure that memory auto online policy is in
41 effect by checking /sys/devices/system/memory/auto_online_blocks
[all …]
/drivers/dax/
DKconfig2 tristate "DAX: direct access to differentiated memory"
8 latency...) memory via an mmap(2) capable character
10 platform memory resource that is differentiated from the
11 baseline memory pool. Mappings of a /dev/daxX.Y device impose
17 tristate "PMEM DAX: direct access to persistent memory"
21 Support raw access to persistent memory. Note that this
22 driver consumes memory ranges allocated and exported by the
/drivers/media/platform/exynos4-is/
Dfimc-is.c245 buf = is->memory.vaddr + is->setfile.base; in fimc_is_load_setfile()
250 pr_debug("mem vaddr: %p, setfile buf: %p\n", is->memory.vaddr, buf); in fimc_is_load_setfile()
275 mcuctl_write(is->memory.paddr, is, MCUCTL_REG_BBOAR); in fimc_is_cpu_set_power()
321 memcpy(is->memory.vaddr, is->fw.f_w->data, is->fw.f_w->size); in fimc_is_start_firmware()
341 is->memory.vaddr = dma_alloc_coherent(dev, FIMC_IS_CPU_MEM_SIZE, in fimc_is_alloc_cpu_memory()
342 &is->memory.paddr, GFP_KERNEL); in fimc_is_alloc_cpu_memory()
343 if (is->memory.vaddr == NULL) in fimc_is_alloc_cpu_memory()
346 is->memory.size = FIMC_IS_CPU_MEM_SIZE; in fimc_is_alloc_cpu_memory()
347 memset(is->memory.vaddr, 0, is->memory.size); in fimc_is_alloc_cpu_memory()
349 dev_info(dev, "FIMC-IS CPU memory base: %#x\n", (u32)is->memory.paddr); in fimc_is_alloc_cpu_memory()
[all …]
/drivers/staging/android/
DKconfig10 The ashmem subsystem is a new shared memory allocator, similar to
14 It is, in theory, a good memory allocator for low-memory devices,
15 because it can discard shared memory units when under memory pressure.
20 Registers processes to be killed when low memory conditions, this is useful
24 scripts (/init.rc), and it defines priority values with minimum free memory size
/drivers/staging/android/ion/
Ddevicetree.txt3 Ion is a memory manager that allows for sharing of buffers via dma-buf.
5 a 'heap'. A heap represents a specific type of memory. Each heap has
30 - memory-region: A phandle to a memory region. Required for DMA heap type
31 (see reserved-memory.txt for details on the reservation)
44 memory-region = <&camera_region>;
49 memory-region = <&fb_region>;
/drivers/media/v4l2-core/
Dvideobuf2-v4l2.c97 length = (b->memory == VB2_MEMORY_USERPTR || in __verify_length()
98 b->memory == VB2_MEMORY_DMABUF) in __verify_length()
112 length = (b->memory == VB2_MEMORY_USERPTR) in __verify_length()
177 if (b->memory != q->memory) { in vb2_queue_or_prepare_buf()
199 b->memory = vb->memory; in __fill_v4l2_buffer()
222 if (q->memory == VB2_MEMORY_MMAP) in __fill_v4l2_buffer()
224 else if (q->memory == VB2_MEMORY_USERPTR) in __fill_v4l2_buffer()
226 else if (q->memory == VB2_MEMORY_DMABUF) in __fill_v4l2_buffer()
238 if (q->memory == VB2_MEMORY_MMAP) in __fill_v4l2_buffer()
240 else if (q->memory == VB2_MEMORY_USERPTR) in __fill_v4l2_buffer()
[all …]
Dvideobuf-core.c329 b->memory = vb->memory; in videobuf_status()
330 switch (b->memory) { in videobuf_status()
388 enum v4l2_memory memory) in __videobuf_mmap_setup() argument
407 q->bufs[i]->memory = memory; in __videobuf_mmap_setup()
409 switch (memory) { in __videobuf_mmap_setup()
432 enum v4l2_memory memory) in videobuf_mmap_setup() argument
436 ret = __videobuf_mmap_setup(q, bcount, bsize, memory); in videobuf_mmap_setup()
448 if (req->memory != V4L2_MEMORY_MMAP && in videobuf_reqbufs()
449 req->memory != V4L2_MEMORY_USERPTR && in videobuf_reqbufs()
450 req->memory != V4L2_MEMORY_OVERLAY) { in videobuf_reqbufs()
[all …]
Dvideobuf2-core.c329 static int __vb2_queue_alloc(struct vb2_queue *q, enum vb2_memory memory, in __vb2_queue_alloc() argument
354 vb->memory = memory; in __vb2_queue_alloc()
362 if (memory == VB2_MEMORY_MMAP) { in __vb2_queue_alloc()
410 if (q->memory == VB2_MEMORY_MMAP) in __vb2_free_mem()
412 else if (q->memory == VB2_MEMORY_DMABUF) in __vb2_free_mem()
529 q->memory = 0; in __vb2_queue_free()
614 enum vb2_memory memory, unsigned int type) in vb2_verify_memory_type() argument
616 if (memory != VB2_MEMORY_MMAP && memory != VB2_MEMORY_USERPTR && in vb2_verify_memory_type()
617 memory != VB2_MEMORY_DMABUF) { in vb2_verify_memory_type()
631 if (memory == VB2_MEMORY_MMAP && __verify_mmap_ops(q)) { in vb2_verify_memory_type()
[all …]
/drivers/base/
Dmemory.c629 int register_memory(struct memory_block *memory) in register_memory() argument
631 memory->dev.bus = &memory_subsys; in register_memory()
632 memory->dev.id = memory->start_section_nr / sections_per_block; in register_memory()
633 memory->dev.release = memory_block_release; in register_memory()
634 memory->dev.groups = memory_memblk_attr_groups; in register_memory()
635 memory->dev.offline = memory->state == MEM_OFFLINE; in register_memory()
637 return device_register(&memory->dev); in register_memory()
640 static int init_memory_block(struct memory_block **memory, in init_memory_block() argument
662 *memory = mem; in init_memory_block()
732 unregister_memory(struct memory_block *memory) in unregister_memory() argument
[all …]
/drivers/mtd/maps/
Dgpio-addr-flash.c208 struct resource *memory; in gpio_flash_probe() local
213 memory = platform_get_resource(pdev, IORESOURCE_MEM, 0); in gpio_flash_probe()
216 if (!memory || !gpios || !gpios->end) in gpio_flash_probe()
231 state->win_size = resource_size(memory); in gpio_flash_probe()
241 state->map.virt = ioremap_nocache(memory->start, state->map.size); in gpio_flash_probe()
262 state->mtd = do_map_probe(memory->name, &state->map); in gpio_flash_probe()
Dbfin-async-flash.c130 struct resource *memory = platform_get_resource(pdev, IORESOURCE_MEM, 0); in bfin_flash_probe() local
144 state->map.size = resource_size(memory); in bfin_flash_probe()
145 state->map.virt = (void __iomem *)memory->start; in bfin_flash_probe()
146 state->map.phys = memory->start; in bfin_flash_probe()
160 state->mtd = do_map_probe(memory->name, &state->map); in bfin_flash_probe()
/drivers/dma/
DKconfig11 used to offload memory copies in the network stack and
422 16 to 32 channels for peripheral to memory or memory to memory
459 The DMA controller can transfer data from memory to peripheral,
460 periphal to memory, periphal to periphal and memory to memory.
479 This DMA controller transfers data from memory to peripheral fifo
480 or vice versa. It does not support memory to memory data transfer.
491 (APE). This DMA controller transfers data from memory to
492 peripheral and vice versa. It does not support memory to
493 memory data transfer.
540 AXI VDMA engine provides high-bandwidth direct memory access
[all …]
/drivers/mtd/onenand/
DKconfig31 via the GPMC memory controller.
44 One Block of the NAND Flash Array memory is reserved as
45 a One-Time Programmable Block memory area.
49 operations as any other NAND Flash Array memory block.
59 Flash memory array, these two component enables simultaneous program

123456789