Home
last modified time | relevance | path

Searched refs:man (Results 1 – 25 of 49) sorted by relevance

12

/drivers/gpu/drm/vmwgfx/
Dvmwgfx_cmdbuf.c154 struct vmw_cmdbuf_man *man; member
196 static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context,
198 static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context);
206 static int vmw_cmdbuf_cur_lock(struct vmw_cmdbuf_man *man, bool interruptible) in vmw_cmdbuf_cur_lock() argument
209 if (mutex_lock_interruptible(&man->cur_mutex)) in vmw_cmdbuf_cur_lock()
212 mutex_lock(&man->cur_mutex); in vmw_cmdbuf_cur_lock()
223 static void vmw_cmdbuf_cur_unlock(struct vmw_cmdbuf_man *man) in vmw_cmdbuf_cur_unlock() argument
225 mutex_unlock(&man->cur_mutex); in vmw_cmdbuf_cur_unlock()
244 dma_pool_free(header->man->dheaders, dheader, header->handle); in vmw_cmdbuf_header_inline_free()
258 struct vmw_cmdbuf_man *man = header->man; in __vmw_cmdbuf_header_free() local
[all …]
Dvmwgfx_cmdbuf_res.c48 struct vmw_cmdbuf_res_manager *man; member
80 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_lookup() argument
88 ret = drm_ht_find_item(&man->resources, key, &hash); in vmw_cmdbuf_res_lookup()
104 static void vmw_cmdbuf_res_free(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_free() argument
108 WARN_ON(drm_ht_remove_item(&man->resources, &entry->hash)); in vmw_cmdbuf_res_free()
135 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_commit()
168 vmw_cmdbuf_res_free(entry->man, entry); in vmw_cmdbuf_res_revert()
171 ret = drm_ht_insert_item(&entry->man->resources, in vmw_cmdbuf_res_revert()
174 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_revert()
197 int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_add() argument
[all …]
Dvmwgfx_gmrid_manager.c47 static int vmw_gmrid_man_get_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_get_node() argument
53 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_get_node()
84 static void vmw_gmrid_man_put_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_put_node() argument
88 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_put_node()
99 static int vmw_gmrid_man_init(struct ttm_mem_type_manager *man, in vmw_gmrid_man_init() argument
103 container_of(man->bdev, struct vmw_private, bdev); in vmw_gmrid_man_init()
126 man->priv = (void *) gman; in vmw_gmrid_man_init()
130 static int vmw_gmrid_man_takedown(struct ttm_mem_type_manager *man) in vmw_gmrid_man_takedown() argument
133 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_takedown()
142 static void vmw_gmrid_man_debug(struct ttm_mem_type_manager *man, in vmw_gmrid_man_debug() argument
Dvmwgfx_ttm_buffer.c745 struct ttm_mem_type_manager *man) in vmw_init_mem_type() argument
751 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
752 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
753 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
757 man->func = &ttm_bo_manager_func; in vmw_init_mem_type()
758 man->gpu_offset = 0; in vmw_init_mem_type()
759 man->flags = TTM_MEMTYPE_FLAG_FIXED | TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
760 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
761 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
770 man->func = &vmw_gmrid_manager_func; in vmw_init_mem_type()
[all …]
Dvmwgfx_drv.h394 struct vmw_cmdbuf_res_manager *man; member
1253 struct vmw_cmdbuf_res_manager *man,
1258 extern int vmw_shader_remove(struct vmw_cmdbuf_res_manager *man,
1261 extern int vmw_dx_shader_add(struct vmw_cmdbuf_res_manager *man,
1271 vmw_shader_lookup(struct vmw_cmdbuf_res_manager *man,
1280 extern void vmw_cmdbuf_res_man_destroy(struct vmw_cmdbuf_res_manager *man);
1283 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man,
1288 extern int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man,
1293 extern int vmw_cmdbuf_res_remove(struct vmw_cmdbuf_res_manager *man,
1319 extern int vmw_cmdbuf_set_pool_size(struct vmw_cmdbuf_man *man,
[all …]
Dvmwgfx_so.h137 extern int vmw_view_add(struct vmw_cmdbuf_res_manager *man,
146 extern int vmw_view_remove(struct vmw_cmdbuf_res_manager *man,
157 extern struct vmw_resource *vmw_view_lookup(struct vmw_cmdbuf_res_manager *man,
Dvmwgfx_so.c310 int vmw_view_add(struct vmw_cmdbuf_res_manager *man, in vmw_view_add() argument
378 ret = vmw_cmdbuf_res_add(man, vmw_cmdbuf_res_view, in vmw_view_add()
405 int vmw_view_remove(struct vmw_cmdbuf_res_manager *man, in vmw_view_remove() argument
415 return vmw_cmdbuf_res_remove(man, vmw_cmdbuf_res_view, in vmw_view_remove()
487 struct vmw_resource *vmw_view_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_view_lookup() argument
491 return vmw_cmdbuf_res_lookup(man, vmw_cmdbuf_res_view, in vmw_view_lookup()
/drivers/gpu/drm/ttm/
Dttm_bo.c82 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_type_debug() local
84 drm_printf(p, " has_type: %d\n", man->has_type); in ttm_mem_type_debug()
85 drm_printf(p, " use_type: %d\n", man->use_type); in ttm_mem_type_debug()
86 drm_printf(p, " flags: 0x%08X\n", man->flags); in ttm_mem_type_debug()
87 drm_printf(p, " gpu_offset: 0x%08llX\n", man->gpu_offset); in ttm_mem_type_debug()
88 drm_printf(p, " size: %llu\n", man->size); in ttm_mem_type_debug()
89 drm_printf(p, " available_caching: 0x%08X\n", man->available_caching); in ttm_mem_type_debug()
90 drm_printf(p, " default_caching: 0x%08X\n", man->default_caching); in ttm_mem_type_debug()
92 (*man->func->debug)(man, p); in ttm_mem_type_debug()
174 struct ttm_mem_type_manager *man; in ttm_bo_add_mem_to_lru() local
[all …]
Dttm_bo_util.c94 int ttm_mem_io_lock(struct ttm_mem_type_manager *man, bool interruptible) in ttm_mem_io_lock() argument
96 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_lock()
100 return mutex_lock_interruptible(&man->io_reserve_mutex); in ttm_mem_io_lock()
102 mutex_lock(&man->io_reserve_mutex); in ttm_mem_io_lock()
107 void ttm_mem_io_unlock(struct ttm_mem_type_manager *man) in ttm_mem_io_unlock() argument
109 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_unlock()
112 mutex_unlock(&man->io_reserve_mutex); in ttm_mem_io_unlock()
116 static int ttm_mem_io_evict(struct ttm_mem_type_manager *man) in ttm_mem_io_evict() argument
120 if (!man->use_io_reserve_lru || list_empty(&man->io_reserve_lru)) in ttm_mem_io_evict()
123 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
[all …]
Dttm_bo_manager.c51 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument
56 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_get_node()
65 lpfn = man->size; in ttm_bo_man_get_node()
92 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument
95 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_put_node()
107 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument
118 man->priv = rman; in ttm_bo_man_init()
122 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument
124 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_takedown()
132 man->priv = NULL; in ttm_bo_man_takedown()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gtt_mgr.c53 (adev->mman.bdev.man[TTM_PL_TT].size) * PAGE_SIZE); in amdgpu_mem_info_gtt_total_show()
71 amdgpu_gtt_mgr_usage(&adev->mman.bdev.man[TTM_PL_TT])); in amdgpu_mem_info_gtt_used_show()
87 static int amdgpu_gtt_mgr_init(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_init() argument
90 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_gtt_mgr_init()
104 man->priv = mgr; in amdgpu_gtt_mgr_init()
128 static int amdgpu_gtt_mgr_fini(struct ttm_mem_type_manager *man) in amdgpu_gtt_mgr_fini() argument
130 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_gtt_mgr_fini()
131 struct amdgpu_gtt_mgr *mgr = man->priv; in amdgpu_gtt_mgr_fini()
136 man->priv = NULL; in amdgpu_gtt_mgr_fini()
168 static int amdgpu_gtt_mgr_alloc(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_alloc() argument
[all …]
Damdgpu_vram_mgr.c83 amdgpu_vram_mgr_usage(&adev->mman.bdev.man[TTM_PL_VRAM])); in amdgpu_mem_info_vram_used_show()
101 amdgpu_vram_mgr_vis_usage(&adev->mman.bdev.man[TTM_PL_VRAM])); in amdgpu_mem_info_vis_vram_used_show()
121 static int amdgpu_vram_mgr_init(struct ttm_mem_type_manager *man, in amdgpu_vram_mgr_init() argument
124 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_vram_mgr_init()
134 man->priv = mgr; in amdgpu_vram_mgr_init()
169 static int amdgpu_vram_mgr_fini(struct ttm_mem_type_manager *man) in amdgpu_vram_mgr_fini() argument
171 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_vram_mgr_fini()
172 struct amdgpu_vram_mgr *mgr = man->priv; in amdgpu_vram_mgr_fini()
178 man->priv = NULL; in amdgpu_vram_mgr_fini()
267 static int amdgpu_vram_mgr_new(struct ttm_mem_type_manager *man, in amdgpu_vram_mgr_new() argument
[all …]
Damdgpu_ttm.c86 struct ttm_mem_type_manager *man) in amdgpu_init_mem_type() argument
95 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in amdgpu_init_mem_type()
96 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type()
97 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type()
101 man->func = &amdgpu_gtt_mgr_func; in amdgpu_init_mem_type()
102 man->gpu_offset = adev->gmc.gart_start; in amdgpu_init_mem_type()
103 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type()
104 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type()
105 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in amdgpu_init_mem_type()
109 man->func = &amdgpu_vram_mgr_func; in amdgpu_init_mem_type()
[all …]
Damdgpu_ttm.h73 uint64_t amdgpu_gtt_mgr_usage(struct ttm_mem_type_manager *man);
74 int amdgpu_gtt_mgr_recover(struct ttm_mem_type_manager *man);
77 uint64_t amdgpu_vram_mgr_usage(struct ttm_mem_type_manager *man);
78 uint64_t amdgpu_vram_mgr_vis_usage(struct ttm_mem_type_manager *man);
/drivers/gpu/drm/virtio/
Dvirtgpu_ttm.c76 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument
85 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument
91 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument
97 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument
102 static void ttm_bo_man_debug(struct ttm_mem_type_manager *man, in ttm_bo_man_debug() argument
116 struct ttm_mem_type_manager *man) in virtio_gpu_init_mem_type() argument
121 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in virtio_gpu_init_mem_type()
122 man->available_caching = TTM_PL_MASK_CACHING; in virtio_gpu_init_mem_type()
123 man->default_caching = TTM_PL_FLAG_CACHED; in virtio_gpu_init_mem_type()
126 man->func = &virtio_gpu_bo_manager_func; in virtio_gpu_init_mem_type()
[all …]
/drivers/gpu/drm/
Ddrm_vram_mm_helper.c60 struct ttm_mem_type_manager *man) in bo_driver_init_mem_type() argument
64 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in bo_driver_init_mem_type()
65 man->available_caching = TTM_PL_MASK_CACHING; in bo_driver_init_mem_type()
66 man->default_caching = TTM_PL_FLAG_CACHED; in bo_driver_init_mem_type()
69 man->func = &ttm_bo_manager_func; in bo_driver_init_mem_type()
70 man->flags = TTM_MEMTYPE_FLAG_FIXED | in bo_driver_init_mem_type()
72 man->available_caching = TTM_PL_FLAG_UNCACHED | in bo_driver_init_mem_type()
74 man->default_caching = TTM_PL_FLAG_WC; in bo_driver_init_mem_type()
104 struct ttm_mem_type_manager *man = bdev->man + mem->mem_type; in bo_driver_io_mem_reserve() local
107 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in bo_driver_io_mem_reserve()
/drivers/gpu/drm/qxl/
Dqxl_ttm.c98 struct ttm_mem_type_manager *man) in qxl_init_mem_type() argument
108 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in qxl_init_mem_type()
109 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
110 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
118 man->func = &ttm_bo_manager_func; in qxl_init_mem_type()
119 man->gpu_offset = slot->gpu_offset; in qxl_init_mem_type()
120 man->flags = TTM_MEMTYPE_FLAG_FIXED | in qxl_init_mem_type()
122 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
123 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
165 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in qxl_ttm_io_mem_reserve() local
[all …]
Dqxl_object.c151 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() local
163 (void) ttm_mem_io_lock(man, false); in qxl_bo_kmap_atomic_page()
165 ttm_mem_io_unlock(man); in qxl_bo_kmap_atomic_page()
196 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() local
204 (void) ttm_mem_io_lock(man, false); in qxl_bo_kunmap_atomic_page()
206 ttm_mem_io_unlock(man); in qxl_bo_kunmap_atomic_page()
/drivers/gpu/drm/radeon/
Dradeon_ttm.c75 struct ttm_mem_type_manager *man) in radeon_init_mem_type() argument
84 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
85 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
86 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
89 man->func = &ttm_bo_manager_func; in radeon_init_mem_type()
90 man->gpu_offset = rdev->mc.gtt_start; in radeon_init_mem_type()
91 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
92 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
93 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in radeon_init_mem_type()
102 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
[all …]
/drivers/atm/
Dfirestream.c440 unsigned int man = -1; /* hush gcc */ in make_rate() local
467 man = 511; in make_rate()
474 man = rate; in make_rate()
477 while (!(man & (1<<31))) { in make_rate()
479 man = man<<1; in make_rate()
486 man = man<<1; in make_rate()
487 man &= 0xffffffffU; /* a nop on 32-bit systems */ in make_rate()
500 man = man>>(32-9); in make_rate()
505 if (man & (~0U>>9)) { in make_rate()
506 man = (man>>(32-9)) + 1; in make_rate()
[all …]
Dambassador.c877 unsigned int man = -1; // hush gcc in make_rate() local
902 man = 511; in make_rate()
909 man = rate; in make_rate()
912 while (!(man & (1<<31))) { in make_rate()
914 man = man<<1; in make_rate()
920 man = man<<1; in make_rate()
921 man &= 0xffffffffU; // a nop on 32-bit systems in make_rate()
933 man = man>>(32-9); in make_rate()
938 if (man & (~0U>>9)) { in make_rate()
939 man = (man>>(32-9)) + 1; in make_rate()
[all …]
/drivers/media/i2c/smiapp/
Dsmiapp-regs.c21 uint64_t man; in float_to_u32_mul_1000000() local
50 man = ((phloat & 0x7fffff) | 0x800000) * 1000000ULL; in float_to_u32_mul_1000000()
53 man >>= -exp; in float_to_u32_mul_1000000()
55 man <<= exp; in float_to_u32_mul_1000000()
57 man >>= 23; /* Remove mantissa bias */ in float_to_u32_mul_1000000()
59 return man & 0xffffffff; in float_to_u32_mul_1000000()
/drivers/gpu/drm/nouveau/
Dnouveau_ttm.c35 nouveau_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_manager_init() argument
41 nouveau_manager_fini(struct ttm_mem_type_manager *man) in nouveau_manager_fini() argument
47 nouveau_manager_del(struct ttm_mem_type_manager *man, struct ttm_mem_reg *reg) in nouveau_manager_del() argument
53 nouveau_manager_debug(struct ttm_mem_type_manager *man, in nouveau_manager_debug() argument
59 nouveau_vram_manager_new(struct ttm_mem_type_manager *man, in nouveau_vram_manager_new() argument
99 nouveau_gart_manager_new(struct ttm_mem_type_manager *man, in nouveau_gart_manager_new() argument
127 nv04_gart_manager_new(struct ttm_mem_type_manager *man, in nv04_gart_manager_new() argument
Dnouveau_bo.c658 struct ttm_mem_type_manager *man) in nouveau_bo_init_mem_type() argument
665 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in nouveau_bo_init_mem_type()
666 man->available_caching = TTM_PL_MASK_CACHING; in nouveau_bo_init_mem_type()
667 man->default_caching = TTM_PL_FLAG_CACHED; in nouveau_bo_init_mem_type()
670 man->flags = TTM_MEMTYPE_FLAG_FIXED | in nouveau_bo_init_mem_type()
672 man->available_caching = TTM_PL_FLAG_UNCACHED | in nouveau_bo_init_mem_type()
674 man->default_caching = TTM_PL_FLAG_WC; in nouveau_bo_init_mem_type()
680 man->available_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
681 man->default_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
684 man->func = &nouveau_vram_manager; in nouveau_bo_init_mem_type()
[all …]
/drivers/hwmon/
Dlochnagar-hwmon.c74 u64 man = data & 0x007FFFFF; in float_to_long() local
79 man = (man + (1 << 23)) * precision; in float_to_long()
81 if (fls64(man) + exp > (int)sizeof(long) * 8 - 1) in float_to_long()
84 result = (man + (1ull << (-exp - 1))) >> -exp; in float_to_long()
86 result = man << exp; in float_to_long()

12