Home
last modified time | relevance | path

Searched refs:man (Results 1 – 25 of 47) sorted by relevance

12

/drivers/gpu/drm/vmwgfx/
Dvmwgfx_cmdbuf.c145 struct vmw_cmdbuf_man *man; member
187 static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, bool enable);
196 static int vmw_cmdbuf_cur_lock(struct vmw_cmdbuf_man *man, bool interruptible) in vmw_cmdbuf_cur_lock() argument
199 if (mutex_lock_interruptible(&man->cur_mutex)) in vmw_cmdbuf_cur_lock()
202 mutex_lock(&man->cur_mutex); in vmw_cmdbuf_cur_lock()
213 static void vmw_cmdbuf_cur_unlock(struct vmw_cmdbuf_man *man) in vmw_cmdbuf_cur_unlock() argument
215 mutex_unlock(&man->cur_mutex); in vmw_cmdbuf_cur_unlock()
234 dma_pool_free(header->man->dheaders, dheader, header->handle); in vmw_cmdbuf_header_inline_free()
248 struct vmw_cmdbuf_man *man = header->man; in __vmw_cmdbuf_header_free() local
250 lockdep_assert_held_once(&man->lock); in __vmw_cmdbuf_header_free()
[all …]
Dvmwgfx_cmdbuf_res.c48 struct vmw_cmdbuf_res_manager *man; member
80 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_lookup() argument
88 ret = drm_ht_find_item(&man->resources, key, &hash); in vmw_cmdbuf_res_lookup()
105 static void vmw_cmdbuf_res_free(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_free() argument
109 WARN_ON(drm_ht_remove_item(&man->resources, &entry->hash)); in vmw_cmdbuf_res_free()
136 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_commit()
169 vmw_cmdbuf_res_free(entry->man, entry); in vmw_cmdbuf_res_revert()
172 ret = drm_ht_insert_item(&entry->man->resources, in vmw_cmdbuf_res_revert()
175 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_revert()
198 int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_add() argument
[all …]
Dvmwgfx_gmrid_manager.c47 static int vmw_gmrid_man_get_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_get_node() argument
53 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_get_node()
101 static void vmw_gmrid_man_put_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_put_node() argument
105 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_put_node()
116 static int vmw_gmrid_man_init(struct ttm_mem_type_manager *man, in vmw_gmrid_man_init() argument
120 container_of(man->bdev, struct vmw_private, bdev); in vmw_gmrid_man_init()
143 man->priv = (void *) gman; in vmw_gmrid_man_init()
147 static int vmw_gmrid_man_takedown(struct ttm_mem_type_manager *man) in vmw_gmrid_man_takedown() argument
150 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_takedown()
159 static void vmw_gmrid_man_debug(struct ttm_mem_type_manager *man, in vmw_gmrid_man_debug() argument
Dvmwgfx_buffer.c728 struct ttm_mem_type_manager *man) in vmw_init_mem_type() argument
734 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
735 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
736 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
740 man->func = &ttm_bo_manager_func; in vmw_init_mem_type()
741 man->gpu_offset = 0; in vmw_init_mem_type()
742 man->flags = TTM_MEMTYPE_FLAG_FIXED | TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
743 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
744 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
753 man->func = &vmw_gmrid_manager_func; in vmw_init_mem_type()
[all …]
Dvmwgfx_drv.h317 struct vmw_cmdbuf_res_manager *man; member
1075 struct vmw_cmdbuf_res_manager *man,
1080 extern int vmw_shader_remove(struct vmw_cmdbuf_res_manager *man,
1083 extern int vmw_dx_shader_add(struct vmw_cmdbuf_res_manager *man,
1093 vmw_shader_lookup(struct vmw_cmdbuf_res_manager *man,
1102 extern void vmw_cmdbuf_res_man_destroy(struct vmw_cmdbuf_res_manager *man);
1105 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man,
1110 extern int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man,
1115 extern int vmw_cmdbuf_res_remove(struct vmw_cmdbuf_res_manager *man,
1141 extern int vmw_cmdbuf_set_pool_size(struct vmw_cmdbuf_man *man,
[all …]
Dvmwgfx_so.h137 extern int vmw_view_add(struct vmw_cmdbuf_res_manager *man,
146 extern int vmw_view_remove(struct vmw_cmdbuf_res_manager *man,
157 extern struct vmw_resource *vmw_view_lookup(struct vmw_cmdbuf_res_manager *man,
Dvmwgfx_so.c314 int vmw_view_add(struct vmw_cmdbuf_res_manager *man, in vmw_view_add() argument
378 ret = vmw_cmdbuf_res_add(man, vmw_cmdbuf_res_view, in vmw_view_add()
405 int vmw_view_remove(struct vmw_cmdbuf_res_manager *man, in vmw_view_remove() argument
415 return vmw_cmdbuf_res_remove(man, vmw_cmdbuf_res_view, in vmw_view_remove()
487 struct vmw_resource *vmw_view_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_view_lookup() argument
491 return vmw_cmdbuf_res_lookup(man, vmw_cmdbuf_res_view, in vmw_view_lookup()
/drivers/gpu/drm/ttm/
Dttm_bo.c72 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_type_debug() local
74 pr_err(" has_type: %d\n", man->has_type); in ttm_mem_type_debug()
75 pr_err(" use_type: %d\n", man->use_type); in ttm_mem_type_debug()
76 pr_err(" flags: 0x%08X\n", man->flags); in ttm_mem_type_debug()
77 pr_err(" gpu_offset: 0x%08llX\n", man->gpu_offset); in ttm_mem_type_debug()
78 pr_err(" size: %llu\n", man->size); in ttm_mem_type_debug()
79 pr_err(" available_caching: 0x%08X\n", man->available_caching); in ttm_mem_type_debug()
80 pr_err(" default_caching: 0x%08X\n", man->default_caching); in ttm_mem_type_debug()
82 (*man->func->debug)(man, TTM_PFX); in ttm_mem_type_debug()
245 return bo->bdev->man[bo->mem.mem_type].lru.prev; in ttm_bo_default_lru_tail()
[all …]
Dttm_bo_util.c88 int ttm_mem_io_lock(struct ttm_mem_type_manager *man, bool interruptible) in ttm_mem_io_lock() argument
90 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_lock()
94 return mutex_lock_interruptible(&man->io_reserve_mutex); in ttm_mem_io_lock()
96 mutex_lock(&man->io_reserve_mutex); in ttm_mem_io_lock()
101 void ttm_mem_io_unlock(struct ttm_mem_type_manager *man) in ttm_mem_io_unlock() argument
103 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_unlock()
106 mutex_unlock(&man->io_reserve_mutex); in ttm_mem_io_unlock()
110 static int ttm_mem_io_evict(struct ttm_mem_type_manager *man) in ttm_mem_io_evict() argument
114 if (!man->use_io_reserve_lru || list_empty(&man->io_reserve_lru)) in ttm_mem_io_evict()
117 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
[all …]
Dttm_bo_manager.c50 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument
55 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_get_node()
65 lpfn = man->size; in ttm_bo_man_get_node()
93 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument
96 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_put_node()
108 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument
119 man->priv = rman; in ttm_bo_man_init()
123 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument
125 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_takedown()
133 man->priv = NULL; in ttm_bo_man_takedown()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gtt_mgr.c42 static int amdgpu_gtt_mgr_init(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_init() argument
54 man->priv = mgr; in amdgpu_gtt_mgr_init()
66 static int amdgpu_gtt_mgr_fini(struct ttm_mem_type_manager *man) in amdgpu_gtt_mgr_fini() argument
68 struct amdgpu_gtt_mgr *mgr = man->priv; in amdgpu_gtt_mgr_fini()
79 man->priv = NULL; in amdgpu_gtt_mgr_fini()
93 int amdgpu_gtt_mgr_alloc(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_alloc() argument
98 struct amdgpu_gtt_mgr *mgr = man->priv; in amdgpu_gtt_mgr_alloc()
116 lpfn = man->size; in amdgpu_gtt_mgr_alloc()
133 tbo->bdev->man[tbo->mem.mem_type].gpu_offset; in amdgpu_gtt_mgr_alloc()
149 static int amdgpu_gtt_mgr_new(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_new() argument
[all …]
Damdgpu_ttm.c149 struct ttm_mem_type_manager *man) in amdgpu_init_mem_type() argument
158 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in amdgpu_init_mem_type()
159 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type()
160 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type()
163 man->func = &amdgpu_gtt_mgr_func; in amdgpu_init_mem_type()
164 man->gpu_offset = adev->mc.gtt_start; in amdgpu_init_mem_type()
165 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type()
166 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type()
167 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in amdgpu_init_mem_type()
171 man->func = &ttm_bo_manager_func; in amdgpu_init_mem_type()
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_ttm.c36 nouveau_vram_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_vram_manager_init() argument
38 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_init()
40 man->priv = fb; in nouveau_vram_manager_init()
45 nouveau_vram_manager_fini(struct ttm_mem_type_manager *man) in nouveau_vram_manager_fini() argument
47 man->priv = NULL; in nouveau_vram_manager_fini()
66 nouveau_vram_manager_del(struct ttm_mem_type_manager *man, in nouveau_vram_manager_del() argument
69 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_del()
76 nouveau_vram_manager_new(struct ttm_mem_type_manager *man, in nouveau_vram_manager_new() argument
81 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_new()
117 nouveau_gart_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_gart_manager_init() argument
[all …]
Dnouveau_bo.c565 struct ttm_mem_type_manager *man) in nouveau_bo_init_mem_type() argument
571 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in nouveau_bo_init_mem_type()
572 man->available_caching = TTM_PL_MASK_CACHING; in nouveau_bo_init_mem_type()
573 man->default_caching = TTM_PL_FLAG_CACHED; in nouveau_bo_init_mem_type()
576 man->flags = TTM_MEMTYPE_FLAG_FIXED | in nouveau_bo_init_mem_type()
578 man->available_caching = TTM_PL_FLAG_UNCACHED | in nouveau_bo_init_mem_type()
580 man->default_caching = TTM_PL_FLAG_WC; in nouveau_bo_init_mem_type()
585 man->available_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
586 man->default_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
589 man->func = &nouveau_vram_manager; in nouveau_bo_init_mem_type()
[all …]
/drivers/gpu/drm/virtio/
Dvirtgpu_ttm.c168 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument
177 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument
184 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument
190 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument
195 static void ttm_bo_man_debug(struct ttm_mem_type_manager *man, in ttm_bo_man_debug() argument
209 struct ttm_mem_type_manager *man) in virtio_gpu_init_mem_type() argument
218 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in virtio_gpu_init_mem_type()
219 man->available_caching = TTM_PL_MASK_CACHING; in virtio_gpu_init_mem_type()
220 man->default_caching = TTM_PL_FLAG_CACHED; in virtio_gpu_init_mem_type()
223 man->func = &virtio_gpu_bo_manager_func; in virtio_gpu_init_mem_type()
[all …]
/drivers/gpu/drm/qxl/
Dqxl_ttm.c161 struct ttm_mem_type_manager *man) in qxl_init_mem_type() argument
166 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in qxl_init_mem_type()
167 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
168 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
173 man->func = &ttm_bo_manager_func; in qxl_init_mem_type()
174 man->gpu_offset = 0; in qxl_init_mem_type()
175 man->flags = TTM_MEMTYPE_FLAG_FIXED | in qxl_init_mem_type()
177 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
178 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
220 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in qxl_ttm_io_mem_reserve() local
[all …]
Dqxl_object.c147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() local
159 (void) ttm_mem_io_lock(man, false); in qxl_bo_kmap_atomic_page()
161 ttm_mem_io_unlock(man); in qxl_bo_kmap_atomic_page()
189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() local
201 (void) ttm_mem_io_lock(man, false); in qxl_bo_kunmap_atomic_page()
203 ttm_mem_io_unlock(man); in qxl_bo_kunmap_atomic_page()
/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c114 struct ttm_mem_type_manager *man) in mgag200_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in mgag200_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in mgag200_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in mgag200_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in mgag200_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in mgag200_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in mgag200_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in mgag200_bo_init_mem_type()
160 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in mgag200_ttm_io_mem_reserve() local
168 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in mgag200_ttm_io_mem_reserve()
/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c114 struct ttm_mem_type_manager *man) in cirrus_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in cirrus_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in cirrus_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in cirrus_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in cirrus_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in cirrus_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in cirrus_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in cirrus_bo_init_mem_type()
160 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in cirrus_ttm_io_mem_reserve() local
168 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in cirrus_ttm_io_mem_reserve()
/drivers/gpu/drm/ast/
Dast_ttm.c114 struct ttm_mem_type_manager *man) in ast_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in ast_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in ast_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in ast_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in ast_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in ast_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in ast_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in ast_bo_init_mem_type()
160 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ast_ttm_io_mem_reserve() local
168 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in ast_ttm_io_mem_reserve()
/drivers/gpu/drm/radeon/
Dradeon_ttm.c128 struct ttm_mem_type_manager *man) in radeon_init_mem_type() argument
137 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
138 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
139 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
142 man->func = &ttm_bo_manager_func; in radeon_init_mem_type()
143 man->gpu_offset = rdev->mc.gtt_start; in radeon_init_mem_type()
144 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
145 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
146 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in radeon_init_mem_type()
155 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
[all …]
/drivers/atm/
Dfirestream.c455 unsigned int man = -1; /* hush gcc */ in make_rate() local
482 man = 511; in make_rate()
489 man = rate; in make_rate()
492 while (!(man & (1<<31))) { in make_rate()
494 man = man<<1; in make_rate()
501 man = man<<1; in make_rate()
502 man &= 0xffffffffU; /* a nop on 32-bit systems */ in make_rate()
515 man = man>>(32-9); in make_rate()
520 if (man & (~0U>>9)) { in make_rate()
521 man = (man>>(32-9)) + 1; in make_rate()
[all …]
Dambassador.c892 unsigned int man = -1; // hush gcc in make_rate() local
917 man = 511; in make_rate()
924 man = rate; in make_rate()
927 while (!(man & (1<<31))) { in make_rate()
929 man = man<<1; in make_rate()
935 man = man<<1; in make_rate()
936 man &= 0xffffffffU; // a nop on 32-bit systems in make_rate()
948 man = man>>(32-9); in make_rate()
953 if (man & (~0U>>9)) { in make_rate()
954 man = (man>>(32-9)) + 1; in make_rate()
[all …]
/drivers/gpu/drm/bochs/
Dbochs_mm.c91 struct ttm_mem_type_manager *man) in bochs_bo_init_mem_type() argument
95 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in bochs_bo_init_mem_type()
96 man->available_caching = TTM_PL_MASK_CACHING; in bochs_bo_init_mem_type()
97 man->default_caching = TTM_PL_FLAG_CACHED; in bochs_bo_init_mem_type()
100 man->func = &ttm_bo_manager_func; in bochs_bo_init_mem_type()
101 man->flags = TTM_MEMTYPE_FLAG_FIXED | in bochs_bo_init_mem_type()
103 man->available_caching = TTM_PL_FLAG_UNCACHED | in bochs_bo_init_mem_type()
105 man->default_caching = TTM_PL_FLAG_WC; in bochs_bo_init_mem_type()
138 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in bochs_ttm_io_mem_reserve() local
146 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in bochs_ttm_io_mem_reserve()
/drivers/media/i2c/smiapp/
Dsmiapp-regs.c29 uint64_t man; in float_to_u32_mul_1000000() local
58 man = ((phloat & 0x7fffff) | 0x800000) * 1000000ULL; in float_to_u32_mul_1000000()
61 man >>= -exp; in float_to_u32_mul_1000000()
63 man <<= exp; in float_to_u32_mul_1000000()
65 man >>= 23; /* Remove mantissa bias */ in float_to_u32_mul_1000000()
67 return man & 0xffffffff; in float_to_u32_mul_1000000()

12