Home
last modified time | relevance | path

Searched refs:man (Results 1 – 25 of 35) sorted by relevance

12

/drivers/gpu/drm/vmwgfx/
Dvmwgfx_cmdbuf_res.c53 struct vmw_cmdbuf_res_manager *man; member
85 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_lookup() argument
93 ret = drm_ht_find_item(&man->resources, key, &hash); in vmw_cmdbuf_res_lookup()
110 static void vmw_cmdbuf_res_free(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_free() argument
114 WARN_ON(drm_ht_remove_item(&man->resources, &entry->hash)); in vmw_cmdbuf_res_free()
138 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_commit()
171 vmw_cmdbuf_res_free(entry->man, entry); in vmw_cmdbuf_res_revert()
174 ret = drm_ht_insert_item(&entry->man->resources, in vmw_cmdbuf_res_revert()
177 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_revert()
200 int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_add() argument
[all …]
Dvmwgfx_gmrid_manager.c47 static int vmw_gmrid_man_get_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_get_node() argument
53 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_get_node()
101 static void vmw_gmrid_man_put_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_put_node() argument
105 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_put_node()
116 static int vmw_gmrid_man_init(struct ttm_mem_type_manager *man, in vmw_gmrid_man_init() argument
120 container_of(man->bdev, struct vmw_private, bdev); in vmw_gmrid_man_init()
143 man->priv = (void *) gman; in vmw_gmrid_man_init()
147 static int vmw_gmrid_man_takedown(struct ttm_mem_type_manager *man) in vmw_gmrid_man_takedown() argument
150 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_takedown()
159 static void vmw_gmrid_man_debug(struct ttm_mem_type_manager *man, in vmw_gmrid_man_debug() argument
Dvmwgfx_buffer.c715 struct ttm_mem_type_manager *man) in vmw_init_mem_type() argument
721 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
722 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
723 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
727 man->func = &ttm_bo_manager_func; in vmw_init_mem_type()
728 man->gpu_offset = 0; in vmw_init_mem_type()
729 man->flags = TTM_MEMTYPE_FLAG_FIXED | TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type()
730 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
731 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type()
740 man->func = &vmw_gmrid_manager_func; in vmw_init_mem_type()
[all …]
Dvmwgfx_shader.c554 int vmw_compat_shader_remove(struct vmw_cmdbuf_res_manager *man, in vmw_compat_shader_remove() argument
561 return vmw_cmdbuf_res_remove(man, vmw_cmdbuf_res_compat_shader, in vmw_compat_shader_remove()
582 struct vmw_cmdbuf_res_manager *man, in vmw_compat_shader_add() argument
631 ret = vmw_cmdbuf_res_add(man, vmw_cmdbuf_res_compat_shader, in vmw_compat_shader_add()
653 vmw_compat_shader_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_compat_shader_lookup() argument
660 return vmw_cmdbuf_res_lookup(man, vmw_cmdbuf_res_compat_shader, in vmw_compat_shader_lookup()
Dvmwgfx_context.c36 struct vmw_cmdbuf_res_manager *man; member
118 vmw_cmdbuf_res_man_destroy(uctx->man); in vmw_hw_context_destroy()
161 uctx->man = vmw_cmdbuf_res_man_create(dev_priv); in vmw_gb_context_init()
162 if (unlikely(IS_ERR(uctx->man))) { in vmw_gb_context_init()
163 ret = PTR_ERR(uctx->man); in vmw_gb_context_init()
164 uctx->man = NULL; in vmw_gb_context_init()
920 return container_of(ctx, struct vmw_user_context, res)->man; in vmw_context_res_man()
Dvmwgfx_drv.h1040 struct vmw_cmdbuf_res_manager *man,
1045 extern int vmw_compat_shader_remove(struct vmw_cmdbuf_res_manager *man,
1049 vmw_compat_shader_lookup(struct vmw_cmdbuf_res_manager *man,
1058 extern void vmw_cmdbuf_res_man_destroy(struct vmw_cmdbuf_res_manager *man);
1061 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man,
1066 extern int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man,
1071 extern int vmw_cmdbuf_res_remove(struct vmw_cmdbuf_res_manager *man,
/drivers/gpu/drm/ttm/
Dttm_bo_util.c80 int ttm_mem_io_lock(struct ttm_mem_type_manager *man, bool interruptible) in ttm_mem_io_lock() argument
82 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_lock()
86 return mutex_lock_interruptible(&man->io_reserve_mutex); in ttm_mem_io_lock()
88 mutex_lock(&man->io_reserve_mutex); in ttm_mem_io_lock()
93 void ttm_mem_io_unlock(struct ttm_mem_type_manager *man) in ttm_mem_io_unlock() argument
95 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_unlock()
98 mutex_unlock(&man->io_reserve_mutex); in ttm_mem_io_unlock()
102 static int ttm_mem_io_evict(struct ttm_mem_type_manager *man) in ttm_mem_io_evict() argument
106 if (!man->use_io_reserve_lru || list_empty(&man->io_reserve_lru)) in ttm_mem_io_evict()
109 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
[all …]
Dttm_bo.c72 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_type_debug() local
74 pr_err(" has_type: %d\n", man->has_type); in ttm_mem_type_debug()
75 pr_err(" use_type: %d\n", man->use_type); in ttm_mem_type_debug()
76 pr_err(" flags: 0x%08X\n", man->flags); in ttm_mem_type_debug()
77 pr_err(" gpu_offset: 0x%08lX\n", man->gpu_offset); in ttm_mem_type_debug()
78 pr_err(" size: %llu\n", man->size); in ttm_mem_type_debug()
79 pr_err(" available_caching: 0x%08X\n", man->available_caching); in ttm_mem_type_debug()
80 pr_err(" default_caching: 0x%08X\n", man->default_caching); in ttm_mem_type_debug()
82 (*man->func->debug)(man, TTM_PFX); in ttm_mem_type_debug()
167 struct ttm_mem_type_manager *man; in ttm_bo_add_to_lru() local
[all …]
Dttm_bo_manager.c50 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument
55 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_get_node()
64 lpfn = man->size; in ttm_bo_man_get_node()
91 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument
94 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_put_node()
106 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument
117 man->priv = rman; in ttm_bo_man_init()
121 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument
123 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_takedown()
131 man->priv = NULL; in ttm_bo_man_takedown()
[all …]
Dttm_bo_vm.c103 struct ttm_mem_type_manager *man = in ttm_bo_vm_fault() local
104 &bdev->man[bo->mem.mem_type]; in ttm_bo_vm_fault()
178 ret = ttm_mem_io_lock(man, true); in ttm_bo_vm_fault()
266 ttm_mem_io_unlock(man); in ttm_bo_vm_fault()
/drivers/gpu/drm/nouveau/
Dnouveau_ttm.c33 nouveau_vram_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_vram_manager_init() argument
35 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_init()
37 man->priv = pfb; in nouveau_vram_manager_init()
42 nouveau_vram_manager_fini(struct ttm_mem_type_manager *man) in nouveau_vram_manager_fini() argument
44 man->priv = NULL; in nouveau_vram_manager_fini()
63 nouveau_vram_manager_del(struct ttm_mem_type_manager *man, in nouveau_vram_manager_del() argument
66 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_del()
73 nouveau_vram_manager_new(struct ttm_mem_type_manager *man, in nouveau_vram_manager_new() argument
78 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_new()
104 nouveau_vram_manager_debug(struct ttm_mem_type_manager *man, const char *prefix) in nouveau_vram_manager_debug() argument
[all …]
Dnouveau_bo.c495 struct ttm_mem_type_manager *man) in nouveau_bo_init_mem_type() argument
501 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in nouveau_bo_init_mem_type()
502 man->available_caching = TTM_PL_MASK_CACHING; in nouveau_bo_init_mem_type()
503 man->default_caching = TTM_PL_FLAG_CACHED; in nouveau_bo_init_mem_type()
506 man->flags = TTM_MEMTYPE_FLAG_FIXED | in nouveau_bo_init_mem_type()
508 man->available_caching = TTM_PL_FLAG_UNCACHED | in nouveau_bo_init_mem_type()
510 man->default_caching = TTM_PL_FLAG_WC; in nouveau_bo_init_mem_type()
515 man->available_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
516 man->default_caching = TTM_PL_FLAG_UNCACHED; in nouveau_bo_init_mem_type()
519 man->func = &nouveau_vram_manager; in nouveau_bo_init_mem_type()
[all …]
/drivers/gpu/drm/qxl/
Dqxl_ttm.c161 struct ttm_mem_type_manager *man) in qxl_init_mem_type() argument
166 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in qxl_init_mem_type()
167 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
168 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
173 man->func = &ttm_bo_manager_func; in qxl_init_mem_type()
174 man->gpu_offset = 0; in qxl_init_mem_type()
175 man->flags = TTM_MEMTYPE_FLAG_FIXED | in qxl_init_mem_type()
177 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type()
178 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type()
219 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in qxl_ttm_io_mem_reserve() local
[all …]
Dqxl_object.c147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() local
159 (void) ttm_mem_io_lock(man, false); in qxl_bo_kmap_atomic_page()
161 ttm_mem_io_unlock(man); in qxl_bo_kmap_atomic_page()
189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() local
201 (void) ttm_mem_io_lock(man, false); in qxl_bo_kunmap_atomic_page()
203 ttm_mem_io_unlock(man); in qxl_bo_kunmap_atomic_page()
/drivers/gpu/drm/ast/
Dast_ttm.c114 struct ttm_mem_type_manager *man) in ast_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in ast_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in ast_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in ast_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in ast_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in ast_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in ast_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in ast_bo_init_mem_type()
159 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ast_ttm_io_mem_reserve() local
167 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in ast_ttm_io_mem_reserve()
/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c114 struct ttm_mem_type_manager *man) in cirrus_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in cirrus_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in cirrus_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in cirrus_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in cirrus_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in cirrus_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in cirrus_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in cirrus_bo_init_mem_type()
159 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in cirrus_ttm_io_mem_reserve() local
167 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in cirrus_ttm_io_mem_reserve()
/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c114 struct ttm_mem_type_manager *man) in mgag200_bo_init_mem_type() argument
118 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in mgag200_bo_init_mem_type()
119 man->available_caching = TTM_PL_MASK_CACHING; in mgag200_bo_init_mem_type()
120 man->default_caching = TTM_PL_FLAG_CACHED; in mgag200_bo_init_mem_type()
123 man->func = &ttm_bo_manager_func; in mgag200_bo_init_mem_type()
124 man->flags = TTM_MEMTYPE_FLAG_FIXED | in mgag200_bo_init_mem_type()
126 man->available_caching = TTM_PL_FLAG_UNCACHED | in mgag200_bo_init_mem_type()
128 man->default_caching = TTM_PL_FLAG_WC; in mgag200_bo_init_mem_type()
159 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in mgag200_ttm_io_mem_reserve() local
167 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in mgag200_ttm_io_mem_reserve()
/drivers/gpu/drm/radeon/
Dradeon_ttm.c128 struct ttm_mem_type_manager *man) in radeon_init_mem_type() argument
137 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
138 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
139 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
142 man->func = &ttm_bo_manager_func; in radeon_init_mem_type()
143 man->gpu_offset = rdev->mc.gtt_start; in radeon_init_mem_type()
144 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type()
145 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type()
146 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in radeon_init_mem_type()
155 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type()
[all …]
Dradeon_gem.c218 struct ttm_mem_type_manager *man; in radeon_gem_info_ioctl() local
220 man = &rdev->mman.bdev.man[TTM_PL_VRAM]; in radeon_gem_info_ioctl()
223 args->vram_visible = (u64)man->size << PAGE_SHIFT; in radeon_gem_info_ioctl()
/drivers/atm/
Dfirestream.c451 unsigned int man = -1; /* hush gcc */ in make_rate() local
478 man = 511; in make_rate()
485 man = rate; in make_rate()
488 while (!(man & (1<<31))) { in make_rate()
490 man = man<<1; in make_rate()
497 man = man<<1; in make_rate()
498 man &= 0xffffffffU; /* a nop on 32-bit systems */ in make_rate()
511 man = man>>(32-9); in make_rate()
516 if (man & (~0U>>9)) { in make_rate()
517 man = (man>>(32-9)) + 1; in make_rate()
[all …]
Dambassador.c892 unsigned int man = -1; // hush gcc in make_rate() local
917 man = 511; in make_rate()
924 man = rate; in make_rate()
927 while (!(man & (1<<31))) { in make_rate()
929 man = man<<1; in make_rate()
935 man = man<<1; in make_rate()
936 man &= 0xffffffffU; // a nop on 32-bit systems in make_rate()
948 man = man>>(32-9); in make_rate()
953 if (man & (~0U>>9)) { in make_rate()
954 man = (man>>(32-9)) + 1; in make_rate()
[all …]
/drivers/gpu/drm/bochs/
Dbochs_mm.c91 struct ttm_mem_type_manager *man) in bochs_bo_init_mem_type() argument
95 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in bochs_bo_init_mem_type()
96 man->available_caching = TTM_PL_MASK_CACHING; in bochs_bo_init_mem_type()
97 man->default_caching = TTM_PL_FLAG_CACHED; in bochs_bo_init_mem_type()
100 man->func = &ttm_bo_manager_func; in bochs_bo_init_mem_type()
101 man->flags = TTM_MEMTYPE_FLAG_FIXED | in bochs_bo_init_mem_type()
103 man->available_caching = TTM_PL_FLAG_UNCACHED | in bochs_bo_init_mem_type()
105 man->default_caching = TTM_PL_FLAG_WC; in bochs_bo_init_mem_type()
137 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in bochs_ttm_io_mem_reserve() local
145 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in bochs_ttm_io_mem_reserve()
/drivers/media/i2c/smiapp/
Dsmiapp-regs.c35 uint64_t man; in float_to_u32_mul_1000000() local
64 man = ((phloat & 0x7fffff) | 0x800000) * 1000000ULL; in float_to_u32_mul_1000000()
67 man >>= -exp; in float_to_u32_mul_1000000()
69 man <<= exp; in float_to_u32_mul_1000000()
71 man >>= 23; /* Remove mantissa bias */ in float_to_u32_mul_1000000()
73 return man & 0xffffffff; in float_to_u32_mul_1000000()
/drivers/media/i2c/
Das3645a.c565 int rval, man, model, rfu, version; in as3645a_registered() local
579 man = AS_DESIGN_INFO_FACTORY(rval); in as3645a_registered()
597 switch (man) { in as3645a_registered()
618 man, version); in as3645a_registered()
/drivers/tty/
DKconfig27 The setterm command ("man setterm") can be used to change the
29 man page console_codes(4) ("man console_codes") contains the special
32 the setfont ("man setfont") command and the key bindings are defined
33 with the loadkeys ("man loadkeys") command.
68 would use the third virtual terminal as system console. (Try "man

12