/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_cmdbuf.c | 149 struct vmw_cmdbuf_man *man; member 191 static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context, 193 static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context); 201 static int vmw_cmdbuf_cur_lock(struct vmw_cmdbuf_man *man, bool interruptible) in vmw_cmdbuf_cur_lock() argument 204 if (mutex_lock_interruptible(&man->cur_mutex)) in vmw_cmdbuf_cur_lock() 207 mutex_lock(&man->cur_mutex); in vmw_cmdbuf_cur_lock() 218 static void vmw_cmdbuf_cur_unlock(struct vmw_cmdbuf_man *man) in vmw_cmdbuf_cur_unlock() argument 220 mutex_unlock(&man->cur_mutex); in vmw_cmdbuf_cur_unlock() 239 dma_pool_free(header->man->dheaders, dheader, header->handle); in vmw_cmdbuf_header_inline_free() 253 struct vmw_cmdbuf_man *man = header->man; in __vmw_cmdbuf_header_free() local [all …]
|
D | vmwgfx_cmdbuf_res.c | 48 struct vmw_cmdbuf_res_manager *man; member 80 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_lookup() argument 88 ret = drm_ht_find_item(&man->resources, key, &hash); in vmw_cmdbuf_res_lookup() 105 static void vmw_cmdbuf_res_free(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_free() argument 109 WARN_ON(drm_ht_remove_item(&man->resources, &entry->hash)); in vmw_cmdbuf_res_free() 136 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_commit() 169 vmw_cmdbuf_res_free(entry->man, entry); in vmw_cmdbuf_res_revert() 172 ret = drm_ht_insert_item(&entry->man->resources, in vmw_cmdbuf_res_revert() 175 list_add_tail(&entry->head, &entry->man->list); in vmw_cmdbuf_res_revert() 198 int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man, in vmw_cmdbuf_res_add() argument [all …]
|
D | vmwgfx_gmrid_manager.c | 47 static int vmw_gmrid_man_get_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_get_node() argument 53 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_get_node() 101 static void vmw_gmrid_man_put_node(struct ttm_mem_type_manager *man, in vmw_gmrid_man_put_node() argument 105 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_put_node() 116 static int vmw_gmrid_man_init(struct ttm_mem_type_manager *man, in vmw_gmrid_man_init() argument 120 container_of(man->bdev, struct vmw_private, bdev); in vmw_gmrid_man_init() 143 man->priv = (void *) gman; in vmw_gmrid_man_init() 147 static int vmw_gmrid_man_takedown(struct ttm_mem_type_manager *man) in vmw_gmrid_man_takedown() argument 150 (struct vmwgfx_gmrid_man *)man->priv; in vmw_gmrid_man_takedown() 159 static void vmw_gmrid_man_debug(struct ttm_mem_type_manager *man, in vmw_gmrid_man_debug() argument
|
D | vmwgfx_buffer.c | 728 struct ttm_mem_type_manager *man) in vmw_init_mem_type() argument 734 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type() 735 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type() 736 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type() 740 man->func = &ttm_bo_manager_func; in vmw_init_mem_type() 741 man->gpu_offset = 0; in vmw_init_mem_type() 742 man->flags = TTM_MEMTYPE_FLAG_FIXED | TTM_MEMTYPE_FLAG_MAPPABLE; in vmw_init_mem_type() 743 man->available_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type() 744 man->default_caching = TTM_PL_FLAG_CACHED; in vmw_init_mem_type() 753 man->func = &vmw_gmrid_manager_func; in vmw_init_mem_type() [all …]
|
D | vmwgfx_drv.h | 318 struct vmw_cmdbuf_res_manager *man; member 1083 struct vmw_cmdbuf_res_manager *man, 1088 extern int vmw_shader_remove(struct vmw_cmdbuf_res_manager *man, 1091 extern int vmw_dx_shader_add(struct vmw_cmdbuf_res_manager *man, 1101 vmw_shader_lookup(struct vmw_cmdbuf_res_manager *man, 1110 extern void vmw_cmdbuf_res_man_destroy(struct vmw_cmdbuf_res_manager *man); 1113 vmw_cmdbuf_res_lookup(struct vmw_cmdbuf_res_manager *man, 1118 extern int vmw_cmdbuf_res_add(struct vmw_cmdbuf_res_manager *man, 1123 extern int vmw_cmdbuf_res_remove(struct vmw_cmdbuf_res_manager *man, 1149 extern int vmw_cmdbuf_set_pool_size(struct vmw_cmdbuf_man *man, [all …]
|
/drivers/gpu/drm/ttm/ |
D | ttm_bo.c | 72 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_type_debug() local 75 pr_err(" has_type: %d\n", man->has_type); in ttm_mem_type_debug() 76 pr_err(" use_type: %d\n", man->use_type); in ttm_mem_type_debug() 77 pr_err(" flags: 0x%08X\n", man->flags); in ttm_mem_type_debug() 78 pr_err(" gpu_offset: 0x%08llX\n", man->gpu_offset); in ttm_mem_type_debug() 79 pr_err(" size: %llu\n", man->size); in ttm_mem_type_debug() 80 pr_err(" available_caching: 0x%08X\n", man->available_caching); in ttm_mem_type_debug() 81 pr_err(" default_caching: 0x%08X\n", man->default_caching); in ttm_mem_type_debug() 83 (*man->func->debug)(man, &p); in ttm_mem_type_debug() 166 struct ttm_mem_type_manager *man; in ttm_bo_add_to_lru() local [all …]
|
D | ttm_bo_util.c | 88 int ttm_mem_io_lock(struct ttm_mem_type_manager *man, bool interruptible) in ttm_mem_io_lock() argument 90 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_lock() 94 return mutex_lock_interruptible(&man->io_reserve_mutex); in ttm_mem_io_lock() 96 mutex_lock(&man->io_reserve_mutex); in ttm_mem_io_lock() 101 void ttm_mem_io_unlock(struct ttm_mem_type_manager *man) in ttm_mem_io_unlock() argument 103 if (likely(man->io_reserve_fastpath)) in ttm_mem_io_unlock() 106 mutex_unlock(&man->io_reserve_mutex); in ttm_mem_io_unlock() 110 static int ttm_mem_io_evict(struct ttm_mem_type_manager *man) in ttm_mem_io_evict() argument 114 if (!man->use_io_reserve_lru || list_empty(&man->io_reserve_lru)) in ttm_mem_io_evict() 117 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict() [all …]
|
D | ttm_bo_manager.c | 50 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument 55 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_get_node() 64 lpfn = man->size; in ttm_bo_man_get_node() 91 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument 94 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_put_node() 106 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument 117 man->priv = rman; in ttm_bo_man_init() 121 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument 123 struct ttm_range_manager *rman = (struct ttm_range_manager *) man->priv; in ttm_bo_man_takedown() 131 man->priv = NULL; in ttm_bo_man_takedown() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gtt_mgr.c | 42 static int amdgpu_gtt_mgr_init(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_init() argument 45 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_gtt_mgr_init() 58 man->priv = mgr; in amdgpu_gtt_mgr_init() 70 static int amdgpu_gtt_mgr_fini(struct ttm_mem_type_manager *man) in amdgpu_gtt_mgr_fini() argument 72 struct amdgpu_gtt_mgr *mgr = man->priv; in amdgpu_gtt_mgr_fini() 83 man->priv = NULL; in amdgpu_gtt_mgr_fini() 111 static int amdgpu_gtt_mgr_alloc(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_alloc() argument 116 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_gtt_mgr_alloc() 117 struct amdgpu_gtt_mgr *mgr = man->priv; in amdgpu_gtt_mgr_alloc() 162 static int amdgpu_gtt_mgr_new(struct ttm_mem_type_manager *man, in amdgpu_gtt_mgr_new() argument [all …]
|
D | amdgpu_vram_mgr.c | 43 static int amdgpu_vram_mgr_init(struct ttm_mem_type_manager *man, in amdgpu_vram_mgr_init() argument 54 man->priv = mgr; in amdgpu_vram_mgr_init() 66 static int amdgpu_vram_mgr_fini(struct ttm_mem_type_manager *man) in amdgpu_vram_mgr_fini() argument 68 struct amdgpu_vram_mgr *mgr = man->priv; in amdgpu_vram_mgr_fini() 79 man->priv = NULL; in amdgpu_vram_mgr_fini() 130 static int amdgpu_vram_mgr_new(struct ttm_mem_type_manager *man, in amdgpu_vram_mgr_new() argument 135 struct amdgpu_device *adev = amdgpu_ttm_adev(man->bdev); in amdgpu_vram_mgr_new() 136 struct amdgpu_vram_mgr *mgr = man->priv; in amdgpu_vram_mgr_new() 147 lpfn = man->size; in amdgpu_vram_mgr_new() 229 static void amdgpu_vram_mgr_del(struct ttm_mem_type_manager *man, in amdgpu_vram_mgr_del() argument [all …]
|
D | amdgpu_ttm.c | 147 struct ttm_mem_type_manager *man) in amdgpu_init_mem_type() argument 156 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in amdgpu_init_mem_type() 157 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type() 158 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type() 161 man->func = &amdgpu_gtt_mgr_func; in amdgpu_init_mem_type() 162 man->gpu_offset = adev->mc.gart_start; in amdgpu_init_mem_type() 163 man->available_caching = TTM_PL_MASK_CACHING; in amdgpu_init_mem_type() 164 man->default_caching = TTM_PL_FLAG_CACHED; in amdgpu_init_mem_type() 165 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in amdgpu_init_mem_type() 169 man->func = &amdgpu_vram_mgr_func; in amdgpu_init_mem_type() [all …]
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_ttm.c | 36 nouveau_vram_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_vram_manager_init() argument 38 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_init() 40 man->priv = fb; in nouveau_vram_manager_init() 45 nouveau_vram_manager_fini(struct ttm_mem_type_manager *man) in nouveau_vram_manager_fini() argument 47 man->priv = NULL; in nouveau_vram_manager_fini() 66 nouveau_vram_manager_del(struct ttm_mem_type_manager *man, in nouveau_vram_manager_del() argument 69 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_del() 76 nouveau_vram_manager_new(struct ttm_mem_type_manager *man, in nouveau_vram_manager_new() argument 81 struct nouveau_drm *drm = nouveau_bdev(man->bdev); in nouveau_vram_manager_new() 117 nouveau_gart_manager_init(struct ttm_mem_type_manager *man, unsigned long psize) in nouveau_gart_manager_init() argument [all …]
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_ttm.c | 167 static int ttm_bo_man_get_node(struct ttm_mem_type_manager *man, in ttm_bo_man_get_node() argument 176 static void ttm_bo_man_put_node(struct ttm_mem_type_manager *man, in ttm_bo_man_put_node() argument 183 static int ttm_bo_man_init(struct ttm_mem_type_manager *man, in ttm_bo_man_init() argument 189 static int ttm_bo_man_takedown(struct ttm_mem_type_manager *man) in ttm_bo_man_takedown() argument 194 static void ttm_bo_man_debug(struct ttm_mem_type_manager *man, in ttm_bo_man_debug() argument 208 struct ttm_mem_type_manager *man) in virtio_gpu_init_mem_type() argument 217 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in virtio_gpu_init_mem_type() 218 man->available_caching = TTM_PL_MASK_CACHING; in virtio_gpu_init_mem_type() 219 man->default_caching = TTM_PL_FLAG_CACHED; in virtio_gpu_init_mem_type() 222 man->func = &virtio_gpu_bo_manager_func; in virtio_gpu_init_mem_type() [all …]
|
/drivers/gpu/drm/qxl/ |
D | qxl_ttm.c | 160 struct ttm_mem_type_manager *man) in qxl_init_mem_type() argument 165 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in qxl_init_mem_type() 166 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type() 167 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type() 172 man->func = &ttm_bo_manager_func; in qxl_init_mem_type() 173 man->gpu_offset = 0; in qxl_init_mem_type() 174 man->flags = TTM_MEMTYPE_FLAG_FIXED | in qxl_init_mem_type() 176 man->available_caching = TTM_PL_MASK_CACHING; in qxl_init_mem_type() 177 man->default_caching = TTM_PL_FLAG_CACHED; in qxl_init_mem_type() 219 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in qxl_ttm_io_mem_reserve() local [all …]
|
D | qxl_object.c | 147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() local 159 (void) ttm_mem_io_lock(man, false); in qxl_bo_kmap_atomic_page() 161 ttm_mem_io_unlock(man); in qxl_bo_kmap_atomic_page() 189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() local 201 (void) ttm_mem_io_lock(man, false); in qxl_bo_kunmap_atomic_page() 203 ttm_mem_io_unlock(man); in qxl_bo_kunmap_atomic_page()
|
/drivers/gpu/drm/radeon/ |
D | radeon_ttm.c | 128 struct ttm_mem_type_manager *man) in radeon_init_mem_type() argument 137 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type() 138 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type() 139 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type() 142 man->func = &ttm_bo_manager_func; in radeon_init_mem_type() 143 man->gpu_offset = rdev->mc.gtt_start; in radeon_init_mem_type() 144 man->available_caching = TTM_PL_MASK_CACHING; in radeon_init_mem_type() 145 man->default_caching = TTM_PL_FLAG_CACHED; in radeon_init_mem_type() 146 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; in radeon_init_mem_type() 155 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in radeon_init_mem_type() [all …]
|
/drivers/staging/vboxvideo/ |
D | vbox_ttm.c | 113 struct ttm_mem_type_manager *man) in vbox_bo_init_mem_type() argument 117 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in vbox_bo_init_mem_type() 118 man->available_caching = TTM_PL_MASK_CACHING; in vbox_bo_init_mem_type() 119 man->default_caching = TTM_PL_FLAG_CACHED; in vbox_bo_init_mem_type() 122 man->func = &ttm_bo_manager_func; in vbox_bo_init_mem_type() 123 man->flags = TTM_MEMTYPE_FLAG_FIXED | TTM_MEMTYPE_FLAG_MAPPABLE; in vbox_bo_init_mem_type() 124 man->available_caching = TTM_PL_FLAG_UNCACHED | TTM_PL_FLAG_WC; in vbox_bo_init_mem_type() 125 man->default_caching = TTM_PL_FLAG_WC; in vbox_bo_init_mem_type() 156 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in vbox_ttm_io_mem_reserve() local 164 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in vbox_ttm_io_mem_reserve()
|
/drivers/gpu/drm/mgag200/ |
D | mgag200_ttm.c | 115 struct ttm_mem_type_manager *man) in mgag200_bo_init_mem_type() argument 119 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in mgag200_bo_init_mem_type() 120 man->available_caching = TTM_PL_MASK_CACHING; in mgag200_bo_init_mem_type() 121 man->default_caching = TTM_PL_FLAG_CACHED; in mgag200_bo_init_mem_type() 124 man->func = &ttm_bo_manager_func; in mgag200_bo_init_mem_type() 125 man->flags = TTM_MEMTYPE_FLAG_FIXED | in mgag200_bo_init_mem_type() 127 man->available_caching = TTM_PL_FLAG_UNCACHED | in mgag200_bo_init_mem_type() 129 man->default_caching = TTM_PL_FLAG_WC; in mgag200_bo_init_mem_type() 161 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in mgag200_ttm_io_mem_reserve() local 169 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in mgag200_ttm_io_mem_reserve()
|
/drivers/gpu/drm/cirrus/ |
D | cirrus_ttm.c | 115 struct ttm_mem_type_manager *man) in cirrus_bo_init_mem_type() argument 119 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in cirrus_bo_init_mem_type() 120 man->available_caching = TTM_PL_MASK_CACHING; in cirrus_bo_init_mem_type() 121 man->default_caching = TTM_PL_FLAG_CACHED; in cirrus_bo_init_mem_type() 124 man->func = &ttm_bo_manager_func; in cirrus_bo_init_mem_type() 125 man->flags = TTM_MEMTYPE_FLAG_FIXED | in cirrus_bo_init_mem_type() 127 man->available_caching = TTM_PL_FLAG_UNCACHED | in cirrus_bo_init_mem_type() 129 man->default_caching = TTM_PL_FLAG_WC; in cirrus_bo_init_mem_type() 161 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in cirrus_ttm_io_mem_reserve() local 169 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in cirrus_ttm_io_mem_reserve()
|
/drivers/gpu/drm/ast/ |
D | ast_ttm.c | 115 struct ttm_mem_type_manager *man) in ast_bo_init_mem_type() argument 119 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in ast_bo_init_mem_type() 120 man->available_caching = TTM_PL_MASK_CACHING; in ast_bo_init_mem_type() 121 man->default_caching = TTM_PL_FLAG_CACHED; in ast_bo_init_mem_type() 124 man->func = &ttm_bo_manager_func; in ast_bo_init_mem_type() 125 man->flags = TTM_MEMTYPE_FLAG_FIXED | in ast_bo_init_mem_type() 127 man->available_caching = TTM_PL_FLAG_UNCACHED | in ast_bo_init_mem_type() 129 man->default_caching = TTM_PL_FLAG_WC; in ast_bo_init_mem_type() 161 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ast_ttm_io_mem_reserve() local 169 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in ast_ttm_io_mem_reserve()
|
/drivers/atm/ |
D | firestream.c | 455 unsigned int man = -1; /* hush gcc */ in make_rate() local 482 man = 511; in make_rate() 489 man = rate; in make_rate() 492 while (!(man & (1<<31))) { in make_rate() 494 man = man<<1; in make_rate() 501 man = man<<1; in make_rate() 502 man &= 0xffffffffU; /* a nop on 32-bit systems */ in make_rate() 515 man = man>>(32-9); in make_rate() 520 if (man & (~0U>>9)) { in make_rate() 521 man = (man>>(32-9)) + 1; in make_rate() [all …]
|
D | ambassador.c | 892 unsigned int man = -1; // hush gcc in make_rate() local 917 man = 511; in make_rate() 924 man = rate; in make_rate() 927 while (!(man & (1<<31))) { in make_rate() 929 man = man<<1; in make_rate() 935 man = man<<1; in make_rate() 936 man &= 0xffffffffU; // a nop on 32-bit systems in make_rate() 948 man = man>>(32-9); in make_rate() 953 if (man & (~0U>>9)) { in make_rate() 954 man = (man>>(32-9)) + 1; in make_rate() [all …]
|
/drivers/gpu/drm/hisilicon/hibmc/ |
D | hibmc_ttm.c | 96 struct ttm_mem_type_manager *man) in hibmc_bo_init_mem_type() argument 100 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in hibmc_bo_init_mem_type() 101 man->available_caching = TTM_PL_MASK_CACHING; in hibmc_bo_init_mem_type() 102 man->default_caching = TTM_PL_FLAG_CACHED; in hibmc_bo_init_mem_type() 105 man->func = &ttm_bo_manager_func; in hibmc_bo_init_mem_type() 106 man->flags = TTM_MEMTYPE_FLAG_FIXED | in hibmc_bo_init_mem_type() 108 man->available_caching = TTM_PL_FLAG_UNCACHED | in hibmc_bo_init_mem_type() 110 man->default_caching = TTM_PL_FLAG_WC; in hibmc_bo_init_mem_type() 168 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in hibmc_ttm_io_mem_reserve() local 176 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in hibmc_ttm_io_mem_reserve()
|
/drivers/gpu/drm/bochs/ |
D | bochs_mm.c | 91 struct ttm_mem_type_manager *man) in bochs_bo_init_mem_type() argument 95 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE; in bochs_bo_init_mem_type() 96 man->available_caching = TTM_PL_MASK_CACHING; in bochs_bo_init_mem_type() 97 man->default_caching = TTM_PL_FLAG_CACHED; in bochs_bo_init_mem_type() 100 man->func = &ttm_bo_manager_func; in bochs_bo_init_mem_type() 101 man->flags = TTM_MEMTYPE_FLAG_FIXED | in bochs_bo_init_mem_type() 103 man->available_caching = TTM_PL_FLAG_UNCACHED | in bochs_bo_init_mem_type() 105 man->default_caching = TTM_PL_FLAG_WC; in bochs_bo_init_mem_type() 138 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in bochs_ttm_io_mem_reserve() local 146 if (!(man->flags & TTM_MEMTYPE_FLAG_MAPPABLE)) in bochs_ttm_io_mem_reserve()
|
/drivers/media/i2c/smiapp/ |
D | smiapp-regs.c | 29 uint64_t man; in float_to_u32_mul_1000000() local 58 man = ((phloat & 0x7fffff) | 0x800000) * 1000000ULL; in float_to_u32_mul_1000000() 61 man >>= -exp; in float_to_u32_mul_1000000() 63 man <<= exp; in float_to_u32_mul_1000000() 65 man >>= 23; /* Remove mantissa bias */ in float_to_u32_mul_1000000() 67 return man & 0xffffffff; in float_to_u32_mul_1000000()
|