• Home
  • Raw
  • Download

Lines Matching refs:man

72 	struct ttm_mem_type_manager *man = &bdev->man[mem_type];  in ttm_mem_type_debug()  local
75 pr_err(" has_type: %d\n", man->has_type); in ttm_mem_type_debug()
76 pr_err(" use_type: %d\n", man->use_type); in ttm_mem_type_debug()
77 pr_err(" flags: 0x%08X\n", man->flags); in ttm_mem_type_debug()
78 pr_err(" gpu_offset: 0x%08llX\n", man->gpu_offset); in ttm_mem_type_debug()
79 pr_err(" size: %llu\n", man->size); in ttm_mem_type_debug()
80 pr_err(" available_caching: 0x%08X\n", man->available_caching); in ttm_mem_type_debug()
81 pr_err(" default_caching: 0x%08X\n", man->default_caching); in ttm_mem_type_debug()
83 (*man->func->debug)(man, &p); in ttm_mem_type_debug()
166 struct ttm_mem_type_manager *man; in ttm_bo_add_to_lru() local
174 man = &bdev->man[bo->mem.mem_type]; in ttm_bo_add_to_lru()
175 list_add_tail(&bo->lru, &man->lru[bo->priority]); in ttm_bo_add_to_lru()
280 struct ttm_mem_type_manager *old_man = &bdev->man[bo->mem.mem_type]; in ttm_bo_handle_move_mem()
281 struct ttm_mem_type_manager *new_man = &bdev->man[mem->mem_type]; in ttm_bo_handle_move_mem()
361 bdev->man[bo->mem.mem_type].gpu_offset; in ttm_bo_handle_move_mem()
369 new_man = &bdev->man[bo->mem.mem_type]; in ttm_bo_handle_move_mem()
643 struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type]; in ttm_bo_release() local
646 ttm_mem_io_lock(man, false); in ttm_bo_release()
648 ttm_mem_io_unlock(man); in ttm_bo_release()
739 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_evict_first() local
746 list_for_each_entry(bo, &man->lru[i], lru) { in ttm_mem_evict_first()
793 struct ttm_mem_type_manager *man = &bo->bdev->man[mem->mem_type]; in ttm_bo_mem_put() local
796 (*man->func->put_node)(man, mem); in ttm_bo_mem_put()
804 struct ttm_mem_type_manager *man, in ttm_bo_add_move_fence() argument
810 spin_lock(&man->move_lock); in ttm_bo_add_move_fence()
811 fence = dma_fence_get(man->move); in ttm_bo_add_move_fence()
812 spin_unlock(&man->move_lock); in ttm_bo_add_move_fence()
840 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_bo_mem_force_space() local
844 ret = (*man->func->get_node)(man, bo, place, mem); in ttm_bo_mem_force_space()
855 return ttm_bo_add_move_fence(bo, man, mem); in ttm_bo_mem_force_space()
858 static uint32_t ttm_bo_select_caching(struct ttm_mem_type_manager *man, in ttm_bo_select_caching() argument
871 else if ((man->default_caching & caching) != 0) in ttm_bo_select_caching()
872 result |= man->default_caching; in ttm_bo_select_caching()
883 static bool ttm_bo_mt_compatible(struct ttm_mem_type_manager *man, in ttm_bo_mt_compatible() argument
893 if ((place->flags & man->available_caching) == 0) in ttm_bo_mt_compatible()
896 cur_flags |= (place->flags & man->available_caching); in ttm_bo_mt_compatible()
917 struct ttm_mem_type_manager *man; in ttm_bo_mem_space() local
936 man = &bdev->man[mem_type]; in ttm_bo_mem_space()
937 if (!man->has_type || !man->use_type) in ttm_bo_mem_space()
940 type_ok = ttm_bo_mt_compatible(man, mem_type, place, in ttm_bo_mem_space()
947 cur_flags = ttm_bo_select_caching(man, bo->mem.placement, in ttm_bo_mem_space()
959 ret = (*man->func->get_node)(man, bo, place, mem); in ttm_bo_mem_space()
964 ret = ttm_bo_add_move_fence(bo, man, mem); in ttm_bo_mem_space()
966 (*man->func->put_node)(man, mem); in ttm_bo_mem_space()
985 man = &bdev->man[mem_type]; in ttm_bo_mem_space()
986 if (!man->has_type || !man->use_type) in ttm_bo_mem_space()
988 if (!ttm_bo_mt_compatible(man, mem_type, place, &cur_flags)) in ttm_bo_mem_space()
992 cur_flags = ttm_bo_select_caching(man, bo->mem.placement, in ttm_bo_mem_space()
1338 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_bo_force_list_clean() local
1350 while (!list_empty(&man->lru[i])) { in ttm_bo_force_list_clean()
1360 spin_lock(&man->move_lock); in ttm_bo_force_list_clean()
1361 fence = dma_fence_get(man->move); in ttm_bo_force_list_clean()
1362 spin_unlock(&man->move_lock); in ttm_bo_force_list_clean()
1376 struct ttm_mem_type_manager *man; in ttm_bo_clean_mm() local
1383 man = &bdev->man[mem_type]; in ttm_bo_clean_mm()
1385 if (!man->has_type) { in ttm_bo_clean_mm()
1391 man->use_type = false; in ttm_bo_clean_mm()
1392 man->has_type = false; in ttm_bo_clean_mm()
1402 ret = (*man->func->takedown)(man); in ttm_bo_clean_mm()
1405 dma_fence_put(man->move); in ttm_bo_clean_mm()
1406 man->move = NULL; in ttm_bo_clean_mm()
1414 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_bo_evict_mm() local
1421 if (!man->has_type) { in ttm_bo_evict_mm()
1434 struct ttm_mem_type_manager *man; in ttm_bo_init_mm() local
1438 man = &bdev->man[type]; in ttm_bo_init_mm()
1439 BUG_ON(man->has_type); in ttm_bo_init_mm()
1440 man->io_reserve_fastpath = true; in ttm_bo_init_mm()
1441 man->use_io_reserve_lru = false; in ttm_bo_init_mm()
1442 mutex_init(&man->io_reserve_mutex); in ttm_bo_init_mm()
1443 spin_lock_init(&man->move_lock); in ttm_bo_init_mm()
1444 INIT_LIST_HEAD(&man->io_reserve_lru); in ttm_bo_init_mm()
1446 ret = bdev->driver->init_mem_type(bdev, type, man); in ttm_bo_init_mm()
1449 man->bdev = bdev; in ttm_bo_init_mm()
1452 ret = (*man->func->init)(man, p_size); in ttm_bo_init_mm()
1456 man->has_type = true; in ttm_bo_init_mm()
1457 man->use_type = true; in ttm_bo_init_mm()
1458 man->size = p_size; in ttm_bo_init_mm()
1461 INIT_LIST_HEAD(&man->lru[i]); in ttm_bo_init_mm()
1462 man->move = NULL; in ttm_bo_init_mm()
1536 struct ttm_mem_type_manager *man; in ttm_bo_device_release() local
1540 man = &bdev->man[i]; in ttm_bo_device_release()
1541 if (man->has_type) { in ttm_bo_device_release()
1542 man->use_type = false; in ttm_bo_device_release()
1548 man->has_type = false; in ttm_bo_device_release()
1566 if (list_empty(&bdev->man[0].lru[0])) in ttm_bo_device_release()
1587 memset(bdev->man, 0, sizeof(bdev->man)); in ttm_bo_device_init()
1620 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ttm_mem_reg_is_pci() local
1622 if (!(man->flags & TTM_MEMTYPE_FLAG_FIXED)) { in ttm_mem_reg_is_pci()
1626 if (man->flags & TTM_MEMTYPE_FLAG_CMA) in ttm_mem_reg_is_pci()
1646 struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type]; in ttm_bo_unmap_virtual() local
1648 ttm_mem_io_lock(man, false); in ttm_bo_unmap_virtual()
1650 ttm_mem_io_unlock(man); in ttm_bo_unmap_virtual()