• Home
  • Raw
  • Download

Lines Matching refs:pool

79 	struct ttm_pool *pool;  in ttm_pool_pre_populated()  local
87 pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL); in ttm_pool_pre_populated()
88 KUNIT_ASSERT_NOT_NULL(test, pool); in ttm_pool_pre_populated()
90 ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, true, false); in ttm_pool_pre_populated()
92 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_pre_populated()
95 ttm_pool_free(pool, tt); in ttm_pool_pre_populated()
98 return pool; in ttm_pool_pre_populated()
141 struct ttm_pool *pool; in ttm_pool_alloc_basic() local
151 pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL); in ttm_pool_alloc_basic()
152 KUNIT_ASSERT_NOT_NULL(test, pool); in ttm_pool_alloc_basic()
154 ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, params->use_dma_alloc, in ttm_pool_alloc_basic()
157 KUNIT_ASSERT_PTR_EQ(test, pool->dev, devs->dev); in ttm_pool_alloc_basic()
158 KUNIT_ASSERT_EQ(test, pool->nid, NUMA_NO_NODE); in ttm_pool_alloc_basic()
159 KUNIT_ASSERT_EQ(test, pool->use_dma_alloc, params->use_dma_alloc); in ttm_pool_alloc_basic()
161 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_alloc_basic()
191 ttm_pool_free(pool, tt); in ttm_pool_alloc_basic()
193 ttm_pool_fini(pool); in ttm_pool_alloc_basic()
202 struct ttm_pool *pool; in ttm_pool_alloc_basic_dma_addr() local
219 pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL); in ttm_pool_alloc_basic_dma_addr()
220 KUNIT_ASSERT_NOT_NULL(test, pool); in ttm_pool_alloc_basic_dma_addr()
222 ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, true, false); in ttm_pool_alloc_basic_dma_addr()
224 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_alloc_basic_dma_addr()
234 ttm_pool_free(pool, tt); in ttm_pool_alloc_basic_dma_addr()
236 ttm_pool_fini(pool); in ttm_pool_alloc_basic_dma_addr()
242 struct ttm_pool *pool; in ttm_pool_alloc_order_caching_match() local
249 pool = ttm_pool_pre_populated(test, size, caching); in ttm_pool_alloc_order_caching_match()
251 pt = &pool->caching[caching].orders[order]; in ttm_pool_alloc_order_caching_match()
257 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_alloc_order_caching_match()
262 ttm_pool_free(pool, tt); in ttm_pool_alloc_order_caching_match()
264 ttm_pool_fini(pool); in ttm_pool_alloc_order_caching_match()
270 struct ttm_pool *pool; in ttm_pool_alloc_caching_mismatch() local
278 pool = ttm_pool_pre_populated(test, size, pool_caching); in ttm_pool_alloc_caching_mismatch()
280 pt_pool = &pool->caching[pool_caching].orders[order]; in ttm_pool_alloc_caching_mismatch()
281 pt_tt = &pool->caching[tt_caching].orders[order]; in ttm_pool_alloc_caching_mismatch()
289 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_alloc_caching_mismatch()
292 ttm_pool_free(pool, tt); in ttm_pool_alloc_caching_mismatch()
298 ttm_pool_fini(pool); in ttm_pool_alloc_caching_mismatch()
304 struct ttm_pool *pool; in ttm_pool_alloc_order_mismatch() local
312 pool = ttm_pool_pre_populated(test, fst_size, caching); in ttm_pool_alloc_order_mismatch()
314 pt_pool = &pool->caching[caching].orders[order]; in ttm_pool_alloc_order_mismatch()
315 pt_tt = &pool->caching[caching].orders[0]; in ttm_pool_alloc_order_mismatch()
323 err = ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_alloc_order_mismatch()
326 ttm_pool_free(pool, tt); in ttm_pool_alloc_order_mismatch()
332 ttm_pool_fini(pool); in ttm_pool_alloc_order_mismatch()
340 struct ttm_pool *pool; in ttm_pool_free_dma_alloc() local
349 pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL); in ttm_pool_free_dma_alloc()
350 KUNIT_ASSERT_NOT_NULL(test, pool); in ttm_pool_free_dma_alloc()
352 ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, true, false); in ttm_pool_free_dma_alloc()
353 ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_free_dma_alloc()
355 pt = &pool->caching[caching].orders[order]; in ttm_pool_free_dma_alloc()
358 ttm_pool_free(pool, tt); in ttm_pool_free_dma_alloc()
363 ttm_pool_fini(pool); in ttm_pool_free_dma_alloc()
371 struct ttm_pool *pool; in ttm_pool_free_no_dma_alloc() local
380 pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL); in ttm_pool_free_no_dma_alloc()
381 KUNIT_ASSERT_NOT_NULL(test, pool); in ttm_pool_free_no_dma_alloc()
383 ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, false, false); in ttm_pool_free_no_dma_alloc()
384 ttm_pool_alloc(pool, tt, &simple_ctx); in ttm_pool_free_no_dma_alloc()
386 pt = &pool->caching[caching].orders[order]; in ttm_pool_free_no_dma_alloc()
389 ttm_pool_free(pool, tt); in ttm_pool_free_no_dma_alloc()
394 ttm_pool_fini(pool); in ttm_pool_free_no_dma_alloc()
399 struct ttm_pool *pool; in ttm_pool_fini_basic() local
405 pool = ttm_pool_pre_populated(test, size, caching); in ttm_pool_fini_basic()
406 pt = &pool->caching[caching].orders[order]; in ttm_pool_fini_basic()
410 ttm_pool_fini(pool); in ttm_pool_fini_basic()