/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
D | mali_kbase_mem_pool.c | 31 #define pool_dbg(pool, format, ...) \ argument 32 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, \ 33 (pool->next_pool) ? "kctx" : "kbdev", \ 34 kbase_mem_pool_size(pool), \ 35 kbase_mem_pool_max_size(pool), \ 41 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument 43 ssize_t max_size = kbase_mem_pool_max_size(pool); in kbase_mem_pool_capacity() 44 ssize_t cur_size = kbase_mem_pool_size(pool); in kbase_mem_pool_capacity() 49 static bool kbase_mem_pool_is_full(struct kbase_mem_pool *pool) in kbase_mem_pool_is_full() argument 51 return kbase_mem_pool_size(pool) >= kbase_mem_pool_max_size(pool); in kbase_mem_pool_is_full() [all …]
|
D | mali_kbase_mem.h | 811 int kbase_mem_pool_init(struct kbase_mem_pool *pool, 825 void kbase_mem_pool_term(struct kbase_mem_pool *pool); 842 struct page *kbase_mem_pool_alloc(struct kbase_mem_pool *pool); 855 struct page *kbase_mem_pool_alloc_locked(struct kbase_mem_pool *pool); 872 void kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *page, 886 void kbase_mem_pool_free_locked(struct kbase_mem_pool *pool, struct page *p, 911 int kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_4k_pages, 951 int kbase_mem_pool_alloc_pages_locked(struct kbase_mem_pool *pool, 966 void kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, 981 void kbase_mem_pool_free_pages_locked(struct kbase_mem_pool *pool, [all …]
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
D | mali_kbase_mem_pool.c | 32 #define pool_dbg(pool, format, ...) … argument 33 …dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, (pool->next_pool) ? "kctx" : "kbdev", … 34 kbase_mem_pool_size(pool), kbase_mem_pool_max_size(pool), ##__VA_ARGS__) 39 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument 41 ssize_t max_size = kbase_mem_pool_max_size(pool); in kbase_mem_pool_capacity() 42 ssize_t cur_size = kbase_mem_pool_size(pool); in kbase_mem_pool_capacity() 47 static bool kbase_mem_pool_is_full(struct kbase_mem_pool *pool) in kbase_mem_pool_is_full() argument 49 return kbase_mem_pool_size(pool) >= kbase_mem_pool_max_size(pool); in kbase_mem_pool_is_full() 52 static bool kbase_mem_pool_is_empty(struct kbase_mem_pool *pool) in kbase_mem_pool_is_empty() argument 54 return kbase_mem_pool_size(pool) == 0; in kbase_mem_pool_is_empty() [all …]
|
D | mali_kbase_mem.h | 725 int kbase_mem_pool_init(struct kbase_mem_pool *pool, const struct kbase_mem_pool_config *config, un… 735 void kbase_mem_pool_term(struct kbase_mem_pool *pool); 752 struct page *kbase_mem_pool_alloc(struct kbase_mem_pool *pool); 765 struct page *kbase_mem_pool_alloc_locked(struct kbase_mem_pool *pool); 782 void kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *page, bool dirty); 795 void kbase_mem_pool_free_locked(struct kbase_mem_pool *pool, struct page *p, bool dirty); 819 int kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagged_addr … 859 int kbase_mem_pool_alloc_pages_locked(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagge… 873 void kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pa… 888 void kbase_mem_pool_free_pages_locked(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_a… [all …]
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
D | mali_kbase_mem_pool.c | 27 #define pool_dbg(pool, format, ...) \ argument 28 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, \ 29 (pool->next_pool) ? "kctx" : "kbdev", \ 30 kbase_mem_pool_size(pool), \ 31 kbase_mem_pool_max_size(pool), \ 37 static inline void kbase_mem_pool_lock(struct kbase_mem_pool *pool) in kbase_mem_pool_lock() argument 39 spin_lock(&pool->pool_lock); in kbase_mem_pool_lock() 42 static inline void kbase_mem_pool_unlock(struct kbase_mem_pool *pool) in kbase_mem_pool_unlock() argument 44 spin_unlock(&pool->pool_lock); in kbase_mem_pool_unlock() 47 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument [all …]
|
D | mali_kbase_mem_pool_debugfs.c | 27 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_size_get() local 29 *val = kbase_mem_pool_size(pool); in kbase_mem_pool_debugfs_size_get() 36 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_size_set() local 38 kbase_mem_pool_trim(pool, val); in kbase_mem_pool_debugfs_size_set() 50 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_max_size_get() local 52 *val = kbase_mem_pool_max_size(pool); in kbase_mem_pool_debugfs_max_size_get() 59 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_max_size_set() local 61 kbase_mem_pool_set_max_size(pool, val); in kbase_mem_pool_debugfs_max_size_set() 72 struct kbase_mem_pool *pool) in kbase_mem_pool_debugfs_init() argument 75 pool, &kbase_mem_pool_debugfs_size_fops); in kbase_mem_pool_debugfs_init() [all …]
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
D | mali_kbase_mem_pool.c | 25 #define pool_dbg(pool, format, ...) … argument 26 …dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, (pool->next_pool) ? "kctx" : "kbdev", … 27 kbase_mem_pool_size(pool), kbase_mem_pool_max_size(pool), ##__VA_ARGS__) 32 static inline void kbase_mem_pool_lock(struct kbase_mem_pool *pool) in kbase_mem_pool_lock() argument 34 spin_lock(&pool->pool_lock); in kbase_mem_pool_lock() 37 static inline void kbase_mem_pool_unlock(struct kbase_mem_pool *pool) in kbase_mem_pool_unlock() argument 39 spin_unlock(&pool->pool_lock); in kbase_mem_pool_unlock() 42 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument 44 ssize_t max_size = kbase_mem_pool_max_size(pool); in kbase_mem_pool_capacity() 45 ssize_t cur_size = kbase_mem_pool_size(pool); in kbase_mem_pool_capacity() [all …]
|
D | mali_kbase_mem_pool_debugfs.c | 25 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_size_get() local 27 *val = kbase_mem_pool_size(pool); in kbase_mem_pool_debugfs_size_get() 34 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_size_set() local 36 kbase_mem_pool_trim(pool, val); in kbase_mem_pool_debugfs_size_set() 46 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_max_size_get() local 48 *val = kbase_mem_pool_max_size(pool); in kbase_mem_pool_debugfs_max_size_get() 55 struct kbase_mem_pool *pool = (struct kbase_mem_pool *)data; in kbase_mem_pool_debugfs_max_size_set() local 57 kbase_mem_pool_set_max_size(pool, val); in kbase_mem_pool_debugfs_max_size_set() 65 void kbase_mem_pool_debugfs_init(struct dentry *parent, struct kbase_mem_pool *pool) in kbase_mem_pool_debugfs_init() argument 67 …debugfs_create_file("mem_pool_size", S_IRUGO | S_IWUSR, parent, pool, &kbase_mem_pool_debugfs_size… in kbase_mem_pool_debugfs_init() [all …]
|
/device/soc/rockchip/rk3588/kernel/drivers/dma-buf/heaps/ |
D | page_pool.c | 22 struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) in dmabuf_page_pool_alloc_pages() argument 26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages() 29 static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, in dmabuf_page_pool_free_pages() argument 32 __free_pages(page, pool->order); in dmabuf_page_pool_free_pages() 35 static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_add() argument 44 mutex_lock(&pool->mutex); in dmabuf_page_pool_add() 45 list_add_tail(&page->lru, &pool->items[index]); in dmabuf_page_pool_add() 46 pool->count[index]++; in dmabuf_page_pool_add() 48 1 << pool->order); in dmabuf_page_pool_add() 49 mutex_unlock(&pool->mutex); in dmabuf_page_pool_add() [all …]
|
/device/soc/rockchip/common/vendor/drivers/dma-buf/heaps/ |
D | page_pool.c | 21 static inline struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) in dmabuf_page_pool_alloc_pages() argument 26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages() 29 static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_free_pages() argument 31 __free_pages(page, pool->order); in dmabuf_page_pool_free_pages() 34 static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_add() argument 44 mutex_lock(&pool->mutex); in dmabuf_page_pool_add() 45 list_add_tail(&page->lru, &pool->items[index]); in dmabuf_page_pool_add() 46 pool->count[index]++; in dmabuf_page_pool_add() 47 mutex_unlock(&pool->mutex); in dmabuf_page_pool_add() 48 mod_node_page_state(page_pgdat(page), NR_KERNEL_MISC_RECLAIMABLE, 1 << pool->order); in dmabuf_page_pool_add() [all …]
|
/device/board/isoftstone/yangfan/kernel/src/driv/dma-buf/ |
D | page_pool.c | 22 struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) in dmabuf_page_pool_alloc_pages() argument 26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages() 29 static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, in dmabuf_page_pool_free_pages() argument 32 __free_pages(page, pool->order); in dmabuf_page_pool_free_pages() 35 static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_add() argument 44 mutex_lock(&pool->mutex); in dmabuf_page_pool_add() 45 list_add_tail(&page->lru, &pool->items[index]); in dmabuf_page_pool_add() 46 pool->count[index]++; in dmabuf_page_pool_add() 47 mutex_unlock(&pool->mutex); in dmabuf_page_pool_add() 49 1 << pool->order); in dmabuf_page_pool_add() [all …]
|
/device/board/unionman/unionpi_tiger/kernel/drivers/media/drivers/common/canvas/ |
D | canvas_mgr.c | 43 static inline void cavas_pool_lock(struct canvas_pool *pool) in cavas_pool_lock() argument 47 spin_lock_irqsave(&pool->lock, pool->flags); in cavas_pool_lock() 51 static inline void cavas_pool_unlock(struct canvas_pool *pool) in cavas_pool_unlock() argument 53 spin_unlock_irqrestore(&pool->lock, pool->flags); in cavas_pool_unlock() 58 canvas_pool_map_alloc_canvas_in(struct canvas_pool *pool, const char *owner) in canvas_pool_map_alloc_canvas_in() argument 62 int start_index = pool->next_alloced_index; in canvas_pool_map_alloc_canvas_in() 66 i = find_next_zero_bit(pool->canvas_map, in canvas_pool_map_alloc_canvas_in() 67 pool->canvas_max, in canvas_pool_map_alloc_canvas_in() 77 cavas_pool_lock(pool); in canvas_pool_map_alloc_canvas_in() 78 if (!test_and_set_bit(i, pool->canvas_map)) { in canvas_pool_map_alloc_canvas_in() [all …]
|
/device/soc/amlogic/a311d/soc/amlogic/canvas/ |
D | canvas_mgr.c | 41 static inline void cavas_pool_lock(struct canvas_pool *pool) in cavas_pool_lock() argument 45 spin_lock_irqsave(&pool->lock, pool->flags); in cavas_pool_lock() 49 static inline void cavas_pool_unlock(struct canvas_pool *pool) in cavas_pool_unlock() argument 51 spin_unlock_irqrestore(&pool->lock, pool->flags); in cavas_pool_unlock() 56 canvas_pool_map_alloc_canvas_in(struct canvas_pool *pool, const char *owner) in canvas_pool_map_alloc_canvas_in() argument 60 int start_index = pool->next_alloced_index; in canvas_pool_map_alloc_canvas_in() 64 i = find_next_zero_bit(pool->canvas_map, in canvas_pool_map_alloc_canvas_in() 65 pool->canvas_max, in canvas_pool_map_alloc_canvas_in() 75 cavas_pool_lock(pool); in canvas_pool_map_alloc_canvas_in() 76 if (!test_and_set_bit(i, pool->canvas_map)) { in canvas_pool_map_alloc_canvas_in() [all …]
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/tests/kutf/ |
D | kutf_mem.c | 41 int kutf_mempool_init(struct kutf_mempool *pool) in kutf_mempool_init() argument 43 if (!pool) { in kutf_mempool_init() 48 INIT_LIST_HEAD(&pool->head); in kutf_mempool_init() 49 mutex_init(&pool->lock); in kutf_mempool_init() 55 void kutf_mempool_destroy(struct kutf_mempool *pool) in kutf_mempool_destroy() argument 60 if (!pool) { in kutf_mempool_destroy() 65 mutex_lock(&pool->lock); in kutf_mempool_destroy() 66 list_for_each_safe(remove, tmp, &pool->head) { in kutf_mempool_destroy() 73 mutex_unlock(&pool->lock); in kutf_mempool_destroy() 78 void *kutf_mempool_alloc(struct kutf_mempool *pool, size_t size) in kutf_mempool_alloc() argument [all …]
|
/device/soc/hisilicon/hi3861v100/sdk_liteos/platform/os/Huawei_LiteOS/kernel/include/ |
D | los_memory.h | 107 extern UINT32 LOS_MemTotalUsedGet(VOID *pool); 130 extern UINT32 LOS_MemFreeBlksGet(VOID *pool); 153 extern UINT32 LOS_MemUsedBlksGet(VOID *pool); 178 extern UINT32 LOS_MemTaskIdGet(const VOID *pool); 202 extern UINT32 LOS_MemLastUsedGet(VOID *pool); 228 extern UINT32 LOS_MemIntegrityCheck(VOID *pool); 282 extern UINT32 LOS_MemPoolSizeGet(const VOID *pool); 350 extern UINT32 LOS_MemNodeSizeCheck(VOID *pool, const VOID *ptr, UINT32 *totalSize, UINT32 *availSiz… 423 extern UINT32 LOS_MemInfoGet(VOID *pool, LOS_MEM_STATUS *status); 446 extern UINT32 LOS_MemGetMaxFreeBlkSize(VOID *pool); [all …]
|
/device/soc/telink/b91/b91_ble_sdk/common/buf_pool1/ |
D | mempool.c | 21 mem_pool_t *mempool_init(mem_pool_t *pool, void *mem, int itemsize, int itemcount) in mempool_init() argument 23 if (!pool || !mem) { in mempool_init() 27 pool->free_list = (mem_block_t *)mem; in mempool_init() 36 return pool; in mempool_init() 44 void *mempool_alloc(mem_pool_t *pool) in mempool_alloc() argument 46 if (!pool->free_list) { in mempool_alloc() 50 mem_block_t *tmp = pool->free_list; in mempool_alloc() 51 pool->free_list = tmp->next_block; in mempool_alloc() 55 void mempool_free(mem_pool_t *pool, void *p) in mempool_free() argument 58 tmp->next_block = pool->free_list; in mempool_free() [all …]
|
D | zb_buffer.c | 39 zb_buf_t *p = g_mPool.head = &g_mPool.pool[0]; in tl_zbBufferInit() 64 if ((((u32)(g_mPool.head) < (u32)(&g_mPool.pool[0])) && (g_mPool.head != NULL)) || in my_zb_buf_get() 65 ((u32)(g_mPool.head) > (u32)(&g_mPool.pool[ZB_BUF_POOL_SIZE - 1]))) { in my_zb_buf_get() 177 if ((((u32)(g_mPool.head) < (u32)(&g_mPool.pool[0])) && (g_mPool.head != NULL)) || in my_zb_buf_free() 178 ((u32)(g_mPool.head) > (u32)(&g_mPool.pool[ZB_BUF_POOL_SIZE - 1]))) { in my_zb_buf_free() 185 if ((((u32)(g_mPool.head) < (u32)(&g_mPool.pool[0])) && (g_mPool.head != NULL)) || in my_zb_buf_free() 186 ((u32)(g_mPool.head) > (u32)(&g_mPool.pool[ZB_BUF_POOL_SIZE - 1]))) { in my_zb_buf_free() 216 …if (((u32)p >= (u32)(&g_mPool.pool[0])) && ((u32)p < (u32)((&g_mPool.pool[ZB_BUF_POOL_SIZE - 1])) … in is_zb_buf() 237 if (g_mPool.pool[i].hdr.used) { in zb_buf_debug_start() 238 g_zbBufDBG[cnt].handle = g_mPool.pool[i].hdr.handle; in zb_buf_debug_start() [all …]
|
/device/soc/rockchip/rk3588/hardware/mpp/include/ |
D | mpp_mem_pool.h | 31 #define mpp_mem_pool_deinit(pool) mpp_mem_pool_deinit_f(__FUNCTION__, pool); argument 33 #define mpp_mem_pool_get(pool) mpp_mem_pool_get_f(__FUNCTION__, pool) argument 34 #define mpp_mem_pool_put(pool, p) mpp_mem_pool_put_f(__FUNCTION__, pool, p) argument 37 void mpp_mem_pool_deinit_f(const char *caller, MppMemPool pool); 39 void *mpp_mem_pool_get_f(const char *caller, MppMemPool pool); 40 void mpp_mem_pool_put_f(const char *caller, MppMemPool pool, void *p);
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/tests/kutf/ |
D | kutf_mem.c | 37 int kutf_mempool_init(struct kutf_mempool *pool) in kutf_mempool_init() argument 39 if (!pool) { in kutf_mempool_init() 44 INIT_LIST_HEAD(&pool->head); in kutf_mempool_init() 50 void kutf_mempool_destroy(struct kutf_mempool *pool) in kutf_mempool_destroy() argument 55 if (!pool) { in kutf_mempool_destroy() 60 list_for_each_safe(remove, tmp, &pool->head) { in kutf_mempool_destroy() 70 void *kutf_mempool_alloc(struct kutf_mempool *pool, size_t size) in kutf_mempool_alloc() argument 74 if (!pool) { in kutf_mempool_alloc() 86 list_add(&ret->node, &pool->head); in kutf_mempool_alloc()
|
/device/soc/hisilicon/hi3861v100/sdk_liteos/third_party/lwip_sack/include/lwip/priv/ |
D | tcpip_priv.h | 58 #define API_VAR_ALLOC(type, pool, name, errorval) do { \ argument 59 name = (type *)memp_malloc(pool); \ 64 #define API_VAR_ALLOC_POOL(type, pool, name, errorval) do { \ argument 65 name = (type *)LWIP_MEMPOOL_ALLOC(pool); \ 70 #define API_VAR_FREE(pool, name) memp_free(pool, name) argument 71 #define API_VAR_FREE_POOL(pool, name) LWIP_MEMPOOL_FREE(pool, name) argument 84 #define API_VAR_ALLOC(type, pool, name, errorval) argument 85 #define API_VAR_ALLOC_POOL(type, pool, name, errorval) argument 86 #define API_VAR_FREE(pool, name) argument 87 #define API_VAR_FREE_POOL(pool, name) argument
|
/device/soc/hisilicon/hi3861v100/sdk_liteos/platform/os/Huawei_LiteOS/kernel/base/include/ |
D | los_memory_pri.h | 97 #define OS_MEM_HEAD(pool, size) OS_DLNK_HEAD(OS_MEM_HEAD_ADDR(pool), size) argument 98 #define OS_MEM_HEAD_ADDR(pool) ((VOID *)((UINT32)(UINTPTR)(pool) + sizeof(LosMemPoolInfo))) argument 100 #define OS_MEM_FIRST_NODE(pool) ((LosMemDynNode *)((UINT8 *)OS_MEM_HEAD_ADDR(pool) + OS_DLNK_HEAD_S… argument 101 #define OS_MEM_END_NODE(pool, size) ((LosMemDynNode *)(((UINT8 *)(pool) + (size)) - OS_MEM_NODE_HEA… argument 172 typedef UINT32 (*OsMemAllocCheckFunc)(VOID *pool); 175 typedef VOID (*OsMemFreeCheckFunc)(const VOID *pool, const LosMemDynNode *node); 221 extern VOID OsMemFreeNodeCheck(const VOID *pool, const LosMemDynNode *node);
|
/device/soc/hisilicon/hi3516dv300/sdk_linux/drv/mpp/component/hdmi/src/mkp/ |
D | drv_hdmi_event.c | 35 static hi_s32 event_type_counter(hdmi_event_pool *pool, hdmi_event event, hi_bool write) in event_type_counter() argument 39 write ? pool->run_cnt.hpd_wr_cnt++ : pool->run_cnt.hpd_rd_cnt++; in event_type_counter() 42 write ? pool->run_cnt.unhpd_wr_cnt++ : pool->run_cnt.unhpd_rd_cnt++; in event_type_counter() 45 write ? pool->run_cnt.edid_fail_wr_cnt++ : pool->run_cnt.edid_fail_rd_cnt++; in event_type_counter() 48 write ? pool->run_cnt.rsen_con_wr_cnt++ : pool->run_cnt.rsen_con_rd_cnt++; in event_type_counter() 51 write ? pool->run_cnt.rsen_dis_wr_cnt++ : pool->run_cnt.rsen_dis_rd_cnt++; in event_type_counter() 73 for (i = 0, tmp_pool = &evt_info->pool[0]; i < HDMI_EVENT_POOL_CNT; i++, tmp_pool++) { in event_mach_id() 122 (hi_void)memset_s(evt_info->pool, sizeof(evt_info->pool), 0, sizeof(evt_info->pool)); in drv_hdmi_event_deinit() 149 for (i = 0, tmp_pool = &evt_info->pool[0]; i < HDMI_EVENT_POOL_CNT; i++, tmp_pool++) { in drv_hdmi_event_pool_malloc() 157 for (i = 0, tmp_pool = &evt_info->pool[0]; i < HDMI_EVENT_POOL_CNT; i++, tmp_pool++) { in drv_hdmi_event_pool_malloc() [all …]
|
/device/board/unionman/unionpi_tiger/kernel/drivers/media/drivers/amvdec_ports/ |
D | aml_vcodec_vfq.h | 32 struct vframe_s **pool; member 46 static inline void vfq_init(struct vfq_s *q, u32 size, struct vframe_s **pool) in vfq_init() argument 50 q->pool = pool; in vfq_init() 65 q->pool[wp] = vf; in vfq_push() 86 vf = q->pool[rp]; in vfq_pop() 98 return (vfq_empty(q)) ? NULL : q->pool[q->rp]; in vfq_peek()
|
/device/soc/allwinner/t507/patches/kernel/linux-5.10/ |
D | kernel.patch | 402 + bool "DMA-BUF heaps page-pool library" 404 + Choose this option to enable the DMA-BUF heaps page-pool library. 860 + * Based on the ION page pool code 1065 + * DMA BUF page pool system 1069 + * Based on the ION page pool code 1084 +struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) 1088 + return alloc_pages(pool->gfp_mask, pool->order); 1091 +static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, 1094 + __free_pages(page, pool->order); 1097 +static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) [all …]
|
/device/board/isoftstone/yangfan/kernel/src/driv/net/rockchip_wlan/rkwifi/bcmdhd/include/ |
D | bcm_mpool_pub.h | 300 void* bcm_mp_alloc(bcm_mp_pool_h pool); 314 int bcm_mp_free(bcm_mp_pool_h pool, void *objp); 328 void bcm_mp_stats(bcm_mp_pool_h pool, bcm_mp_stats_t *stats); 342 int bcm_mp_dump(bcm_mp_pool_h pool, struct bcmstrbuf *b);
|