Home
last modified time | relevance | path

Searched refs:lru_list (Results 1 – 7 of 7) sorted by relevance

/drivers/gpu/drm/i915/gvt/
Dsched_policy.c54 struct list_head lru_list; member
108 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
113 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
122 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
186 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in find_busy_vgpu()
228 list_del_init(&vgpu_data->lru_list); in tbs_sched_func()
229 list_add_tail(&vgpu_data->lru_list, in tbs_sched_func()
321 INIT_LIST_HEAD(&data->lru_list); in tbs_sched_init_vgpu()
347 if (!list_empty(&vgpu_data->lru_list)) in tbs_sched_start_schedule()
355 list_add(&vgpu_data->lru_list, &sched_data->lru_runq_head); in tbs_sched_start_schedule()
[all …]
Dgtt.h162 struct list_head lru_list; member
Dgtt.c1920 INIT_LIST_HEAD(&mm->ppgtt_mm.lru_list); in intel_vgpu_create_ppgtt_mm()
1939 list_add_tail(&mm->ppgtt_mm.lru_list, &gvt->gtt.ppgtt_mm_lru_list_head); in intel_vgpu_create_ppgtt_mm()
2001 list_del(&mm->ppgtt_mm.lru_list); in _intel_vgpu_mm_release()
2048 list_move_tail(&mm->ppgtt_mm.lru_list, in intel_vgpu_pin_mm()
2064 mm = container_of(pos, struct intel_vgpu_mm, ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm()
2069 list_del_init(&mm->ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm()
2821 list_del_init(&mm->ppgtt_mm.lru_list); in intel_vgpu_invalidate_ppgtt()
/drivers/md/
Ddm-bufio.c140 struct list_head lru_list; member
512 list_add(&b->lru_list, &c->lru[dirty]); in __link_buffer()
530 list_del(&b->lru_list); in __unlink_buffer()
549 list_move(&b->lru_list, &c->lru[dirty]); in __relink_lru()
804 list_for_each_entry_reverse(b, &c->lru[LIST_CLEAN], lru_list) { in __get_unclaimed_buffer()
816 list_for_each_entry_reverse(b, &c->lru[LIST_DIRTY], lru_list) { in __get_unclaimed_buffer()
904 struct dm_buffer, lru_list); in __alloc_buffer_wait_no_callback()
905 list_del(&b->lru_list); in __alloc_buffer_wait_no_callback()
942 list_add(&b->lru_list, &c->reserved_buffers); in __free_buffer_wake()
954 list_for_each_entry_safe_reverse(b, tmp, &c->lru[LIST_DIRTY], lru_list) { in __write_dirty_buffers_async()
[all …]
/drivers/infiniband/hw/hfi1/
Dmmu_rb.c103 INIT_LIST_HEAD(&h->lru_list); in hfi1_mmu_rb_register()
177 list_add_tail(&mnode->list, &handler->lru_list); in hfi1_mmu_rb_insert()
193 list_move_tail(&node->list, &handler->lru_list); in hfi1_mmu_rb_get_first()
276 list_for_each_entry_safe(rbnode, ptr, &handler->lru_list, list) { in hfi1_mmu_rb_evict()
Dmmu_rb.h83 struct list_head lru_list; member
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_resource.c707 struct list_head *lru_list = &dev_priv->res_lru[res->func->res_type]; in vmw_resource_validate() local
724 if (list_empty(lru_list) || !res->func->may_evict) { in vmw_resource_validate()
733 (list_first_entry(lru_list, struct vmw_resource, in vmw_resource_validate()
743 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_validate()
917 struct list_head *lru_list = &dev_priv->res_lru[type]; in vmw_resource_evict_type() local
926 if (list_empty(lru_list)) in vmw_resource_evict_type()
930 list_first_entry(lru_list, struct vmw_resource, in vmw_resource_evict_type()
939 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_evict_type()