Home
last modified time | relevance | path

Searched refs:work (Results 1 – 19 of 19) sorted by relevance

/mm/
Dvmpressure.c70 static struct vmpressure *work_to_vmpressure(struct work_struct *work) in work_to_vmpressure() argument
72 return container_of(work, struct vmpressure, work); in work_to_vmpressure()
181 static void vmpressure_work_fn(struct work_struct *work) in vmpressure_work_fn() argument
183 struct vmpressure *vmpr = work_to_vmpressure(work); in vmpressure_work_fn()
278 schedule_work(&vmpr->work); in vmpressure()
454 INIT_WORK(&vmpr->work, vmpressure_work_fn); in vmpressure_init()
470 flush_work(&vmpr->work); in vmpressure_cleanup()
Dz3fold.c117 struct work_struct work; member
166 struct work_struct work; member
328 INIT_WORK(&zhdr->work, compact_page_work); in init_z3fold_page()
457 queue_work(pool->release_wq, &pool->work); in __release_z3fold_page()
492 struct z3fold_pool *pool = container_of(w, struct z3fold_pool, work); in free_pages_work()
504 cancel_work_sync(&zhdr->work); in free_pages_work()
649 work); in compact_page_work()
802 INIT_WORK(&pool->work, free_pages_work); in z3fold_create_pool()
924 cancel_work_sync(&zhdr->work); in z3fold_alloc()
1071 queue_work_on(zhdr->cpu, pool->compact_wq, &zhdr->work); in z3fold_free()
[all …]
Dslab_common.c45 static void slab_caches_to_rcu_destroy_workfn(struct work_struct *work);
569 static void slab_caches_to_rcu_destroy_workfn(struct work_struct *work) in slab_caches_to_rcu_destroy_workfn() argument
710 static void kmemcg_workfn(struct work_struct *work) in kmemcg_workfn() argument
712 struct kmem_cache *s = container_of(work, struct kmem_cache, in kmemcg_workfn()
713 memcg_params.work); in kmemcg_workfn()
736 INIT_WORK(&s->memcg_params.work, kmemcg_workfn); in kmemcg_rcufn()
737 queue_work(memcg_kmem_cache_wq, &s->memcg_params.work); in kmemcg_rcufn()
756 INIT_WORK(&s->memcg_params.work, kmemcg_workfn); in kmemcg_cache_shutdown()
757 queue_work(memcg_kmem_cache_wq, &s->memcg_params.work); in kmemcg_cache_shutdown()
Dzswap.c126 struct work_struct work; member
625 static void __zswap_pool_release(struct work_struct *work) in __zswap_pool_release() argument
627 struct zswap_pool *pool = container_of(work, typeof(*pool), work); in __zswap_pool_release()
650 INIT_WORK(&pool->work, __zswap_pool_release); in __zswap_pool_empty()
651 schedule_work(&pool->work); in __zswap_pool_empty()
Dswap.c726 struct work_struct *work = &per_cpu(lru_add_drain_work, cpu); in lru_add_drain_all() local
734 INIT_WORK(work, lru_add_drain_per_cpu); in lru_add_drain_all()
735 queue_work_on(cpu, mm_percpu_wq, work); in lru_add_drain_all()
Dmemory-failure.c1436 struct work_struct work; member
1469 schedule_work_on(smp_processor_id(), &mf_cpu->work); in memory_failure_queue()
1478 static void memory_failure_work_func(struct work_struct *work) in memory_failure_work_func() argument
1508 INIT_WORK(&mf_cpu->work, memory_failure_work_func); in memory_failure_init()
Dmemcontrol.c2152 struct work_struct work; member
2284 drain_local_stock(&stock->work); in drain_all_stock()
2286 schedule_work_on(cpu, &stock->work); in drain_all_stock()
2353 static void high_work_func(struct work_struct *work) in high_work_func() argument
2357 memcg = container_of(work, struct mem_cgroup, high_work); in high_work_func()
2804 struct work_struct work; member
2810 container_of(w, struct memcg_kmem_cache_create_work, work); in memcg_kmem_cache_create_func()
2837 INIT_WORK(&cw->work, memcg_kmem_cache_create_func); in memcg_schedule_kmem_cache_create()
2839 queue_work(memcg_kmem_cache_wq, &cw->work); in memcg_schedule_kmem_cache_create()
4549 static void memcg_event_remove(struct work_struct *work) in memcg_event_remove() argument
[all …]
Dbacking-dev.c480 static void cgwb_release_workfn(struct work_struct *work) in cgwb_release_workfn() argument
482 struct bdi_writeback *wb = container_of(work, struct bdi_writeback, in cgwb_release_workfn()
Dslab.h91 struct work_struct work; member
Dpage_alloc.c104 struct work_struct work; member
2867 static void drain_local_pages_wq(struct work_struct *work) in drain_local_pages_wq() argument
2871 drain = container_of(work, struct pcpu_drain, work); in drain_local_pages_wq()
2955 INIT_WORK(&drain->work, drain_local_pages_wq); in drain_all_pages()
2956 queue_work_on(cpu, mm_percpu_wq, &drain->work); in drain_all_pages()
2959 flush_work(&per_cpu_ptr(&pcpu_drain, cpu)->work); in drain_all_pages()
Dslab.c514 if (reap_work->work.func == NULL) { in start_cpu_timer()
1083 per_cpu(slab_reap_work, cpu).work.func = NULL; in slab_offline_cpu()
3984 struct delayed_work *work = to_delayed_work(w); in cache_reap() local
4032 schedule_delayed_work_on(smp_processor_id(), work, in cache_reap()
Dzsmalloc.c2188 static void async_free_zspage(struct work_struct *work) in async_free_zspage() argument
2196 struct zs_pool *pool = container_of(work, struct zs_pool, in async_free_zspage()
Dpercpu.c187 static void pcpu_balance_workfn(struct work_struct *work);
1832 static void pcpu_balance_workfn(struct work_struct *work) in pcpu_balance_workfn() argument
Dkmemleak.c1862 static void kmemleak_do_cleanup(struct work_struct *work) in kmemleak_do_cleanup() argument
Dswapfile.c492 static void swap_discard_work(struct work_struct *work) in swap_discard_work() argument
496 si = container_of(work, struct swap_info_struct, discard_work); in swap_discard_work()
Dvmstat.c1756 static void refresh_vm_stats(struct work_struct *work) in refresh_vm_stats() argument
DKconfig397 benefit but it will work automatically for all applications.
Dslub.c5702 static void sysfs_slab_remove_workfn(struct work_struct *work) in sysfs_slab_remove_workfn() argument
5705 container_of(work, struct kmem_cache, kobj_remove_work); in sysfs_slab_remove_workfn()
Dhugetlb.c1334 static void free_hpage_workfn(struct work_struct *work) in free_hpage_workfn() argument