/kernel/linux/linux-5.10/mm/ |
D | swap_slots.c | 149 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache() 177 mutex_lock(&cache->alloc_lock); in drain_slots_cache_cpu() 185 mutex_unlock(&cache->alloc_lock); in drain_slots_cache_cpu() 331 mutex_lock(&cache->alloc_lock); in get_swap_page() 342 mutex_unlock(&cache->alloc_lock); in get_swap_page()
|
/kernel/linux/linux-5.10/drivers/uio/ |
D | uio_dmem_genirq.c | 40 struct mutex alloc_lock; member 52 mutex_lock(&priv->alloc_lock); in uio_dmem_genirq_open() 68 mutex_unlock(&priv->alloc_lock); in uio_dmem_genirq_open() 85 mutex_lock(&priv->alloc_lock); in uio_dmem_genirq_release() 101 mutex_unlock(&priv->alloc_lock); in uio_dmem_genirq_release() 193 mutex_init(&priv->alloc_lock); in uio_dmem_genirq_probe()
|
/kernel/linux/linux-5.10/drivers/misc/ |
D | hpilo.c | 551 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_close() 553 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_close() 598 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_open() 601 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_open() 607 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_open() 609 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_open() 655 spin_lock(&hw->alloc_lock); in ilo_isr() 660 spin_unlock(&hw->alloc_lock); in ilo_isr() 680 spin_unlock(&hw->alloc_lock); in ilo_isr() 802 spin_lock_init(&ilo_hw->alloc_lock); in ilo_probe()
|
D | hpilo.h | 58 spinlock_t alloc_lock; member
|
/kernel/linux/linux-5.10/init/ |
D | init_task.c | 131 .alloc_lock = __SPIN_LOCK_UNLOCKED(init_task.alloc_lock), 165 &init_task.alloc_lock),
|
/kernel/linux/linux-5.10/include/linux/sched/ |
D | task.h | 171 spin_lock(&p->alloc_lock); in task_lock() 176 spin_unlock(&p->alloc_lock); in task_unlock()
|
/kernel/linux/linux-5.10/include/linux/ |
D | swap_slots.h | 15 struct mutex alloc_lock; /* protects slots, nr, cur */ member
|
D | sched.h | 1142 spinlock_t alloc_lock; member
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/ |
D | pio.c | 742 spin_lock_init(&sc->alloc_lock); in sc_alloc() 929 spin_lock_irq(&sc->alloc_lock); in sc_disable() 973 spin_unlock_irq(&sc->alloc_lock); in sc_disable() 1303 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_enable() 1374 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_enable() 1429 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_stop() 1433 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_stop() 1462 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_buffer_alloc() 1464 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_buffer_alloc() 1473 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_buffer_alloc() [all …]
|
D | pio.h | 115 spinlock_t alloc_lock ____cacheline_aligned_in_smp;
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/ |
D | cmd.c | 122 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_alloc_index() 126 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_alloc_index() 133 lockdep_assert_held(&cmd->alloc_lock); in cmd_free_index() 147 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_ent_put() 158 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_ent_put() 933 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_work_handler() 935 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_work_handler() 1660 spin_lock_irqsave(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() 1673 spin_unlock_irqrestore(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() 1682 spin_unlock_irqrestore(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() [all …]
|
/kernel/linux/linux-5.10/tools/lib/lockdep/ |
D | preload.c | 135 static struct lock_lookup *alloc_lock(void) in alloc_lock() function 188 l = alloc_lock(); in __get_lock()
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
D | book3s_64_vio.c | 206 mutex_lock(&stt->alloc_lock); in kvm_spapr_get_tce_page() 214 mutex_unlock(&stt->alloc_lock); in kvm_spapr_get_tce_page() 308 mutex_init(&stt->alloc_lock); in kvm_vm_ioctl_create_spapr_tce()
|
/kernel/linux/linux-5.10/security/yama/ |
D | yama_lsm.c | 80 assert_spin_locked(&target->alloc_lock); /* for target->comm */ in report_access()
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
D | kvm_host.h | 205 struct mutex alloc_lock; member
|
/kernel/linux/linux-5.10/include/linux/mlx5/ |
D | driver.h | 284 spinlock_t alloc_lock; member
|
/kernel/linux/linux-5.10/kernel/ |
D | fork.c | 2030 spin_lock_init(&p->alloc_lock); in copy_process() 2079 seqcount_spinlock_init(&p->mems_allowed_seq, &p->alloc_lock); in copy_process()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/ |
D | resource_tracker.c | 321 spin_lock(&res_alloc->alloc_lock); in mlx4_grant_resource() 374 spin_unlock(&res_alloc->alloc_lock); in mlx4_grant_resource() 390 spin_lock(&res_alloc->alloc_lock); in mlx4_release_resource() 419 spin_unlock(&res_alloc->alloc_lock); in mlx4_release_resource() 551 spin_lock_init(&res_alloc->alloc_lock); in mlx4_init_resource_tracker()
|
D | mlx4.h | 541 spinlock_t alloc_lock; /* protect quotas */ member
|
/kernel/linux/linux-5.10/Documentation/admin-guide/cgroup-v1/ |
D | cgroups.rst | 533 - while holding the task's alloc_lock (via task_lock())
|