/drivers/gpu/drm/vmwgfx/ |
D | ttm_lock.c | 45 void ttm_lock_init(struct ttm_lock *lock) in ttm_lock_init() argument 47 spin_lock_init(&lock->lock); in ttm_lock_init() 48 init_waitqueue_head(&lock->queue); in ttm_lock_init() 49 lock->rw = 0; in ttm_lock_init() 50 lock->flags = 0; in ttm_lock_init() 53 void ttm_read_unlock(struct ttm_lock *lock) in ttm_read_unlock() argument 55 spin_lock(&lock->lock); in ttm_read_unlock() 56 if (--lock->rw == 0) in ttm_read_unlock() 57 wake_up_all(&lock->queue); in ttm_read_unlock() 58 spin_unlock(&lock->lock); in ttm_read_unlock() [all …]
|
D | ttm_lock.h | 71 spinlock_t lock; member 83 extern void ttm_lock_init(struct ttm_lock *lock); 92 extern void ttm_read_unlock(struct ttm_lock *lock); 104 extern int ttm_read_lock(struct ttm_lock *lock, bool interruptible); 121 extern int ttm_read_trylock(struct ttm_lock *lock, bool interruptible); 130 extern void ttm_write_unlock(struct ttm_lock *lock); 142 extern int ttm_write_lock(struct ttm_lock *lock, bool interruptible); 151 extern void ttm_lock_downgrade(struct ttm_lock *lock); 160 extern void ttm_suspend_lock(struct ttm_lock *lock); 169 extern void ttm_suspend_unlock(struct ttm_lock *lock); [all …]
|
/drivers/md/persistent-data/ |
D | dm-block-manager.c | 44 spinlock_t lock; member 60 static unsigned __find_holder(struct block_lock *lock, in __find_holder() argument 66 if (lock->holders[i] == task) in __find_holder() 74 static void __add_holder(struct block_lock *lock, struct task_struct *task) in __add_holder() argument 76 unsigned h = __find_holder(lock, NULL); in __add_holder() 82 lock->holders[h] = task; in __add_holder() 85 t = lock->traces + h; in __add_holder() 91 static void __del_holder(struct block_lock *lock, struct task_struct *task) in __del_holder() argument 93 unsigned h = __find_holder(lock, task); in __del_holder() 94 lock->holders[h] = NULL; in __del_holder() [all …]
|
/drivers/gpu/drm/ |
D | drm_lock.c | 63 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_lock_take() local 67 old = *lock; in drm_lock_take() 75 prev = cmpxchg(lock, old, new); in drm_lock_take() 112 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_lock_transfer() local 116 old = *lock; in drm_lock_transfer() 118 prev = cmpxchg(lock, old, new); in drm_lock_transfer() 127 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_legacy_lock_free() local 139 old = *lock; in drm_legacy_lock_free() 141 prev = cmpxchg(lock, old, new); in drm_legacy_lock_free() 168 struct drm_lock *lock = data; in drm_legacy_lock() local [all …]
|
D | drm_modeset_lock.c | 230 struct drm_modeset_lock *lock; in drm_modeset_drop_locks() local 232 lock = list_first_entry(&ctx->locked, in drm_modeset_drop_locks() 235 drm_modeset_unlock(lock); in drm_modeset_drop_locks() 240 static inline int modeset_lock(struct drm_modeset_lock *lock, in modeset_lock() argument 251 if (!ww_mutex_trylock(&lock->mutex)) in modeset_lock() 256 ret = ww_mutex_lock_slow_interruptible(&lock->mutex, &ctx->ww_ctx); in modeset_lock() 258 ret = ww_mutex_lock_interruptible(&lock->mutex, &ctx->ww_ctx); in modeset_lock() 260 ww_mutex_lock_slow(&lock->mutex, &ctx->ww_ctx); in modeset_lock() 263 ret = ww_mutex_lock(&lock->mutex, &ctx->ww_ctx); in modeset_lock() 266 WARN_ON(!list_empty(&lock->head)); in modeset_lock() [all …]
|
D | drm_debugfs_crc.c | 152 spin_lock_irq(&crc->lock); in crc_control_write() 155 spin_unlock_irq(&crc->lock); in crc_control_write() 163 spin_unlock_irq(&crc->lock); in crc_control_write() 180 assert_spin_locked(&crc->lock); in crtc_crc_data_count() 230 spin_lock_irq(&crc->lock); in crtc_crc_open() 238 spin_unlock_irq(&crc->lock); in crtc_crc_open() 252 spin_lock_irq(&crc->lock); in crtc_crc_open() 254 spin_unlock_irq(&crc->lock); in crtc_crc_open() 264 spin_lock_irq(&crc->lock); in crtc_crc_release() 266 spin_unlock_irq(&crc->lock); in crtc_crc_release() [all …]
|
/drivers/acpi/acpica/ |
D | utlock.c | 28 acpi_status acpi_ut_create_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_create_rw_lock() argument 32 lock->num_readers = 0; in acpi_ut_create_rw_lock() 33 status = acpi_os_create_mutex(&lock->reader_mutex); in acpi_ut_create_rw_lock() 38 status = acpi_os_create_mutex(&lock->writer_mutex); in acpi_ut_create_rw_lock() 42 void acpi_ut_delete_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_delete_rw_lock() argument 45 acpi_os_delete_mutex(lock->reader_mutex); in acpi_ut_delete_rw_lock() 46 acpi_os_delete_mutex(lock->writer_mutex); in acpi_ut_delete_rw_lock() 48 lock->num_readers = 0; in acpi_ut_delete_rw_lock() 49 lock->reader_mutex = NULL; in acpi_ut_delete_rw_lock() 50 lock->writer_mutex = NULL; in acpi_ut_delete_rw_lock() [all …]
|
/drivers/clk/mmp/ |
D | clk-apbc.c | 32 spinlock_t *lock; member 45 if (apbc->lock) in clk_apbc_prepare() 46 spin_lock_irqsave(apbc->lock, flags); in clk_apbc_prepare() 54 if (apbc->lock) in clk_apbc_prepare() 55 spin_unlock_irqrestore(apbc->lock, flags); in clk_apbc_prepare() 59 if (apbc->lock) in clk_apbc_prepare() 60 spin_lock_irqsave(apbc->lock, flags); in clk_apbc_prepare() 66 if (apbc->lock) in clk_apbc_prepare() 67 spin_unlock_irqrestore(apbc->lock, flags); in clk_apbc_prepare() 72 if (apbc->lock) in clk_apbc_prepare() [all …]
|
D | clk-gate.c | 34 if (gate->lock) in mmp_clk_gate_enable() 35 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_enable() 42 if (gate->lock) in mmp_clk_gate_enable() 43 spin_unlock_irqrestore(gate->lock, flags); in mmp_clk_gate_enable() 60 if (gate->lock) in mmp_clk_gate_disable() 61 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_disable() 68 if (gate->lock) in mmp_clk_gate_disable() 69 spin_unlock_irqrestore(gate->lock, flags); in mmp_clk_gate_disable() 78 if (gate->lock) in mmp_clk_gate_is_enabled() 79 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_is_enabled() [all …]
|
D | clk-apmu.c | 26 spinlock_t *lock; member 35 if (apmu->lock) in clk_apmu_enable() 36 spin_lock_irqsave(apmu->lock, flags); in clk_apmu_enable() 41 if (apmu->lock) in clk_apmu_enable() 42 spin_unlock_irqrestore(apmu->lock, flags); in clk_apmu_enable() 53 if (apmu->lock) in clk_apmu_disable() 54 spin_lock_irqsave(apmu->lock, flags); in clk_apmu_disable() 59 if (apmu->lock) in clk_apmu_disable() 60 spin_unlock_irqrestore(apmu->lock, flags); in clk_apmu_disable() 69 void __iomem *base, u32 enable_mask, spinlock_t *lock) in mmp_clk_register_apmu() argument [all …]
|
/drivers/clk/berlin/ |
D | berlin2-div.c | 58 spinlock_t *lock; member 71 if (div->lock) in berlin2_div_is_enabled() 72 spin_lock(div->lock); in berlin2_div_is_enabled() 77 if (div->lock) in berlin2_div_is_enabled() 78 spin_unlock(div->lock); in berlin2_div_is_enabled() 89 if (div->lock) in berlin2_div_enable() 90 spin_lock(div->lock); in berlin2_div_enable() 96 if (div->lock) in berlin2_div_enable() 97 spin_unlock(div->lock); in berlin2_div_enable() 108 if (div->lock) in berlin2_div_disable() [all …]
|
/drivers/mmc/host/ |
D | mmc_hsq.c | 34 spin_lock_irqsave(&hsq->lock, flags); in mmc_hsq_pump_requests() 38 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_pump_requests() 44 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_pump_requests() 52 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_pump_requests() 116 spin_lock_irqsave(&hsq->lock, flags); in mmc_hsq_post_request() 131 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_post_request() 135 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_post_request() 158 spin_lock_irqsave(&hsq->lock, flags); in mmc_hsq_finalize_request() 161 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_finalize_request() 170 spin_unlock_irqrestore(&hsq->lock, flags); in mmc_hsq_finalize_request() [all …]
|
/drivers/gpu/drm/via/ |
D | via_video.c | 41 XVMCLOCKPTR(dev_priv->sarea_priv, i)->lock = 0; in via_init_futex() 52 volatile int *lock; in via_release_futex() local 58 lock = (volatile int *)XVMCLOCKPTR(dev_priv->sarea_priv, i); in via_release_futex() 59 if ((_DRM_LOCKING_CONTEXT(*lock) == context)) { in via_release_futex() 60 if (_DRM_LOCK_IS_HELD(*lock) in via_release_futex() 61 && (*lock & _DRM_LOCK_CONT)) { in via_release_futex() 64 *lock = 0; in via_release_futex() 72 volatile int *lock; in via_decoder_futex() local 79 if (fx->lock >= VIA_NR_XVMC_LOCKS) in via_decoder_futex() 82 lock = (volatile int *)XVMCLOCKPTR(sAPriv, fx->lock); in via_decoder_futex() [all …]
|
/drivers/base/power/ |
D | runtime.c | 104 spin_lock_irqsave(&dev->power.lock, flags); in rpm_get_accounted_time() 109 spin_unlock_irqrestore(&dev->power.lock, flags); in rpm_get_accounted_time() 226 spin_lock_irq(&dev->power.lock); in pm_runtime_set_memalloc_noio() 229 spin_unlock_irq(&dev->power.lock); in pm_runtime_set_memalloc_noio() 364 __releases(&dev->power.lock) __acquires(&dev->power.lock) in __rpm_callback() 370 spin_unlock(&dev->power.lock); in __rpm_callback() 372 spin_unlock_irq(&dev->power.lock); in __rpm_callback() 397 spin_lock(&dev->power.lock); in __rpm_callback() 417 spin_lock_irq(&dev->power.lock); in __rpm_callback() 487 spin_unlock(&dev->power.lock); in rpm_idle() [all …]
|
/drivers/usb/gadget/function/ |
D | u_ether_configfs.h | 35 mutex_lock(&opts->lock); \ 37 mutex_unlock(&opts->lock); \ 48 mutex_lock(&opts->lock); \ 50 mutex_unlock(&opts->lock); \ 55 mutex_unlock(&opts->lock); \ 70 mutex_lock(&opts->lock); \ 72 mutex_unlock(&opts->lock); \ 83 mutex_lock(&opts->lock); \ 85 mutex_unlock(&opts->lock); \ 90 mutex_unlock(&opts->lock); \ [all …]
|
/drivers/acpi/ |
D | acpi_dbg.c | 51 struct mutex lock; member 136 mutex_lock(&acpi_aml_io.lock); in acpi_aml_running() 138 mutex_unlock(&acpi_aml_io.lock); in acpi_aml_running() 146 mutex_lock(&acpi_aml_io.lock); in acpi_aml_busy() 148 mutex_unlock(&acpi_aml_io.lock); in acpi_aml_busy() 160 mutex_lock(&acpi_aml_io.lock); in acpi_aml_used() 162 mutex_unlock(&acpi_aml_io.lock); in acpi_aml_used() 170 mutex_lock(&acpi_aml_io.lock); in acpi_aml_kern_readable() 173 mutex_unlock(&acpi_aml_io.lock); in acpi_aml_kern_readable() 181 mutex_lock(&acpi_aml_io.lock); in acpi_aml_kern_writable() [all …]
|
/drivers/net/wireless/st/cw1200/ |
D | queue.c | 101 spin_lock_bh(&stats->lock); in __cw1200_queue_gc() 105 spin_unlock_bh(&stats->lock); in __cw1200_queue_gc() 135 spin_lock_bh(&queue->lock); in cw1200_queue_gc() 137 spin_unlock_bh(&queue->lock); in cw1200_queue_gc() 150 spin_lock_init(&stats->lock); in cw1200_queue_stats_init() 177 spin_lock_init(&queue->lock); in cw1200_queue_init() 206 spin_lock_bh(&queue->lock); in cw1200_queue_clear() 218 spin_lock_bh(&stats->lock); in cw1200_queue_clear() 224 spin_unlock_bh(&stats->lock); in cw1200_queue_clear() 229 spin_unlock_bh(&queue->lock); in cw1200_queue_clear() [all …]
|
/drivers/usb/gadget/legacy/ |
D | inode.c | 116 spinlock_t lock; member 175 spin_lock_init (&dev->lock); in dev_new() 192 struct mutex lock; member 296 if (!mutex_trylock(&epdata->lock)) in get_ready_ep() 300 mutex_unlock(&epdata->lock); in get_ready_ep() 308 val = mutex_lock_interruptible(&epdata->lock); in get_ready_ep() 326 mutex_unlock(&epdata->lock); in get_ready_ep() 336 spin_lock_irq (&epdata->dev->lock); in ep_io() 347 spin_unlock_irq (&epdata->dev->lock); in ep_io() 352 spin_lock_irq (&epdata->dev->lock); in ep_io() [all …]
|
/drivers/base/ |
D | map.c | 26 int (*lock)(dev_t, void *); member 29 struct mutex *lock; member 34 int (*lock)(dev_t, void *), void *data) in kobj_map() 51 p->lock = lock; in kobj_map() 56 mutex_lock(domain->lock); in kobj_map() 64 mutex_unlock(domain->lock); in kobj_map() 78 mutex_lock(domain->lock); in kobj_unmap() 91 mutex_unlock(domain->lock); in kobj_unmap() 102 mutex_lock(domain->lock); in kobj_lookup() 119 if (p->lock && p->lock(dev, data) < 0) { in kobj_lookup() [all …]
|
/drivers/pci/endpoint/ |
D | pci-epc-core.c | 156 mutex_lock(&epc->lock); in pci_epc_get_features() 158 mutex_unlock(&epc->lock); in pci_epc_get_features() 175 mutex_lock(&epc->lock); in pci_epc_stop() 177 mutex_unlock(&epc->lock); in pci_epc_stop() 197 mutex_lock(&epc->lock); in pci_epc_start() 199 mutex_unlock(&epc->lock); in pci_epc_start() 225 mutex_lock(&epc->lock); in pci_epc_raise_irq() 227 mutex_unlock(&epc->lock); in pci_epc_raise_irq() 250 mutex_lock(&epc->lock); in pci_epc_get_msi() 252 mutex_unlock(&epc->lock); in pci_epc_get_msi() [all …]
|
/drivers/gpu/drm/i915/display/ |
D | intel_frontbuffer.c | 83 spin_lock(&i915->fb_tracking.lock); in frontbuffer_flush() 85 spin_unlock(&i915->fb_tracking.lock); in frontbuffer_flush() 111 spin_lock(&i915->fb_tracking.lock); in intel_frontbuffer_flip_prepare() 115 spin_unlock(&i915->fb_tracking.lock); in intel_frontbuffer_flip_prepare() 131 spin_lock(&i915->fb_tracking.lock); in intel_frontbuffer_flip_complete() 135 spin_unlock(&i915->fb_tracking.lock); in intel_frontbuffer_flip_complete() 155 spin_lock(&i915->fb_tracking.lock); in intel_frontbuffer_flip() 158 spin_unlock(&i915->fb_tracking.lock); in intel_frontbuffer_flip() 170 spin_lock(&i915->fb_tracking.lock); in __intel_fb_invalidate() 173 spin_unlock(&i915->fb_tracking.lock); in __intel_fb_invalidate() [all …]
|
/drivers/md/ |
D | dm-user.c | 101 struct mutex lock; member 156 struct mutex lock; member 196 lockdep_assert_held(&t->lock); in is_user_space_thread_present() 207 mutex_lock(&t->lock); in process_delayed_work() 213 mutex_unlock(&t->lock); in process_delayed_work() 225 mutex_unlock(&t->lock); in process_delayed_work() 581 lockdep_assert_held(&t->lock); in msg_get_map() 594 lockdep_assert_held(&t->lock); in msg_get_to_user() 615 mutex_unlock(&t->lock); in msg_get_to_user() 617 mutex_lock(&t->lock); in msg_get_to_user() [all …]
|
/drivers/usb/serial/ |
D | cypress_m8.c | 92 spinlock_t lock; /* private lock */ member 362 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control() 365 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control() 400 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control() 405 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control() 408 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control() 410 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control() 422 spin_lock_irqsave(&priv->lock, flags); in cypress_set_dead() 424 spin_unlock_irqrestore(&priv->lock, flags); in cypress_set_dead() 428 spin_unlock_irqrestore(&priv->lock, flags); in cypress_set_dead() [all …]
|
/drivers/tty/hvc/ |
D | hvcs.c | 250 spinlock_t lock; member 343 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_partner_vtys_show() 345 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_partner_vtys_show() 357 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_partner_clcs_show() 359 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_partner_clcs_show() 382 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_current_vty_show() 384 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_current_vty_show() 402 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_vterm_state_store() 405 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_vterm_state_store() 412 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_vterm_state_store() [all …]
|
/drivers/net/ethernet/ti/ |
D | davinci_cpdma.c | 101 spinlock_t lock; member 114 spinlock_t lock; member 381 spin_lock_irqsave(&chan->lock, flags); in cpdma_chan_on() 383 spin_unlock_irqrestore(&chan->lock, flags); in cpdma_chan_on() 387 spin_unlock_irqrestore(&chan->lock, flags); in cpdma_chan_on() 398 spin_unlock_irqrestore(&chan->lock, flags); in cpdma_chan_on() 522 spin_lock_init(&ctlr->lock); in cpdma_ctlr_create() 541 spin_lock_irqsave(&ctlr->lock, flags); in cpdma_ctlr_start() 543 spin_unlock_irqrestore(&ctlr->lock, flags); in cpdma_ctlr_start() 591 spin_unlock_irqrestore(&ctlr->lock, flags); in cpdma_ctlr_start() [all …]
|