/drivers/input/joystick/iforce/ |
D | iforce-ff.c | 182 struct ff_effect *old, in need_condition_modifier() argument 195 ret |= old->u.condition[i].right_saturation != new->u.condition[i].right_saturation in need_condition_modifier() 196 || old->u.condition[i].left_saturation != new->u.condition[i].left_saturation in need_condition_modifier() 197 || old->u.condition[i].right_coeff != new->u.condition[i].right_coeff in need_condition_modifier() 198 || old->u.condition[i].left_coeff != new->u.condition[i].left_coeff in need_condition_modifier() 199 || old->u.condition[i].deadband != new->u.condition[i].deadband in need_condition_modifier() 200 || old->u.condition[i].center != new->u.condition[i].center; in need_condition_modifier() 210 struct ff_effect *old, in need_magnitude_modifier() argument 219 return old->u.constant.level != effect->u.constant.level; in need_magnitude_modifier() 226 static int need_envelope_modifier(struct iforce *iforce, struct ff_effect *old, in need_envelope_modifier() argument [all …]
|
/drivers/gpu/drm/ |
D | drm_lock.c | 62 unsigned int old, new, prev; in drm_lock_take() local 67 old = *lock; in drm_lock_take() 68 if (old & _DRM_LOCK_HELD) in drm_lock_take() 69 new = old | _DRM_LOCK_CONT; in drm_lock_take() 75 prev = cmpxchg(lock, old, new); in drm_lock_take() 76 } while (prev != old); in drm_lock_take() 79 if (_DRM_LOCKING_CONTEXT(old) == context) { in drm_lock_take() 80 if (old & _DRM_LOCK_HELD) { in drm_lock_take() 111 unsigned int old, new, prev; in drm_lock_transfer() local 116 old = *lock; in drm_lock_transfer() [all …]
|
/drivers/hid/usbhid/ |
D | hid-pidff.c | 262 struct ff_envelope *old) in pidff_needs_set_envelope() argument 264 return envelope->attack_level != old->attack_level || in pidff_needs_set_envelope() 265 envelope->fade_level != old->fade_level || in pidff_needs_set_envelope() 266 envelope->attack_length != old->attack_length || in pidff_needs_set_envelope() 267 envelope->fade_length != old->fade_length; in pidff_needs_set_envelope() 289 struct ff_effect *old) in pidff_needs_set_constant() argument 291 return effect->u.constant.level != old->u.constant.level; in pidff_needs_set_constant() 324 struct ff_effect *old) in pidff_needs_set_effect() argument 326 return effect->replay.length != old->replay.length || in pidff_needs_set_effect() 327 effect->trigger.interval != old->trigger.interval || in pidff_needs_set_effect() [all …]
|
D | usbkbd.c | 83 unsigned char old[8]; member 122 if (kbd->old[i] > 3 && memscan(kbd->new + 2, kbd->old[i], 6) == kbd->new + 8) { in usb_kbd_irq() 123 if (usb_kbd_keycode[kbd->old[i]]) in usb_kbd_irq() 124 input_report_key(kbd->dev, usb_kbd_keycode[kbd->old[i]], 0); in usb_kbd_irq() 128 kbd->old[i]); in usb_kbd_irq() 131 if (kbd->new[i] > 3 && memscan(kbd->old + 2, kbd->new[i], 6) == kbd->old + 8) { in usb_kbd_irq() 143 memcpy(kbd->old, kbd->new, 8); in usb_kbd_irq()
|
/drivers/dma-buf/ |
D | dma-resv.c | 184 struct dma_resv_list *old, *new; in dma_resv_reserve_fences() local 189 old = dma_resv_fences_list(obj); in dma_resv_reserve_fences() 190 if (old && old->max_fences) { in dma_resv_reserve_fences() 191 if ((old->num_fences + num_fences) <= old->max_fences) in dma_resv_reserve_fences() 193 max = max(old->num_fences + num_fences, old->max_fences * 2); in dma_resv_reserve_fences() 208 for (i = 0, j = 0, k = max; i < (old ? old->num_fences : 0); ++i) { in dma_resv_reserve_fences() 212 dma_resv_list_entry(old, i, obj, &fence, &usage); in dma_resv_reserve_fences() 230 if (!old) in dma_resv_reserve_fences() 241 kfree_rcu(old, rcu); in dma_resv_reserve_fences() 284 struct dma_fence *old; in dma_resv_add_fence() local [all …]
|
/drivers/mtd/ |
D | mtd_blkdevs.c | 411 int del_mtd_blktrans_dev(struct mtd_blktrans_dev *old) in del_mtd_blktrans_dev() argument 417 if (old->disk_attributes) in del_mtd_blktrans_dev() 418 sysfs_remove_group(&disk_to_dev(old->disk)->kobj, in del_mtd_blktrans_dev() 419 old->disk_attributes); in del_mtd_blktrans_dev() 422 del_gendisk(old->disk); in del_mtd_blktrans_dev() 425 spin_lock_irqsave(&old->queue_lock, flags); in del_mtd_blktrans_dev() 426 old->rq->queuedata = NULL; in del_mtd_blktrans_dev() 427 spin_unlock_irqrestore(&old->queue_lock, flags); in del_mtd_blktrans_dev() 430 blk_mq_freeze_queue(old->rq); in del_mtd_blktrans_dev() 431 blk_mq_quiesce_queue(old->rq); in del_mtd_blktrans_dev() [all …]
|
/drivers/gpu/drm/i915/display/ |
D | intel_fifo_underrun.c | 115 bool enable, bool old) in i9xx_set_fifo_underrun_reporting() argument 129 if (old && intel_de_read(dev_priv, reg) & PIPE_FIFO_UNDERRUN_STATUS) in i9xx_set_fifo_underrun_reporting() 168 bool old) in ivb_set_fifo_underrun_reporting() argument 182 if (old && in ivb_set_fifo_underrun_reporting() 257 bool enable, bool old) in cpt_set_fifo_underrun_reporting() argument 272 if (old && intel_de_read(dev_priv, SERR_INT) & in cpt_set_fifo_underrun_reporting() 286 bool old; in __intel_set_cpu_fifo_underrun_reporting() local 290 old = !crtc->cpu_fifo_underrun_disabled; in __intel_set_cpu_fifo_underrun_reporting() 294 i9xx_set_fifo_underrun_reporting(dev, pipe, enable, old); in __intel_set_cpu_fifo_underrun_reporting() 298 ivb_set_fifo_underrun_reporting(dev, pipe, enable, old); in __intel_set_cpu_fifo_underrun_reporting() [all …]
|
/drivers/media/common/ |
D | cx2341x.c | 1025 const struct cx2341x_mpeg_params *old, in cx2341x_update() argument 1040 if (!old || in cx2341x_update() 1041 CMP_FIELD(old, new, is_50hz)) { in cx2341x_update() 1048 if (!old || in cx2341x_update() 1049 CMP_FIELD(old, new, width) || in cx2341x_update() 1050 CMP_FIELD(old, new, height) || in cx2341x_update() 1051 CMP_FIELD(old, new, video_encoding)) { in cx2341x_update() 1064 if (!old || in cx2341x_update() 1065 CMP_FIELD(old, new, stream_type)) { in cx2341x_update() 1071 if (!old || in cx2341x_update() [all …]
|
/drivers/firewire/ |
D | core-iso.c | 207 int try, new, old = allocate ? BANDWIDTH_AVAILABLE_INITIAL : 0; in manage_bandwidth() local 216 new = allocate ? old - bandwidth : old + bandwidth; in manage_bandwidth() 220 data[0] = cpu_to_be32(old); in manage_bandwidth() 231 if (be32_to_cpup(data) == old) in manage_bandwidth() 234 old = be32_to_cpup(data); in manage_bandwidth() 245 __be32 bit, all, old; in manage_channel() local 249 old = all = allocate ? cpu_to_be32(~0) : 0; in manage_channel() 258 if ((old & bit) != (all & bit)) in manage_channel() 261 data[0] = old; in manage_channel() 262 data[1] = old ^ bit; in manage_channel() [all …]
|
/drivers/nvmem/ |
D | qfprom.c | 140 const struct qfprom_touched_values *old) in qfprom_disable_fuse_blowing() argument 144 writel(old->timer_val, priv->qfpconf + QFPROM_BLOW_TIMER_OFFSET); in qfprom_disable_fuse_blowing() 145 writel(old->accel_val, priv->qfpconf + QFPROM_ACCEL_OFFSET); in qfprom_disable_fuse_blowing() 164 ret = clk_set_rate(priv->secclk, old->clk_rate); in qfprom_disable_fuse_blowing() 185 struct qfprom_touched_values *old) in qfprom_enable_fuse_blowing() argument 196 old->clk_rate = clk_get_rate(priv->secclk); in qfprom_enable_fuse_blowing() 227 old->timer_val = readl(priv->qfpconf + QFPROM_BLOW_TIMER_OFFSET); in qfprom_enable_fuse_blowing() 228 old->accel_val = readl(priv->qfpconf + QFPROM_ACCEL_OFFSET); in qfprom_enable_fuse_blowing() 239 clk_set_rate(priv->secclk, old->clk_rate); in qfprom_enable_fuse_blowing() 260 struct qfprom_touched_values old; in qfprom_reg_write() local [all …]
|
/drivers/gpu/drm/msm/disp/dpu1/ |
D | dpu_core_perf.c | 307 struct dpu_core_perf_params *new, *old; in dpu_core_perf_crtc_update() local 328 old = &dpu_crtc->cur_perf; in dpu_core_perf_crtc_update() 339 if ((params_changed && ((new->bw_ctl > old->bw_ctl) || in dpu_core_perf_crtc_update() 340 (new->max_per_pipe_ib > old->max_per_pipe_ib))) || in dpu_core_perf_crtc_update() 341 (!params_changed && ((new->bw_ctl < old->bw_ctl) || in dpu_core_perf_crtc_update() 342 (new->max_per_pipe_ib < old->max_per_pipe_ib)))) { in dpu_core_perf_crtc_update() 345 new->bw_ctl, old->bw_ctl); in dpu_core_perf_crtc_update() 346 old->bw_ctl = new->bw_ctl; in dpu_core_perf_crtc_update() 347 old->max_per_pipe_ib = new->max_per_pipe_ib; in dpu_core_perf_crtc_update() 351 if ((params_changed && new->core_clk_rate > old->core_clk_rate) || in dpu_core_perf_crtc_update() [all …]
|
/drivers/net/wireless/ath/ath10k/ |
D | wow.c | 81 const struct cfg80211_pkt_pattern *old) in ath10k_wow_convert_8023_to_80211() argument 88 int total_len = old->pkt_offset + old->pattern_len; in ath10k_wow_convert_8023_to_80211() 105 memcpy(hdr_8023_pattern + old->pkt_offset, in ath10k_wow_convert_8023_to_80211() 106 old->pattern, ETH_HLEN - old->pkt_offset); in ath10k_wow_convert_8023_to_80211() 107 memcpy(hdr_8023_bit_mask + old->pkt_offset, in ath10k_wow_convert_8023_to_80211() 108 old->mask, ETH_HLEN - old->pkt_offset); in ath10k_wow_convert_8023_to_80211() 127 if (old->pkt_offset < ETH_ALEN) in ath10k_wow_convert_8023_to_80211() 128 new->pkt_offset = old->pkt_offset + in ath10k_wow_convert_8023_to_80211() 130 else if (old->pkt_offset < offsetof(struct ethhdr, h_proto)) in ath10k_wow_convert_8023_to_80211() 131 new->pkt_offset = old->pkt_offset + in ath10k_wow_convert_8023_to_80211() [all …]
|
/drivers/scsi/sym53c8xx_2/ |
D | sym_fw.c | 350 u32 opcode, new, old, tmp1, tmp2; in sym_fw_bind_script() local 501 old = *cur; in sym_fw_bind_script() 503 switch (old & RELOC_MASK) { in sym_fw_bind_script() 505 new = (old & ~RELOC_MASK) + np->mmio_ba; in sym_fw_bind_script() 508 new = (old & ~RELOC_MASK) + np->scripta_ba; in sym_fw_bind_script() 511 new = (old & ~RELOC_MASK) + np->scriptb_ba; in sym_fw_bind_script() 514 new = (old & ~RELOC_MASK) + np->hcb_ba; in sym_fw_bind_script() 522 if (old == 0) { in sym_fw_bind_script() 523 new = old; in sym_fw_bind_script() 530 "weird relocation %x\n", old); in sym_fw_bind_script()
|
/drivers/gpu/drm/i915/ |
D | i915_hwmon.h | 18 void i915_hwmon_power_max_disable(struct drm_i915_private *i915, bool *old); 19 void i915_hwmon_power_max_restore(struct drm_i915_private *i915, bool old); 23 static inline void i915_hwmon_power_max_disable(struct drm_i915_private *i915, bool *old) { }; in i915_hwmon_power_max_disable() argument 24 static inline void i915_hwmon_power_max_restore(struct drm_i915_private *i915, bool old) { }; in i915_hwmon_power_max_restore() argument
|
/drivers/gpu/drm/arm/display/komeda/ |
D | komeda_crtc.c | 248 struct drm_crtc_state *old) in komeda_crtc_do_flush() argument 264 komeda_pipeline_update(master, old->state); in komeda_crtc_do_flush() 267 komeda_pipeline_update(slave, old->state); in komeda_crtc_do_flush() 281 struct drm_crtc_state *old = drm_atomic_get_old_crtc_state(state, in komeda_crtc_atomic_enable() local 287 komeda_crtc_do_flush(crtc, old); in komeda_crtc_atomic_enable() 329 struct drm_crtc_state *old = drm_atomic_get_old_crtc_state(state, in komeda_crtc_atomic_disable() local 332 struct komeda_crtc_state *old_st = to_kcrtc_st(old); in komeda_crtc_atomic_disable() 343 komeda_pipeline_disable(slave, old->state); in komeda_crtc_atomic_disable() 346 needs_phase2 = komeda_pipeline_disable(master, old->state); in komeda_crtc_atomic_disable() 373 komeda_pipeline_disable(kcrtc->master, old->state); in komeda_crtc_atomic_disable() [all …]
|
/drivers/s390/scsi/ |
D | zfcp_scsi.c | 556 struct fsf_qtcb_bottom_port *old) in zfcp_scsi_adjust_fc_host_stats() argument 559 data->seconds_since_last_reset - old->seconds_since_last_reset; in zfcp_scsi_adjust_fc_host_stats() 560 fc_stats->tx_frames = data->tx_frames - old->tx_frames; in zfcp_scsi_adjust_fc_host_stats() 561 fc_stats->tx_words = data->tx_words - old->tx_words; in zfcp_scsi_adjust_fc_host_stats() 562 fc_stats->rx_frames = data->rx_frames - old->rx_frames; in zfcp_scsi_adjust_fc_host_stats() 563 fc_stats->rx_words = data->rx_words - old->rx_words; in zfcp_scsi_adjust_fc_host_stats() 564 fc_stats->lip_count = data->lip - old->lip; in zfcp_scsi_adjust_fc_host_stats() 565 fc_stats->nos_count = data->nos - old->nos; in zfcp_scsi_adjust_fc_host_stats() 566 fc_stats->error_frames = data->error_frames - old->error_frames; in zfcp_scsi_adjust_fc_host_stats() 567 fc_stats->dumped_frames = data->dumped_frames - old->dumped_frames; in zfcp_scsi_adjust_fc_host_stats() [all …]
|
/drivers/xen/events/ |
D | events_fifo.c | 229 event_word_t new, old, w; in clear_masked_cond() local 240 old = w & ~(1 << EVTCHN_FIFO_BUSY); in clear_masked_cond() 241 new = old & ~(1 << EVTCHN_FIFO_MASKED); in clear_masked_cond() 242 w = sync_cmpxchg(word, old, new); in clear_masked_cond() 243 } while (w != old); in clear_masked_cond() 262 event_word_t new, old, w; in clear_linked() local 267 old = w; in clear_linked() 270 } while ((w = sync_cmpxchg(word, old, new)) != old); in clear_linked()
|
/drivers/net/ethernet/sfc/ |
D | tc_conntrack.c | 283 struct efx_tc_ct_entry *conn, *old; in efx_tc_ct_replace() local 298 old = rhashtable_lookup_get_insert_fast(&efx->tc->ct_ht, in efx_tc_ct_replace() 301 if (IS_ERR(old)) { in efx_tc_ct_replace() 302 rc = PTR_ERR(old); in efx_tc_ct_replace() 304 } else if (old) { in efx_tc_ct_replace() 361 if (!old) in efx_tc_ct_replace() 475 struct efx_tc_ct_zone *ct_zone, *old; in efx_tc_ct_register_zone() local 482 old = rhashtable_lookup_get_insert_fast(&efx->tc->ct_zone_ht, in efx_tc_ct_register_zone() 485 if (old) { in efx_tc_ct_register_zone() 488 if (IS_ERR(old)) /* oh dear, it's actually an error */ in efx_tc_ct_register_zone() [all …]
|
D | tc.c | 126 struct efx_tc_mac_pedit_action *ped, *old; in efx_tc_flower_get_mac() local 133 old = rhashtable_lookup_get_insert_fast(&efx->tc->mac_ht, in efx_tc_flower_get_mac() 136 if (old) { in efx_tc_flower_get_mac() 139 if (IS_ERR(old)) /* oh dear, it's actually an error */ in efx_tc_flower_get_mac() 140 return ERR_CAST(old); in efx_tc_flower_get_mac() 141 if (!refcount_inc_not_zero(&old->ref)) in efx_tc_flower_get_mac() 144 return old; in efx_tc_flower_get_mac() 501 struct efx_tc_encap_match *encap, *old, *pseudo = NULL; in efx_tc_flower_record_encap_match() local 599 old = rhashtable_lookup_get_insert_fast(&efx->tc->encap_match_ht, in efx_tc_flower_record_encap_match() 602 if (old) { in efx_tc_flower_record_encap_match() [all …]
|
/drivers/net/wireless/ath/ath11k/ |
D | wow.c | 152 const struct cfg80211_pkt_pattern *old) in ath11k_wow_convert_8023_to_80211() argument 159 int total_len = old->pkt_offset + old->pattern_len; in ath11k_wow_convert_8023_to_80211() 176 memcpy(hdr_8023_pattern + old->pkt_offset, in ath11k_wow_convert_8023_to_80211() 177 old->pattern, ETH_HLEN - old->pkt_offset); in ath11k_wow_convert_8023_to_80211() 178 memcpy(hdr_8023_bit_mask + old->pkt_offset, in ath11k_wow_convert_8023_to_80211() 179 old->mask, ETH_HLEN - old->pkt_offset); in ath11k_wow_convert_8023_to_80211() 198 if (old->pkt_offset < ETH_ALEN) in ath11k_wow_convert_8023_to_80211() 199 new->pkt_offset = old->pkt_offset + in ath11k_wow_convert_8023_to_80211() 201 else if (old->pkt_offset < offsetof(struct ethhdr, h_proto)) in ath11k_wow_convert_8023_to_80211() 202 new->pkt_offset = old->pkt_offset + in ath11k_wow_convert_8023_to_80211() [all …]
|
/drivers/md/bcache/ |
D | io.c | 97 unsigned int old = count; in bch_count_io_errors() local 105 count = atomic_cmpxchg(&ca->io_count, old, new); in bch_count_io_errors() 107 if (count == old) { in bch_count_io_errors() 112 old = errors; in bch_count_io_errors() 115 old, new); in bch_count_io_errors() 116 } while (old != errors); in bch_count_io_errors()
|
/drivers/hwmon/ |
D | bt1-pvt.c | 119 u32 old; in pvt_update() local 121 old = readl_relaxed(reg); in pvt_update() 122 writel((old & ~mask) | (data & mask), reg); in pvt_update() 124 return old & mask; in pvt_update() 136 u32 old; in pvt_set_mode() local 140 old = pvt_update(pvt->regs + PVT_CTRL, PVT_CTRL_EN, 0); in pvt_set_mode() 142 mode | old); in pvt_set_mode() 154 u32 old; in pvt_set_trim() local 158 old = pvt_update(pvt->regs + PVT_CTRL, PVT_CTRL_EN, 0); in pvt_set_trim() 160 trim | old); in pvt_set_trim() [all …]
|
/drivers/tty/vt/ |
D | consolemap.c | 543 struct uni_pagedict *new, *old = *vc->uni_pagedict_loc; in con_allocate_new() local 552 if (old) in con_allocate_new() 553 old->refcount--; in con_allocate_new() 561 struct uni_pagedict *old = *vc->uni_pagedict_loc; in con_do_clear_unimap() local 563 if (!old || old->refcount > 1) in con_do_clear_unimap() 566 old->sum = 0; in con_do_clear_unimap() 567 con_release_unimap(old); in con_do_clear_unimap() 582 struct uni_pagedict *old) in con_unshare_unimap() argument 601 u16 **dir = old->uni_pgdir[d]; in con_unshare_unimap() 625 old->refcount++; in con_unshare_unimap() [all …]
|
/drivers/gpu/host1x/hw/ |
D | syncpt_hw.c | 56 u32 old, live; in syncpt_load() local 60 old = host1x_syncpt_read_min(sp); in syncpt_load() 62 } while ((u32)atomic_cmpxchg(&sp->min_val, old, live) != old); in syncpt_load()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_fence.c | 188 struct dma_fence *old; in amdgpu_fence_emit() local 191 old = dma_fence_get_rcu_safe(ptr); in amdgpu_fence_emit() 194 if (old) { in amdgpu_fence_emit() 195 r = dma_fence_wait(old, false); in amdgpu_fence_emit() 196 dma_fence_put(old); in amdgpu_fence_emit() 710 struct dma_fence *old, **ptr; in amdgpu_fence_driver_clear_job_fences() local 714 old = rcu_dereference_protected(*ptr, 1); in amdgpu_fence_driver_clear_job_fences() 715 if (old && old->ops == &amdgpu_job_fence_ops) { in amdgpu_fence_driver_clear_job_fences() 722 job = container_of(old, struct amdgpu_job, hw_fence); in amdgpu_fence_driver_clear_job_fences() 723 if (!job->base.s_fence && !dma_fence_is_signaled(old)) in amdgpu_fence_driver_clear_job_fences() [all …]
|