/drivers/gpu/drm/i915/gt/ |
D | intel_timeline.c | 37 struct intel_timeline *tl = in __timeline_retire() local 38 container_of(active, typeof(*tl), active); in __timeline_retire() 40 i915_vma_unpin(tl->hwsp_ggtt); in __timeline_retire() 41 intel_timeline_put(tl); in __timeline_retire() 46 struct intel_timeline *tl = in __timeline_active() local 47 container_of(active, typeof(*tl), active); in __timeline_active() 49 __i915_vma_pin(tl->hwsp_ggtt); in __timeline_active() 50 intel_timeline_get(tl); in __timeline_active() 168 struct intel_timeline *tl; in intel_timeline_create_from_engine() local 170 tl = __intel_timeline_create(engine->gt, hwsp, offset); in intel_timeline_create_from_engine() [all …]
|
D | selftest_timeline.c | 25 static struct page *hwsp_page(struct intel_timeline *tl) in hwsp_page() argument 27 struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj; in hwsp_page() 33 static unsigned long hwsp_cacheline(struct intel_timeline *tl) in hwsp_cacheline() argument 35 unsigned long address = (unsigned long)page_address(hwsp_page(tl)); in hwsp_cacheline() 37 return (address + offset_in_page(tl->hwsp_offset)) / TIMELINE_SEQNO_BYTES; in hwsp_cacheline() 40 static int selftest_tl_pin(struct intel_timeline *tl) in selftest_tl_pin() argument 47 err = i915_gem_object_lock(tl->hwsp_ggtt->obj, &ww); in selftest_tl_pin() 49 err = intel_timeline_pin(tl, &ww); in selftest_tl_pin() 77 struct intel_timeline *tl) in __mock_hwsp_record() argument 79 tl = xchg(&state->history[idx], tl); in __mock_hwsp_record() [all …]
|
D | intel_gt_requests.c | 17 static bool retire_requests(struct intel_timeline *tl) in retire_requests() argument 21 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 26 return !i915_active_fence_isset(&tl->last_request); in retire_requests() 64 struct intel_timeline *tl = xchg(&engine->retire, NULL); in engine_retire() local 67 struct intel_timeline *next = xchg(&tl->retire, NULL); in engine_retire() 77 if (mutex_trylock(&tl->mutex)) { in engine_retire() 78 retire_requests(tl); in engine_retire() 79 mutex_unlock(&tl->mutex); in engine_retire() 81 intel_timeline_put(tl); in engine_retire() 84 tl = ptr_mask_bits(next, 1); in engine_retire() [all …]
|
D | intel_timeline.h | 45 static inline int __intel_timeline_sync_set(struct intel_timeline *tl, in __intel_timeline_sync_set() argument 48 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set() 51 static inline int intel_timeline_sync_set(struct intel_timeline *tl, in intel_timeline_sync_set() argument 54 return __intel_timeline_sync_set(tl, fence->context, fence->seqno); in intel_timeline_sync_set() 57 static inline bool __intel_timeline_sync_is_later(struct intel_timeline *tl, in __intel_timeline_sync_is_later() argument 60 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later() 63 static inline bool intel_timeline_sync_is_later(struct intel_timeline *tl, in intel_timeline_sync_is_later() argument 66 return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); in intel_timeline_sync_is_later() 69 void __intel_timeline_pin(struct intel_timeline *tl); 70 int intel_timeline_pin(struct intel_timeline *tl, struct i915_gem_ww_ctx *ww); [all …]
|
D | selftest_context.c | 17 struct intel_timeline *tl = i915_request_timeline(rq); in request_sync() local 21 intel_timeline_get(tl); in request_sync() 35 lockdep_unpin_lock(&tl->mutex, rq->cookie); in request_sync() 36 mutex_unlock(&tl->mutex); in request_sync() 39 intel_timeline_put(tl); in request_sync() 46 struct intel_timeline *tl = ce->timeline; in context_sync() local 49 mutex_lock(&tl->mutex); in context_sync() 54 if (list_empty(&tl->requests)) in context_sync() 57 rq = list_last_entry(&tl->requests, typeof(*rq), link); in context_sync() 68 mutex_unlock(&tl->mutex); in context_sync()
|
D | intel_context.h | 193 struct intel_timeline *tl = ce->timeline; in intel_context_timeline_lock() local 196 err = mutex_lock_interruptible(&tl->mutex); in intel_context_timeline_lock() 200 return tl; in intel_context_timeline_lock() 203 static inline void intel_context_timeline_unlock(struct intel_timeline *tl) in intel_context_timeline_unlock() argument 204 __releases(&tl->mutex) in intel_context_timeline_unlock() 206 mutex_unlock(&tl->mutex); in intel_context_timeline_unlock()
|
D | mock_engine.c | 16 static int mock_timeline_pin(struct intel_timeline *tl) in mock_timeline_pin() argument 20 if (WARN_ON(!i915_gem_object_trylock(tl->hwsp_ggtt->obj))) in mock_timeline_pin() 23 err = intel_timeline_pin_map(tl); in mock_timeline_pin() 24 i915_gem_object_unlock(tl->hwsp_ggtt->obj); in mock_timeline_pin() 28 atomic_inc(&tl->pin_count); in mock_timeline_pin() 32 static void mock_timeline_unpin(struct intel_timeline *tl) in mock_timeline_unpin() argument 34 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in mock_timeline_unpin() 35 atomic_dec(&tl->pin_count); in mock_timeline_unpin()
|
D | intel_engine_cs.c | 1384 struct intel_timeline *tl; in get_timeline() local 1395 tl = rcu_dereference(rq->timeline); in get_timeline() 1396 if (!kref_get_unless_zero(&tl->kref)) in get_timeline() 1397 tl = NULL; in get_timeline() 1400 return tl; in get_timeline() 1408 struct intel_timeline *tl = get_timeline(rq); in print_ring() local 1413 tl ? tl->hwsp_offset : 0, in print_ring() 1418 if (tl) in print_ring() 1419 intel_timeline_put(tl); in print_ring() 1690 struct intel_timeline *tl = get_timeline(rq); in engine_dump_request() local [all …]
|
D | intel_ring.c | 190 struct intel_timeline *tl, in wait_for_space() argument 199 GEM_BUG_ON(list_empty(&tl->requests)); in wait_for_space() 200 list_for_each_entry(target, &tl->requests, link) { in wait_for_space() 210 if (GEM_WARN_ON(&target->link == &tl->requests)) in wait_for_space()
|
D | intel_engine_pm.c | 127 struct intel_timeline *tl, in __queue_and_release_pm() argument 146 if (!atomic_fetch_inc(&tl->active_count)) in __queue_and_release_pm() 147 list_add_tail(&tl->link, &timelines->active_list); in __queue_and_release_pm()
|
/drivers/net/ethernet/netronome/nfp/ |
D | nfp_net_debugdump.c | 58 struct nfp_dump_tl tl; member 64 struct nfp_dump_tl tl; member 70 struct nfp_dump_tl tl; member 78 struct nfp_dump_tl tl; member 87 struct nfp_dump_tl tl; member 92 struct nfp_dump_tl tl; member 112 typedef int (*nfp_tlv_visit)(struct nfp_pf *pf, struct nfp_dump_tl *tl, 120 struct nfp_dump_tl *tl; in nfp_traverse_tlvs() local 125 while (remaining >= sizeof(*tl)) { in nfp_traverse_tlvs() 126 tl = p; in nfp_traverse_tlvs() [all …]
|
/drivers/isdn/mISDN/ |
D | fsm.c | 98 struct FsmTimer *ft = from_timer(ft, t, tl); in FsmExpireTimer() 114 timer_setup(&ft->tl, FsmExpireTimer, 0); in mISDN_FsmInitTimer() 126 del_timer(&ft->tl); in mISDN_FsmDelTimer() 141 if (timer_pending(&ft->tl)) { in mISDN_FsmAddTimer() 152 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmAddTimer() 153 add_timer(&ft->tl); in mISDN_FsmAddTimer() 169 if (timer_pending(&ft->tl)) in mISDN_FsmRestartTimer() 170 del_timer(&ft->tl); in mISDN_FsmRestartTimer() 173 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmRestartTimer() 174 add_timer(&ft->tl); in mISDN_FsmRestartTimer()
|
D | timerdev.c | 39 struct timer_list tl; member 77 del_timer_sync(&timer->tl); in mISDN_close() 158 struct mISDNtimer *timer = from_timer(timer, t, tl); in dev_expire_timer() 183 timer_setup(&timer->tl, dev_expire_timer, 0); in misdn_add_timer() 189 timer->tl.expires = jiffies + ((HZ * (u_long)timeout) / 1000); in misdn_add_timer() 190 add_timer(&timer->tl); in misdn_add_timer() 207 del_timer_sync(&timer->tl); in misdn_del_timer()
|
D | dsp_tones.c | 462 struct dsp *dsp = from_timer(dsp, t, tone.tl); in dsp_tone_timeout() 481 tone->tl.expires = jiffies + (pat->seq[index] * HZ) / 8000; in dsp_tone_timeout() 482 add_timer(&tone->tl); in dsp_tone_timeout() 507 if (dsp->features.hfc_loops && timer_pending(&tonet->tl)) in dsp_tone() 508 del_timer(&tonet->tl); in dsp_tone() 541 if (timer_pending(&tonet->tl)) in dsp_tone() 542 del_timer(&tonet->tl); in dsp_tone() 543 tonet->tl.expires = jiffies + (pat->seq[0] * HZ) / 8000; in dsp_tone() 544 add_timer(&tonet->tl); in dsp_tone()
|
/drivers/s390/net/ |
D | fsm.c | 135 fsm_timer *this = from_timer(this, t, tl); in fsm_expire_timer() 151 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_settimer() 161 del_timer(&this->tl); in fsm_deltimer() 173 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_addtimer() 176 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_addtimer() 177 add_timer(&this->tl); in fsm_addtimer() 191 del_timer(&this->tl); in fsm_modtimer() 192 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_modtimer() 195 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_modtimer() 196 add_timer(&this->tl); in fsm_modtimer()
|
/drivers/gpu/drm/i915/ |
D | i915_request.c | 371 struct intel_timeline * const tl = i915_request_timeline(rq); in i915_request_retire_upto() local 378 tmp = list_first_entry(&tl->requests, typeof(*tmp), link); in i915_request_retire_upto() 775 static void retire_requests(struct intel_timeline *tl) in retire_requests() argument 779 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 785 request_alloc_slow(struct intel_timeline *tl, in request_alloc_slow() argument 800 if (list_empty(&tl->requests)) in request_alloc_slow() 804 rq = list_first_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 813 rq = list_last_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 817 retire_requests(tl); in request_alloc_slow() 840 struct intel_timeline *tl = ce->timeline; in __i915_request_create() local [all …]
|
/drivers/net/ethernet/qlogic/qed/ |
D | qed_vf.h | 55 struct channel_tlv tl; member 62 struct channel_tlv tl; member 74 struct channel_tlv tl; member 123 struct channel_tlv tl; member 232 struct channel_tlv tl; member 345 struct channel_tlv tl; member 353 struct channel_tlv tl; member 359 struct channel_tlv tl; member 365 struct channel_tlv tl; member 377 struct channel_tlv tl; member [all …]
|
/drivers/net/wireless/intel/iwlegacy/ |
D | 4965-rs.c | 235 il4965_rs_tl_rm_old_stats(struct il_traffic_load *tl, u32 curr_time) in il4965_rs_tl_rm_old_stats() argument 240 while (tl->queue_count && tl->time_stamp < oldest_time) { in il4965_rs_tl_rm_old_stats() 241 tl->total -= tl->packet_count[tl->head]; in il4965_rs_tl_rm_old_stats() 242 tl->packet_count[tl->head] = 0; in il4965_rs_tl_rm_old_stats() 243 tl->time_stamp += TID_QUEUE_CELL_SPACING; in il4965_rs_tl_rm_old_stats() 244 tl->queue_count--; in il4965_rs_tl_rm_old_stats() 245 tl->head++; in il4965_rs_tl_rm_old_stats() 246 if (tl->head >= TID_QUEUE_MAX_SIZE) in il4965_rs_tl_rm_old_stats() 247 tl->head = 0; in il4965_rs_tl_rm_old_stats() 261 struct il_traffic_load *tl = NULL; in il4965_rs_tl_add_packet() local [all …]
|
/drivers/firmware/arm_scmi/ |
D | voltage.c | 117 struct scmi_xfer *td, *tl; in scmi_voltage_descriptors_get() local 129 sizeof(__le64), 0, &tl); in scmi_voltage_descriptors_get() 132 resp_levels = tl->rx.buf; in scmi_voltage_descriptors_get() 152 cmd = tl->tx.buf; in scmi_voltage_descriptors_get() 160 ret = ph->xops->do_xfer(ph, tl); in scmi_voltage_descriptors_get() 198 ph->xops->reset_rx_to_maxsz(ph, tl); in scmi_voltage_descriptors_get() 210 ph->xops->xfer_put(ph, tl); in scmi_voltage_descriptors_get()
|
/drivers/net/ethernet/broadcom/bnx2x/ |
D | bnx2x_vfpf.h | 99 struct channel_tlv tl; member 105 struct channel_tlv tl; member 117 struct channel_tlv tl; member 213 struct channel_tlv tl; member 219 struct channel_tlv tl; member
|
D | bnx2x_vfpf.c | 32 struct channel_tlv *tl = in bnx2x_add_tlv() local 35 tl->type = type; in bnx2x_add_tlv() 36 tl->length = length; in bnx2x_add_tlv() 52 bnx2x_add_tlv(bp, &first_tlv->tl, 0, type, length); in bnx2x_vfpf_prep() 63 first_tlv->tl.type); in bnx2x_vfpf_finalize() 256 bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, in bnx2x_vfpf_acquire() 266 req->first_tlv.tl.length + sizeof(struct channel_tlv), in bnx2x_vfpf_acquire() 413 bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END, in bnx2x_vfpf_release() 464 bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END, in bnx2x_vfpf_init() 515 bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END, in bnx2x_vfpf_close_vf() [all …]
|
/drivers/net/wireless/intel/iwlwifi/dvm/ |
D | rs.c | 249 static void rs_tl_rm_old_stats(struct iwl_traffic_load *tl, u32 curr_time) in rs_tl_rm_old_stats() argument 254 while (tl->queue_count && in rs_tl_rm_old_stats() 255 (tl->time_stamp < oldest_time)) { in rs_tl_rm_old_stats() 256 tl->total -= tl->packet_count[tl->head]; in rs_tl_rm_old_stats() 257 tl->packet_count[tl->head] = 0; in rs_tl_rm_old_stats() 258 tl->time_stamp += TID_QUEUE_CELL_SPACING; in rs_tl_rm_old_stats() 259 tl->queue_count--; in rs_tl_rm_old_stats() 260 tl->head++; in rs_tl_rm_old_stats() 261 if (tl->head >= TID_QUEUE_MAX_SIZE) in rs_tl_rm_old_stats() 262 tl->head = 0; in rs_tl_rm_old_stats() [all …]
|
/drivers/crypto/amlogic/ |
D | amlogic-gxl-core.c | 128 if (mc->chanlist[i].tl) in meson_free_chanlist() 130 mc->chanlist[i].tl, in meson_free_chanlist() 163 mc->chanlist[i].tl = dma_alloc_coherent(mc->dev, in meson_allocate_chanlist() 167 if (!mc->chanlist[i].tl) { in meson_allocate_chanlist()
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_execbuffer.c | 2286 struct intel_timeline *tl = ce->timeline; in eb_throttle() local 2303 list_for_each_entry(rq, &tl->requests, link) { in eb_throttle() 2311 if (&rq->link == &tl->requests) in eb_throttle() 2320 struct intel_timeline *tl; in eb_pin_engine() local 2346 tl = intel_context_timeline_lock(ce); in eb_pin_engine() 2347 if (IS_ERR(tl)) { in eb_pin_engine() 2349 return ERR_CAST(tl); in eb_pin_engine() 2355 intel_context_timeline_unlock(tl); in eb_pin_engine() 2364 struct intel_timeline *tl = ce->timeline; in eb_unpin_engine() local 2371 mutex_lock(&tl->mutex); in eb_unpin_engine() [all …]
|
/drivers/input/keyboard/ |
D | mpr121_touchkey.c | 176 unsigned char usl, lsl, tl, eleconf; in mpr121_phys_init() local 208 tl = (usl * 90) / 100; in mpr121_phys_init() 211 ret |= i2c_smbus_write_byte_data(client, AUTO_CONFIG_TL_ADDR, tl); in mpr121_phys_init()
|