Home
last modified time | relevance | path

Searched refs:e (Results 1 – 25 of 520) sorted by relevance

12345678910>>...21

/drivers/isdn/hardware/eicon/
Dum_idi.c30 static void cleanup_entity(divas_um_idi_entity_t *e);
34 static int process_idi_request(divas_um_idi_entity_t *e,
36 static int process_idi_rc(divas_um_idi_entity_t *e, byte rc);
37 static int process_idi_ind(divas_um_idi_entity_t *e, byte ind);
38 static int write_return_code(divas_um_idi_entity_t *e, byte rc);
173 divas_um_idi_entity_t *e; in cleanup_adapter() local
176 e = list_entry(tmp, divas_um_idi_entity_t, link); in cleanup_adapter()
178 cleanup_entity(e); in cleanup_adapter()
179 if (e->os_context) { in cleanup_adapter()
180 diva_os_wakeup_read(e->os_context); in cleanup_adapter()
[all …]
Dio.c44 static void Request##N(ENTITY *e) \
45 { if (IoAdapters[N]) (*IoAdapters[N]->DIRequest)(IoAdapters[N], e); }
203 void request(PISDN_ADAPTER IoAdapter, ENTITY *e) in request() argument
211 if (!e->Req) in request()
213 IDI_SYNC_REQ *syncReq = (IDI_SYNC_REQ *)e; in request()
214 switch (e->Rc) in request()
335 pcm_req(IoAdapter, e); in request()
338 e->Ind = 0; in request()
343 pcm_req(IoAdapter, e); in request()
346 e->Ind = 0; in request()
[all …]
/drivers/net/ethernet/chelsio/cxgb4/
Dl2t.c65 static inline unsigned int vlan_prio(const struct l2t_entry *e) in vlan_prio() argument
67 return e->vlan >> VLAN_PRIO_SHIFT; in vlan_prio()
70 static inline void l2t_hold(struct l2t_data *d, struct l2t_entry *e) in l2t_hold() argument
72 if (atomic_add_return(1, &e->refcnt) == 1) /* 0 -> 1 transition */ in l2t_hold()
118 static int addreq(const struct l2t_entry *e, const u32 *addr) in addreq() argument
120 if (e->v6) in addreq()
121 return (e->addr[0] ^ addr[0]) | (e->addr[1] ^ addr[1]) | in addreq()
122 (e->addr[2] ^ addr[2]) | (e->addr[3] ^ addr[3]); in addreq()
123 return e->addr[0] ^ addr[0]; in addreq()
126 static void neigh_replace(struct l2t_entry *e, struct neighbour *n) in neigh_replace() argument
[all …]
Dsched.c48 struct sched_class *e; in t4_sched_class_fw_cmd() local
51 e = &s->tab[p->u.params.class]; in t4_sched_class_fw_cmd()
58 p->u.params.channel, e->idx, in t4_sched_class_fw_cmd()
115 struct sched_class *e, *end; in t4_sched_queue_lookup() local
121 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_queue_lookup()
125 if (e->state == SCHED_STATE_UNUSED) in t4_sched_queue_lookup()
128 list_for_each_entry(qe, &e->queue_list, list) { in t4_sched_queue_lookup()
130 found = e; in t4_sched_queue_lookup()
148 struct sched_class *e; in t4_sched_queue_unbind() local
162 e = t4_sched_queue_lookup(pi, qid, &index); in t4_sched_queue_unbind()
[all …]
/drivers/net/ethernet/chelsio/cxgb3/
Dl2t.c63 static inline unsigned int vlan_prio(const struct l2t_entry *e) in vlan_prio() argument
65 return e->vlan >> 13; in vlan_prio()
74 static inline void neigh_replace(struct l2t_entry *e, struct neighbour *n) in neigh_replace() argument
77 if (e->neigh) in neigh_replace()
78 neigh_release(e->neigh); in neigh_replace()
79 e->neigh = n; in neigh_replace()
88 struct l2t_entry *e) in setup_l2e_send_pending() argument
101 OPCODE_TID(req) = htonl(MK_OPCODE_TID(CPL_L2T_WRITE_REQ, e->idx)); in setup_l2e_send_pending()
102 req->params = htonl(V_L2T_W_IDX(e->idx) | V_L2T_W_IFF(e->smt_idx) | in setup_l2e_send_pending()
103 V_L2T_W_VLAN(e->vlan & VLAN_VID_MASK) | in setup_l2e_send_pending()
[all …]
Dcxgb3_defs.h77 union listen_entry *e; in lookup_stid() local
82 e = stid2entry(t, tid); in lookup_stid()
83 if ((void *)e->next >= (void *)t->tid_tab && in lookup_stid()
84 (void *)e->next < (void *)&t->atid_tab[t->natids]) in lookup_stid()
87 return &e->t3c_tid; in lookup_stid()
96 union active_open_entry *e; in lookup_atid() local
101 e = atid2entry(t, tid); in lookup_atid()
102 if ((void *)e->next >= (void *)t->tid_tab && in lookup_atid()
103 (void *)e->next < (void *)&t->atid_tab[t->natids]) in lookup_atid()
106 return &e->t3c_tid; in lookup_atid()
/drivers/md/
Ddm-cache-policy-smq.c87 struct entry *e; in __get_entry() local
89 e = es->begin + block; in __get_entry()
90 BUG_ON(e >= es->end); in __get_entry()
92 return e; in __get_entry()
95 static unsigned to_index(struct entry_space *es, struct entry *e) in to_index() argument
97 BUG_ON(e < es->begin || e >= es->end); in to_index()
98 return e - es->begin; in to_index()
132 static struct entry *l_next(struct entry_space *es, struct entry *e) in l_next() argument
134 return to_entry(es, e->next); in l_next()
137 static struct entry *l_prev(struct entry_space *es, struct entry *e) in l_prev() argument
[all …]
Ddm-exception-store.h66 struct dm_exception *e);
72 struct dm_exception *e, int valid,
147 static inline unsigned dm_consecutive_chunk_count(struct dm_exception *e) in dm_consecutive_chunk_count() argument
149 return e->new_chunk >> DM_CHUNK_NUMBER_BITS; in dm_consecutive_chunk_count()
152 static inline void dm_consecutive_chunk_count_inc(struct dm_exception *e) in dm_consecutive_chunk_count_inc() argument
154 e->new_chunk += (1ULL << DM_CHUNK_NUMBER_BITS); in dm_consecutive_chunk_count_inc()
156 BUG_ON(!dm_consecutive_chunk_count(e)); in dm_consecutive_chunk_count_inc()
159 static inline void dm_consecutive_chunk_count_dec(struct dm_exception *e) in dm_consecutive_chunk_count_dec() argument
161 BUG_ON(!dm_consecutive_chunk_count(e)); in dm_consecutive_chunk_count_dec()
163 e->new_chunk -= (1ULL << DM_CHUNK_NUMBER_BITS); in dm_consecutive_chunk_count_dec()
[all …]
Ddm-snap.c195 struct dm_exception e; member
659 static void dm_remove_exception(struct dm_exception *e) in dm_remove_exception() argument
661 list_del(&e->hash_list); in dm_remove_exception()
672 struct dm_exception *e; in dm_lookup_exception() local
675 list_for_each_entry (e, slot, hash_list) in dm_lookup_exception()
676 if (chunk >= e->old_chunk && in dm_lookup_exception()
677 chunk <= e->old_chunk + dm_consecutive_chunk_count(e)) in dm_lookup_exception()
678 return e; in dm_lookup_exception()
685 struct dm_exception *e; in alloc_completed_exception() local
687 e = kmem_cache_alloc(exception_cache, gfp); in alloc_completed_exception()
[all …]
/drivers/mtd/ubi/
Dwl.c140 struct ubi_wl_entry *e, struct rb_root *root);
142 struct ubi_wl_entry *e);
152 static void wl_tree_add(struct ubi_wl_entry *e, struct rb_root *root) in wl_tree_add() argument
163 if (e->ec < e1->ec) in wl_tree_add()
165 else if (e->ec > e1->ec) in wl_tree_add()
168 ubi_assert(e->pnum != e1->pnum); in wl_tree_add()
169 if (e->pnum < e1->pnum) in wl_tree_add()
176 rb_link_node(&e->u.rb, parent, p); in wl_tree_add()
177 rb_insert_color(&e->u.rb, root); in wl_tree_add()
188 static void wl_entry_destroy(struct ubi_device *ubi, struct ubi_wl_entry *e) in wl_entry_destroy() argument
[all …]
Dfastmap-wl.c38 struct ubi_wl_entry *e, *victim = NULL; in find_anchor_wl_entry() local
41 ubi_rb_for_each_entry(p, e, root, u.rb) { in find_anchor_wl_entry()
42 if (e->pnum < UBI_FM_MAX_START && e->ec < max_ec) { in find_anchor_wl_entry()
43 victim = e; in find_anchor_wl_entry()
44 max_ec = e->ec; in find_anchor_wl_entry()
60 struct ubi_wl_entry *e; in return_unused_pool_pebs() local
63 e = ubi->lookuptbl[pool->pebs[i]]; in return_unused_pool_pebs()
64 wl_tree_add(e, &ubi->free); in return_unused_pool_pebs()
72 struct ubi_wl_entry *e; in anchor_pebs_avalible() local
74 ubi_rb_for_each_entry(p, e, root, u.rb) in anchor_pebs_avalible()
[all …]
/drivers/net/fddi/skfp/
Decm.c106 smc->e.path_test = PT_PASSED ; in ecm_init()
107 smc->e.trace_prop = 0 ; in ecm_init()
108 smc->e.sb_flag = 0 ; in ecm_init()
110 smc->e.ecm_line_state = FALSE ; in ecm_init()
162 smc->e.DisconnectFlag = FALSE ; in ecm_fsm()
165 smc->e.DisconnectFlag = TRUE ; in ecm_fsm()
173 smc->e.path_test = PT_PASSED ; in ecm_fsm()
174 smc->e.ecm_line_state = FALSE ; in ecm_fsm()
181 && smc->e.path_test==PT_PASSED) { in ecm_fsm()
186 else if (cmd == EC_CONNECT && (smc->e.path_test==PT_PASSED) && in ecm_fsm()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sync.c124 struct amdgpu_sync_entry *e; in amdgpu_sync_add_later() local
126 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
127 if (unlikely(e->fence->context != f->context)) in amdgpu_sync_add_later()
130 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later()
146 struct amdgpu_sync_entry *e; in amdgpu_sync_fence() local
158 e = kmem_cache_alloc(amdgpu_sync_slab, GFP_KERNEL); in amdgpu_sync_fence()
159 if (!e) in amdgpu_sync_fence()
162 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
163 e->fence = dma_fence_get(f); in amdgpu_sync_fence()
239 struct amdgpu_sync_entry *e; in amdgpu_sync_peek_fence() local
[all …]
/drivers/ssb/
Ddriver_chipcommon_sflash.c90 const struct ssb_sflash_tbl_e *e; in ssb_sflash_init() local
107 for (e = ssb_sflash_sst_tbl; e->name; e++) { in ssb_sflash_init()
108 if (e->id == id2) in ssb_sflash_init()
115 for (e = ssb_sflash_st_tbl; e->name; e++) { in ssb_sflash_init()
116 if (e->id == id) in ssb_sflash_init()
121 if (!e->name) { in ssb_sflash_init()
132 for (e = ssb_sflash_at_tbl; e->name; e++) { in ssb_sflash_init()
133 if (e->id == id) in ssb_sflash_init()
136 if (!e->name) { in ssb_sflash_init()
149 sflash->blocksize = e->blocksize; in ssb_sflash_init()
[all …]
Ddriver_chipcommon_pmu.c77 const struct pmu0_plltab_entry *e; in pmu0_plltab_find_entry() local
81 e = &pmu0_plltab[i]; in pmu0_plltab_find_entry()
82 if (e->freq == crystalfreq) in pmu0_plltab_find_entry()
83 return e; in pmu0_plltab_find_entry()
94 const struct pmu0_plltab_entry *e = NULL; in ssb_pmu0_pllinit_r0() local
99 e = pmu0_plltab_find_entry(crystalfreq); in ssb_pmu0_pllinit_r0()
100 if (!e) in ssb_pmu0_pllinit_r0()
101 e = pmu0_plltab_find_entry(SSB_PMU0_DEFAULT_XTALFREQ); in ssb_pmu0_pllinit_r0()
102 BUG_ON(!e); in ssb_pmu0_pllinit_r0()
103 crystalfreq = e->freq; in ssb_pmu0_pllinit_r0()
[all …]
/drivers/bcma/
Ddriver_chipcommon_sflash.c93 const struct bcma_sflash_tbl_e *e; in bcma_sflash_init() local
110 for (e = bcma_sflash_sst_tbl; e->name; e++) { in bcma_sflash_init()
111 if (e->id == id2) in bcma_sflash_init()
118 for (e = bcma_sflash_st_tbl; e->name; e++) { in bcma_sflash_init()
119 if (e->id == id) in bcma_sflash_init()
124 if (!e->name) { in bcma_sflash_init()
134 for (e = bcma_sflash_at_tbl; e->name; e++) { in bcma_sflash_init()
135 if (e->id == id) in bcma_sflash_init()
138 if (!e->name) { in bcma_sflash_init()
149 sflash->blocksize = e->blocksize; in bcma_sflash_init()
[all …]
/drivers/gpu/drm/
Ddrm_file.c301 struct drm_pending_event *e, *et; in drm_events_release() local
307 list_for_each_entry_safe(e, et, &file_priv->pending_event_list, in drm_events_release()
309 list_del(&e->pending_link); in drm_events_release()
310 e->file_priv = NULL; in drm_events_release()
314 list_for_each_entry_safe(e, et, &file_priv->event_list, link) { in drm_events_release()
315 list_del(&e->link); in drm_events_release()
316 kfree(e); in drm_events_release()
492 struct drm_pending_event *e = NULL; in drm_read() local
496 e = list_first_entry(&file_priv->event_list, in drm_read()
498 file_priv->event_space += e->event->length; in drm_read()
[all …]
/drivers/gpu/drm/i915/gvt/
Dgtt.h53 struct intel_gvt_gtt_entry *e,
59 struct intel_gvt_gtt_entry *e,
64 bool (*test_present)(struct intel_gvt_gtt_entry *e);
65 void (*clear_present)(struct intel_gvt_gtt_entry *e);
66 bool (*test_pse)(struct intel_gvt_gtt_entry *e);
67 void (*set_pfn)(struct intel_gvt_gtt_entry *e, unsigned long pfn);
68 unsigned long (*get_pfn)(struct intel_gvt_gtt_entry *e);
152 void *page_table, struct intel_gvt_gtt_entry *e,
157 void *page_table, struct intel_gvt_gtt_entry *e,
160 #define ggtt_get_guest_entry(mm, e, index) \ argument
[all …]
Dgtt.c135 #define gtt_init_entry(e, t, p, v) do { \ argument
136 (e)->type = t; \
137 (e)->pdev = p; \
138 memcpy(&(e)->val64, &v, sizeof(v)); \
263 struct intel_gvt_gtt_entry *e, in gtt_get_entry64() argument
276 &e->val64, 8); in gtt_get_entry64()
280 e->val64 = read_pte64(vgpu->gvt->dev_priv, index); in gtt_get_entry64()
282 e->val64 = *((u64 *)pt + index); in gtt_get_entry64()
288 struct intel_gvt_gtt_entry *e, in gtt_set_entry64() argument
301 &e->val64, 8); in gtt_set_entry64()
[all …]
/drivers/net/wireless/mediatek/mt7601u/
Ddma.c20 struct mt7601u_dma_buf_rx *e, gfp_t gfp);
136 mt7601u_rx_process_entry(struct mt7601u_dev *dev, struct mt7601u_dma_buf_rx *e) in mt7601u_rx_process_entry() argument
138 u32 seg_len, data_len = e->urb->actual_length; in mt7601u_rx_process_entry()
139 u8 *data = page_address(e->p); in mt7601u_rx_process_entry()
151 mt7601u_rx_process_seg(dev, data, seg_len, new_p ? e->p : NULL); in mt7601u_rx_process_entry()
163 __free_pages(e->p, MT_RX_ORDER); in mt7601u_rx_process_entry()
165 e->p = new_p; in mt7601u_rx_process_entry()
181 buf = &q->e[q->start]; in mt7601u_rx_get_pending_entry()
213 if (WARN_ONCE(q->e[q->end].urb != urb, "RX urb mismatch")) in mt7601u_complete_rx()
226 struct mt7601u_dma_buf_rx *e; in mt7601u_rx_tasklet() local
[all …]
/drivers/net/wireless/broadcom/b43legacy/
Ddebugfs.c354 struct b43legacy_dfsentry *e = dev->dfsentry; in b43legacy_remove_dynamic_debug() local
358 debugfs_remove(e->dyn_debug_dentries[i]); in b43legacy_remove_dynamic_debug()
363 struct b43legacy_dfsentry *e = dev->dfsentry; in b43legacy_add_dynamic_debug() local
367 e->dyn_debug[id] = (initstate); \ in b43legacy_add_dynamic_debug()
368 d = debugfs_create_bool(name, 0600, e->subdir, \ in b43legacy_add_dynamic_debug()
369 &(e->dyn_debug[id])); \ in b43legacy_add_dynamic_debug()
371 e->dyn_debug_dentries[id] = d; \ in b43legacy_add_dynamic_debug()
385 struct b43legacy_dfsentry *e; in b43legacy_debugfs_add_device() local
390 e = kzalloc(sizeof(*e), GFP_KERNEL); in b43legacy_debugfs_add_device()
391 if (!e) { in b43legacy_debugfs_add_device()
[all …]
/drivers/firewire/
Dcore-cdev.c390 struct bus_reset_event *e; in queue_bus_reset_event() local
392 e = kzalloc(sizeof(*e), GFP_KERNEL); in queue_bus_reset_event()
393 if (e == NULL) in queue_bus_reset_event()
396 fill_bus_reset_event(&e->reset, client); in queue_bus_reset_event()
398 queue_event(client, &e->event, in queue_bus_reset_event()
399 &e->reset, sizeof(e->reset), NULL, 0); in queue_bus_reset_event()
551 struct outbound_transaction_event *e = data; in complete_transaction() local
552 struct fw_cdev_event_response *rsp = &e->response; in complete_transaction()
553 struct client *client = e->client; in complete_transaction()
562 idr_remove(&client->resource_idr, e->r.resource.handle); in complete_transaction()
[all …]
/drivers/edac/
Dedac_mc.c1056 struct edac_raw_error_desc *e) in edac_raw_mc_handle_error() argument
1059 int pos[EDAC_MAX_LAYERS] = { e->top_layer, e->mid_layer, e->low_layer }; in edac_raw_mc_handle_error()
1065 e->page_frame_number, e->offset_in_page, in edac_raw_mc_handle_error()
1066 e->grain, e->syndrome); in edac_raw_mc_handle_error()
1067 edac_ce_error(mci, e->error_count, pos, e->msg, e->location, e->label, in edac_raw_mc_handle_error()
1068 detail, e->other_detail, e->enable_per_layer_report, in edac_raw_mc_handle_error()
1069 e->page_frame_number, e->offset_in_page, e->grain); in edac_raw_mc_handle_error()
1073 e->page_frame_number, e->offset_in_page, e->grain); in edac_raw_mc_handle_error()
1075 edac_ue_error(mci, e->error_count, pos, e->msg, e->location, e->label, in edac_raw_mc_handle_error()
1076 detail, e->other_detail, e->enable_per_layer_report); in edac_raw_mc_handle_error()
[all …]
Dghes_edac.c170 struct edac_raw_error_desc *e; in ghes_edac_report_mem_error() local
185 e = &mci->error_desc; in ghes_edac_report_mem_error()
188 memset(e, 0, sizeof (*e)); in ghes_edac_report_mem_error()
189 e->error_count = 1; in ghes_edac_report_mem_error()
190 e->grain = 1; in ghes_edac_report_mem_error()
191 strcpy(e->label, "unknown label"); in ghes_edac_report_mem_error()
192 e->msg = pvt->msg; in ghes_edac_report_mem_error()
193 e->other_detail = pvt->other_detail; in ghes_edac_report_mem_error()
194 e->top_layer = -1; in ghes_edac_report_mem_error()
195 e->mid_layer = -1; in ghes_edac_report_mem_error()
[all …]
/drivers/net/wireless/broadcom/b43/
Ddebugfs.c661 struct b43_dfsentry *e = dev->dfsentry; in b43_remove_dynamic_debug() local
665 debugfs_remove(e->dyn_debug_dentries[i]); in b43_remove_dynamic_debug()
670 struct b43_dfsentry *e = dev->dfsentry; in b43_add_dynamic_debug() local
674 e->dyn_debug[id] = (initstate); \ in b43_add_dynamic_debug()
675 d = debugfs_create_bool(name, 0600, e->subdir, \ in b43_add_dynamic_debug()
676 &(e->dyn_debug[id])); \ in b43_add_dynamic_debug()
678 e->dyn_debug_dentries[id] = d; \ in b43_add_dynamic_debug()
696 struct b43_dfsentry *e; in b43_debugfs_add_device() local
701 e = kzalloc(sizeof(*e), GFP_KERNEL); in b43_debugfs_add_device()
702 if (!e) { in b43_debugfs_add_device()
[all …]

12345678910>>...21