Home
last modified time | relevance | path

Searched refs:pe (Results 1 – 25 of 57) sorted by relevance

123

/drivers/net/ethernet/marvell/
Dmvpp2.c1018 static int mvpp2_prs_hw_write(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_write() argument
1022 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_write()
1026 pe->tcam.word[MVPP2_PRS_TCAM_INV_WORD] &= ~MVPP2_PRS_TCAM_INV_MASK; in mvpp2_prs_hw_write()
1029 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_write()
1031 mvpp2_write(priv, MVPP2_PRS_TCAM_DATA_REG(i), pe->tcam.word[i]); in mvpp2_prs_hw_write()
1034 mvpp2_write(priv, MVPP2_PRS_SRAM_IDX_REG, pe->index); in mvpp2_prs_hw_write()
1036 mvpp2_write(priv, MVPP2_PRS_SRAM_DATA_REG(i), pe->sram.word[i]); in mvpp2_prs_hw_write()
1042 static int mvpp2_prs_hw_read(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_read() argument
1046 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_read()
1050 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_read()
[all …]
/drivers/clk/st/
Dclkgen-fsyn.c40 unsigned long pe; member
46 { .mdiv = 0x1f, .pe = 0x0, .sdiv = 0x7, .nsdiv = 0 }, /* 312.5 Khz */
47 { .mdiv = 0x17, .pe = 0x25ed, .sdiv = 0x1, .nsdiv = 0 }, /* 27 MHz */
48 { .mdiv = 0x1a, .pe = 0x7b36, .sdiv = 0x2, .nsdiv = 1 }, /* 36.87 MHz */
49 { .mdiv = 0x13, .pe = 0x0, .sdiv = 0x2, .nsdiv = 1 }, /* 48 MHz */
50 { .mdiv = 0x11, .pe = 0x1c72, .sdiv = 0x1, .nsdiv = 1 }, /* 108 MHz */
54 { .mdiv = 0x1f, .pe = 0x0, .sdiv = 0x7, .nsdiv = 0 }, /* 625 Khz */
55 { .mdiv = 0x13, .pe = 0x777c, .sdiv = 0x4, .nsdiv = 1 }, /* 25.175 MHz */
56 { .mdiv = 0x19, .pe = 0x4d35, .sdiv = 0x2, .nsdiv = 0 }, /* 25.200 MHz */
57 { .mdiv = 0x11, .pe = 0x1c72, .sdiv = 0x4, .nsdiv = 1 }, /* 27.000 MHz */
[all …]
/drivers/misc/cxl/
Dtrace.h66 __field(u16, pe)
72 __entry->pe = ctx->pe;
78 __entry->pe
91 __field(u16, pe)
101 __entry->pe = ctx->pe;
112 __entry->pe,
132 __field(u16, pe)
141 __entry->pe = ctx->pe;
150 __entry->pe,
165 __field(u16, pe)
[all …]
Dfault.c120 pr_devel("CXL interrupt: Segment fault pe: %i ea: %#llx\n", ctx->pe, ea); in cxl_handle_segment_miss()
165 pr_devel("Page fault successfully handled for pe: %i!\n", ctx->pe); in cxl_handle_page_fault()
186 __func__, ctx->pe); in get_mem_context()
206 __func__, pid_nr(old_pid), ctx->pe); in get_mem_context()
233 __func__, ctx->pe, pid_nr(old_pid), in get_mem_context()
259 cxl_p2n_read(ctx->afu, CXL_PSL_PEHandle_An) != ctx->pe) { in cxl_handle_fault()
274 "DSISR: %#llx DAR: %#llx\n", ctx->pe, dsisr, dar); in cxl_handle_fault()
282 __func__, ctx->pe, pid_nr(ctx->pid)); in cxl_handle_fault()
287 ctx->pe, pid_nr(ctx->pid)); in cxl_handle_fault()
Dfile.c95 pr_devel("afu_open pe: %i\n", ctx->pe); in __afu_open()
124 __func__, ctx->pe); in afu_release()
156 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_start_work()
231 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_process_element()
233 if (copy_to_user(upe, &ctx->pe, sizeof(__u32))) in afu_ioctl_process_element()
310 pr_devel("afu_poll wait done pe: %i\n", ctx->pe); in afu_poll()
322 pr_devel("afu_poll pe: %i returning %#x\n", ctx->pe, mask); in afu_poll()
380 event.header.process_element = ctx->pe; in afu_read()
Dnative.c345 *(ctx->afu->sw_command_status) = cpu_to_be64(cmd | 0 | ctx->pe); in do_process_element_cmd()
347 cxl_p1n_write(ctx->afu, CXL_PSL_LLCMD_An, cmd | ctx->pe); in do_process_element_cmd()
366 (cmd | (cmd >> 16) | ctx->pe)) in do_process_element_cmd()
388 pr_devel("%s Adding pe: %i started\n", __func__, ctx->pe); in add_process_element()
391 pr_devel("%s Adding pe: %i finished\n", __func__, ctx->pe); in add_process_element()
405 pr_devel("%s Terminate pe: %i started\n", __func__, ctx->pe); in terminate_process_element()
414 pr_devel("%s Terminate pe: %i finished\n", __func__, ctx->pe); in terminate_process_element()
424 pr_devel("%s Remove pe: %i started\n", __func__, ctx->pe); in remove_process_element()
435 pr_devel("%s Remove pe: %i finished\n", __func__, ctx->pe); in remove_process_element()
449 (ctx->afu->pp_offset + ctx->afu->pp_size * ctx->pe); in cxl_assign_psn_space()
Dirq.c106 pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar); in cxl_irq()
119 pr_devel("Scheduling segment miss handling for later pe: %i\n", ctx->pe); in cxl_irq()
140 pr_devel("Scheduling page fault handling for later pe: %i\n", ctx->pe); in cxl_irq()
162 ctx->pe, irq_info->afu_err); in cxl_irq()
240 ctx->pe, irq, hwirq); in cxl_irq_afu()
246 afu_irq, ctx->pe, irq, hwirq); in cxl_irq_afu()
460 ctx->pe, j); in afu_allocate_irqs()
/drivers/vfio/
Dvfio_spapr_eeh.c37 struct eeh_pe *pe; in vfio_spapr_iommu_eeh_ioctl() local
50 pe = eeh_iommu_group_to_pe(group); in vfio_spapr_iommu_eeh_ioctl()
51 if (!pe) in vfio_spapr_iommu_eeh_ioctl()
62 ret = eeh_pe_set_option(pe, EEH_OPT_DISABLE); in vfio_spapr_iommu_eeh_ioctl()
65 ret = eeh_pe_set_option(pe, EEH_OPT_ENABLE); in vfio_spapr_iommu_eeh_ioctl()
68 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_MMIO); in vfio_spapr_iommu_eeh_ioctl()
71 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_DMA); in vfio_spapr_iommu_eeh_ioctl()
74 ret = eeh_pe_get_state(pe); in vfio_spapr_iommu_eeh_ioctl()
77 ret = eeh_pe_reset(pe, EEH_RESET_DEACTIVATE); in vfio_spapr_iommu_eeh_ioctl()
80 ret = eeh_pe_reset(pe, EEH_RESET_HOT); in vfio_spapr_iommu_eeh_ioctl()
[all …]
/drivers/tty/vt/
Dselection.c171 int i, ps, pe, multiplier; in __set_selection() local
192 pe = ye * vc->vc_size_row + (xe << 1); in __set_selection()
206 if (ps > pe) /* make sel_start <= sel_end */ in __set_selection()
209 ps = pe; in __set_selection()
210 pe = tmp; in __set_selection()
227 new_sel_end = pe; in __set_selection()
240 spc = isspace(sel_pos(pe)); in __set_selection()
241 for (new_sel_end = pe; ; pe += 2) in __set_selection()
243 if ((spc && !isspace(sel_pos(pe))) || in __set_selection()
244 (!spc && !inword(sel_pos(pe)))) in __set_selection()
[all …]
/drivers/net/wireless/ath/ath9k/
Ddfs.c204 struct pulse_event *pe) in ath9k_postprocess_radar_event() argument
268 pe->width = dur_to_usecs(sc->sc_ah, dur); in ath9k_postprocess_radar_event()
269 pe->rssi = rssi; in ath9k_postprocess_radar_event()
276 ath9k_dfs_process_radar_pulse(struct ath_softc *sc, struct pulse_event *pe) in ath9k_dfs_process_radar_pulse() argument
282 if (!pd->add_pulse(pd, pe)) in ath9k_dfs_process_radar_pulse()
297 struct pulse_event pe; in ath9k_dfs_process_phyerr() local
333 pe.freq = ah->curchan->channel; in ath9k_dfs_process_phyerr()
334 pe.ts = mactime; in ath9k_dfs_process_phyerr()
335 if (!ath9k_postprocess_radar_event(sc, &ard, &pe)) in ath9k_dfs_process_phyerr()
338 if (pe.width > MIN_CHIRP_PULSE_WIDTH && in ath9k_dfs_process_phyerr()
[all …]
/drivers/md/
Ddm-snap.c701 struct dm_snap_pending_exception *pe = mempool_alloc(s->pending_pool, in alloc_pending_exception() local
705 pe->snap = s; in alloc_pending_exception()
707 return pe; in alloc_pending_exception()
710 static void free_pending_exception(struct dm_snap_pending_exception *pe) in free_pending_exception() argument
712 struct dm_snapshot *s = pe->snap; in free_pending_exception()
714 mempool_free(pe, s->pending_pool); in free_pending_exception()
1519 struct dm_snap_pending_exception *pe = context; in pending_complete() local
1521 struct dm_snapshot *s = pe->snap; in pending_complete()
1542 *e = pe->e; in pending_complete()
1552 __check_for_conflicting_io(s, pe->e.old_chunk); in pending_complete()
[all …]
Ddm-cache-policy-cleaner.c172 static int wb_map(struct dm_cache_policy *pe, dm_oblock_t oblock, in wb_map() argument
177 struct policy *p = to_policy(pe); in wb_map()
201 static int wb_lookup(struct dm_cache_policy *pe, dm_oblock_t oblock, dm_cblock_t *cblock) in wb_lookup() argument
204 struct policy *p = to_policy(pe); in wb_lookup()
224 static void __set_clear_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock, bool set) in __set_clear_dirty() argument
226 struct policy *p = to_policy(pe); in __set_clear_dirty()
247 static void wb_set_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock) in wb_set_dirty() argument
249 struct policy *p = to_policy(pe); in wb_set_dirty()
253 __set_clear_dirty(pe, oblock, true); in wb_set_dirty()
257 static void wb_clear_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock) in wb_clear_dirty() argument
[all …]
/drivers/staging/speakup/
Dselection.c56 int i, ps, pe; in speakup_set_selection() local
64 pe = spk_ye * vc->vc_size_row + (spk_xe << 1); in speakup_set_selection()
66 if (ps > pe) { in speakup_set_selection()
70 ps = pe; in speakup_set_selection()
71 pe = tmp; in speakup_set_selection()
83 new_sel_end = pe; in speakup_set_selection()
89 for (pe = new_sel_end + 2; ; pe += 2) in speakup_set_selection()
90 if (!ishardspace(sel_pos(pe)) || in speakup_set_selection()
91 atedge(pe, vc->vc_size_row)) in speakup_set_selection()
93 if (ishardspace(sel_pos(pe))) in speakup_set_selection()
[all …]
/drivers/gpu/drm/radeon/
Dsi_dma.c71 uint64_t pe, uint64_t src, in si_dma_vm_copy_pages() argument
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
86 pe += bytes; in si_dma_vm_copy_pages()
107 uint64_t pe, in si_dma_vm_write_pages() argument
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
123 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages()
154 uint64_t pe, in si_dma_vm_set_pages() argument
173 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pages()
[all …]
Dni_dma.c317 uint64_t pe, uint64_t src, in cayman_dma_vm_copy_pages() argument
329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
334 pe += ndw * 4; in cayman_dma_vm_copy_pages()
355 uint64_t pe, in cayman_dma_vm_write_pages() argument
370 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
371 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages()
372 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cayman_dma_vm_write_pages()
403 uint64_t pe, in cayman_dma_vm_set_pages() argument
422 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cayman_dma_vm_set_pages()
[all …]
Dradeon_trace.h83 TP_PROTO(uint64_t pe, uint64_t addr, unsigned count,
85 TP_ARGS(pe, addr, count, incr, flags),
87 __field(u64, pe)
95 __entry->pe = pe;
102 __entry->pe, __entry->addr, __entry->incr,
Dcik_sdma.c800 uint64_t pe, uint64_t src, in cik_sdma_vm_copy_pages() argument
814 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
815 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages()
817 pe += bytes; in cik_sdma_vm_copy_pages()
838 uint64_t pe, in cik_sdma_vm_write_pages() argument
853 ib->ptr[ib->length_dw++] = pe; in cik_sdma_vm_write_pages()
854 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_write_pages()
856 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages()
887 uint64_t pe, in cik_sdma_vm_set_pages() argument
906 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cik_sdma_vm_set_pages()
[all …]
/drivers/isdn/hardware/eicon/
Ddivasproc.c359 struct proc_dir_entry *de, *pe; in create_adapter_proc() local
367 pe = proc_create_data(info_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc()
369 if (!pe) in create_adapter_proc()
371 a->proc_info = (void *) pe; in create_adapter_proc()
373 pe = proc_create_data(grp_opt_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc()
375 if (pe) in create_adapter_proc()
376 a->proc_grp_opt = (void *) pe; in create_adapter_proc()
377 pe = proc_create_data(d_l1_down_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc()
379 if (pe) in create_adapter_proc()
380 a->proc_d_l1_down = (void *) pe; in create_adapter_proc()
/drivers/scsi/libsas/
Dsas_dump.c47 void sas_dprint_porte(int phyid, enum port_event pe) in sas_dprint_porte() argument
49 SAS_DPRINTK("phy%d: port event: %s\n", phyid, sas_porte_str[pe]); in sas_dprint_porte()
51 void sas_dprint_phye(int phyid, enum phy_event pe) in sas_dprint_phye() argument
53 SAS_DPRINTK("phy%d: phy event: %s\n", phyid, sas_phye_str[pe]); in sas_dprint_phye()
Dsas_dump.h27 void sas_dprint_porte(int phyid, enum port_event pe);
28 void sas_dprint_phye(int phyid, enum phy_event pe);
/drivers/gpu/drm/nouveau/nvkm/subdev/bios/
Ddp.c172 info->pe = nvbios_rd08(bios, data + 0x03); in nvbios_dpcfg_parse()
180 info->pe = nvbios_rd08(bios, data + 0x02); in nvbios_dpcfg_parse()
192 nvbios_dpcfg_match(struct nvkm_bios *bios, u16 outp, u8 pc, u8 vs, u8 pe, in nvbios_dpcfg_match() argument
201 idx = (pc * 10) + vsoff[vs] + pe; in nvbios_dpcfg_match()
208 nvbios_rd08(bios, data + 0x01) == pe) in nvbios_dpcfg_match()
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_trace.h194 TP_PROTO(uint64_t pe, uint64_t addr, unsigned count,
196 TP_ARGS(pe, addr, count, incr, flags),
198 __field(u64, pe)
206 __entry->pe = pe;
213 __entry->pe, __entry->addr, __entry->incr,
/drivers/atm/
Dfirestream.c777 struct FS_BPENTRY *pe; in process_incoming() local
793 pe = bus_to_virt (qe->p0); in process_incoming()
795 pe->flags, pe->next, pe->bsa, pe->aal_bufsize, in process_incoming()
796 pe->skb, pe->fp); in process_incoming()
811 skb = pe->skb; in process_incoming()
812 pe->fp->n--; in process_incoming()
815 if (FS_DEBUG_QUEUE & fs_debug) my_hd (bus_to_virt (pe->bsa), 0x20); in process_incoming()
823 fs_dprintk (FS_DEBUG_ALLOC, "Free rec-d: %p\n", pe); in process_incoming()
824 kfree (pe); in process_incoming()
832 pe = bus_to_virt (qe->p0); in process_incoming()
[all …]
/drivers/net/wireless/ath/
Ddfs_pri_detector.c119 static void pool_put_pulse_elem(struct pulse_elem *pe) in pool_put_pulse_elem() argument
122 list_add(&pe->head, &pulse_pool); in pool_put_pulse_elem()
150 struct pulse_elem *pe = NULL; in pool_get_pulse_elem() local
153 pe = list_first_entry(&pulse_pool, struct pulse_elem, head); in pool_get_pulse_elem()
154 list_del(&pe->head); in pool_get_pulse_elem()
158 return pe; in pool_get_pulse_elem()
/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dsorgf119.c69 int ln, int vs, int pe, int pc) in gf119_sor_dp_drv_ctl() argument
86 addr = nvbios_dpcfg_match(bios, addr, pc, vs, pe, in gf119_sor_dp_drv_ctl()
97 nvkm_wr32(device, 0x61c120 + loff, data[1] | (ocfg.pe << shift)); in gf119_sor_dp_drv_ctl()

123