/drivers/net/ethernet/marvell/ |
D | mvpp2.c | 1403 static int mvpp2_prs_hw_write(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_write() argument 1407 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_write() 1411 pe->tcam.word[MVPP2_PRS_TCAM_INV_WORD] &= ~MVPP2_PRS_TCAM_INV_MASK; in mvpp2_prs_hw_write() 1414 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_write() 1416 mvpp2_write(priv, MVPP2_PRS_TCAM_DATA_REG(i), pe->tcam.word[i]); in mvpp2_prs_hw_write() 1419 mvpp2_write(priv, MVPP2_PRS_SRAM_IDX_REG, pe->index); in mvpp2_prs_hw_write() 1421 mvpp2_write(priv, MVPP2_PRS_SRAM_DATA_REG(i), pe->sram.word[i]); in mvpp2_prs_hw_write() 1427 static int mvpp2_prs_hw_read(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_read() argument 1431 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_read() 1435 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_read() [all …]
|
/drivers/vfio/ |
D | vfio_spapr_eeh.c | 37 struct eeh_pe *pe; in vfio_spapr_iommu_eeh_ioctl() local 50 pe = eeh_iommu_group_to_pe(group); in vfio_spapr_iommu_eeh_ioctl() 51 if (!pe) in vfio_spapr_iommu_eeh_ioctl() 62 ret = eeh_pe_set_option(pe, EEH_OPT_DISABLE); in vfio_spapr_iommu_eeh_ioctl() 65 ret = eeh_pe_set_option(pe, EEH_OPT_ENABLE); in vfio_spapr_iommu_eeh_ioctl() 68 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_MMIO); in vfio_spapr_iommu_eeh_ioctl() 71 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_DMA); in vfio_spapr_iommu_eeh_ioctl() 74 ret = eeh_pe_get_state(pe); in vfio_spapr_iommu_eeh_ioctl() 77 ret = eeh_pe_reset(pe, EEH_RESET_DEACTIVATE); in vfio_spapr_iommu_eeh_ioctl() 80 ret = eeh_pe_reset(pe, EEH_RESET_HOT); in vfio_spapr_iommu_eeh_ioctl() [all …]
|
/drivers/tty/vt/ |
D | selection.c | 170 int i, ps, pe, multiplier; in __set_selection() local 191 pe = ye * vc->vc_size_row + (xe << 1); in __set_selection() 205 if (ps > pe) /* make sel_start <= sel_end */ in __set_selection() 208 ps = pe; in __set_selection() 209 pe = tmp; in __set_selection() 226 new_sel_end = pe; in __set_selection() 239 spc = isspace(sel_pos(pe)); in __set_selection() 240 for (new_sel_end = pe; ; pe += 2) in __set_selection() 242 if ((spc && !isspace(sel_pos(pe))) || in __set_selection() 243 (!spc && !inword(sel_pos(pe)))) in __set_selection() [all …]
|
/drivers/net/wireless/ath/ath9k/ |
D | dfs.c | 204 struct pulse_event *pe) in ath9k_postprocess_radar_event() argument 268 pe->width = dur_to_usecs(sc->sc_ah, dur); in ath9k_postprocess_radar_event() 269 pe->rssi = rssi; in ath9k_postprocess_radar_event() 276 ath9k_dfs_process_radar_pulse(struct ath_softc *sc, struct pulse_event *pe) in ath9k_dfs_process_radar_pulse() argument 282 if (!pd->add_pulse(pd, pe)) in ath9k_dfs_process_radar_pulse() 297 struct pulse_event pe; in ath9k_dfs_process_phyerr() local 333 pe.freq = ah->curchan->channel; in ath9k_dfs_process_phyerr() 334 pe.ts = mactime; in ath9k_dfs_process_phyerr() 335 if (!ath9k_postprocess_radar_event(sc, &ard, &pe)) in ath9k_dfs_process_phyerr() 338 if (pe.width > MIN_CHIRP_PULSE_WIDTH && in ath9k_dfs_process_phyerr() [all …]
|
/drivers/misc/cxl/ |
D | trace.h | 75 __field(u16, pe) 81 __entry->pe = ctx->pe; 87 __entry->pe 100 __field(u16, pe) 110 __entry->pe = ctx->pe; 121 __entry->pe, 141 __field(u16, pe) 150 __entry->pe = ctx->pe; 159 __entry->pe, 174 __field(u16, pe) [all …]
|
D | irq.c | 46 pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar); in cxl_irq_psl9() 49 pr_devel("CXL interrupt: Scheduling translation fault handling for later (pe: %i)\n", ctx->pe); in cxl_irq_psl9() 68 ctx->pe, irq_info->afu_err); in cxl_irq_psl9() 97 pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar); in cxl_irq_psl8() 110 pr_devel("Scheduling segment miss handling for later pe: %i\n", ctx->pe); in cxl_irq_psl8() 131 pr_devel("Scheduling page fault handling for later pe: %i\n", ctx->pe); in cxl_irq_psl8() 154 ctx->pe, irq_info->afu_err); in cxl_irq_psl8() 205 ctx->pe, irq, hwirq); in cxl_irq_afu() 211 afu_irq, ctx->pe, irq, hwirq); in cxl_irq_afu() 342 ctx->pe, j); in afu_allocate_irqs()
|
D | fault.c | 121 pr_devel("CXL interrupt: Segment fault pe: %i ea: %#llx\n", ctx->pe, ea); in cxl_handle_segment_miss() 193 pr_devel("Page fault successfully handled for pe: %i!\n", ctx->pe); in cxl_handle_page_fault() 254 cxl_p2n_read(ctx->afu, CXL_PSL_PEHandle_An) != ctx->pe) { in cxl_handle_fault() 271 "DSISR: %#llx DAR: %#llx\n", ctx->pe, dsisr, dar); in cxl_handle_fault() 278 __func__, ctx->pe, pid_nr(ctx->pid)); in cxl_handle_fault() 283 ctx->pe, pid_nr(ctx->pid)); in cxl_handle_fault()
|
D | context.c | 103 ctx->pe = i; in cxl_context_init() 106 ctx->external_pe = ctx->pe; in cxl_context_init() 137 __func__, ctx->pe, vmf->address, offset); in cxl_mmap_fault() 222 ctx->psn_phys, ctx->pe , ctx->master); in cxl_context_iomap() 346 idr_remove(&ctx->afu->contexts_idr, ctx->pe); in cxl_context_free()
|
D | file.c | 96 pr_devel("afu_open pe: %i\n", ctx->pe); in __afu_open() 125 __func__, ctx->pe); in afu_release() 157 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_start_work() 256 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_process_element() 346 pr_devel("afu_poll wait done pe: %i\n", ctx->pe); in afu_poll() 357 pr_devel("afu_poll pe: %i returning %#x\n", ctx->pe, mask); in afu_poll() 445 event.header.process_element = ctx->pe; in afu_read()
|
/drivers/md/ |
D | dm-snap.c | 701 struct dm_snap_pending_exception *pe = mempool_alloc(s->pending_pool, in alloc_pending_exception() local 705 pe->snap = s; in alloc_pending_exception() 707 return pe; in alloc_pending_exception() 710 static void free_pending_exception(struct dm_snap_pending_exception *pe) in free_pending_exception() argument 712 struct dm_snapshot *s = pe->snap; in free_pending_exception() 714 mempool_free(pe, s->pending_pool); in free_pending_exception() 1518 struct dm_snap_pending_exception *pe = context; in pending_complete() local 1520 struct dm_snapshot *s = pe->snap; in pending_complete() 1541 *e = pe->e; in pending_complete() 1551 __check_for_conflicting_io(s, pe->e.old_chunk); in pending_complete() [all …]
|
/drivers/staging/speakup/ |
D | selection.c | 57 int i, ps, pe; in speakup_set_selection() local 65 pe = spk_ye * vc->vc_size_row + (spk_xe << 1); in speakup_set_selection() 67 if (ps > pe) { in speakup_set_selection() 71 ps = pe; in speakup_set_selection() 72 pe = tmp; in speakup_set_selection() 84 new_sel_end = pe; in speakup_set_selection() 90 for (pe = new_sel_end + 2; ; pe += 2) in speakup_set_selection() 91 if (!ishardspace(sel_pos(pe)) || in speakup_set_selection() 92 atedge(pe, vc->vc_size_row)) in speakup_set_selection() 94 if (ishardspace(sel_pos(pe))) in speakup_set_selection() [all …]
|
/drivers/gpu/drm/radeon/ |
D | si_dma.c | 71 uint64_t pe, uint64_t src, in si_dma_vm_copy_pages() argument 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 86 pe += bytes; in si_dma_vm_copy_pages() 107 uint64_t pe, in si_dma_vm_write_pages() argument 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 123 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages() 154 uint64_t pe, in si_dma_vm_set_pages() argument 173 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pages() [all …]
|
D | ni_dma.c | 317 uint64_t pe, uint64_t src, in cayman_dma_vm_copy_pages() argument 329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 334 pe += ndw * 4; in cayman_dma_vm_copy_pages() 355 uint64_t pe, in cayman_dma_vm_write_pages() argument 370 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages() 371 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 372 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cayman_dma_vm_write_pages() 403 uint64_t pe, in cayman_dma_vm_set_pages() argument 422 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cayman_dma_vm_set_pages() [all …]
|
D | radeon_trace.h | 84 TP_PROTO(uint64_t pe, uint64_t addr, unsigned count, 86 TP_ARGS(pe, addr, count, incr, flags), 88 __field(u64, pe) 96 __entry->pe = pe; 103 __entry->pe, __entry->addr, __entry->incr,
|
D | cik_sdma.c | 805 uint64_t pe, uint64_t src, in cik_sdma_vm_copy_pages() argument 819 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages() 820 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages() 822 pe += bytes; in cik_sdma_vm_copy_pages() 843 uint64_t pe, in cik_sdma_vm_write_pages() argument 858 ib->ptr[ib->length_dw++] = pe; in cik_sdma_vm_write_pages() 859 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_write_pages() 861 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages() 892 uint64_t pe, in cik_sdma_vm_set_pages() argument 911 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cik_sdma_vm_set_pages() [all …]
|
/drivers/clk/st/ |
D | clkgen-fsyn.c | 40 unsigned long pe; member 63 struct clkgen_field pe[QUADFS_MAX_CHAN]; member 106 .pe = { CLKGEN_FIELD(0x304, 0x7fff, 0), 130 .pe = { CLKGEN_FIELD(0x2b4, 0x7fff, 0), 468 u32 pe; member 498 CLKGEN_WRITE(fs, pe[fs->chan], fs->pe); in quadfs_fsynth_program_rate() 582 res = (P20 * (32 + fs->mdiv) + 32 * fs->pe) * s * ns; in clk_fs660c32_dig_get_rate() 607 fs_tmp.pe = (unsigned long)*p; in clk_fs660c32_get_pe() 617 fs->pe = (unsigned long)*p; in clk_fs660c32_get_pe() 666 if (fs->pe > 2) in clk_fs660c32_dig_get_params() [all …]
|
/drivers/scsi/libsas/ |
D | sas_dump.c | 47 void sas_dprint_porte(int phyid, enum port_event pe) in sas_dprint_porte() argument 49 SAS_DPRINTK("phy%d: port event: %s\n", phyid, sas_porte_str[pe]); in sas_dprint_porte() 51 void sas_dprint_phye(int phyid, enum phy_event pe) in sas_dprint_phye() argument 53 SAS_DPRINTK("phy%d: phy event: %s\n", phyid, sas_phye_str[pe]); in sas_dprint_phye()
|
D | sas_dump.h | 27 void sas_dprint_porte(int phyid, enum port_event pe); 28 void sas_dprint_phye(int phyid, enum phy_event pe);
|
/drivers/isdn/hardware/eicon/ |
D | divasproc.c | 359 struct proc_dir_entry *de, *pe; in create_adapter_proc() local 367 pe = proc_create_data(info_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 369 if (!pe) in create_adapter_proc() 371 a->proc_info = (void *) pe; in create_adapter_proc() 373 pe = proc_create_data(grp_opt_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 375 if (pe) in create_adapter_proc() 376 a->proc_grp_opt = (void *) pe; in create_adapter_proc() 377 pe = proc_create_data(d_l1_down_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 379 if (pe) in create_adapter_proc() 380 a->proc_d_l1_down = (void *) pe; in create_adapter_proc()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_trace.h | 352 TP_PROTO(uint64_t pe, uint64_t addr, unsigned count, 354 TP_ARGS(pe, addr, count, incr, flags), 356 __field(u64, pe) 364 __entry->pe = pe; 371 __entry->pe, __entry->addr, __entry->incr, 376 TP_PROTO(uint64_t pe, uint64_t src, unsigned count), 377 TP_ARGS(pe, src, count), 379 __field(u64, pe) 385 __entry->pe = pe; 390 __entry->pe, __entry->src, __entry->count)
|
D | si_dma.c | 346 uint64_t pe, uint64_t src, in si_dma_vm_copy_pte() argument 353 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte() 355 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pte() 370 static void si_dma_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe, in si_dma_vm_write_pte() argument 377 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte() 378 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in si_dma_vm_write_pte() 399 uint64_t pe, in si_dma_vm_set_pte_pde() argument 418 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pte_pde() 419 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_set_pte_pde() 426 pe += ndw * 4; in si_dma_vm_set_pte_pde()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/bios/ |
D | dp.c | 175 info->pe = nvbios_rd08(bios, data + 0x03); in nvbios_dpcfg_parse() 183 info->pe = nvbios_rd08(bios, data + 0x02); in nvbios_dpcfg_parse() 188 info->pe = nvbios_rd08(bios, data + 0x01); in nvbios_dpcfg_parse() 200 nvbios_dpcfg_match(struct nvkm_bios *bios, u16 outp, u8 pc, u8 vs, u8 pe, in nvbios_dpcfg_match() argument 209 idx = (pc * 10) + vsoff[vs] + pe; in nvbios_dpcfg_match() 219 nvbios_rd08(bios, data + 0x01) == pe) in nvbios_dpcfg_match()
|
/drivers/atm/ |
D | firestream.c | 781 struct FS_BPENTRY *pe; in process_incoming() local 797 pe = bus_to_virt (qe->p0); in process_incoming() 799 pe->flags, pe->next, pe->bsa, pe->aal_bufsize, in process_incoming() 800 pe->skb, pe->fp); in process_incoming() 815 skb = pe->skb; in process_incoming() 816 pe->fp->n--; in process_incoming() 819 if (FS_DEBUG_QUEUE & fs_debug) my_hd (bus_to_virt (pe->bsa), 0x20); in process_incoming() 827 fs_dprintk (FS_DEBUG_ALLOC, "Free rec-d: %p\n", pe); in process_incoming() 828 kfree (pe); in process_incoming() 836 pe = bus_to_virt (qe->p0); in process_incoming() [all …]
|
/drivers/net/wireless/ath/ |
D | dfs_pri_detector.c | 119 static void pool_put_pulse_elem(struct pulse_elem *pe) in pool_put_pulse_elem() argument 122 list_add(&pe->head, &pulse_pool); in pool_put_pulse_elem() 150 struct pulse_elem *pe = NULL; in pool_get_pulse_elem() local 153 pe = list_first_entry(&pulse_pool, struct pulse_elem, head); in pool_get_pulse_elem() 154 list_del(&pe->head); in pool_get_pulse_elem() 158 return pe; in pool_get_pulse_elem()
|
/drivers/gpu/drm/nouveau/include/nvkm/subdev/bios/ |
D | dp.h | 22 u8 pe; member 30 nvbios_dpcfg_match(struct nvkm_bios *, u16 outp, u8 pc, u8 vs, u8 pe,
|