/drivers/net/ethernet/marvell/ |
D | mvpp2.c | 989 static int mvpp2_prs_hw_write(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_write() argument 993 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_write() 997 pe->tcam.word[MVPP2_PRS_TCAM_INV_WORD] &= ~MVPP2_PRS_TCAM_INV_MASK; in mvpp2_prs_hw_write() 1000 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_write() 1002 mvpp2_write(priv, MVPP2_PRS_TCAM_DATA_REG(i), pe->tcam.word[i]); in mvpp2_prs_hw_write() 1005 mvpp2_write(priv, MVPP2_PRS_SRAM_IDX_REG, pe->index); in mvpp2_prs_hw_write() 1007 mvpp2_write(priv, MVPP2_PRS_SRAM_DATA_REG(i), pe->sram.word[i]); in mvpp2_prs_hw_write() 1013 static int mvpp2_prs_hw_read(struct mvpp2 *priv, struct mvpp2_prs_entry *pe) in mvpp2_prs_hw_read() argument 1017 if (pe->index > MVPP2_PRS_TCAM_SRAM_SIZE - 1) in mvpp2_prs_hw_read() 1021 mvpp2_write(priv, MVPP2_PRS_TCAM_IDX_REG, pe->index); in mvpp2_prs_hw_read() [all …]
|
/drivers/clk/st/ |
D | clkgen-fsyn.c | 39 unsigned long pe; member 45 { .mdiv = 0x1f, .pe = 0x0, .sdiv = 0x7, .nsdiv = 0 }, /* 312.5 Khz */ 46 { .mdiv = 0x17, .pe = 0x25ed, .sdiv = 0x1, .nsdiv = 0 }, /* 27 MHz */ 47 { .mdiv = 0x1a, .pe = 0x7b36, .sdiv = 0x2, .nsdiv = 1 }, /* 36.87 MHz */ 48 { .mdiv = 0x13, .pe = 0x0, .sdiv = 0x2, .nsdiv = 1 }, /* 48 MHz */ 49 { .mdiv = 0x11, .pe = 0x1c72, .sdiv = 0x1, .nsdiv = 1 }, /* 108 MHz */ 53 { .mdiv = 0x1f, .pe = 0x0, .sdiv = 0x7, .nsdiv = 0 }, /* 625 Khz */ 54 { .mdiv = 0x13, .pe = 0x777c, .sdiv = 0x4, .nsdiv = 1 }, /* 25.175 MHz */ 55 { .mdiv = 0x19, .pe = 0x4d35, .sdiv = 0x2, .nsdiv = 0 }, /* 25.200 MHz */ 56 { .mdiv = 0x11, .pe = 0x1c72, .sdiv = 0x4, .nsdiv = 1 }, /* 27.000 MHz */ [all …]
|
/drivers/tty/vt/ |
D | selection.c | 164 int i, ps, pe, multiplier; in set_selection() local 185 pe = ye * vc->vc_size_row + (xe << 1); in set_selection() 199 if (ps > pe) /* make sel_start <= sel_end */ in set_selection() 202 ps = pe; in set_selection() 203 pe = tmp; in set_selection() 220 new_sel_end = pe; in set_selection() 233 spc = isspace(sel_pos(pe)); in set_selection() 234 for (new_sel_end = pe; ; pe += 2) in set_selection() 236 if ((spc && !isspace(sel_pos(pe))) || in set_selection() 237 (!spc && !inword(sel_pos(pe)))) in set_selection() [all …]
|
/drivers/vfio/ |
D | vfio_spapr_eeh.c | 37 struct eeh_pe *pe; in vfio_spapr_iommu_eeh_ioctl() local 50 pe = eeh_iommu_group_to_pe(group); in vfio_spapr_iommu_eeh_ioctl() 51 if (!pe) in vfio_spapr_iommu_eeh_ioctl() 62 ret = eeh_pe_set_option(pe, EEH_OPT_DISABLE); in vfio_spapr_iommu_eeh_ioctl() 65 ret = eeh_pe_set_option(pe, EEH_OPT_ENABLE); in vfio_spapr_iommu_eeh_ioctl() 68 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_MMIO); in vfio_spapr_iommu_eeh_ioctl() 71 ret = eeh_pe_set_option(pe, EEH_OPT_THAW_DMA); in vfio_spapr_iommu_eeh_ioctl() 74 ret = eeh_pe_get_state(pe); in vfio_spapr_iommu_eeh_ioctl() 77 ret = eeh_pe_reset(pe, EEH_RESET_DEACTIVATE); in vfio_spapr_iommu_eeh_ioctl() 80 ret = eeh_pe_reset(pe, EEH_RESET_HOT); in vfio_spapr_iommu_eeh_ioctl() [all …]
|
/drivers/md/ |
D | dm-snap.c | 679 struct dm_snap_pending_exception *pe = mempool_alloc(s->pending_pool, in alloc_pending_exception() local 683 pe->snap = s; in alloc_pending_exception() 685 return pe; in alloc_pending_exception() 688 static void free_pending_exception(struct dm_snap_pending_exception *pe) in free_pending_exception() argument 690 struct dm_snapshot *s = pe->snap; in free_pending_exception() 692 mempool_free(pe, s->pending_pool); in free_pending_exception() 1433 struct dm_snap_pending_exception *pe = context; in pending_complete() local 1435 struct dm_snapshot *s = pe->snap; in pending_complete() 1456 *e = pe->e; in pending_complete() 1466 __check_for_conflicting_io(s, pe->e.old_chunk); in pending_complete() [all …]
|
D | dm-cache-policy-cleaner.c | 172 static int wb_map(struct dm_cache_policy *pe, dm_oblock_t oblock, in wb_map() argument 176 struct policy *p = to_policy(pe); in wb_map() 200 static int wb_lookup(struct dm_cache_policy *pe, dm_oblock_t oblock, dm_cblock_t *cblock) in wb_lookup() argument 203 struct policy *p = to_policy(pe); in wb_lookup() 223 static void __set_clear_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock, bool set) in __set_clear_dirty() argument 225 struct policy *p = to_policy(pe); in __set_clear_dirty() 246 static void wb_set_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock) in wb_set_dirty() argument 248 struct policy *p = to_policy(pe); in wb_set_dirty() 252 __set_clear_dirty(pe, oblock, true); in wb_set_dirty() 256 static void wb_clear_dirty(struct dm_cache_policy *pe, dm_oblock_t oblock) in wb_clear_dirty() argument [all …]
|
/drivers/net/wireless/ath/ath9k/ |
D | dfs.c | 53 struct pulse_event *pe) in ath9k_postprocess_radar_event() argument 123 pe->width = dur_to_usecs(sc->sc_ah, dur); in ath9k_postprocess_radar_event() 124 pe->rssi = rssi; in ath9k_postprocess_radar_event() 141 struct pulse_event pe; in ath9k_dfs_process_phyerr() local 177 pe.freq = ah->curchan->channel; in ath9k_dfs_process_phyerr() 178 pe.ts = mactime; in ath9k_dfs_process_phyerr() 179 if (ath9k_postprocess_radar_event(sc, &ard, &pe)) { in ath9k_dfs_process_phyerr() 184 pe.freq, pe.ts, pe.width, pe.rssi, in ath9k_dfs_process_phyerr() 185 pe.ts - sc->dfs_prev_pulse_ts); in ath9k_dfs_process_phyerr() 186 sc->dfs_prev_pulse_ts = pe.ts; in ath9k_dfs_process_phyerr() [all …]
|
/drivers/staging/speakup/ |
D | selection.c | 56 int i, ps, pe; in speakup_set_selection() local 64 pe = spk_ye * vc->vc_size_row + (spk_xe << 1); in speakup_set_selection() 66 if (ps > pe) { in speakup_set_selection() 70 ps = pe; in speakup_set_selection() 71 pe = tmp; in speakup_set_selection() 83 new_sel_end = pe; in speakup_set_selection() 89 for (pe = new_sel_end + 2; ; pe += 2) in speakup_set_selection() 90 if (!ishardspace(sel_pos(pe)) || in speakup_set_selection() 91 atedge(pe, vc->vc_size_row)) in speakup_set_selection() 93 if (ishardspace(sel_pos(pe))) in speakup_set_selection() [all …]
|
/drivers/gpu/drm/radeon/ |
D | si_dma.c | 71 uint64_t pe, uint64_t src, in si_dma_vm_copy_pages() argument 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 86 pe += bytes; in si_dma_vm_copy_pages() 107 uint64_t pe, in si_dma_vm_write_pages() argument 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 123 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages() 155 uint64_t pe, in si_dma_vm_set_pages() argument 174 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pages() [all …]
|
D | ni_dma.c | 316 uint64_t pe, uint64_t src, in cayman_dma_vm_copy_pages() argument 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 333 pe += ndw * 4; in cayman_dma_vm_copy_pages() 354 uint64_t pe, in cayman_dma_vm_write_pages() argument 369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages() 370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 371 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cayman_dma_vm_write_pages() 403 uint64_t pe, in cayman_dma_vm_set_pages() argument 422 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cayman_dma_vm_set_pages() [all …]
|
D | radeon_trace.h | 84 TP_PROTO(uint64_t pe, uint64_t addr, unsigned count, 86 TP_ARGS(pe, addr, count, incr, flags), 88 __field(u64, pe) 96 __entry->pe = pe; 103 __entry->pe, __entry->addr, __entry->incr,
|
D | cik_sdma.c | 775 uint64_t pe, uint64_t src, in cik_sdma_vm_copy_pages() argument 789 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages() 790 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages() 792 pe += bytes; in cik_sdma_vm_copy_pages() 813 uint64_t pe, in cik_sdma_vm_write_pages() argument 828 ib->ptr[ib->length_dw++] = pe; in cik_sdma_vm_write_pages() 829 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_write_pages() 831 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages() 863 uint64_t pe, in cik_sdma_vm_set_pages() argument 882 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cik_sdma_vm_set_pages() [all …]
|
/drivers/isdn/hardware/eicon/ |
D | divasproc.c | 359 struct proc_dir_entry *de, *pe; in create_adapter_proc() local 367 pe = proc_create_data(info_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 369 if (!pe) in create_adapter_proc() 371 a->proc_info = (void *) pe; in create_adapter_proc() 373 pe = proc_create_data(grp_opt_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 375 if (pe) in create_adapter_proc() 376 a->proc_grp_opt = (void *) pe; in create_adapter_proc() 377 pe = proc_create_data(d_l1_down_proc_name, S_IRUGO | S_IWUSR, de, in create_adapter_proc() 379 if (pe) in create_adapter_proc() 380 a->proc_d_l1_down = (void *) pe; in create_adapter_proc()
|
/drivers/scsi/libsas/ |
D | sas_dump.c | 47 void sas_dprint_porte(int phyid, enum port_event pe) in sas_dprint_porte() argument 49 SAS_DPRINTK("phy%d: port event: %s\n", phyid, sas_porte_str[pe]); in sas_dprint_porte() 51 void sas_dprint_phye(int phyid, enum phy_event pe) in sas_dprint_phye() argument 53 SAS_DPRINTK("phy%d: phy event: %s\n", phyid, sas_phye_str[pe]); in sas_dprint_phye()
|
D | sas_dump.h | 27 void sas_dprint_porte(int phyid, enum port_event pe); 28 void sas_dprint_phye(int phyid, enum phy_event pe);
|
/drivers/gpu/drm/nouveau/core/subdev/bios/ |
D | dp.c | 170 info->pe = nv_ro08(bios, data + 0x03); in nvbios_dpcfg_parse() 177 info->pe = nv_ro08(bios, data + 0x02); in nvbios_dpcfg_parse() 189 nvbios_dpcfg_match(struct nouveau_bios *bios, u16 outp, u8 pc, u8 vs, u8 pe, in nvbios_dpcfg_match() argument 198 idx = (pc * 10) + vsoff[vs] + pe; in nvbios_dpcfg_match() 203 nv_ro08(bios, data + 0x01) == pe) in nvbios_dpcfg_match()
|
/drivers/misc/cxl/ |
D | file.c | 83 pr_devel("afu_open pe: %i\n", ctx->pe); in __afu_open() 113 __func__, ctx->pe); in afu_release() 141 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_start_work() 200 pr_devel("%s: pe: %i\n", __func__, ctx->pe); in afu_ioctl_process_element() 202 if (copy_to_user(upe, &ctx->pe, sizeof(__u32))) in afu_ioctl_process_element() 251 pr_devel("afu_poll wait done pe: %i\n", ctx->pe); in afu_poll() 263 pr_devel("afu_poll pe: %i returning %#x\n", ctx->pe, mask); in afu_poll() 313 event.header.process_element = ctx->pe; in afu_read()
|
D | native.c | 286 *(ctx->afu->sw_command_status) = cpu_to_be64(cmd | 0 | ctx->pe); in do_process_element_cmd() 288 cxl_p1n_write(ctx->afu, CXL_PSL_LLCMD_An, cmd | ctx->pe); in do_process_element_cmd() 300 (cmd | (cmd >> 16) | ctx->pe)) in do_process_element_cmd() 320 pr_devel("%s Adding pe: %i started\n", __func__, ctx->pe); in add_process_element() 323 pr_devel("%s Adding pe: %i finished\n", __func__, ctx->pe); in add_process_element() 337 pr_devel("%s Terminate pe: %i started\n", __func__, ctx->pe); in terminate_process_element() 341 pr_devel("%s Terminate pe: %i finished\n", __func__, ctx->pe); in terminate_process_element() 351 pr_devel("%s Remove pe: %i started\n", __func__, ctx->pe); in remove_process_element() 355 pr_devel("%s Remove pe: %i finished\n", __func__, ctx->pe); in remove_process_element() 369 (ctx->afu->pp_offset + ctx->afu->pp_size * ctx->pe); in assign_psn_space()
|
D | irq.c | 110 pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar); in cxl_irq() 123 pr_devel("Scheduling segment miss handling for later pe: %i\n", ctx->pe); in cxl_irq() 144 pr_devel("Scheduling page fault handling for later pe: %i\n", ctx->pe); in cxl_irq() 166 ctx->pe, irq_info.afu_err); in cxl_irq() 225 ctx->pe, irq, hwirq); in cxl_irq_afu() 230 afu_irq, ctx->pe, irq, hwirq); in cxl_irq_afu()
|
D | context.c | 97 ctx->pe = i; in cxl_context_init() 127 ctx->psn_phys, ctx->pe , ctx->master); in cxl_context_iomap() 194 idr_remove(&ctx->afu->contexts_idr, ctx->pe); in cxl_context_free()
|
D | fault.c | 118 pr_devel("CXL interrupt: Segment fault pe: %i ea: %#llx\n", ctx->pe, ea); in cxl_handle_segment_miss() 156 pr_devel("Page fault successfully handled for pe: %i!\n", ctx->pe); in cxl_handle_page_fault() 171 cxl_p2n_read(ctx->afu, CXL_PSL_PEHandle_An) != ctx->pe) { in cxl_handle_fault() 180 "DSISR: %#llx DAR: %#llx\n", ctx->pe, dsisr, dar); in cxl_handle_fault()
|
/drivers/net/wireless/ath/ |
D | dfs_pri_detector.c | 116 static void pool_put_pulse_elem(struct pulse_elem *pe) in pool_put_pulse_elem() argument 119 list_add(&pe->head, &pulse_pool); in pool_put_pulse_elem() 147 struct pulse_elem *pe = NULL; in pool_get_pulse_elem() local 150 pe = list_first_entry(&pulse_pool, struct pulse_elem, head); in pool_get_pulse_elem() 151 list_del(&pe->head); in pool_get_pulse_elem() 155 return pe; in pool_get_pulse_elem()
|
/drivers/atm/ |
D | firestream.c | 777 struct FS_BPENTRY *pe; in process_incoming() local 793 pe = bus_to_virt (qe->p0); in process_incoming() 795 pe->flags, pe->next, pe->bsa, pe->aal_bufsize, in process_incoming() 796 pe->skb, pe->fp); in process_incoming() 811 skb = pe->skb; in process_incoming() 812 pe->fp->n--; in process_incoming() 815 if (FS_DEBUG_QUEUE & fs_debug) my_hd (bus_to_virt (pe->bsa), 0x20); in process_incoming() 823 fs_dprintk (FS_DEBUG_ALLOC, "Free rec-d: %p\n", pe); in process_incoming() 824 kfree (pe); in process_incoming() 832 pe = bus_to_virt (qe->p0); in process_incoming() [all …]
|
/drivers/gpu/drm/nouveau/core/engine/disp/ |
D | sornvd0.c | 83 nvd0_sor_dp_drv_ctl(struct nvkm_output_dp *outp, int ln, int vs, int pe, int pc) in nvd0_sor_dp_drv_ctl() argument 100 addr = nvbios_dpcfg_match(bios, addr, pc, vs, pe, in nvd0_sor_dp_drv_ctl() 111 nv_wr32(priv, 0x61c120 + loff, data[1] | (ocfg.pe << shift)); in nvd0_sor_dp_drv_ctl()
|
D | sornv94.c | 105 nv94_sor_dp_drv_ctl(struct nvkm_output_dp *outp, int ln, int vs, int pe, int pc) in nv94_sor_dp_drv_ctl() argument 122 addr = nvbios_dpcfg_match(bios, addr, 0, vs, pe, in nv94_sor_dp_drv_ctl() 133 nv_wr32(priv, 0x61c120 + loff, data[1] | (ocfg.pe << shift)); in nv94_sor_dp_drv_ctl()
|