/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
D | gt215.c | 38 const u32 soff = ior->id * 0x800; in gt215_sor_hda_eld() local 42 nvkm_wr32(device, 0x61c440 + soff, (i << 8) | data[i]); in gt215_sor_hda_eld() 44 nvkm_wr32(device, 0x61c440 + soff, (i << 8)); in gt215_sor_hda_eld() 45 nvkm_mask(device, 0x61c448 + soff, 0x80000002, 0x80000002); in gt215_sor_hda_eld() 71 const u32 soff = nv50_ior_base(sor); in gt215_sor_dp_audio() local 75 nvkm_mask(device, 0x61c1e0 + soff, mask, data); in gt215_sor_dp_audio() 77 if (!(nvkm_rd32(device, 0x61c1e0 + soff) & 0x80000000)) in gt215_sor_dp_audio() 100 const u32 soff = nv50_ior_base(ior); in gt215_sor_hdmi_infoframe_vsi() local 104 nvkm_mask(device, 0x61c53c + soff, 0x00010001, 0x00010000); in gt215_sor_hdmi_infoframe_vsi() 108 nvkm_wr32(device, 0x61c544 + soff, vsi.header); in gt215_sor_hdmi_infoframe_vsi() [all …]
|
D | g94.c | 57 const u32 soff = nv50_ior_base(sor); in g94_sor_dp_audio_sym() local 59 nvkm_mask(device, 0x61c1e8 + soff, 0x0000ffff, h); in g94_sor_dp_audio_sym() 60 nvkm_mask(device, 0x61c1ec + soff, 0x00ffffff, v); in g94_sor_dp_audio_sym() 105 const u32 soff = nv50_ior_base(sor); in g94_sor_dp_power() local 113 nvkm_mask(device, 0x61c034 + soff, 0x80000000, 0x80000000); in g94_sor_dp_power() 115 if (!(nvkm_rd32(device, 0x61c034 + soff) & 0x80000000)) in g94_sor_dp_power() 124 const u32 soff = nv50_ior_base(sor); in g94_sor_dp_links() local 135 nvkm_mask(device, 0x614300 + soff, 0x000c0000, clksor); in g94_sor_dp_links() 156 const u32 soff = nv50_ior_base(sor); in g94_sor_war_needed() local 159 switch (nvkm_rd32(device, 0x614300 + soff) & 0x00030000) { in g94_sor_war_needed() [all …]
|
D | gm107.c | 35 const u32 soff = nv50_ior_base(sor); in gm107_sor_dp_pattern() local 50 nvkm_mask(device, 0x61c110 + soff, mask, data); in gm107_sor_dp_pattern() 52 nvkm_mask(device, 0x61c12c + soff, mask, data); in gm107_sor_dp_pattern()
|
D | gf119.c | 49 const u32 soff = 0x030 * ior->id + (head * 0x04); in gf119_sor_hda_eld() local 53 nvkm_wr32(device, 0x10ec00 + soff, (i << 8) | data[i]); in gf119_sor_hda_eld() 55 nvkm_wr32(device, 0x10ec00 + soff, (i << 8)); in gf119_sor_hda_eld() 56 nvkm_mask(device, 0x10ec10 + soff, 0x80000002, 0x80000002); in gf119_sor_hda_eld() 63 const u32 soff = 0x030 * ior->id + (head * 0x04); in gf119_sor_hda_hpd() local 74 nvkm_mask(device, 0x10ec10 + soff, mask, data); in gf119_sor_hda_hpd() 154 const u32 soff = nv50_ior_base(sor); in gf119_sor_dp_pattern() local 167 nvkm_mask(device, 0x61c110 + soff, 0x1f1f1f1f, data); in gf119_sor_dp_pattern() 174 const u32 soff = nv50_ior_base(sor); in gf119_sor_dp_links() local 186 nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); in gf119_sor_dp_links() [all …]
|
D | ga102.c | 35 const u32 soff = nv50_ior_base(sor); in ga102_sor_dp_links() local 60 nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); in ga102_sor_dp_links() 64 nvkm_mask(device, 0x612300 + soff, 0x00030000, 0x00010000); in ga102_sor_dp_links()
|
D | tu102.c | 46 const u32 soff = nv50_ior_base(sor); in tu102_sor_dp_links() local 58 nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); in tu102_sor_dp_links() 62 nvkm_mask(device, 0x612300 + soff, 0x00030000, 0x00010000); in tu102_sor_dp_links()
|
D | gm200.c | 74 const u32 soff = nv50_ior_base(ior); in gm200_sor_hdmi_scdc() local 77 nvkm_mask(device, 0x61c5bc + soff, 0x00000003, ctrl); in gm200_sor_hdmi_scdc()
|
D | nv50.c | 164 const u32 soff = nv50_ior_base(sor); in nv50_sor_clock() local 166 nvkm_mask(device, 0x614300 + soff, 0x00000707, (div << 8) | div); in nv50_sor_clock() 170 nv50_sor_power_wait(struct nvkm_device *device, u32 soff) in nv50_sor_power_wait() argument 173 if (!(nvkm_rd32(device, 0x61c004 + soff) & 0x80000000)) in nv50_sor_power_wait() 182 const u32 soff = nv50_ior_base(sor); in nv50_sor_power() local 187 nv50_sor_power_wait(device, soff); in nv50_sor_power() 188 nvkm_mask(device, 0x61c004 + soff, field, state); in nv50_sor_power() 189 nv50_sor_power_wait(device, soff); in nv50_sor_power() 192 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in nv50_sor_power()
|
D | gv100.c | 342 const u32 soff = (chan->chid.ctrl - 1) * 0x04; in gv100_disp_dmac_idle() local 344 u32 stat = nvkm_rd32(device, 0x610664 + soff); in gv100_disp_dmac_idle() 557 const u32 soff = (chan->chid.ctrl - 1) * 0x04; in gv100_disp_curs_idle() local 559 u32 stat = nvkm_rd32(device, 0x610664 + soff); in gv100_disp_curs_idle()
|
/drivers/dma/ |
D | fsl-edma-common.c | 439 edma_write_tcdreg(fsl_chan, tcd->soff, soff); in fsl_edma_set_tcd_regs() 475 u16 attr, u16 soff, u32 nbytes, u32 slast, u16 citer, in fsl_edma_fill_tcd() argument 494 tcd->soff = cpu_to_le16(soff); in fsl_edma_fill_tcd() 577 u16 soff, doff, iter; in fsl_edma_prep_dma_cyclic() local 617 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic() 622 soff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic() 628 soff = doff = 0; in fsl_edma_prep_dma_cyclic() 633 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic() 650 u16 soff, doff, iter; in fsl_edma_prep_slave_sg() local 681 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg() [all …]
|
D | mpc512x_dma.c | 140 u32 soff:16; /* Signed source address offset */ member 635 tcd->soff = 32; in mpc_dma_prep_memcpy() 641 tcd->soff = 16; in mpc_dma_prep_memcpy() 646 tcd->soff = 4; in mpc_dma_prep_memcpy() 651 tcd->soff = 2; in mpc_dma_prep_memcpy() 656 tcd->soff = 1; in mpc_dma_prep_memcpy() 748 tcd->soff = 0; in mpc_dma_prep_slave_sg() 757 tcd->soff = mchan->swidth; in mpc_dma_prep_slave_sg()
|
D | fsl-edma-common.h | 79 __le16 soff; member
|
/drivers/pinctrl/qcom/ |
D | pinctrl-lpass-lpi.h | 46 #define LPI_PINGROUP(id, soff, f1, f2, f3, f4) \ argument 51 .slew_offset = soff, \
|
/drivers/net/ethernet/netronome/nfp/bpf/ |
D | verifier.c | 105 unsigned int soff; in nfp_bpf_map_update_value_ok() local 107 soff = -(off + i) - 1; in nfp_bpf_map_update_value_ok() 108 stack_entry = &state->stack[soff / BPF_REG_SIZE]; in nfp_bpf_map_update_value_ok() 109 if (stack_entry->slot_type[soff % BPF_REG_SIZE] == STACK_ZERO) in nfp_bpf_map_update_value_ok() 114 i, soff); in nfp_bpf_map_update_value_ok()
|
/drivers/gpu/drm/nouveau/dispnv04/ |
D | overlay.c | 128 int soff = NV_PCRTC0_SIZE * nv_crtc->index; in nv10_update_plane() local 151 nvif_mask(dev, NV_PCRTC_ENGINE_CTRL + soff, NV_CRTC_FSEL_OVERLAY, NV_CRTC_FSEL_OVERLAY); in nv10_update_plane()
|
/drivers/block/aoe/ |
D | aoecmd.c | 1015 int soff = 0; in bvcpy() local 1022 skb_copy_bits(skb, soff, p, bv.bv_len); in bvcpy() 1024 soff += bv.bv_len; in bvcpy()
|
/drivers/crypto/ |
D | hifn_795x.c | 1327 unsigned int soff, doff; in hifn_setup_dma() local 1333 soff = src->offset; in hifn_setup_dma() 1336 hifn_setup_src_desc(dev, spage, soff, len, n - len == 0); in hifn_setup_dma()
|
/drivers/net/ethernet/brocade/bna/ |
D | bfa_ioc.c | 2190 bfa_nw_ioc_smem_read(struct bfa_ioc *ioc, void *tbuf, u32 soff, u32 sz) in bfa_nw_ioc_smem_read() argument 2196 pgnum = PSS_SMEM_PGNUM(ioc->ioc_regs.smem_pg0, soff); in bfa_nw_ioc_smem_read() 2197 loff = PSS_SMEM_PGOFF(soff); in bfa_nw_ioc_smem_read()
|
/drivers/scsi/bfa/ |
D | bfa_ioc.c | 2040 bfa_ioc_smem_read(struct bfa_ioc_s *ioc, void *tbuf, u32 soff, u32 sz) in bfa_ioc_smem_read() argument 2047 pgnum = PSS_SMEM_PGNUM(ioc->ioc_regs.smem_pg0, soff); in bfa_ioc_smem_read() 2048 loff = PSS_SMEM_PGOFF(soff); in bfa_ioc_smem_read() 2099 bfa_ioc_smem_clr(struct bfa_ioc_s *ioc, u32 soff, u32 sz) in bfa_ioc_smem_clr() argument 2104 pgnum = PSS_SMEM_PGNUM(ioc->ioc_regs.smem_pg0, soff); in bfa_ioc_smem_clr() 2105 loff = PSS_SMEM_PGOFF(soff); in bfa_ioc_smem_clr()
|
/drivers/net/ethernet/sun/ |
D | cassini.h | 2125 u8 soff, snext; /* if match succeeds, new offset and match */ member
|
D | cassini.c | 1164 val |= CAS_BASE(HP_INSTR_RAM_MID_SOFF, inst->soff); in cas_load_firmware()
|