/drivers/staging/kpc2000/kpc_dma/ |
D | dma.c | 29 struct kpc_dma_device *eng = container_of(ws, struct kpc_dma_device, irq_work); in ndd_irq_worker() local 31 lock_engine(eng); in ndd_irq_worker() 33 if (GetEngineCompletePtr(eng) == 0) in ndd_irq_worker() 36 if (eng->desc_completed->MyDMAAddr == GetEngineCompletePtr(eng)) in ndd_irq_worker() 39 cur = eng->desc_completed; in ndd_irq_worker() 42 dev_dbg(&eng->pldev->dev, "Handling completed descriptor %p (acd = %p)\n", cur, cur->acd); in ndd_irq_worker() 43 BUG_ON(cur == eng->desc_next); // Ordering failure. in ndd_irq_worker() 46 eng->accumulated_bytes = 0; in ndd_irq_worker() 47 eng->accumulated_flags = 0; in ndd_irq_worker() 50 eng->accumulated_bytes += cur->DescByteCount; in ndd_irq_worker() [all …]
|
D | kpc_dma_driver.h | 147 u32 GetEngineCapabilities(struct kpc_dma_device *eng) in GetEngineCapabilities() argument 149 return readl(eng->eng_regs + 0); in GetEngineCapabilities() 153 void WriteEngineControl(struct kpc_dma_device *eng, u32 value) in WriteEngineControl() argument 155 writel(value, eng->eng_regs + 1); in WriteEngineControl() 159 u32 GetEngineControl(struct kpc_dma_device *eng) in GetEngineControl() argument 161 return readl(eng->eng_regs + 1); in GetEngineControl() 165 void SetClearEngineControl(struct kpc_dma_device *eng, u32 set_bits, u32 clear_bits) in SetClearEngineControl() argument 167 u32 val = GetEngineControl(eng); in SetClearEngineControl() 171 WriteEngineControl(eng, val); in SetClearEngineControl() 175 void SetEngineNextPtr(struct kpc_dma_device *eng, struct kpc_dma_descriptor *desc) in SetEngineNextPtr() argument [all …]
|
D | fileops.c | 273 struct kpc_dma_device *eng = priv->ldev; in kpc_dma_close() local 275 lock_engine(eng); in kpc_dma_close() 277 stop_dma_engine(eng); in kpc_dma_close() 279 cur = eng->desc_completed->Next; in kpc_dma_close() 280 while (cur != eng->desc_next) { in kpc_dma_close() 281 dev_dbg(&eng->pldev->dev, "Aborting descriptor %p (acd = %p)\n", cur, cur->acd); in kpc_dma_close() 288 eng->desc_completed = cur; in kpc_dma_close() 293 start_dma_engine(eng); in kpc_dma_close() 295 unlock_engine(eng); in kpc_dma_close()
|
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | channv50.c | 88 nvkm_kmap(chan->eng); in nv50_fifo_chan_engine_fini() 89 nvkm_wo32(chan->eng, offset + 0x00, 0x00000000); in nv50_fifo_chan_engine_fini() 90 nvkm_wo32(chan->eng, offset + 0x04, 0x00000000); in nv50_fifo_chan_engine_fini() 91 nvkm_wo32(chan->eng, offset + 0x08, 0x00000000); in nv50_fifo_chan_engine_fini() 92 nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000); in nv50_fifo_chan_engine_fini() 93 nvkm_wo32(chan->eng, offset + 0x10, 0x00000000); in nv50_fifo_chan_engine_fini() 94 nvkm_wo32(chan->eng, offset + 0x14, 0x00000000); in nv50_fifo_chan_engine_fini() 95 nvkm_done(chan->eng); in nv50_fifo_chan_engine_fini() 116 nvkm_kmap(chan->eng); in nv50_fifo_chan_engine_init() 117 nvkm_wo32(chan->eng, offset + 0x00, 0x00190000); in nv50_fifo_chan_engine_init() [all …]
|
D | chang84.c | 120 nvkm_kmap(chan->eng); in g84_fifo_chan_engine_fini() 121 nvkm_wo32(chan->eng, offset + 0x00, 0x00000000); in g84_fifo_chan_engine_fini() 122 nvkm_wo32(chan->eng, offset + 0x04, 0x00000000); in g84_fifo_chan_engine_fini() 123 nvkm_wo32(chan->eng, offset + 0x08, 0x00000000); in g84_fifo_chan_engine_fini() 124 nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000); in g84_fifo_chan_engine_fini() 125 nvkm_wo32(chan->eng, offset + 0x10, 0x00000000); in g84_fifo_chan_engine_fini() 126 nvkm_wo32(chan->eng, offset + 0x14, 0x00000000); in g84_fifo_chan_engine_fini() 127 nvkm_done(chan->eng); in g84_fifo_chan_engine_fini() 147 nvkm_kmap(chan->eng); in g84_fifo_chan_engine_init() 148 nvkm_wo32(chan->eng, offset + 0x00, 0x00190000); in g84_fifo_chan_engine_init() [all …]
|
D | channv50.h | 14 struct nvkm_gpuobj *eng; member
|
/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | gk104.c | 43 struct gk104_clk_info eng[16]; member 290 struct gk104_clk_info *info = &clk->eng[idx]; in calc_clk() 359 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_0() 388 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_2() 413 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_3() 424 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_4_0() 439 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_4_1() 466 for (j = 0; j < ARRAY_SIZE(clk->eng); j++) { in gk104_clk_prog() 469 if (!clk->eng[j].freq) in gk104_clk_prog() 482 memset(clk->eng, 0x00, sizeof(clk->eng)); in gk104_clk_tidy()
|
D | gt215.c | 36 struct gt215_clk_info eng[nv_clk_src_max]; member 278 &clk->eng[dom]); in calc_clk() 289 struct gt215_clk_info *info = &clk->eng[nv_clk_src_host]; in calc_host() 365 struct gt215_clk_info *info = &clk->eng[dom]; in prog_pll() 409 struct gt215_clk_info *info = &clk->eng[dom]; in prog_clk() 417 struct gt215_clk_info *info = &clk->eng[nv_clk_src_host]; in prog_host() 445 struct gt215_clk_info *info = &clk->eng[dom]; in prog_core() 462 struct gt215_clk_info *core = &clk->eng[nv_clk_src_core]; in gt215_clk_calc() 477 &clk->eng[nv_clk_src_core_intm]); in gt215_clk_calc() 489 struct gt215_clk_info *core = &clk->eng[nv_clk_src_core]; in gt215_clk_prog()
|
D | gf100.c | 43 struct gf100_clk_info eng[16]; member 276 struct gf100_clk_info *info = &clk->eng[idx]; in calc_clk() 346 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_0() 368 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_2() 395 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_3() 410 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_4() 431 for (j = 0; j < ARRAY_SIZE(clk->eng); j++) { in gf100_clk_prog() 432 if (!clk->eng[j].freq) in gf100_clk_prog() 445 memset(clk->eng, 0x00, sizeof(clk->eng)); in gf100_clk_tidy()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | gmc_v10_0.c | 202 const unsigned eng = 17; in gmc_v10_0_flush_vm_hub() local 218 hub->eng_distance * eng); in gmc_v10_0_flush_vm_hub() 228 WREG32_NO_KIQ(hub->vm_inv_eng0_req + hub->eng_distance * eng, inv_req); in gmc_v10_0_flush_vm_hub() 235 RREG32_NO_KIQ(hub->vm_inv_eng0_req + hub->eng_distance * eng); in gmc_v10_0_flush_vm_hub() 240 hub->eng_distance * eng); in gmc_v10_0_flush_vm_hub() 255 hub->eng_distance * eng, 0); in gmc_v10_0_flush_vm_hub() 292 const unsigned eng = 17; in gmc_v10_0_flush_gpu_tlb() local 294 u32 req = hub->vm_inv_eng0_req + hub->eng_distance * eng; in gmc_v10_0_flush_gpu_tlb() 295 u32 ack = hub->vm_inv_eng0_ack + hub->eng_distance * eng; in gmc_v10_0_flush_gpu_tlb() 428 unsigned eng = ring->vm_inv_eng; in gmc_v10_0_emit_flush_gpu_tlb() local [all …]
|
D | gmc_v9_0.c | 722 const unsigned eng = 17; in gmc_v9_0_flush_gpu_tlb() local 750 uint32_t req = hub->vm_inv_eng0_req + hub->eng_distance * eng; in gmc_v9_0_flush_gpu_tlb() 751 uint32_t ack = hub->vm_inv_eng0_ack + hub->eng_distance * eng; in gmc_v9_0_flush_gpu_tlb() 773 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb() 785 hub->eng_distance * eng, inv_req); in gmc_v9_0_flush_gpu_tlb() 794 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb() 798 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb() 815 hub->eng_distance * eng, 0); in gmc_v9_0_flush_gpu_tlb() 920 unsigned eng = ring->vm_inv_eng; in gmc_v9_0_emit_flush_gpu_tlb() local 934 hub->eng_distance * eng, 0x1, 0x1); in gmc_v9_0_emit_flush_gpu_tlb() [all …]
|
D | mmsch_v1_0.h | 59 struct mmsch_vf_eng_init_header eng[2]; member
|
D | vcn_v2_5.c | 1184 header->eng[i].table_offset = header->total_size; in vcn_v2_5_sriov_start() 1185 header->eng[i].init_status = 0; in vcn_v2_5_sriov_start() 1186 header->eng[i].table_size = 0; in vcn_v2_5_sriov_start() 1297 header->eng[i].table_size = table_size; in vcn_v2_5_sriov_start()
|
/drivers/net/ethernet/cavium/liquidio/ |
D | cn23xx_pf_regs.h | 552 #define CN23XX_DPI_DMA_ENG_ENB(eng) (CN23XX_DPI_DMA_ENG0_ENB + ((eng) * 8)) argument 565 #define CN23XX_DPI_DMA_ENG_BUF(eng) \ argument 566 (CN23XX_DPI_DMA_ENG0_BUF + ((eng) * 8))
|
/drivers/net/ethernet/qlogic/qed/ |
D | qed_dev_api.h | 327 u8 ppfid, enum qed_eng eng); 337 int qed_llh_set_roce_affinity(struct qed_dev *cdev, enum qed_eng eng);
|
D | qed_dev.c | 642 enum qed_eng eng; in qed_llh_set_engine_affin() local 655 eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0; in qed_llh_set_engine_affin() 656 rc = qed_llh_set_roce_affinity(cdev, eng); in qed_llh_set_engine_affin() 666 eng); in qed_llh_set_engine_affin() 671 eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0; in qed_llh_set_engine_affin() 673 eng = QED_BOTH_ENG; in qed_llh_set_engine_affin() 676 rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng); in qed_llh_set_engine_affin() 687 eng); in qed_llh_set_engine_affin() 738 int qed_llh_set_ppfid_affinity(struct qed_dev *cdev, u8 ppfid, enum qed_eng eng) in qed_llh_set_ppfid_affinity() argument 756 switch (eng) { in qed_llh_set_ppfid_affinity() [all …]
|
D | qed_rdma.c | 1987 enum qed_eng eng; in qed_iwarp_set_engine_affin() local 1996 eng = QED_BOTH_ENG; in qed_iwarp_set_engine_affin() 1998 eng = cdev->l2_affin_hint ? QED_ENG1 : QED_ENG0; in qed_iwarp_set_engine_affin() 2000 rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng); in qed_iwarp_set_engine_affin() 2010 eng); in qed_iwarp_set_engine_affin()
|
D | qed_debug.c | 1116 fw_info.ver.num.eng) < 0) in qed_dump_fw_ver_param()
|
D | qed_hsi.h | 2707 u8 eng; member
|
/drivers/dma/idxd/ |
D | device.c | 653 struct idxd_engine *eng; in idxd_engines_setup() local 662 eng = &idxd->engines[i]; in idxd_engines_setup() 663 group = eng->group; in idxd_engines_setup() 668 group->grpcfg.engines |= BIT(eng->id); in idxd_engines_setup()
|