Home
last modified time | relevance | path

Searched refs:eng (Results 1 – 20 of 20) sorted by relevance

/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dchannv50.c97 nvkm_kmap(chan->eng); in nv50_fifo_chan_engine_fini()
98 nvkm_wo32(chan->eng, offset + 0x00, 0x00000000); in nv50_fifo_chan_engine_fini()
99 nvkm_wo32(chan->eng, offset + 0x04, 0x00000000); in nv50_fifo_chan_engine_fini()
100 nvkm_wo32(chan->eng, offset + 0x08, 0x00000000); in nv50_fifo_chan_engine_fini()
101 nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000); in nv50_fifo_chan_engine_fini()
102 nvkm_wo32(chan->eng, offset + 0x10, 0x00000000); in nv50_fifo_chan_engine_fini()
103 nvkm_wo32(chan->eng, offset + 0x14, 0x00000000); in nv50_fifo_chan_engine_fini()
104 nvkm_done(chan->eng); in nv50_fifo_chan_engine_fini()
125 nvkm_kmap(chan->eng); in nv50_fifo_chan_engine_init()
126 nvkm_wo32(chan->eng, offset + 0x00, 0x00190000); in nv50_fifo_chan_engine_init()
[all …]
Dchang84.c100 nvkm_kmap(chan->eng); in g84_fifo_chan_engine_fini()
101 nvkm_wo32(chan->eng, offset + 0x00, 0x00000000); in g84_fifo_chan_engine_fini()
102 nvkm_wo32(chan->eng, offset + 0x04, 0x00000000); in g84_fifo_chan_engine_fini()
103 nvkm_wo32(chan->eng, offset + 0x08, 0x00000000); in g84_fifo_chan_engine_fini()
104 nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000); in g84_fifo_chan_engine_fini()
105 nvkm_wo32(chan->eng, offset + 0x10, 0x00000000); in g84_fifo_chan_engine_fini()
106 nvkm_wo32(chan->eng, offset + 0x14, 0x00000000); in g84_fifo_chan_engine_fini()
107 nvkm_done(chan->eng); in g84_fifo_chan_engine_fini()
127 nvkm_kmap(chan->eng); in g84_fifo_chan_engine_init()
128 nvkm_wo32(chan->eng, offset + 0x00, 0x00190000); in g84_fifo_chan_engine_init()
[all …]
Dchannv50.h14 struct nvkm_gpuobj *eng; member
/drivers/gpu/drm/nouveau/nvkm/subdev/clk/
Dgk104.c43 struct gk104_clk_info eng[16]; member
290 struct gk104_clk_info *info = &clk->eng[idx]; in calc_clk()
359 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_0()
388 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_2()
413 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_3()
424 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_4_0()
439 struct gk104_clk_info *info = &clk->eng[idx]; in gk104_clk_prog_4_1()
466 for (j = 0; j < ARRAY_SIZE(clk->eng); j++) { in gk104_clk_prog()
469 if (!clk->eng[j].freq) in gk104_clk_prog()
482 memset(clk->eng, 0x00, sizeof(clk->eng)); in gk104_clk_tidy()
Dgt215.c36 struct gt215_clk_info eng[nv_clk_src_max]; member
278 &clk->eng[dom]); in calc_clk()
289 struct gt215_clk_info *info = &clk->eng[nv_clk_src_host]; in calc_host()
365 struct gt215_clk_info *info = &clk->eng[dom]; in prog_pll()
409 struct gt215_clk_info *info = &clk->eng[dom]; in prog_clk()
417 struct gt215_clk_info *info = &clk->eng[nv_clk_src_host]; in prog_host()
445 struct gt215_clk_info *info = &clk->eng[dom]; in prog_core()
462 struct gt215_clk_info *core = &clk->eng[nv_clk_src_core]; in gt215_clk_calc()
477 &clk->eng[nv_clk_src_core_intm]); in gt215_clk_calc()
489 struct gt215_clk_info *core = &clk->eng[nv_clk_src_core]; in gt215_clk_prog()
Dgf100.c43 struct gf100_clk_info eng[16]; member
276 struct gf100_clk_info *info = &clk->eng[idx]; in calc_clk()
346 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_0()
368 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_2()
395 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_3()
410 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_4()
431 for (j = 0; j < ARRAY_SIZE(clk->eng); j++) { in gf100_clk_prog()
432 if (!clk->eng[j].freq) in gf100_clk_prog()
445 memset(clk->eng, 0x00, sizeof(clk->eng)); in gf100_clk_tidy()
/drivers/gpu/drm/amd/amdgpu/
Dgmc_v10_0.c231 const unsigned eng = 17; in gmc_v10_0_flush_vm_hub() local
251 hub->eng_distance * eng, hub_ip); in gmc_v10_0_flush_vm_hub()
263 hub->eng_distance * eng, in gmc_v10_0_flush_vm_hub()
273 hub->eng_distance * eng, hub_ip); in gmc_v10_0_flush_vm_hub()
278 hub->eng_distance * eng, hub_ip); in gmc_v10_0_flush_vm_hub()
294 hub->eng_distance * eng, 0, hub_ip); in gmc_v10_0_flush_vm_hub()
333 const unsigned eng = 17; in gmc_v10_0_flush_gpu_tlb() local
335 u32 req = hub->vm_inv_eng0_req + hub->eng_distance * eng; in gmc_v10_0_flush_gpu_tlb()
336 u32 ack = hub->vm_inv_eng0_ack + hub->eng_distance * eng; in gmc_v10_0_flush_gpu_tlb()
471 unsigned eng = ring->vm_inv_eng; in gmc_v10_0_emit_flush_gpu_tlb() local
[all …]
Dgmc_v9_0.c743 const unsigned eng = 17; in gmc_v9_0_flush_gpu_tlb() local
771 uint32_t req = hub->vm_inv_eng0_req + hub->eng_distance * eng; in gmc_v9_0_flush_gpu_tlb()
772 uint32_t ack = hub->vm_inv_eng0_ack + hub->eng_distance * eng; in gmc_v9_0_flush_gpu_tlb()
794 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb()
806 hub->eng_distance * eng, inv_req); in gmc_v9_0_flush_gpu_tlb()
816 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb()
820 hub->eng_distance * eng); in gmc_v9_0_flush_gpu_tlb()
837 hub->eng_distance * eng, 0); in gmc_v9_0_flush_gpu_tlb()
944 unsigned eng = ring->vm_inv_eng; in gmc_v9_0_emit_flush_gpu_tlb() local
958 hub->eng_distance * eng, 0x1, 0x1); in gmc_v9_0_emit_flush_gpu_tlb()
[all …]
Dmmsch_v1_0.h57 struct mmsch_vf_eng_init_header eng[2]; member
Dvcn_v2_5.c1184 header->eng[i].table_offset = header->total_size; in vcn_v2_5_sriov_start()
1185 header->eng[i].init_status = 0; in vcn_v2_5_sriov_start()
1186 header->eng[i].table_size = 0; in vcn_v2_5_sriov_start()
1297 header->eng[i].table_size = table_size; in vcn_v2_5_sriov_start()
/drivers/net/ethernet/cavium/liquidio/
Dcn23xx_pf_regs.h552 #define CN23XX_DPI_DMA_ENG_ENB(eng) (CN23XX_DPI_DMA_ENG0_ENB + ((eng) * 8)) argument
565 #define CN23XX_DPI_DMA_ENG_BUF(eng) \ argument
566 (CN23XX_DPI_DMA_ENG0_BUF + ((eng) * 8))
/drivers/net/ethernet/qlogic/qed/
Dqed_dev_api.h361 u8 ppfid, enum qed_eng eng);
371 int qed_llh_set_roce_affinity(struct qed_dev *cdev, enum qed_eng eng);
Dqed_dev.c643 enum qed_eng eng; in qed_llh_set_engine_affin() local
656 eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0; in qed_llh_set_engine_affin()
657 rc = qed_llh_set_roce_affinity(cdev, eng); in qed_llh_set_engine_affin()
667 eng); in qed_llh_set_engine_affin()
673 eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0; in qed_llh_set_engine_affin()
675 eng = QED_BOTH_ENG; in qed_llh_set_engine_affin()
678 rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng); in qed_llh_set_engine_affin()
689 eng); in qed_llh_set_engine_affin()
740 int qed_llh_set_ppfid_affinity(struct qed_dev *cdev, u8 ppfid, enum qed_eng eng) in qed_llh_set_ppfid_affinity() argument
758 switch (eng) { in qed_llh_set_ppfid_affinity()
[all …]
Dqed_rdma.c1987 enum qed_eng eng; in qed_iwarp_set_engine_affin() local
1996 eng = QED_BOTH_ENG; in qed_iwarp_set_engine_affin()
1998 eng = cdev->l2_affin_hint ? QED_ENG1 : QED_ENG0; in qed_iwarp_set_engine_affin()
2000 rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng); in qed_iwarp_set_engine_affin()
2010 eng); in qed_iwarp_set_engine_affin()
Dqed_debug.c1116 fw_info.ver.num.eng) < 0) in qed_dump_fw_ver_param()
Dqed_hsi.h2708 u8 eng; member
/drivers/dma/idxd/
Ddevice.c964 struct idxd_engine *eng; in idxd_engines_setup() local
973 eng = idxd->engines[i]; in idxd_engines_setup()
974 group = eng->group; in idxd_engines_setup()
979 group->grpcfg.engines |= BIT(eng->id); in idxd_engines_setup()
Dregisters.h499 u64 eng:8; member
Dperfmon.c347 flt_eng = flt_cfg.eng; in perfmon_pmu_event_start()
/drivers/crypto/marvell/octeontx2/
Dotx2_cptpf_ucode.c158 static int __write_ucode_base(struct otx2_cptpf_dev *cptpf, int eng, in __write_ucode_base() argument
162 CPT_AF_EXEX_UCODE_BASE(eng), in __write_ucode_base()