• Home
  • Raw
  • Download

Lines Matching refs:io_base

465 	val1 = readl_relaxed(qm->io_base + HPRE_DATA_RUSER_CFG);  in hpre_config_pasid()
466 val2 = readl_relaxed(qm->io_base + HPRE_DATA_WUSER_CFG); in hpre_config_pasid()
474 writel_relaxed(val1, qm->io_base + HPRE_DATA_RUSER_CFG); in hpre_config_pasid()
475 writel_relaxed(val2, qm->io_base + HPRE_DATA_WUSER_CFG); in hpre_config_pasid()
518 qm->io_base + offset + HPRE_CORE_ENB); in hpre_set_cluster()
519 writel(0x1, qm->io_base + offset + HPRE_CORE_INI_CFG); in hpre_set_cluster()
520 ret = readl_relaxed_poll_timeout(qm->io_base + offset + in hpre_set_cluster()
545 val = readl(qm->io_base + QM_PEH_AXUSER_CFG); in disable_flr_of_bme()
548 writel(val, qm->io_base + QM_PEH_AXUSER_CFG); in disable_flr_of_bme()
549 writel(PEH_AXUSER_CFG_ENABLE, qm->io_base + QM_PEH_AXUSER_CFG_ENABLE); in disable_flr_of_bme()
561 val = readl_relaxed(qm->io_base + HPRE_PREFETCH_CFG); in hpre_open_sva_prefetch()
563 writel(val, qm->io_base + HPRE_PREFETCH_CFG); in hpre_open_sva_prefetch()
565 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_PREFETCH_CFG, in hpre_open_sva_prefetch()
581 val = readl_relaxed(qm->io_base + HPRE_PREFETCH_CFG); in hpre_close_sva_prefetch()
583 writel(val, qm->io_base + HPRE_PREFETCH_CFG); in hpre_close_sva_prefetch()
585 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_SVA_PREFTCH_DFX, in hpre_close_sva_prefetch()
600 val = readl(qm->io_base + HPRE_CLKGATE_CTL); in hpre_enable_clock_gate()
602 writel(val, qm->io_base + HPRE_CLKGATE_CTL); in hpre_enable_clock_gate()
604 val = readl(qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_enable_clock_gate()
606 writel(val, qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_enable_clock_gate()
608 val = readl(qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_enable_clock_gate()
610 writel(val, qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_enable_clock_gate()
612 val = readl_relaxed(qm->io_base + HPRE_CORE_SHB_CFG); in hpre_enable_clock_gate()
614 writel(val, qm->io_base + HPRE_CORE_SHB_CFG); in hpre_enable_clock_gate()
624 val = readl(qm->io_base + HPRE_CLKGATE_CTL); in hpre_disable_clock_gate()
626 writel(val, qm->io_base + HPRE_CLKGATE_CTL); in hpre_disable_clock_gate()
628 val = readl(qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_disable_clock_gate()
630 writel(val, qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_disable_clock_gate()
632 val = readl(qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_disable_clock_gate()
634 writel(val, qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_disable_clock_gate()
636 val = readl_relaxed(qm->io_base + HPRE_CORE_SHB_CFG); in hpre_disable_clock_gate()
638 writel(val, qm->io_base + HPRE_CORE_SHB_CFG); in hpre_disable_clock_gate()
650 writel(HPRE_QM_USR_CFG_MASK, qm->io_base + QM_ARUSER_M_CFG_ENABLE); in hpre_set_user_domain_and_cache()
651 writel(HPRE_QM_USR_CFG_MASK, qm->io_base + QM_AWUSER_M_CFG_ENABLE); in hpre_set_user_domain_and_cache()
652 writel_relaxed(HPRE_QM_AXI_CFG_MASK, qm->io_base + QM_AXI_M_CFG); in hpre_set_user_domain_and_cache()
655 val = readl_relaxed(qm->io_base + HPRE_QM_ABNML_INT_MASK); in hpre_set_user_domain_and_cache()
657 writel_relaxed(val, qm->io_base + HPRE_QM_ABNML_INT_MASK); in hpre_set_user_domain_and_cache()
661 qm->io_base + HPRE_TYPES_ENB); in hpre_set_user_domain_and_cache()
663 writel(HPRE_RSA_ENB, qm->io_base + HPRE_TYPES_ENB); in hpre_set_user_domain_and_cache()
665 writel(HPRE_QM_VFG_AX_MASK, qm->io_base + HPRE_VFG_AXCACHE); in hpre_set_user_domain_and_cache()
666 writel(0x0, qm->io_base + HPRE_BD_ENDIAN); in hpre_set_user_domain_and_cache()
667 writel(0x0, qm->io_base + HPRE_INT_MASK); in hpre_set_user_domain_and_cache()
668 writel(0x0, qm->io_base + HPRE_POISON_BYPASS); in hpre_set_user_domain_and_cache()
669 writel(0x0, qm->io_base + HPRE_COMM_CNT_CLR_CE); in hpre_set_user_domain_and_cache()
670 writel(0x0, qm->io_base + HPRE_ECC_BYPASS); in hpre_set_user_domain_and_cache()
672 writel(HPRE_BD_USR_MASK, qm->io_base + HPRE_BD_ARUSR_CFG); in hpre_set_user_domain_and_cache()
673 writel(HPRE_BD_USR_MASK, qm->io_base + HPRE_BD_AWUSR_CFG); in hpre_set_user_domain_and_cache()
674 writel(0x1, qm->io_base + HPRE_RDCHN_INI_CFG); in hpre_set_user_domain_and_cache()
675 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_RDCHN_INI_ST, val, in hpre_set_user_domain_and_cache()
715 writel(0x0, qm->io_base + offset + HPRE_CLUSTER_INQURY); in hpre_cnt_regs_clear()
719 writel(0x0, qm->io_base + HPRE_CTRL_CNT_CLR_CE); in hpre_cnt_regs_clear()
728 val1 = readl(qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_master_ooo_ctrl()
739 writel(val2, qm->io_base + HPRE_OOO_SHUTDOWN_SEL); in hpre_master_ooo_ctrl()
741 writel(val1, qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_master_ooo_ctrl()
752 writel(ce | nfe | HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_INT_MASK); in hpre_hw_error_disable()
765 writel(ce | nfe | HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_HAC_SOURCE_INT); in hpre_hw_error_enable()
768 writel(ce, qm->io_base + HPRE_RAS_CE_ENB); in hpre_hw_error_enable()
769 writel(nfe, qm->io_base + HPRE_RAS_NFE_ENB); in hpre_hw_error_enable()
770 writel(HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_RAS_FE_ENB); in hpre_hw_error_enable()
776 writel(HPRE_CORE_INT_ENABLE, qm->io_base + HPRE_INT_MASK); in hpre_hw_error_enable()
790 return readl(qm->io_base + HPRE_CTRL_CNT_CLR_CE) & in hpre_clear_enable_read()
802 tmp = (readl(qm->io_base + HPRE_CTRL_CNT_CLR_CE) & in hpre_clear_enable_write()
804 writel(tmp, qm->io_base + HPRE_CTRL_CNT_CLR_CE); in hpre_clear_enable_write()
816 return readl(qm->io_base + offset + HPRE_CLSTR_ADDR_INQRY_RSLT); in hpre_cluster_inqry_read()
826 writel(val, qm->io_base + offset + HPRE_CLUSTER_INQURY); in hpre_cluster_inqry_write()
988 regset->base = qm->io_base; in hpre_pf_comm_regs_debugfs_init()
1019 regset->base = qm->io_base + hpre_cluster_offsets[i]; in hpre_cluster_debugfs_init()
1195 void __iomem *io_base; in hpre_show_last_regs_init() local
1206 debug->last_words[i] = readl_relaxed(qm->io_base + in hpre_show_last_regs_init()
1210 io_base = qm->io_base + hpre_cluster_offsets[i]; in hpre_show_last_regs_init()
1214 io_base + hpre_cluster_dfx_regs[j].offset); in hpre_show_last_regs_init()
1238 void __iomem *io_base; in hpre_show_last_dfx_regs() local
1248 val = readl_relaxed(qm->io_base + hpre_com_dfx_regs[i].offset); in hpre_show_last_dfx_regs()
1256 io_base = qm->io_base + hpre_cluster_offsets[i]; in hpre_show_last_dfx_regs()
1258 val = readl_relaxed(io_base + in hpre_show_last_dfx_regs()
1283 return readl(qm->io_base + HPRE_INT_STATUS); in hpre_get_hw_err_status()
1290 writel(err_sts, qm->io_base + HPRE_HAC_SOURCE_INT); in hpre_clear_hw_err_status()
1292 writel(nfe, qm->io_base + HPRE_RAS_NFE_ENB); in hpre_clear_hw_err_status()
1299 value = readl(qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()
1301 qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()
1303 qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()