Lines Matching refs:io_base

496 	val1 = readl_relaxed(qm->io_base + HPRE_DATA_RUSER_CFG);  in hpre_config_pasid()
497 val2 = readl_relaxed(qm->io_base + HPRE_DATA_WUSER_CFG); in hpre_config_pasid()
505 writel_relaxed(val1, qm->io_base + HPRE_DATA_RUSER_CFG); in hpre_config_pasid()
506 writel_relaxed(val2, qm->io_base + HPRE_DATA_WUSER_CFG); in hpre_config_pasid()
547 qm->io_base + offset + HPRE_CORE_ENB); in hpre_set_cluster()
548 writel(0x1, qm->io_base + offset + HPRE_CORE_INI_CFG); in hpre_set_cluster()
549 ret = readl_relaxed_poll_timeout(qm->io_base + offset + in hpre_set_cluster()
574 val = readl(qm->io_base + QM_PEH_AXUSER_CFG); in disable_flr_of_bme()
577 writel(val, qm->io_base + QM_PEH_AXUSER_CFG); in disable_flr_of_bme()
578 writel(PEH_AXUSER_CFG_ENABLE, qm->io_base + QM_PEH_AXUSER_CFG_ENABLE); in disable_flr_of_bme()
590 val = readl_relaxed(qm->io_base + HPRE_PREFETCH_CFG); in hpre_open_sva_prefetch()
592 writel(val, qm->io_base + HPRE_PREFETCH_CFG); in hpre_open_sva_prefetch()
594 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_PREFETCH_CFG, in hpre_open_sva_prefetch()
610 val = readl_relaxed(qm->io_base + HPRE_PREFETCH_CFG); in hpre_close_sva_prefetch()
612 writel(val, qm->io_base + HPRE_PREFETCH_CFG); in hpre_close_sva_prefetch()
614 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_SVA_PREFTCH_DFX, in hpre_close_sva_prefetch()
629 val = readl(qm->io_base + HPRE_CLKGATE_CTL); in hpre_enable_clock_gate()
631 writel(val, qm->io_base + HPRE_CLKGATE_CTL); in hpre_enable_clock_gate()
633 val = readl(qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_enable_clock_gate()
635 writel(val, qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_enable_clock_gate()
637 val = readl(qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_enable_clock_gate()
639 writel(val, qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_enable_clock_gate()
641 val = readl_relaxed(qm->io_base + HPRE_CORE_SHB_CFG); in hpre_enable_clock_gate()
643 writel(val, qm->io_base + HPRE_CORE_SHB_CFG); in hpre_enable_clock_gate()
653 val = readl(qm->io_base + HPRE_CLKGATE_CTL); in hpre_disable_clock_gate()
655 writel(val, qm->io_base + HPRE_CLKGATE_CTL); in hpre_disable_clock_gate()
657 val = readl(qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_disable_clock_gate()
659 writel(val, qm->io_base + HPRE_PEH_CFG_AUTO_GATE); in hpre_disable_clock_gate()
661 val = readl(qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_disable_clock_gate()
663 writel(val, qm->io_base + HPRE_CLUSTER_DYN_CTL); in hpre_disable_clock_gate()
665 val = readl_relaxed(qm->io_base + HPRE_CORE_SHB_CFG); in hpre_disable_clock_gate()
667 writel(val, qm->io_base + HPRE_CORE_SHB_CFG); in hpre_disable_clock_gate()
679 writel(HPRE_QM_USR_CFG_MASK, qm->io_base + QM_ARUSER_M_CFG_ENABLE); in hpre_set_user_domain_and_cache()
680 writel(HPRE_QM_USR_CFG_MASK, qm->io_base + QM_AWUSER_M_CFG_ENABLE); in hpre_set_user_domain_and_cache()
681 writel_relaxed(HPRE_QM_AXI_CFG_MASK, qm->io_base + QM_AXI_M_CFG); in hpre_set_user_domain_and_cache()
684 val = readl_relaxed(qm->io_base + HPRE_QM_ABNML_INT_MASK); in hpre_set_user_domain_and_cache()
686 writel_relaxed(val, qm->io_base + HPRE_QM_ABNML_INT_MASK); in hpre_set_user_domain_and_cache()
690 qm->io_base + HPRE_TYPES_ENB); in hpre_set_user_domain_and_cache()
692 writel(HPRE_RSA_ENB, qm->io_base + HPRE_TYPES_ENB); in hpre_set_user_domain_and_cache()
694 writel(HPRE_QM_VFG_AX_MASK, qm->io_base + HPRE_VFG_AXCACHE); in hpre_set_user_domain_and_cache()
695 writel(0x0, qm->io_base + HPRE_BD_ENDIAN); in hpre_set_user_domain_and_cache()
696 writel(0x0, qm->io_base + HPRE_INT_MASK); in hpre_set_user_domain_and_cache()
697 writel(0x0, qm->io_base + HPRE_POISON_BYPASS); in hpre_set_user_domain_and_cache()
698 writel(0x0, qm->io_base + HPRE_COMM_CNT_CLR_CE); in hpre_set_user_domain_and_cache()
699 writel(0x0, qm->io_base + HPRE_ECC_BYPASS); in hpre_set_user_domain_and_cache()
701 writel(HPRE_BD_USR_MASK, qm->io_base + HPRE_BD_ARUSR_CFG); in hpre_set_user_domain_and_cache()
702 writel(HPRE_BD_USR_MASK, qm->io_base + HPRE_BD_AWUSR_CFG); in hpre_set_user_domain_and_cache()
703 writel(0x1, qm->io_base + HPRE_RDCHN_INI_CFG); in hpre_set_user_domain_and_cache()
704 ret = readl_relaxed_poll_timeout(qm->io_base + HPRE_RDCHN_INI_ST, val, in hpre_set_user_domain_and_cache()
743 writel(0x0, qm->io_base + offset + HPRE_CLUSTER_INQURY); in hpre_cnt_regs_clear()
747 writel(0x0, qm->io_base + HPRE_CTRL_CNT_CLR_CE); in hpre_cnt_regs_clear()
756 val1 = readl(qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_master_ooo_ctrl()
767 writel(val2, qm->io_base + HPRE_OOO_SHUTDOWN_SEL); in hpre_master_ooo_ctrl()
769 writel(val1, qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_master_ooo_ctrl()
780 writel(ce | nfe | HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_INT_MASK); in hpre_hw_error_disable()
793 writel(ce | nfe | HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_HAC_SOURCE_INT); in hpre_hw_error_enable()
796 writel(ce, qm->io_base + HPRE_RAS_CE_ENB); in hpre_hw_error_enable()
797 writel(nfe, qm->io_base + HPRE_RAS_NFE_ENB); in hpre_hw_error_enable()
798 writel(HPRE_HAC_RAS_FE_ENABLE, qm->io_base + HPRE_RAS_FE_ENB); in hpre_hw_error_enable()
804 writel(HPRE_CORE_INT_ENABLE, qm->io_base + HPRE_INT_MASK); in hpre_hw_error_enable()
818 return readl(qm->io_base + HPRE_CTRL_CNT_CLR_CE) & in hpre_clear_enable_read()
830 tmp = (readl(qm->io_base + HPRE_CTRL_CNT_CLR_CE) & in hpre_clear_enable_write()
832 writel(tmp, qm->io_base + HPRE_CTRL_CNT_CLR_CE); in hpre_clear_enable_write()
844 return readl(qm->io_base + offset + HPRE_CLSTR_ADDR_INQRY_RSLT); in hpre_cluster_inqry_read()
854 writel(val, qm->io_base + offset + HPRE_CLUSTER_INQURY); in hpre_cluster_inqry_write()
1016 regset->base = qm->io_base; in hpre_pf_comm_regs_debugfs_init()
1046 regset->base = qm->io_base + hpre_cluster_offsets[i]; in hpre_cluster_debugfs_init()
1183 void __iomem *io_base; in hpre_show_last_regs_init() local
1192 debug->last_words[i] = readl_relaxed(qm->io_base + in hpre_show_last_regs_init()
1196 io_base = qm->io_base + hpre_cluster_offsets[i]; in hpre_show_last_regs_init()
1200 io_base + hpre_cluster_dfx_regs[j].offset); in hpre_show_last_regs_init()
1225 void __iomem *io_base; in hpre_show_last_dfx_regs() local
1234 val = readl_relaxed(qm->io_base + hpre_com_dfx_regs[i].offset); in hpre_show_last_dfx_regs()
1241 io_base = qm->io_base + hpre_cluster_offsets[i]; in hpre_show_last_dfx_regs()
1243 val = readl_relaxed(io_base + in hpre_show_last_dfx_regs()
1268 return readl(qm->io_base + HPRE_INT_STATUS); in hpre_get_hw_err_status()
1275 writel(err_sts, qm->io_base + HPRE_HAC_SOURCE_INT); in hpre_clear_hw_err_status()
1277 writel(nfe, qm->io_base + HPRE_RAS_NFE_ENB); in hpre_clear_hw_err_status()
1284 value = readl(qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()
1286 qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()
1288 qm->io_base + HPRE_AM_OOO_SHUTDOWN_ENB); in hpre_open_axi_master_ooo()