Lines Matching +full:smmu +full:- +full:v3

1 // SPDX-License-Identifier: GPL-2.0
5 * Copyright (c) 2014-2015, The Linux Foundation. All rights reserved.
8 * Author: Stanimir Varbanov <svarbanov@mm-sol.com>
32 #include "pcie-designware.h"
202 #define to_qcom_pcie(x) dev_get_drvdata((x)->dev)
206 gpiod_set_value_cansleep(pcie->reset, 1); in qcom_ep_reset_assert()
214 gpiod_set_value_cansleep(pcie->reset, 0); in qcom_ep_reset_deassert()
223 if (pcie->ops->ltssm_enable) in qcom_pcie_start_link()
224 pcie->ops->ltssm_enable(pcie); in qcom_pcie_start_link()
234 val = readl(pcie->elbi + PCIE20_ELBI_SYS_CTRL); in qcom_pcie_2_1_0_ltssm_enable()
236 writel(val, pcie->elbi + PCIE20_ELBI_SYS_CTRL); in qcom_pcie_2_1_0_ltssm_enable()
241 struct qcom_pcie_resources_2_1_0 *res = &pcie->res.v2_1_0; in qcom_pcie_get_resources_2_1_0()
242 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_2_1_0()
243 struct device *dev = pci->dev; in qcom_pcie_get_resources_2_1_0()
246 res->supplies[0].supply = "vdda"; in qcom_pcie_get_resources_2_1_0()
247 res->supplies[1].supply = "vdda_phy"; in qcom_pcie_get_resources_2_1_0()
248 res->supplies[2].supply = "vdda_refclk"; in qcom_pcie_get_resources_2_1_0()
249 ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(res->supplies), in qcom_pcie_get_resources_2_1_0()
250 res->supplies); in qcom_pcie_get_resources_2_1_0()
254 res->clks[0].id = "iface"; in qcom_pcie_get_resources_2_1_0()
255 res->clks[1].id = "core"; in qcom_pcie_get_resources_2_1_0()
256 res->clks[2].id = "phy"; in qcom_pcie_get_resources_2_1_0()
257 res->clks[3].id = "aux"; in qcom_pcie_get_resources_2_1_0()
258 res->clks[4].id = "ref"; in qcom_pcie_get_resources_2_1_0()
261 ret = devm_clk_bulk_get(dev, 3, res->clks); in qcom_pcie_get_resources_2_1_0()
266 ret = devm_clk_bulk_get_optional(dev, 2, res->clks + 3); in qcom_pcie_get_resources_2_1_0()
270 res->pci_reset = devm_reset_control_get_exclusive(dev, "pci"); in qcom_pcie_get_resources_2_1_0()
271 if (IS_ERR(res->pci_reset)) in qcom_pcie_get_resources_2_1_0()
272 return PTR_ERR(res->pci_reset); in qcom_pcie_get_resources_2_1_0()
274 res->axi_reset = devm_reset_control_get_exclusive(dev, "axi"); in qcom_pcie_get_resources_2_1_0()
275 if (IS_ERR(res->axi_reset)) in qcom_pcie_get_resources_2_1_0()
276 return PTR_ERR(res->axi_reset); in qcom_pcie_get_resources_2_1_0()
278 res->ahb_reset = devm_reset_control_get_exclusive(dev, "ahb"); in qcom_pcie_get_resources_2_1_0()
279 if (IS_ERR(res->ahb_reset)) in qcom_pcie_get_resources_2_1_0()
280 return PTR_ERR(res->ahb_reset); in qcom_pcie_get_resources_2_1_0()
282 res->por_reset = devm_reset_control_get_exclusive(dev, "por"); in qcom_pcie_get_resources_2_1_0()
283 if (IS_ERR(res->por_reset)) in qcom_pcie_get_resources_2_1_0()
284 return PTR_ERR(res->por_reset); in qcom_pcie_get_resources_2_1_0()
286 res->ext_reset = devm_reset_control_get_optional_exclusive(dev, "ext"); in qcom_pcie_get_resources_2_1_0()
287 if (IS_ERR(res->ext_reset)) in qcom_pcie_get_resources_2_1_0()
288 return PTR_ERR(res->ext_reset); in qcom_pcie_get_resources_2_1_0()
290 res->phy_reset = devm_reset_control_get_exclusive(dev, "phy"); in qcom_pcie_get_resources_2_1_0()
291 return PTR_ERR_OR_ZERO(res->phy_reset); in qcom_pcie_get_resources_2_1_0()
296 struct qcom_pcie_resources_2_1_0 *res = &pcie->res.v2_1_0; in qcom_pcie_deinit_2_1_0()
298 clk_bulk_disable_unprepare(ARRAY_SIZE(res->clks), res->clks); in qcom_pcie_deinit_2_1_0()
299 reset_control_assert(res->pci_reset); in qcom_pcie_deinit_2_1_0()
300 reset_control_assert(res->axi_reset); in qcom_pcie_deinit_2_1_0()
301 reset_control_assert(res->ahb_reset); in qcom_pcie_deinit_2_1_0()
302 reset_control_assert(res->por_reset); in qcom_pcie_deinit_2_1_0()
303 reset_control_assert(res->ext_reset); in qcom_pcie_deinit_2_1_0()
304 reset_control_assert(res->phy_reset); in qcom_pcie_deinit_2_1_0()
306 writel(1, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_deinit_2_1_0()
308 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_deinit_2_1_0()
313 struct qcom_pcie_resources_2_1_0 *res = &pcie->res.v2_1_0; in qcom_pcie_init_2_1_0()
314 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_2_1_0()
315 struct device *dev = pci->dev; in qcom_pcie_init_2_1_0()
316 struct device_node *node = dev->of_node; in qcom_pcie_init_2_1_0()
321 reset_control_assert(res->pci_reset); in qcom_pcie_init_2_1_0()
322 reset_control_assert(res->axi_reset); in qcom_pcie_init_2_1_0()
323 reset_control_assert(res->ahb_reset); in qcom_pcie_init_2_1_0()
324 reset_control_assert(res->por_reset); in qcom_pcie_init_2_1_0()
325 reset_control_assert(res->ext_reset); in qcom_pcie_init_2_1_0()
326 reset_control_assert(res->phy_reset); in qcom_pcie_init_2_1_0()
328 writel(1, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_1_0()
330 ret = regulator_bulk_enable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_1_0()
336 ret = reset_control_deassert(res->ahb_reset); in qcom_pcie_init_2_1_0()
342 ret = reset_control_deassert(res->ext_reset); in qcom_pcie_init_2_1_0()
348 ret = reset_control_deassert(res->phy_reset); in qcom_pcie_init_2_1_0()
354 ret = reset_control_deassert(res->pci_reset); in qcom_pcie_init_2_1_0()
360 ret = reset_control_deassert(res->por_reset); in qcom_pcie_init_2_1_0()
366 ret = reset_control_deassert(res->axi_reset); in qcom_pcie_init_2_1_0()
372 ret = clk_bulk_prepare_enable(ARRAY_SIZE(res->clks), res->clks); in qcom_pcie_init_2_1_0()
377 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_1_0()
379 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_1_0()
381 if (of_device_is_compatible(node, "qcom,pcie-ipq8064") || in qcom_pcie_init_2_1_0()
382 of_device_is_compatible(node, "qcom,pcie-ipq8064-v2")) { in qcom_pcie_init_2_1_0()
386 pcie->parf + PCIE20_PARF_PCS_DEEMPH); in qcom_pcie_init_2_1_0()
389 pcie->parf + PCIE20_PARF_PCS_SWING); in qcom_pcie_init_2_1_0()
390 writel(PHY_RX0_EQ(4), pcie->parf + PCIE20_PARF_CONFIG_BITS); in qcom_pcie_init_2_1_0()
393 if (of_device_is_compatible(node, "qcom,pcie-ipq8064")) { in qcom_pcie_init_2_1_0()
395 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_1_0()
398 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_1_0()
402 val = readl(pcie->parf + PCIE20_PARF_PHY_REFCLK); in qcom_pcie_init_2_1_0()
404 if (!of_device_is_compatible(node, "qcom,pcie-apq8064")) in qcom_pcie_init_2_1_0()
407 writel(val, pcie->parf + PCIE20_PARF_PHY_REFCLK); in qcom_pcie_init_2_1_0()
414 pci->dbi_base + PCIE20_AXI_MSTR_RESP_COMP_CTRL0); in qcom_pcie_init_2_1_0()
416 pci->dbi_base + PCIE20_AXI_MSTR_RESP_COMP_CTRL1); in qcom_pcie_init_2_1_0()
421 reset_control_assert(res->axi_reset); in qcom_pcie_init_2_1_0()
423 reset_control_assert(res->por_reset); in qcom_pcie_init_2_1_0()
425 reset_control_assert(res->pci_reset); in qcom_pcie_init_2_1_0()
427 reset_control_assert(res->phy_reset); in qcom_pcie_init_2_1_0()
429 reset_control_assert(res->ext_reset); in qcom_pcie_init_2_1_0()
431 reset_control_assert(res->ahb_reset); in qcom_pcie_init_2_1_0()
433 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_1_0()
440 struct qcom_pcie_resources_1_0_0 *res = &pcie->res.v1_0_0; in qcom_pcie_get_resources_1_0_0()
441 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_1_0_0()
442 struct device *dev = pci->dev; in qcom_pcie_get_resources_1_0_0()
444 res->vdda = devm_regulator_get(dev, "vdda"); in qcom_pcie_get_resources_1_0_0()
445 if (IS_ERR(res->vdda)) in qcom_pcie_get_resources_1_0_0()
446 return PTR_ERR(res->vdda); in qcom_pcie_get_resources_1_0_0()
448 res->iface = devm_clk_get(dev, "iface"); in qcom_pcie_get_resources_1_0_0()
449 if (IS_ERR(res->iface)) in qcom_pcie_get_resources_1_0_0()
450 return PTR_ERR(res->iface); in qcom_pcie_get_resources_1_0_0()
452 res->aux = devm_clk_get(dev, "aux"); in qcom_pcie_get_resources_1_0_0()
453 if (IS_ERR(res->aux)) in qcom_pcie_get_resources_1_0_0()
454 return PTR_ERR(res->aux); in qcom_pcie_get_resources_1_0_0()
456 res->master_bus = devm_clk_get(dev, "master_bus"); in qcom_pcie_get_resources_1_0_0()
457 if (IS_ERR(res->master_bus)) in qcom_pcie_get_resources_1_0_0()
458 return PTR_ERR(res->master_bus); in qcom_pcie_get_resources_1_0_0()
460 res->slave_bus = devm_clk_get(dev, "slave_bus"); in qcom_pcie_get_resources_1_0_0()
461 if (IS_ERR(res->slave_bus)) in qcom_pcie_get_resources_1_0_0()
462 return PTR_ERR(res->slave_bus); in qcom_pcie_get_resources_1_0_0()
464 res->core = devm_reset_control_get_exclusive(dev, "core"); in qcom_pcie_get_resources_1_0_0()
465 return PTR_ERR_OR_ZERO(res->core); in qcom_pcie_get_resources_1_0_0()
470 struct qcom_pcie_resources_1_0_0 *res = &pcie->res.v1_0_0; in qcom_pcie_deinit_1_0_0()
472 reset_control_assert(res->core); in qcom_pcie_deinit_1_0_0()
473 clk_disable_unprepare(res->slave_bus); in qcom_pcie_deinit_1_0_0()
474 clk_disable_unprepare(res->master_bus); in qcom_pcie_deinit_1_0_0()
475 clk_disable_unprepare(res->iface); in qcom_pcie_deinit_1_0_0()
476 clk_disable_unprepare(res->aux); in qcom_pcie_deinit_1_0_0()
477 regulator_disable(res->vdda); in qcom_pcie_deinit_1_0_0()
482 struct qcom_pcie_resources_1_0_0 *res = &pcie->res.v1_0_0; in qcom_pcie_init_1_0_0()
483 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_1_0_0()
484 struct device *dev = pci->dev; in qcom_pcie_init_1_0_0()
487 ret = reset_control_deassert(res->core); in qcom_pcie_init_1_0_0()
493 ret = clk_prepare_enable(res->aux); in qcom_pcie_init_1_0_0()
499 ret = clk_prepare_enable(res->iface); in qcom_pcie_init_1_0_0()
505 ret = clk_prepare_enable(res->master_bus); in qcom_pcie_init_1_0_0()
511 ret = clk_prepare_enable(res->slave_bus); in qcom_pcie_init_1_0_0()
517 ret = regulator_enable(res->vdda); in qcom_pcie_init_1_0_0()
524 writel(0, pcie->parf + PCIE20_PARF_DBI_BASE_ADDR); in qcom_pcie_init_1_0_0()
527 u32 val = readl(pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT); in qcom_pcie_init_1_0_0()
530 writel(val, pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT); in qcom_pcie_init_1_0_0()
535 clk_disable_unprepare(res->slave_bus); in qcom_pcie_init_1_0_0()
537 clk_disable_unprepare(res->master_bus); in qcom_pcie_init_1_0_0()
539 clk_disable_unprepare(res->iface); in qcom_pcie_init_1_0_0()
541 clk_disable_unprepare(res->aux); in qcom_pcie_init_1_0_0()
543 reset_control_assert(res->core); in qcom_pcie_init_1_0_0()
553 val = readl(pcie->parf + PCIE20_PARF_LTSSM); in qcom_pcie_2_3_2_ltssm_enable()
555 writel(val, pcie->parf + PCIE20_PARF_LTSSM); in qcom_pcie_2_3_2_ltssm_enable()
560 struct qcom_pcie_resources_2_3_2 *res = &pcie->res.v2_3_2; in qcom_pcie_get_resources_2_3_2()
561 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_2_3_2()
562 struct device *dev = pci->dev; in qcom_pcie_get_resources_2_3_2()
565 res->supplies[0].supply = "vdda"; in qcom_pcie_get_resources_2_3_2()
566 res->supplies[1].supply = "vddpe-3v3"; in qcom_pcie_get_resources_2_3_2()
567 ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(res->supplies), in qcom_pcie_get_resources_2_3_2()
568 res->supplies); in qcom_pcie_get_resources_2_3_2()
572 res->aux_clk = devm_clk_get(dev, "aux"); in qcom_pcie_get_resources_2_3_2()
573 if (IS_ERR(res->aux_clk)) in qcom_pcie_get_resources_2_3_2()
574 return PTR_ERR(res->aux_clk); in qcom_pcie_get_resources_2_3_2()
576 res->cfg_clk = devm_clk_get(dev, "cfg"); in qcom_pcie_get_resources_2_3_2()
577 if (IS_ERR(res->cfg_clk)) in qcom_pcie_get_resources_2_3_2()
578 return PTR_ERR(res->cfg_clk); in qcom_pcie_get_resources_2_3_2()
580 res->master_clk = devm_clk_get(dev, "bus_master"); in qcom_pcie_get_resources_2_3_2()
581 if (IS_ERR(res->master_clk)) in qcom_pcie_get_resources_2_3_2()
582 return PTR_ERR(res->master_clk); in qcom_pcie_get_resources_2_3_2()
584 res->slave_clk = devm_clk_get(dev, "bus_slave"); in qcom_pcie_get_resources_2_3_2()
585 if (IS_ERR(res->slave_clk)) in qcom_pcie_get_resources_2_3_2()
586 return PTR_ERR(res->slave_clk); in qcom_pcie_get_resources_2_3_2()
588 res->pipe_clk = devm_clk_get(dev, "pipe"); in qcom_pcie_get_resources_2_3_2()
589 return PTR_ERR_OR_ZERO(res->pipe_clk); in qcom_pcie_get_resources_2_3_2()
594 struct qcom_pcie_resources_2_3_2 *res = &pcie->res.v2_3_2; in qcom_pcie_deinit_2_3_2()
596 clk_disable_unprepare(res->slave_clk); in qcom_pcie_deinit_2_3_2()
597 clk_disable_unprepare(res->master_clk); in qcom_pcie_deinit_2_3_2()
598 clk_disable_unprepare(res->cfg_clk); in qcom_pcie_deinit_2_3_2()
599 clk_disable_unprepare(res->aux_clk); in qcom_pcie_deinit_2_3_2()
601 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_deinit_2_3_2()
606 struct qcom_pcie_resources_2_3_2 *res = &pcie->res.v2_3_2; in qcom_pcie_post_deinit_2_3_2()
608 clk_disable_unprepare(res->pipe_clk); in qcom_pcie_post_deinit_2_3_2()
613 struct qcom_pcie_resources_2_3_2 *res = &pcie->res.v2_3_2; in qcom_pcie_init_2_3_2()
614 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_2_3_2()
615 struct device *dev = pci->dev; in qcom_pcie_init_2_3_2()
619 ret = regulator_bulk_enable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_3_2()
625 ret = clk_prepare_enable(res->aux_clk); in qcom_pcie_init_2_3_2()
631 ret = clk_prepare_enable(res->cfg_clk); in qcom_pcie_init_2_3_2()
637 ret = clk_prepare_enable(res->master_clk); in qcom_pcie_init_2_3_2()
643 ret = clk_prepare_enable(res->slave_clk); in qcom_pcie_init_2_3_2()
650 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_3_2()
652 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_3_2()
655 writel(0, pcie->parf + PCIE20_PARF_DBI_BASE_ADDR); in qcom_pcie_init_2_3_2()
658 val = readl(pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_3_2()
660 writel(val, pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_3_2()
662 val = readl(pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_3_2()
664 writel(val, pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_3_2()
666 val = readl(pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT_V2); in qcom_pcie_init_2_3_2()
668 writel(val, pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT_V2); in qcom_pcie_init_2_3_2()
673 clk_disable_unprepare(res->master_clk); in qcom_pcie_init_2_3_2()
675 clk_disable_unprepare(res->cfg_clk); in qcom_pcie_init_2_3_2()
677 clk_disable_unprepare(res->aux_clk); in qcom_pcie_init_2_3_2()
680 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_3_2()
687 struct qcom_pcie_resources_2_3_2 *res = &pcie->res.v2_3_2; in qcom_pcie_post_init_2_3_2()
688 struct dw_pcie *pci = pcie->pci; in qcom_pcie_post_init_2_3_2()
689 struct device *dev = pci->dev; in qcom_pcie_post_init_2_3_2()
692 ret = clk_prepare_enable(res->pipe_clk); in qcom_pcie_post_init_2_3_2()
703 struct qcom_pcie_resources_2_4_0 *res = &pcie->res.v2_4_0; in qcom_pcie_get_resources_2_4_0()
704 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_2_4_0()
705 struct device *dev = pci->dev; in qcom_pcie_get_resources_2_4_0()
706 bool is_ipq = of_device_is_compatible(dev->of_node, "qcom,pcie-ipq4019"); in qcom_pcie_get_resources_2_4_0()
709 res->clks[0].id = "aux"; in qcom_pcie_get_resources_2_4_0()
710 res->clks[1].id = "master_bus"; in qcom_pcie_get_resources_2_4_0()
711 res->clks[2].id = "slave_bus"; in qcom_pcie_get_resources_2_4_0()
712 res->clks[3].id = "iface"; in qcom_pcie_get_resources_2_4_0()
714 /* qcom,pcie-ipq4019 is defined without "iface" */ in qcom_pcie_get_resources_2_4_0()
715 res->num_clks = is_ipq ? 3 : 4; in qcom_pcie_get_resources_2_4_0()
717 ret = devm_clk_bulk_get(dev, res->num_clks, res->clks); in qcom_pcie_get_resources_2_4_0()
721 res->axi_m_reset = devm_reset_control_get_exclusive(dev, "axi_m"); in qcom_pcie_get_resources_2_4_0()
722 if (IS_ERR(res->axi_m_reset)) in qcom_pcie_get_resources_2_4_0()
723 return PTR_ERR(res->axi_m_reset); in qcom_pcie_get_resources_2_4_0()
725 res->axi_s_reset = devm_reset_control_get_exclusive(dev, "axi_s"); in qcom_pcie_get_resources_2_4_0()
726 if (IS_ERR(res->axi_s_reset)) in qcom_pcie_get_resources_2_4_0()
727 return PTR_ERR(res->axi_s_reset); in qcom_pcie_get_resources_2_4_0()
734 res->pipe_reset = devm_reset_control_get_exclusive(dev, "pipe"); in qcom_pcie_get_resources_2_4_0()
735 if (IS_ERR(res->pipe_reset)) in qcom_pcie_get_resources_2_4_0()
736 return PTR_ERR(res->pipe_reset); in qcom_pcie_get_resources_2_4_0()
738 res->axi_m_vmid_reset = devm_reset_control_get_exclusive(dev, in qcom_pcie_get_resources_2_4_0()
740 if (IS_ERR(res->axi_m_vmid_reset)) in qcom_pcie_get_resources_2_4_0()
741 return PTR_ERR(res->axi_m_vmid_reset); in qcom_pcie_get_resources_2_4_0()
743 res->axi_s_xpu_reset = devm_reset_control_get_exclusive(dev, in qcom_pcie_get_resources_2_4_0()
745 if (IS_ERR(res->axi_s_xpu_reset)) in qcom_pcie_get_resources_2_4_0()
746 return PTR_ERR(res->axi_s_xpu_reset); in qcom_pcie_get_resources_2_4_0()
748 res->parf_reset = devm_reset_control_get_exclusive(dev, "parf"); in qcom_pcie_get_resources_2_4_0()
749 if (IS_ERR(res->parf_reset)) in qcom_pcie_get_resources_2_4_0()
750 return PTR_ERR(res->parf_reset); in qcom_pcie_get_resources_2_4_0()
752 res->phy_reset = devm_reset_control_get_exclusive(dev, "phy"); in qcom_pcie_get_resources_2_4_0()
753 if (IS_ERR(res->phy_reset)) in qcom_pcie_get_resources_2_4_0()
754 return PTR_ERR(res->phy_reset); in qcom_pcie_get_resources_2_4_0()
757 res->axi_m_sticky_reset = devm_reset_control_get_exclusive(dev, in qcom_pcie_get_resources_2_4_0()
759 if (IS_ERR(res->axi_m_sticky_reset)) in qcom_pcie_get_resources_2_4_0()
760 return PTR_ERR(res->axi_m_sticky_reset); in qcom_pcie_get_resources_2_4_0()
762 res->pipe_sticky_reset = devm_reset_control_get_exclusive(dev, in qcom_pcie_get_resources_2_4_0()
764 if (IS_ERR(res->pipe_sticky_reset)) in qcom_pcie_get_resources_2_4_0()
765 return PTR_ERR(res->pipe_sticky_reset); in qcom_pcie_get_resources_2_4_0()
767 res->pwr_reset = devm_reset_control_get_exclusive(dev, "pwr"); in qcom_pcie_get_resources_2_4_0()
768 if (IS_ERR(res->pwr_reset)) in qcom_pcie_get_resources_2_4_0()
769 return PTR_ERR(res->pwr_reset); in qcom_pcie_get_resources_2_4_0()
771 res->ahb_reset = devm_reset_control_get_exclusive(dev, "ahb"); in qcom_pcie_get_resources_2_4_0()
772 if (IS_ERR(res->ahb_reset)) in qcom_pcie_get_resources_2_4_0()
773 return PTR_ERR(res->ahb_reset); in qcom_pcie_get_resources_2_4_0()
776 res->phy_ahb_reset = devm_reset_control_get_exclusive(dev, "phy_ahb"); in qcom_pcie_get_resources_2_4_0()
777 if (IS_ERR(res->phy_ahb_reset)) in qcom_pcie_get_resources_2_4_0()
778 return PTR_ERR(res->phy_ahb_reset); in qcom_pcie_get_resources_2_4_0()
786 struct qcom_pcie_resources_2_4_0 *res = &pcie->res.v2_4_0; in qcom_pcie_deinit_2_4_0()
788 reset_control_assert(res->axi_m_reset); in qcom_pcie_deinit_2_4_0()
789 reset_control_assert(res->axi_s_reset); in qcom_pcie_deinit_2_4_0()
790 reset_control_assert(res->pipe_reset); in qcom_pcie_deinit_2_4_0()
791 reset_control_assert(res->pipe_sticky_reset); in qcom_pcie_deinit_2_4_0()
792 reset_control_assert(res->phy_reset); in qcom_pcie_deinit_2_4_0()
793 reset_control_assert(res->phy_ahb_reset); in qcom_pcie_deinit_2_4_0()
794 reset_control_assert(res->axi_m_sticky_reset); in qcom_pcie_deinit_2_4_0()
795 reset_control_assert(res->pwr_reset); in qcom_pcie_deinit_2_4_0()
796 reset_control_assert(res->ahb_reset); in qcom_pcie_deinit_2_4_0()
797 clk_bulk_disable_unprepare(res->num_clks, res->clks); in qcom_pcie_deinit_2_4_0()
802 struct qcom_pcie_resources_2_4_0 *res = &pcie->res.v2_4_0; in qcom_pcie_init_2_4_0()
803 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_2_4_0()
804 struct device *dev = pci->dev; in qcom_pcie_init_2_4_0()
808 ret = reset_control_assert(res->axi_m_reset); in qcom_pcie_init_2_4_0()
814 ret = reset_control_assert(res->axi_s_reset); in qcom_pcie_init_2_4_0()
822 ret = reset_control_assert(res->pipe_reset); in qcom_pcie_init_2_4_0()
828 ret = reset_control_assert(res->pipe_sticky_reset); in qcom_pcie_init_2_4_0()
834 ret = reset_control_assert(res->phy_reset); in qcom_pcie_init_2_4_0()
840 ret = reset_control_assert(res->phy_ahb_reset); in qcom_pcie_init_2_4_0()
848 ret = reset_control_assert(res->axi_m_sticky_reset); in qcom_pcie_init_2_4_0()
854 ret = reset_control_assert(res->pwr_reset); in qcom_pcie_init_2_4_0()
860 ret = reset_control_assert(res->ahb_reset); in qcom_pcie_init_2_4_0()
868 ret = reset_control_deassert(res->phy_ahb_reset); in qcom_pcie_init_2_4_0()
874 ret = reset_control_deassert(res->phy_reset); in qcom_pcie_init_2_4_0()
880 ret = reset_control_deassert(res->pipe_reset); in qcom_pcie_init_2_4_0()
886 ret = reset_control_deassert(res->pipe_sticky_reset); in qcom_pcie_init_2_4_0()
894 ret = reset_control_deassert(res->axi_m_reset); in qcom_pcie_init_2_4_0()
900 ret = reset_control_deassert(res->axi_m_sticky_reset); in qcom_pcie_init_2_4_0()
906 ret = reset_control_deassert(res->axi_s_reset); in qcom_pcie_init_2_4_0()
912 ret = reset_control_deassert(res->pwr_reset); in qcom_pcie_init_2_4_0()
918 ret = reset_control_deassert(res->ahb_reset); in qcom_pcie_init_2_4_0()
926 ret = clk_bulk_prepare_enable(res->num_clks, res->clks); in qcom_pcie_init_2_4_0()
931 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_4_0()
933 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_4_0()
936 writel(0, pcie->parf + PCIE20_PARF_DBI_BASE_ADDR); in qcom_pcie_init_2_4_0()
939 val = readl(pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_4_0()
941 writel(val, pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_4_0()
943 val = readl(pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_4_0()
945 writel(val, pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_4_0()
947 val = readl(pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT_V2); in qcom_pcie_init_2_4_0()
949 writel(val, pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT_V2); in qcom_pcie_init_2_4_0()
954 reset_control_assert(res->ahb_reset); in qcom_pcie_init_2_4_0()
956 reset_control_assert(res->pwr_reset); in qcom_pcie_init_2_4_0()
958 reset_control_assert(res->axi_s_reset); in qcom_pcie_init_2_4_0()
960 reset_control_assert(res->axi_m_sticky_reset); in qcom_pcie_init_2_4_0()
962 reset_control_assert(res->axi_m_reset); in qcom_pcie_init_2_4_0()
964 reset_control_assert(res->pipe_sticky_reset); in qcom_pcie_init_2_4_0()
966 reset_control_assert(res->pipe_reset); in qcom_pcie_init_2_4_0()
968 reset_control_assert(res->phy_reset); in qcom_pcie_init_2_4_0()
970 reset_control_assert(res->phy_ahb_reset); in qcom_pcie_init_2_4_0()
976 struct qcom_pcie_resources_2_3_3 *res = &pcie->res.v2_3_3; in qcom_pcie_get_resources_2_3_3()
977 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_2_3_3()
978 struct device *dev = pci->dev; in qcom_pcie_get_resources_2_3_3()
984 res->iface = devm_clk_get(dev, "iface"); in qcom_pcie_get_resources_2_3_3()
985 if (IS_ERR(res->iface)) in qcom_pcie_get_resources_2_3_3()
986 return PTR_ERR(res->iface); in qcom_pcie_get_resources_2_3_3()
988 res->axi_m_clk = devm_clk_get(dev, "axi_m"); in qcom_pcie_get_resources_2_3_3()
989 if (IS_ERR(res->axi_m_clk)) in qcom_pcie_get_resources_2_3_3()
990 return PTR_ERR(res->axi_m_clk); in qcom_pcie_get_resources_2_3_3()
992 res->axi_s_clk = devm_clk_get(dev, "axi_s"); in qcom_pcie_get_resources_2_3_3()
993 if (IS_ERR(res->axi_s_clk)) in qcom_pcie_get_resources_2_3_3()
994 return PTR_ERR(res->axi_s_clk); in qcom_pcie_get_resources_2_3_3()
996 res->ahb_clk = devm_clk_get(dev, "ahb"); in qcom_pcie_get_resources_2_3_3()
997 if (IS_ERR(res->ahb_clk)) in qcom_pcie_get_resources_2_3_3()
998 return PTR_ERR(res->ahb_clk); in qcom_pcie_get_resources_2_3_3()
1000 res->aux_clk = devm_clk_get(dev, "aux"); in qcom_pcie_get_resources_2_3_3()
1001 if (IS_ERR(res->aux_clk)) in qcom_pcie_get_resources_2_3_3()
1002 return PTR_ERR(res->aux_clk); in qcom_pcie_get_resources_2_3_3()
1005 res->rst[i] = devm_reset_control_get(dev, rst_names[i]); in qcom_pcie_get_resources_2_3_3()
1006 if (IS_ERR(res->rst[i])) in qcom_pcie_get_resources_2_3_3()
1007 return PTR_ERR(res->rst[i]); in qcom_pcie_get_resources_2_3_3()
1015 struct qcom_pcie_resources_2_3_3 *res = &pcie->res.v2_3_3; in qcom_pcie_deinit_2_3_3()
1017 clk_disable_unprepare(res->iface); in qcom_pcie_deinit_2_3_3()
1018 clk_disable_unprepare(res->axi_m_clk); in qcom_pcie_deinit_2_3_3()
1019 clk_disable_unprepare(res->axi_s_clk); in qcom_pcie_deinit_2_3_3()
1020 clk_disable_unprepare(res->ahb_clk); in qcom_pcie_deinit_2_3_3()
1021 clk_disable_unprepare(res->aux_clk); in qcom_pcie_deinit_2_3_3()
1026 struct qcom_pcie_resources_2_3_3 *res = &pcie->res.v2_3_3; in qcom_pcie_init_2_3_3()
1027 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_2_3_3()
1028 struct device *dev = pci->dev; in qcom_pcie_init_2_3_3()
1033 for (i = 0; i < ARRAY_SIZE(res->rst); i++) { in qcom_pcie_init_2_3_3()
1034 ret = reset_control_assert(res->rst[i]); in qcom_pcie_init_2_3_3()
1043 for (i = 0; i < ARRAY_SIZE(res->rst); i++) { in qcom_pcie_init_2_3_3()
1044 ret = reset_control_deassert(res->rst[i]); in qcom_pcie_init_2_3_3()
1058 ret = clk_prepare_enable(res->iface); in qcom_pcie_init_2_3_3()
1064 ret = clk_prepare_enable(res->axi_m_clk); in qcom_pcie_init_2_3_3()
1070 ret = clk_prepare_enable(res->axi_s_clk); in qcom_pcie_init_2_3_3()
1076 ret = clk_prepare_enable(res->ahb_clk); in qcom_pcie_init_2_3_3()
1082 ret = clk_prepare_enable(res->aux_clk); in qcom_pcie_init_2_3_3()
1089 pcie->parf + PCIE20_v3_PARF_SLV_ADDR_SPACE_SIZE); in qcom_pcie_init_2_3_3()
1091 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_3_3()
1093 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_3_3()
1095 writel(0, pcie->parf + PCIE20_PARF_DBI_BASE_ADDR); in qcom_pcie_init_2_3_3()
1100 pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_3_3()
1101 writel(0, pcie->parf + PCIE20_PARF_Q2A_FLUSH); in qcom_pcie_init_2_3_3()
1103 writel(PCI_COMMAND_MASTER, pci->dbi_base + PCI_COMMAND); in qcom_pcie_init_2_3_3()
1104 writel(DBI_RO_WR_EN, pci->dbi_base + PCIE20_MISC_CONTROL_1_REG); in qcom_pcie_init_2_3_3()
1105 writel(PCIE_CAP_LINK1_VAL, pci->dbi_base + offset + PCI_EXP_SLTCAP); in qcom_pcie_init_2_3_3()
1107 val = readl(pci->dbi_base + offset + PCI_EXP_LNKCAP); in qcom_pcie_init_2_3_3()
1109 writel(val, pci->dbi_base + offset + PCI_EXP_LNKCAP); in qcom_pcie_init_2_3_3()
1111 writel(PCI_EXP_DEVCTL2_COMP_TMOUT_DIS, pci->dbi_base + offset + in qcom_pcie_init_2_3_3()
1117 clk_disable_unprepare(res->ahb_clk); in qcom_pcie_init_2_3_3()
1119 clk_disable_unprepare(res->axi_s_clk); in qcom_pcie_init_2_3_3()
1121 clk_disable_unprepare(res->axi_m_clk); in qcom_pcie_init_2_3_3()
1123 clk_disable_unprepare(res->iface); in qcom_pcie_init_2_3_3()
1129 for (i = 0; i < ARRAY_SIZE(res->rst); i++) in qcom_pcie_init_2_3_3()
1130 reset_control_assert(res->rst[i]); in qcom_pcie_init_2_3_3()
1137 struct qcom_pcie_resources_2_7_0 *res = &pcie->res.v2_7_0; in qcom_pcie_get_resources_2_7_0()
1138 struct dw_pcie *pci = pcie->pci; in qcom_pcie_get_resources_2_7_0()
1139 struct device *dev = pci->dev; in qcom_pcie_get_resources_2_7_0()
1142 res->pci_reset = devm_reset_control_get_exclusive(dev, "pci"); in qcom_pcie_get_resources_2_7_0()
1143 if (IS_ERR(res->pci_reset)) in qcom_pcie_get_resources_2_7_0()
1144 return PTR_ERR(res->pci_reset); in qcom_pcie_get_resources_2_7_0()
1146 res->supplies[0].supply = "vdda"; in qcom_pcie_get_resources_2_7_0()
1147 res->supplies[1].supply = "vddpe-3v3"; in qcom_pcie_get_resources_2_7_0()
1148 ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(res->supplies), in qcom_pcie_get_resources_2_7_0()
1149 res->supplies); in qcom_pcie_get_resources_2_7_0()
1153 res->clks[0].id = "aux"; in qcom_pcie_get_resources_2_7_0()
1154 res->clks[1].id = "cfg"; in qcom_pcie_get_resources_2_7_0()
1155 res->clks[2].id = "bus_master"; in qcom_pcie_get_resources_2_7_0()
1156 res->clks[3].id = "bus_slave"; in qcom_pcie_get_resources_2_7_0()
1157 res->clks[4].id = "slave_q2a"; in qcom_pcie_get_resources_2_7_0()
1158 res->clks[5].id = "tbu"; in qcom_pcie_get_resources_2_7_0()
1159 if (of_device_is_compatible(dev->of_node, "qcom,pcie-sm8250")) { in qcom_pcie_get_resources_2_7_0()
1160 res->clks[6].id = "ddrss_sf_tbu"; in qcom_pcie_get_resources_2_7_0()
1161 res->num_clks = 7; in qcom_pcie_get_resources_2_7_0()
1163 res->num_clks = 6; in qcom_pcie_get_resources_2_7_0()
1166 ret = devm_clk_bulk_get(dev, res->num_clks, res->clks); in qcom_pcie_get_resources_2_7_0()
1170 res->pipe_clk = devm_clk_get(dev, "pipe"); in qcom_pcie_get_resources_2_7_0()
1171 return PTR_ERR_OR_ZERO(res->pipe_clk); in qcom_pcie_get_resources_2_7_0()
1176 struct qcom_pcie_resources_2_7_0 *res = &pcie->res.v2_7_0; in qcom_pcie_init_2_7_0()
1177 struct dw_pcie *pci = pcie->pci; in qcom_pcie_init_2_7_0()
1178 struct device *dev = pci->dev; in qcom_pcie_init_2_7_0()
1182 ret = regulator_bulk_enable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_7_0()
1188 ret = clk_bulk_prepare_enable(res->num_clks, res->clks); in qcom_pcie_init_2_7_0()
1192 ret = reset_control_assert(res->pci_reset); in qcom_pcie_init_2_7_0()
1200 ret = reset_control_deassert(res->pci_reset); in qcom_pcie_init_2_7_0()
1206 ret = clk_prepare_enable(res->pipe_clk); in qcom_pcie_init_2_7_0()
1213 writel(DEVICE_TYPE_RC, pcie->parf + PCIE20_PARF_DEVICE_TYPE); in qcom_pcie_init_2_7_0()
1216 val = readl(pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_7_0()
1218 writel(val, pcie->parf + PCIE20_PARF_PHY_CTRL); in qcom_pcie_init_2_7_0()
1221 writel(0, pcie->parf + PCIE20_PARF_DBI_BASE_ADDR); in qcom_pcie_init_2_7_0()
1224 val = readl(pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_7_0()
1226 writel(val, pcie->parf + PCIE20_PARF_SYS_CTRL); in qcom_pcie_init_2_7_0()
1228 val = readl(pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_7_0()
1230 writel(val, pcie->parf + PCIE20_PARF_MHI_CLOCK_RESET_CTRL); in qcom_pcie_init_2_7_0()
1233 val = readl(pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT); in qcom_pcie_init_2_7_0()
1235 writel(val, pcie->parf + PCIE20_PARF_AXI_MSTR_WR_ADDR_HALT); in qcom_pcie_init_2_7_0()
1240 clk_bulk_disable_unprepare(res->num_clks, res->clks); in qcom_pcie_init_2_7_0()
1242 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_init_2_7_0()
1249 struct qcom_pcie_resources_2_7_0 *res = &pcie->res.v2_7_0; in qcom_pcie_deinit_2_7_0()
1251 clk_bulk_disable_unprepare(res->num_clks, res->clks); in qcom_pcie_deinit_2_7_0()
1252 regulator_bulk_disable(ARRAY_SIZE(res->supplies), res->supplies); in qcom_pcie_deinit_2_7_0()
1257 struct qcom_pcie_resources_2_7_0 *res = &pcie->res.v2_7_0; in qcom_pcie_post_init_2_7_0()
1259 return clk_prepare_enable(res->pipe_clk); in qcom_pcie_post_init_2_7_0()
1264 struct qcom_pcie_resources_2_7_0 *res = &pcie->res.v2_7_0; in qcom_pcie_post_deinit_2_7_0()
1266 clk_disable_unprepare(res->pipe_clk); in qcom_pcie_post_deinit_2_7_0()
1272 u16 val = readw(pci->dbi_base + offset + PCI_EXP_LNKSTA); in qcom_pcie_link_up()
1286 void __iomem *bdf_to_sid_base = pcie->parf + PCIE20_PARF_BDF_TO_SID_TABLE_N; in qcom_pcie_config_sid_sm8250()
1287 struct device *dev = pcie->pci->dev; in qcom_pcie_config_sid_sm8250()
1292 of_get_property(dev->of_node, "iommu-map", &size); in qcom_pcie_config_sid_sm8250()
1298 return -ENOMEM; in qcom_pcie_config_sid_sm8250()
1300 of_property_read_u32_array(dev->of_node, in qcom_pcie_config_sid_sm8250()
1301 "iommu-map", (u32 *)map, size / sizeof(u32)); in qcom_pcie_config_sid_sm8250()
1310 /* Extract the SMMU SID base from the first entry of iommu-map */ in qcom_pcie_config_sid_sm8250()
1339 val = map[i].bdf << 16 | (map[i].smmu_sid - smmu_sid_base) << 8 | 0; in qcom_pcie_config_sid_sm8250()
1356 ret = pcie->ops->init(pcie); in qcom_pcie_host_init()
1360 ret = phy_power_on(pcie->phy); in qcom_pcie_host_init()
1364 if (pcie->ops->post_init) { in qcom_pcie_host_init()
1365 ret = pcie->ops->post_init(pcie); in qcom_pcie_host_init()
1372 if (pcie->ops->config_sid) { in qcom_pcie_host_init()
1373 ret = pcie->ops->config_sid(pcie); in qcom_pcie_host_init()
1382 if (pcie->ops->post_deinit) in qcom_pcie_host_init()
1383 pcie->ops->post_deinit(pcie); in qcom_pcie_host_init()
1385 phy_power_off(pcie->phy); in qcom_pcie_host_init()
1387 pcie->ops->deinit(pcie); in qcom_pcie_host_init()
1466 struct device *dev = &pdev->dev; in qcom_pcie_probe()
1474 return -ENOMEM; in qcom_pcie_probe()
1478 return -ENOMEM; in qcom_pcie_probe()
1485 pci->dev = dev; in qcom_pcie_probe()
1486 pci->ops = &dw_pcie_ops; in qcom_pcie_probe()
1487 pp = &pci->pp; in qcom_pcie_probe()
1489 pcie->pci = pci; in qcom_pcie_probe()
1491 pcie->ops = of_device_get_match_data(dev); in qcom_pcie_probe()
1493 pcie->reset = devm_gpiod_get_optional(dev, "perst", GPIOD_OUT_HIGH); in qcom_pcie_probe()
1494 if (IS_ERR(pcie->reset)) { in qcom_pcie_probe()
1495 ret = PTR_ERR(pcie->reset); in qcom_pcie_probe()
1499 pcie->parf = devm_platform_ioremap_resource_byname(pdev, "parf"); in qcom_pcie_probe()
1500 if (IS_ERR(pcie->parf)) { in qcom_pcie_probe()
1501 ret = PTR_ERR(pcie->parf); in qcom_pcie_probe()
1505 pcie->elbi = devm_platform_ioremap_resource_byname(pdev, "elbi"); in qcom_pcie_probe()
1506 if (IS_ERR(pcie->elbi)) { in qcom_pcie_probe()
1507 ret = PTR_ERR(pcie->elbi); in qcom_pcie_probe()
1511 pcie->phy = devm_phy_optional_get(dev, "pciephy"); in qcom_pcie_probe()
1512 if (IS_ERR(pcie->phy)) { in qcom_pcie_probe()
1513 ret = PTR_ERR(pcie->phy); in qcom_pcie_probe()
1517 ret = pcie->ops->get_resources(pcie); in qcom_pcie_probe()
1521 pp->ops = &qcom_pcie_dw_ops; in qcom_pcie_probe()
1523 ret = phy_init(pcie->phy); in qcom_pcie_probe()
1525 pm_runtime_disable(&pdev->dev); in qcom_pcie_probe()
1534 pm_runtime_disable(&pdev->dev); in qcom_pcie_probe()
1548 { .compatible = "qcom,pcie-apq8084", .data = &ops_1_0_0 },
1549 { .compatible = "qcom,pcie-ipq8064", .data = &ops_2_1_0 },
1550 { .compatible = "qcom,pcie-ipq8064-v2", .data = &ops_2_1_0 },
1551 { .compatible = "qcom,pcie-apq8064", .data = &ops_2_1_0 },
1552 { .compatible = "qcom,pcie-msm8996", .data = &ops_2_3_2 },
1553 { .compatible = "qcom,pcie-ipq8074", .data = &ops_2_3_3 },
1554 { .compatible = "qcom,pcie-ipq4019", .data = &ops_2_4_0 },
1555 { .compatible = "qcom,pcie-qcs404", .data = &ops_2_4_0 },
1556 { .compatible = "qcom,pcie-sdm845", .data = &ops_2_7_0 },
1557 { .compatible = "qcom,pcie-sm8250", .data = &ops_1_9_0 },
1563 dev->class = PCI_CLASS_BRIDGE_PCI << 8; in qcom_fixup_class()
1576 .name = "qcom-pcie",