Lines Matching refs:__scm

55 static struct qcom_scm *__scm;  variable
61 ret = clk_prepare_enable(__scm->core_clk); in qcom_scm_clk_enable()
65 ret = clk_prepare_enable(__scm->iface_clk); in qcom_scm_clk_enable()
69 ret = clk_prepare_enable(__scm->bus_clk); in qcom_scm_clk_enable()
76 clk_disable_unprepare(__scm->iface_clk); in qcom_scm_clk_enable()
78 clk_disable_unprepare(__scm->core_clk); in qcom_scm_clk_enable()
85 clk_disable_unprepare(__scm->core_clk); in qcom_scm_clk_disable()
86 clk_disable_unprepare(__scm->iface_clk); in qcom_scm_clk_disable()
87 clk_disable_unprepare(__scm->bus_clk); in qcom_scm_clk_disable()
114 return __qcom_scm_set_warm_boot_addr(__scm->dev, entry, cpus); in qcom_scm_set_warm_boot_addr()
144 ret = __qcom_scm_is_call_available(__scm->dev, QCOM_SCM_SVC_HDCP, in qcom_scm_hdcp_available()
168 ret = __qcom_scm_hdcp_req(__scm->dev, req, req_cnt, resp); in qcom_scm_hdcp_req()
185 ret = __qcom_scm_is_call_available(__scm->dev, QCOM_SCM_SVC_PIL, in qcom_scm_pas_supported()
190 return __qcom_scm_pas_supported(__scm->dev, peripheral); in qcom_scm_pas_supported()
217 mdata_buf = dma_alloc_coherent(__scm->dev, size, &mdata_phys, in qcom_scm_pas_init_image()
220 dev_err(__scm->dev, "Allocation of metadata buffer failed.\n"); in qcom_scm_pas_init_image()
229 ret = __qcom_scm_pas_init_image(__scm->dev, peripheral, mdata_phys); in qcom_scm_pas_init_image()
234 dma_free_coherent(__scm->dev, size, mdata_buf, mdata_phys); in qcom_scm_pas_init_image()
257 ret = __qcom_scm_pas_mem_setup(__scm->dev, peripheral, addr, size); in qcom_scm_pas_mem_setup()
279 ret = __qcom_scm_pas_auth_and_reset(__scm->dev, peripheral); in qcom_scm_pas_auth_and_reset()
300 ret = __qcom_scm_pas_shutdown(__scm->dev, peripheral); in qcom_scm_pas_shutdown()
313 return __qcom_scm_pas_mss_reset(__scm->dev, 1); in qcom_scm_pas_reset_assert()
322 return __qcom_scm_pas_mss_reset(__scm->dev, 0); in qcom_scm_pas_reset_deassert()
332 return __qcom_scm_restore_sec_cfg(__scm->dev, device_id, spare); in qcom_scm_restore_sec_cfg()
338 return __qcom_scm_iommu_secure_ptbl_size(__scm->dev, spare, size); in qcom_scm_iommu_secure_ptbl_size()
344 return __qcom_scm_iommu_secure_ptbl_init(__scm->dev, addr, size, spare); in qcom_scm_iommu_secure_ptbl_init()
350 return __qcom_scm_io_readl(__scm->dev, addr, val); in qcom_scm_io_readl()
356 return __qcom_scm_io_writel(__scm->dev, addr, val); in qcom_scm_io_writel()
365 avail = __qcom_scm_is_call_available(__scm->dev, in qcom_scm_set_download_mode()
369 ret = __qcom_scm_set_dload_mode(__scm->dev, enable); in qcom_scm_set_download_mode()
370 } else if (__scm->dload_mode_addr) { in qcom_scm_set_download_mode()
371 ret = __qcom_scm_io_writel(__scm->dev, __scm->dload_mode_addr, in qcom_scm_set_download_mode()
374 dev_err(__scm->dev, in qcom_scm_set_download_mode()
379 dev_err(__scm->dev, "failed to set download mode: %d\n", ret); in qcom_scm_set_download_mode()
413 return !!__scm; in qcom_scm_is_available()
419 return __qcom_scm_set_remote_state(__scm->dev, state, id); in qcom_scm_set_remote_state()
462 ptr = dma_alloc_coherent(__scm->dev, ptr_sz, &ptr_dma, GFP_KERNEL); in qcom_scm_assign_mem()
465 ptr_phys = dma_to_phys(__scm->dev, ptr_dma); in qcom_scm_assign_mem()
491 ret = __qcom_scm_assign_mem(__scm->dev, mem_to_map_phys, mem_to_map_sz, in qcom_scm_assign_mem()
493 dma_free_coherent(__scm->dev, ptr_sz, ptr, ptr_dma); in qcom_scm_assign_mem()
495 dev_err(__scm->dev, in qcom_scm_assign_mem()
572 __scm = scm; in qcom_scm_probe()
573 __scm->dev = &pdev->dev; in qcom_scm_probe()