Lines Matching refs:tdev
31 nvkm_device_tegra_power_up(struct nvkm_device_tegra *tdev) in nvkm_device_tegra_power_up() argument
35 if (tdev->vdd) { in nvkm_device_tegra_power_up()
36 ret = regulator_enable(tdev->vdd); in nvkm_device_tegra_power_up()
41 ret = clk_prepare_enable(tdev->clk); in nvkm_device_tegra_power_up()
44 ret = clk_prepare_enable(tdev->clk_ref); in nvkm_device_tegra_power_up()
47 ret = clk_prepare_enable(tdev->clk_pwr); in nvkm_device_tegra_power_up()
50 clk_set_rate(tdev->clk_pwr, 204000000); in nvkm_device_tegra_power_up()
53 if (!tdev->pdev->dev.pm_domain) { in nvkm_device_tegra_power_up()
54 reset_control_assert(tdev->rst); in nvkm_device_tegra_power_up()
62 reset_control_deassert(tdev->rst); in nvkm_device_tegra_power_up()
69 clk_disable_unprepare(tdev->clk_pwr); in nvkm_device_tegra_power_up()
71 clk_disable_unprepare(tdev->clk_ref); in nvkm_device_tegra_power_up()
73 clk_disable_unprepare(tdev->clk); in nvkm_device_tegra_power_up()
75 if (tdev->vdd) in nvkm_device_tegra_power_up()
76 regulator_disable(tdev->vdd); in nvkm_device_tegra_power_up()
82 nvkm_device_tegra_power_down(struct nvkm_device_tegra *tdev) in nvkm_device_tegra_power_down() argument
86 clk_disable_unprepare(tdev->clk_pwr); in nvkm_device_tegra_power_down()
87 clk_disable_unprepare(tdev->clk_ref); in nvkm_device_tegra_power_down()
88 clk_disable_unprepare(tdev->clk); in nvkm_device_tegra_power_down()
91 if (tdev->vdd) { in nvkm_device_tegra_power_down()
92 ret = regulator_disable(tdev->vdd); in nvkm_device_tegra_power_down()
101 nvkm_device_tegra_probe_iommu(struct nvkm_device_tegra *tdev) in nvkm_device_tegra_probe_iommu() argument
104 struct device *dev = &tdev->pdev->dev; in nvkm_device_tegra_probe_iommu()
117 if (!tdev->func->iommu_bit) in nvkm_device_tegra_probe_iommu()
120 mutex_init(&tdev->iommu.mutex); in nvkm_device_tegra_probe_iommu()
123 tdev->iommu.domain = iommu_domain_alloc(&platform_bus_type); in nvkm_device_tegra_probe_iommu()
124 if (!tdev->iommu.domain) in nvkm_device_tegra_probe_iommu()
132 pgsize_bitmap = tdev->iommu.domain->pgsize_bitmap; in nvkm_device_tegra_probe_iommu()
134 tdev->iommu.pgshift = PAGE_SHIFT; in nvkm_device_tegra_probe_iommu()
136 tdev->iommu.pgshift = fls(pgsize_bitmap & ~PAGE_MASK); in nvkm_device_tegra_probe_iommu()
137 if (tdev->iommu.pgshift == 0) { in nvkm_device_tegra_probe_iommu()
141 tdev->iommu.pgshift -= 1; in nvkm_device_tegra_probe_iommu()
144 ret = iommu_attach_device(tdev->iommu.domain, dev); in nvkm_device_tegra_probe_iommu()
148 ret = nvkm_mm_init(&tdev->iommu.mm, 0, 0, in nvkm_device_tegra_probe_iommu()
149 (1ULL << tdev->func->iommu_bit) >> in nvkm_device_tegra_probe_iommu()
150 tdev->iommu.pgshift, 1); in nvkm_device_tegra_probe_iommu()
158 iommu_detach_device(tdev->iommu.domain, dev); in nvkm_device_tegra_probe_iommu()
161 iommu_domain_free(tdev->iommu.domain); in nvkm_device_tegra_probe_iommu()
164 tdev->iommu.domain = NULL; in nvkm_device_tegra_probe_iommu()
165 tdev->iommu.pgshift = 0; in nvkm_device_tegra_probe_iommu()
171 nvkm_device_tegra_remove_iommu(struct nvkm_device_tegra *tdev) in nvkm_device_tegra_remove_iommu() argument
174 if (tdev->iommu.domain) { in nvkm_device_tegra_remove_iommu()
175 nvkm_mm_fini(&tdev->iommu.mm); in nvkm_device_tegra_remove_iommu()
176 iommu_detach_device(tdev->iommu.domain, tdev->device.dev); in nvkm_device_tegra_remove_iommu()
177 iommu_domain_free(tdev->iommu.domain); in nvkm_device_tegra_remove_iommu()
191 struct nvkm_device_tegra *tdev = nvkm_device_tegra(device); in nvkm_device_tegra_resource() local
192 return platform_get_resource(tdev->pdev, IORESOURCE_MEM, bar); in nvkm_device_tegra_resource()
212 struct nvkm_device_tegra *tdev = arg; in nvkm_device_tegra_intr() local
213 struct nvkm_device *device = &tdev->device; in nvkm_device_tegra_intr()
224 struct nvkm_device_tegra *tdev = nvkm_device_tegra(device); in nvkm_device_tegra_fini() local
225 if (tdev->irq) { in nvkm_device_tegra_fini()
226 free_irq(tdev->irq, tdev); in nvkm_device_tegra_fini()
227 tdev->irq = 0; in nvkm_device_tegra_fini()
234 struct nvkm_device_tegra *tdev = nvkm_device_tegra(device); in nvkm_device_tegra_init() local
237 irq = platform_get_irq_byname(tdev->pdev, "stall"); in nvkm_device_tegra_init()
242 IRQF_SHARED, "nvkm", tdev); in nvkm_device_tegra_init()
246 tdev->irq = irq; in nvkm_device_tegra_init()
253 struct nvkm_device_tegra *tdev = nvkm_device_tegra(device); in nvkm_device_tegra_dtor() local
254 nvkm_device_tegra_power_down(tdev); in nvkm_device_tegra_dtor()
255 nvkm_device_tegra_remove_iommu(tdev); in nvkm_device_tegra_dtor()
256 return tdev; in nvkm_device_tegra_dtor()
277 struct nvkm_device_tegra *tdev; in nvkm_device_tegra_new() local
281 if (!(tdev = kzalloc(sizeof(*tdev), GFP_KERNEL))) in nvkm_device_tegra_new()
284 tdev->func = func; in nvkm_device_tegra_new()
285 tdev->pdev = pdev; in nvkm_device_tegra_new()
288 tdev->vdd = devm_regulator_get(&pdev->dev, "vdd"); in nvkm_device_tegra_new()
289 if (IS_ERR(tdev->vdd)) { in nvkm_device_tegra_new()
290 ret = PTR_ERR(tdev->vdd); in nvkm_device_tegra_new()
295 tdev->rst = devm_reset_control_get(&pdev->dev, "gpu"); in nvkm_device_tegra_new()
296 if (IS_ERR(tdev->rst)) { in nvkm_device_tegra_new()
297 ret = PTR_ERR(tdev->rst); in nvkm_device_tegra_new()
301 tdev->clk = devm_clk_get(&pdev->dev, "gpu"); in nvkm_device_tegra_new()
302 if (IS_ERR(tdev->clk)) { in nvkm_device_tegra_new()
303 ret = PTR_ERR(tdev->clk); in nvkm_device_tegra_new()
307 rate = clk_get_rate(tdev->clk); in nvkm_device_tegra_new()
309 ret = clk_set_rate(tdev->clk, ULONG_MAX); in nvkm_device_tegra_new()
313 rate = clk_get_rate(tdev->clk); in nvkm_device_tegra_new()
319 tdev->clk_ref = devm_clk_get(&pdev->dev, "ref"); in nvkm_device_tegra_new()
320 if (IS_ERR(tdev->clk_ref)) { in nvkm_device_tegra_new()
321 ret = PTR_ERR(tdev->clk_ref); in nvkm_device_tegra_new()
325 tdev->clk_pwr = devm_clk_get(&pdev->dev, "pwr"); in nvkm_device_tegra_new()
326 if (IS_ERR(tdev->clk_pwr)) { in nvkm_device_tegra_new()
327 ret = PTR_ERR(tdev->clk_pwr); in nvkm_device_tegra_new()
334 ret = dma_set_mask(&pdev->dev, DMA_BIT_MASK(tdev->func->iommu_bit)); in nvkm_device_tegra_new()
338 nvkm_device_tegra_probe_iommu(tdev); in nvkm_device_tegra_new()
340 ret = nvkm_device_tegra_power_up(tdev); in nvkm_device_tegra_new()
344 tdev->gpu_speedo = tegra_sku_info.gpu_speedo_value; in nvkm_device_tegra_new()
345 tdev->gpu_speedo_id = tegra_sku_info.gpu_speedo_id; in nvkm_device_tegra_new()
349 &tdev->device); in nvkm_device_tegra_new()
353 *pdevice = &tdev->device; in nvkm_device_tegra_new()
358 nvkm_device_tegra_power_down(tdev); in nvkm_device_tegra_new()
360 nvkm_device_tegra_remove_iommu(tdev); in nvkm_device_tegra_new()
362 kfree(tdev); in nvkm_device_tegra_new()