Lines Matching refs:tmu
43 struct sh_tmu_device *tmu; member
93 switch (ch->tmu->model) { in sh_tmu_read()
95 return ioread8(ch->tmu->mapbase + 2); in sh_tmu_read()
97 return ioread8(ch->tmu->mapbase + 4); in sh_tmu_read()
115 switch (ch->tmu->model) { in sh_tmu_write()
117 return iowrite8(value, ch->tmu->mapbase + 2); in sh_tmu_write()
119 return iowrite8(value, ch->tmu->mapbase + 4); in sh_tmu_write()
136 raw_spin_lock_irqsave(&ch->tmu->lock, flags); in sh_tmu_start_stop_ch()
145 raw_spin_unlock_irqrestore(&ch->tmu->lock, flags); in sh_tmu_start_stop_ch()
153 ret = clk_enable(ch->tmu->clk); in __sh_tmu_enable()
155 dev_err(&ch->tmu->pdev->dev, "ch%u: cannot enable clock\n", in __sh_tmu_enable()
181 pm_runtime_get_sync(&ch->tmu->pdev->dev); in sh_tmu_enable()
182 dev_pm_syscore_device(&ch->tmu->pdev->dev, true); in sh_tmu_enable()
196 clk_disable(ch->tmu->clk); in __sh_tmu_disable()
209 dev_pm_syscore_device(&ch->tmu->pdev->dev, false); in sh_tmu_disable()
210 pm_runtime_put(&ch->tmu->pdev->dev); in sh_tmu_disable()
299 pm_genpd_syscore_poweroff(&ch->tmu->pdev->dev); in sh_tmu_clocksource_suspend()
311 pm_genpd_syscore_poweron(&ch->tmu->pdev->dev); in sh_tmu_clocksource_resume()
331 dev_info(&ch->tmu->pdev->dev, "ch%u: used as clock source\n", in sh_tmu_register_clocksource()
334 clocksource_register_hz(cs, ch->tmu->rate); in sh_tmu_register_clocksource()
348 ch->periodic = (ch->tmu->rate + HZ/2) / HZ; in sh_tmu_clock_event_start()
371 dev_info(&ch->tmu->pdev->dev, "ch%u: used for %s clock events\n", in sh_tmu_clock_event_set_state()
401 pm_genpd_syscore_poweroff(&ced_to_sh_tmu(ced)->tmu->pdev->dev); in sh_tmu_clock_event_suspend()
406 pm_genpd_syscore_poweron(&ced_to_sh_tmu(ced)->tmu->pdev->dev); in sh_tmu_clock_event_resume()
427 dev_info(&ch->tmu->pdev->dev, "ch%u: used for clock events\n", in sh_tmu_register_clockevent()
430 clockevents_config_and_register(ced, ch->tmu->rate, 0x300, 0xffffffff); in sh_tmu_register_clockevent()
434 dev_name(&ch->tmu->pdev->dev), ch); in sh_tmu_register_clockevent()
436 dev_err(&ch->tmu->pdev->dev, "ch%u: failed to request irq %d\n", in sh_tmu_register_clockevent()
446 ch->tmu->has_clockevent = true; in sh_tmu_register()
449 ch->tmu->has_clocksource = true; in sh_tmu_register()
458 struct sh_tmu_device *tmu) in sh_tmu_channel_setup() argument
464 ch->tmu = tmu; in sh_tmu_channel_setup()
467 if (tmu->model == SH_TMU_SH3) in sh_tmu_channel_setup()
468 ch->base = tmu->mapbase + 4 + ch->index * 12; in sh_tmu_channel_setup()
470 ch->base = tmu->mapbase + 8 + ch->index * 12; in sh_tmu_channel_setup()
472 ch->irq = platform_get_irq(tmu->pdev, index); in sh_tmu_channel_setup()
474 dev_err(&tmu->pdev->dev, "ch%u: failed to get irq\n", in sh_tmu_channel_setup()
482 return sh_tmu_register(ch, dev_name(&tmu->pdev->dev), in sh_tmu_channel_setup()
486 static int sh_tmu_map_memory(struct sh_tmu_device *tmu) in sh_tmu_map_memory() argument
490 res = platform_get_resource(tmu->pdev, IORESOURCE_MEM, 0); in sh_tmu_map_memory()
492 dev_err(&tmu->pdev->dev, "failed to get I/O memory\n"); in sh_tmu_map_memory()
496 tmu->mapbase = ioremap_nocache(res->start, resource_size(res)); in sh_tmu_map_memory()
497 if (tmu->mapbase == NULL) in sh_tmu_map_memory()
503 static int sh_tmu_parse_dt(struct sh_tmu_device *tmu) in sh_tmu_parse_dt() argument
505 struct device_node *np = tmu->pdev->dev.of_node; in sh_tmu_parse_dt()
507 tmu->model = SH_TMU; in sh_tmu_parse_dt()
508 tmu->num_channels = 3; in sh_tmu_parse_dt()
510 of_property_read_u32(np, "#renesas,channels", &tmu->num_channels); in sh_tmu_parse_dt()
512 if (tmu->num_channels != 2 && tmu->num_channels != 3) { in sh_tmu_parse_dt()
513 dev_err(&tmu->pdev->dev, "invalid number of channels %u\n", in sh_tmu_parse_dt()
514 tmu->num_channels); in sh_tmu_parse_dt()
521 static int sh_tmu_setup(struct sh_tmu_device *tmu, struct platform_device *pdev) in sh_tmu_setup() argument
526 tmu->pdev = pdev; in sh_tmu_setup()
528 raw_spin_lock_init(&tmu->lock); in sh_tmu_setup()
531 ret = sh_tmu_parse_dt(tmu); in sh_tmu_setup()
538 tmu->model = id->driver_data; in sh_tmu_setup()
539 tmu->num_channels = hweight8(cfg->channels_mask); in sh_tmu_setup()
541 dev_err(&tmu->pdev->dev, "missing platform data\n"); in sh_tmu_setup()
546 tmu->clk = clk_get(&tmu->pdev->dev, "fck"); in sh_tmu_setup()
547 if (IS_ERR(tmu->clk)) { in sh_tmu_setup()
548 dev_err(&tmu->pdev->dev, "cannot get clock\n"); in sh_tmu_setup()
549 return PTR_ERR(tmu->clk); in sh_tmu_setup()
552 ret = clk_prepare(tmu->clk); in sh_tmu_setup()
557 ret = clk_enable(tmu->clk); in sh_tmu_setup()
561 tmu->rate = clk_get_rate(tmu->clk) / 4; in sh_tmu_setup()
562 clk_disable(tmu->clk); in sh_tmu_setup()
565 ret = sh_tmu_map_memory(tmu); in sh_tmu_setup()
567 dev_err(&tmu->pdev->dev, "failed to remap I/O memory\n"); in sh_tmu_setup()
572 tmu->channels = kcalloc(tmu->num_channels, sizeof(*tmu->channels), in sh_tmu_setup()
574 if (tmu->channels == NULL) { in sh_tmu_setup()
583 for (i = 0; i < tmu->num_channels; ++i) { in sh_tmu_setup()
584 ret = sh_tmu_channel_setup(&tmu->channels[i], i, in sh_tmu_setup()
585 i == 0, i == 1, tmu); in sh_tmu_setup()
590 platform_set_drvdata(pdev, tmu); in sh_tmu_setup()
595 kfree(tmu->channels); in sh_tmu_setup()
596 iounmap(tmu->mapbase); in sh_tmu_setup()
598 clk_unprepare(tmu->clk); in sh_tmu_setup()
600 clk_put(tmu->clk); in sh_tmu_setup()
606 struct sh_tmu_device *tmu = platform_get_drvdata(pdev); in sh_tmu_probe() local
614 if (tmu) { in sh_tmu_probe()
619 tmu = kzalloc(sizeof(*tmu), GFP_KERNEL); in sh_tmu_probe()
620 if (tmu == NULL) in sh_tmu_probe()
623 ret = sh_tmu_setup(tmu, pdev); in sh_tmu_probe()
625 kfree(tmu); in sh_tmu_probe()
633 if (tmu->has_clockevent || tmu->has_clocksource) in sh_tmu_probe()