Lines Matching +full:4 +full:- +full:ch
1 // SPDX-License-Identifier: GPL-2.0
3 * SuperH Timer Support - MTU2
55 #define TSTR -1 /* shared register */
60 #define TSR 4 /* channel register */
75 /* Values 4 to 7 are channel-dependent */
80 #define TCR_TPSC_CH0_TCLKA (4 << 0)
84 #define TCR_TPSC_CH1_TCLKA (4 << 0)
88 #define TCR_TPSC_CH2_TCLKA (4 << 0)
92 #define TCR_TPSC_CH34_P256 (4 << 0)
100 #define TMDR_BFA (1 << 4)
104 #define TMDR_MD_PHASE_1 (4 << 0)
114 #define TIOC_IOCH(n) ((n) << 4)
132 #define TIER_TCIEV (1 << 4)
140 #define TSR_TCFV (1 << 4)
150 [TIER] = 4,
156 static inline unsigned long sh_mtu2_read(struct sh_mtu2_channel *ch, int reg_nr) in sh_mtu2_read() argument
161 return ioread8(ch->mtu->mapbase + 0x280); in sh_mtu2_read()
166 return ioread16(ch->base + offs); in sh_mtu2_read()
168 return ioread8(ch->base + offs); in sh_mtu2_read()
171 static inline void sh_mtu2_write(struct sh_mtu2_channel *ch, int reg_nr, in sh_mtu2_write() argument
177 return iowrite8(value, ch->mtu->mapbase + 0x280); in sh_mtu2_write()
182 iowrite16(value, ch->base + offs); in sh_mtu2_write()
184 iowrite8(value, ch->base + offs); in sh_mtu2_write()
187 static void sh_mtu2_start_stop_ch(struct sh_mtu2_channel *ch, int start) in sh_mtu2_start_stop_ch() argument
192 raw_spin_lock_irqsave(&ch->mtu->lock, flags); in sh_mtu2_start_stop_ch()
193 value = sh_mtu2_read(ch, TSTR); in sh_mtu2_start_stop_ch()
196 value |= 1 << ch->index; in sh_mtu2_start_stop_ch()
198 value &= ~(1 << ch->index); in sh_mtu2_start_stop_ch()
200 sh_mtu2_write(ch, TSTR, value); in sh_mtu2_start_stop_ch()
201 raw_spin_unlock_irqrestore(&ch->mtu->lock, flags); in sh_mtu2_start_stop_ch()
204 static int sh_mtu2_enable(struct sh_mtu2_channel *ch) in sh_mtu2_enable() argument
210 pm_runtime_get_sync(&ch->mtu->pdev->dev); in sh_mtu2_enable()
211 dev_pm_syscore_device(&ch->mtu->pdev->dev, true); in sh_mtu2_enable()
214 ret = clk_enable(ch->mtu->clk); in sh_mtu2_enable()
216 dev_err(&ch->mtu->pdev->dev, "ch%u: cannot enable clock\n", in sh_mtu2_enable()
217 ch->index); in sh_mtu2_enable()
222 sh_mtu2_start_stop_ch(ch, 0); in sh_mtu2_enable()
224 rate = clk_get_rate(ch->mtu->clk) / 64; in sh_mtu2_enable()
231 sh_mtu2_write(ch, TCR, TCR_CCLR_TGRA | TCR_TPSC_P64); in sh_mtu2_enable()
232 sh_mtu2_write(ch, TIOR, TIOC_IOCH(TIOR_OC_0_CLEAR) | in sh_mtu2_enable()
234 sh_mtu2_write(ch, TGR, periodic); in sh_mtu2_enable()
235 sh_mtu2_write(ch, TCNT, 0); in sh_mtu2_enable()
236 sh_mtu2_write(ch, TMDR, TMDR_MD_NORMAL); in sh_mtu2_enable()
237 sh_mtu2_write(ch, TIER, TIER_TGIEA); in sh_mtu2_enable()
240 sh_mtu2_start_stop_ch(ch, 1); in sh_mtu2_enable()
245 static void sh_mtu2_disable(struct sh_mtu2_channel *ch) in sh_mtu2_disable() argument
248 sh_mtu2_start_stop_ch(ch, 0); in sh_mtu2_disable()
251 clk_disable(ch->mtu->clk); in sh_mtu2_disable()
253 dev_pm_syscore_device(&ch->mtu->pdev->dev, false); in sh_mtu2_disable()
254 pm_runtime_put(&ch->mtu->pdev->dev); in sh_mtu2_disable()
259 struct sh_mtu2_channel *ch = dev_id; in sh_mtu2_interrupt() local
262 sh_mtu2_read(ch, TSR); in sh_mtu2_interrupt()
263 sh_mtu2_write(ch, TSR, ~TSR_TGFA); in sh_mtu2_interrupt()
266 ch->ced.event_handler(&ch->ced); in sh_mtu2_interrupt()
277 struct sh_mtu2_channel *ch = ced_to_sh_mtu2(ced); in sh_mtu2_clock_event_shutdown() local
280 sh_mtu2_disable(ch); in sh_mtu2_clock_event_shutdown()
287 struct sh_mtu2_channel *ch = ced_to_sh_mtu2(ced); in sh_mtu2_clock_event_set_periodic() local
290 sh_mtu2_disable(ch); in sh_mtu2_clock_event_set_periodic()
292 dev_info(&ch->mtu->pdev->dev, "ch%u: used for periodic clock events\n", in sh_mtu2_clock_event_set_periodic()
293 ch->index); in sh_mtu2_clock_event_set_periodic()
294 sh_mtu2_enable(ch); in sh_mtu2_clock_event_set_periodic()
300 dev_pm_genpd_suspend(&ced_to_sh_mtu2(ced)->mtu->pdev->dev); in sh_mtu2_clock_event_suspend()
305 dev_pm_genpd_resume(&ced_to_sh_mtu2(ced)->mtu->pdev->dev); in sh_mtu2_clock_event_resume()
308 static void sh_mtu2_register_clockevent(struct sh_mtu2_channel *ch, in sh_mtu2_register_clockevent() argument
311 struct clock_event_device *ced = &ch->ced; in sh_mtu2_register_clockevent()
313 ced->name = name; in sh_mtu2_register_clockevent()
314 ced->features = CLOCK_EVT_FEAT_PERIODIC; in sh_mtu2_register_clockevent()
315 ced->rating = 200; in sh_mtu2_register_clockevent()
316 ced->cpumask = cpu_possible_mask; in sh_mtu2_register_clockevent()
317 ced->set_state_shutdown = sh_mtu2_clock_event_shutdown; in sh_mtu2_register_clockevent()
318 ced->set_state_periodic = sh_mtu2_clock_event_set_periodic; in sh_mtu2_register_clockevent()
319 ced->suspend = sh_mtu2_clock_event_suspend; in sh_mtu2_register_clockevent()
320 ced->resume = sh_mtu2_clock_event_resume; in sh_mtu2_register_clockevent()
322 dev_info(&ch->mtu->pdev->dev, "ch%u: used for clock events\n", in sh_mtu2_register_clockevent()
323 ch->index); in sh_mtu2_register_clockevent()
327 static int sh_mtu2_register(struct sh_mtu2_channel *ch, const char *name) in sh_mtu2_register() argument
329 ch->mtu->has_clockevent = true; in sh_mtu2_register()
330 sh_mtu2_register_clockevent(ch, name); in sh_mtu2_register()
339 static int sh_mtu2_setup_channel(struct sh_mtu2_channel *ch, unsigned int index, in sh_mtu2_setup_channel() argument
346 ch->mtu = mtu; in sh_mtu2_setup_channel()
349 irq = platform_get_irq_byname(mtu->pdev, name); in sh_mtu2_setup_channel()
357 dev_name(&ch->mtu->pdev->dev), ch); in sh_mtu2_setup_channel()
359 dev_err(&ch->mtu->pdev->dev, "ch%u: failed to request irq %d\n", in sh_mtu2_setup_channel()
364 ch->base = mtu->mapbase + sh_mtu2_channel_offsets[index]; in sh_mtu2_setup_channel()
365 ch->index = index; in sh_mtu2_setup_channel()
367 return sh_mtu2_register(ch, dev_name(&mtu->pdev->dev)); in sh_mtu2_setup_channel()
374 res = platform_get_resource(mtu->pdev, IORESOURCE_MEM, 0); in sh_mtu2_map_memory()
376 dev_err(&mtu->pdev->dev, "failed to get I/O memory\n"); in sh_mtu2_map_memory()
377 return -ENXIO; in sh_mtu2_map_memory()
380 mtu->mapbase = ioremap(res->start, resource_size(res)); in sh_mtu2_map_memory()
381 if (mtu->mapbase == NULL) in sh_mtu2_map_memory()
382 return -ENXIO; in sh_mtu2_map_memory()
393 mtu->pdev = pdev; in sh_mtu2_setup()
395 raw_spin_lock_init(&mtu->lock); in sh_mtu2_setup()
398 mtu->clk = clk_get(&mtu->pdev->dev, "fck"); in sh_mtu2_setup()
399 if (IS_ERR(mtu->clk)) { in sh_mtu2_setup()
400 dev_err(&mtu->pdev->dev, "cannot get clock\n"); in sh_mtu2_setup()
401 return PTR_ERR(mtu->clk); in sh_mtu2_setup()
404 ret = clk_prepare(mtu->clk); in sh_mtu2_setup()
411 dev_err(&mtu->pdev->dev, "failed to remap I/O memory\n"); in sh_mtu2_setup()
420 mtu->num_channels = min_t(unsigned int, ret, in sh_mtu2_setup()
423 mtu->channels = kcalloc(mtu->num_channels, sizeof(*mtu->channels), in sh_mtu2_setup()
425 if (mtu->channels == NULL) { in sh_mtu2_setup()
426 ret = -ENOMEM; in sh_mtu2_setup()
430 for (i = 0; i < mtu->num_channels; ++i) { in sh_mtu2_setup()
431 ret = sh_mtu2_setup_channel(&mtu->channels[i], i, mtu); in sh_mtu2_setup()
441 kfree(mtu->channels); in sh_mtu2_setup()
442 iounmap(mtu->mapbase); in sh_mtu2_setup()
444 clk_unprepare(mtu->clk); in sh_mtu2_setup()
446 clk_put(mtu->clk); in sh_mtu2_setup()
456 pm_runtime_set_active(&pdev->dev); in sh_mtu2_probe()
457 pm_runtime_enable(&pdev->dev); in sh_mtu2_probe()
461 dev_info(&pdev->dev, "kept as earlytimer\n"); in sh_mtu2_probe()
467 return -ENOMEM; in sh_mtu2_probe()
472 pm_runtime_idle(&pdev->dev); in sh_mtu2_probe()
479 if (mtu->has_clockevent) in sh_mtu2_probe()
480 pm_runtime_irq_safe(&pdev->dev); in sh_mtu2_probe()
482 pm_runtime_idle(&pdev->dev); in sh_mtu2_probe()
489 return -EBUSY; /* cannot unregister clockevent */ in sh_mtu2_remove()
493 { "sh-mtu2", 0 },