Lines Matching refs:ch
88 static inline unsigned long sh_tmu_read(struct sh_tmu_channel *ch, int reg_nr) in sh_tmu_read() argument
93 switch (ch->tmu->model) { in sh_tmu_read()
95 return ioread8(ch->tmu->mapbase + 2); in sh_tmu_read()
97 return ioread8(ch->tmu->mapbase + 4); in sh_tmu_read()
104 return ioread16(ch->base + offs); in sh_tmu_read()
106 return ioread32(ch->base + offs); in sh_tmu_read()
109 static inline void sh_tmu_write(struct sh_tmu_channel *ch, int reg_nr, in sh_tmu_write() argument
115 switch (ch->tmu->model) { in sh_tmu_write()
117 return iowrite8(value, ch->tmu->mapbase + 2); in sh_tmu_write()
119 return iowrite8(value, ch->tmu->mapbase + 4); in sh_tmu_write()
126 iowrite16(value, ch->base + offs); in sh_tmu_write()
128 iowrite32(value, ch->base + offs); in sh_tmu_write()
131 static void sh_tmu_start_stop_ch(struct sh_tmu_channel *ch, int start) in sh_tmu_start_stop_ch() argument
136 raw_spin_lock_irqsave(&ch->tmu->lock, flags); in sh_tmu_start_stop_ch()
137 value = sh_tmu_read(ch, TSTR); in sh_tmu_start_stop_ch()
140 value |= 1 << ch->index; in sh_tmu_start_stop_ch()
142 value &= ~(1 << ch->index); in sh_tmu_start_stop_ch()
144 sh_tmu_write(ch, TSTR, value); in sh_tmu_start_stop_ch()
145 raw_spin_unlock_irqrestore(&ch->tmu->lock, flags); in sh_tmu_start_stop_ch()
148 static int __sh_tmu_enable(struct sh_tmu_channel *ch) in __sh_tmu_enable() argument
153 ret = clk_enable(ch->tmu->clk); in __sh_tmu_enable()
155 dev_err(&ch->tmu->pdev->dev, "ch%u: cannot enable clock\n", in __sh_tmu_enable()
156 ch->index); in __sh_tmu_enable()
161 sh_tmu_start_stop_ch(ch, 0); in __sh_tmu_enable()
164 sh_tmu_write(ch, TCOR, 0xffffffff); in __sh_tmu_enable()
165 sh_tmu_write(ch, TCNT, 0xffffffff); in __sh_tmu_enable()
168 ch->rate = clk_get_rate(ch->tmu->clk) / 4; in __sh_tmu_enable()
169 sh_tmu_write(ch, TCR, TCR_TPSC_CLK4); in __sh_tmu_enable()
172 sh_tmu_start_stop_ch(ch, 1); in __sh_tmu_enable()
177 static int sh_tmu_enable(struct sh_tmu_channel *ch) in sh_tmu_enable() argument
179 if (ch->enable_count++ > 0) in sh_tmu_enable()
182 pm_runtime_get_sync(&ch->tmu->pdev->dev); in sh_tmu_enable()
183 dev_pm_syscore_device(&ch->tmu->pdev->dev, true); in sh_tmu_enable()
185 return __sh_tmu_enable(ch); in sh_tmu_enable()
188 static void __sh_tmu_disable(struct sh_tmu_channel *ch) in __sh_tmu_disable() argument
191 sh_tmu_start_stop_ch(ch, 0); in __sh_tmu_disable()
194 sh_tmu_write(ch, TCR, TCR_TPSC_CLK4); in __sh_tmu_disable()
197 clk_disable(ch->tmu->clk); in __sh_tmu_disable()
200 static void sh_tmu_disable(struct sh_tmu_channel *ch) in sh_tmu_disable() argument
202 if (WARN_ON(ch->enable_count == 0)) in sh_tmu_disable()
205 if (--ch->enable_count > 0) in sh_tmu_disable()
208 __sh_tmu_disable(ch); in sh_tmu_disable()
210 dev_pm_syscore_device(&ch->tmu->pdev->dev, false); in sh_tmu_disable()
211 pm_runtime_put(&ch->tmu->pdev->dev); in sh_tmu_disable()
214 static void sh_tmu_set_next(struct sh_tmu_channel *ch, unsigned long delta, in sh_tmu_set_next() argument
218 sh_tmu_start_stop_ch(ch, 0); in sh_tmu_set_next()
221 sh_tmu_read(ch, TCR); in sh_tmu_set_next()
224 sh_tmu_write(ch, TCR, TCR_UNIE | TCR_TPSC_CLK4); in sh_tmu_set_next()
228 sh_tmu_write(ch, TCOR, delta); in sh_tmu_set_next()
230 sh_tmu_write(ch, TCOR, 0xffffffff); in sh_tmu_set_next()
232 sh_tmu_write(ch, TCNT, delta); in sh_tmu_set_next()
235 sh_tmu_start_stop_ch(ch, 1); in sh_tmu_set_next()
240 struct sh_tmu_channel *ch = dev_id; in sh_tmu_interrupt() local
243 if (clockevent_state_oneshot(&ch->ced)) in sh_tmu_interrupt()
244 sh_tmu_write(ch, TCR, TCR_TPSC_CLK4); in sh_tmu_interrupt()
246 sh_tmu_write(ch, TCR, TCR_UNIE | TCR_TPSC_CLK4); in sh_tmu_interrupt()
249 ch->ced.event_handler(&ch->ced); in sh_tmu_interrupt()
260 struct sh_tmu_channel *ch = cs_to_sh_tmu(cs); in sh_tmu_clocksource_read() local
262 return sh_tmu_read(ch, TCNT) ^ 0xffffffff; in sh_tmu_clocksource_read()
267 struct sh_tmu_channel *ch = cs_to_sh_tmu(cs); in sh_tmu_clocksource_enable() local
270 if (WARN_ON(ch->cs_enabled)) in sh_tmu_clocksource_enable()
273 ret = sh_tmu_enable(ch); in sh_tmu_clocksource_enable()
275 __clocksource_update_freq_hz(cs, ch->rate); in sh_tmu_clocksource_enable()
276 ch->cs_enabled = true; in sh_tmu_clocksource_enable()
284 struct sh_tmu_channel *ch = cs_to_sh_tmu(cs); in sh_tmu_clocksource_disable() local
286 if (WARN_ON(!ch->cs_enabled)) in sh_tmu_clocksource_disable()
289 sh_tmu_disable(ch); in sh_tmu_clocksource_disable()
290 ch->cs_enabled = false; in sh_tmu_clocksource_disable()
295 struct sh_tmu_channel *ch = cs_to_sh_tmu(cs); in sh_tmu_clocksource_suspend() local
297 if (!ch->cs_enabled) in sh_tmu_clocksource_suspend()
300 if (--ch->enable_count == 0) { in sh_tmu_clocksource_suspend()
301 __sh_tmu_disable(ch); in sh_tmu_clocksource_suspend()
302 pm_genpd_syscore_poweroff(&ch->tmu->pdev->dev); in sh_tmu_clocksource_suspend()
308 struct sh_tmu_channel *ch = cs_to_sh_tmu(cs); in sh_tmu_clocksource_resume() local
310 if (!ch->cs_enabled) in sh_tmu_clocksource_resume()
313 if (ch->enable_count++ == 0) { in sh_tmu_clocksource_resume()
314 pm_genpd_syscore_poweron(&ch->tmu->pdev->dev); in sh_tmu_clocksource_resume()
315 __sh_tmu_enable(ch); in sh_tmu_clocksource_resume()
319 static int sh_tmu_register_clocksource(struct sh_tmu_channel *ch, in sh_tmu_register_clocksource() argument
322 struct clocksource *cs = &ch->cs; in sh_tmu_register_clocksource()
334 dev_info(&ch->tmu->pdev->dev, "ch%u: used as clock source\n", in sh_tmu_register_clocksource()
335 ch->index); in sh_tmu_register_clocksource()
347 static void sh_tmu_clock_event_start(struct sh_tmu_channel *ch, int periodic) in sh_tmu_clock_event_start() argument
349 struct clock_event_device *ced = &ch->ced; in sh_tmu_clock_event_start()
351 sh_tmu_enable(ch); in sh_tmu_clock_event_start()
353 clockevents_config(ced, ch->rate); in sh_tmu_clock_event_start()
356 ch->periodic = (ch->rate + HZ/2) / HZ; in sh_tmu_clock_event_start()
357 sh_tmu_set_next(ch, ch->periodic, 1); in sh_tmu_clock_event_start()
363 struct sh_tmu_channel *ch = ced_to_sh_tmu(ced); in sh_tmu_clock_event_shutdown() local
366 sh_tmu_disable(ch); in sh_tmu_clock_event_shutdown()
373 struct sh_tmu_channel *ch = ced_to_sh_tmu(ced); in sh_tmu_clock_event_set_state() local
377 sh_tmu_disable(ch); in sh_tmu_clock_event_set_state()
379 dev_info(&ch->tmu->pdev->dev, "ch%u: used for %s clock events\n", in sh_tmu_clock_event_set_state()
380 ch->index, periodic ? "periodic" : "oneshot"); in sh_tmu_clock_event_set_state()
381 sh_tmu_clock_event_start(ch, periodic); in sh_tmu_clock_event_set_state()
398 struct sh_tmu_channel *ch = ced_to_sh_tmu(ced); in sh_tmu_clock_event_next() local
403 sh_tmu_set_next(ch, delta, 0); in sh_tmu_clock_event_next()
417 static void sh_tmu_register_clockevent(struct sh_tmu_channel *ch, in sh_tmu_register_clockevent() argument
420 struct clock_event_device *ced = &ch->ced; in sh_tmu_register_clockevent()
435 dev_info(&ch->tmu->pdev->dev, "ch%u: used for clock events\n", in sh_tmu_register_clockevent()
436 ch->index); in sh_tmu_register_clockevent()
440 ret = request_irq(ch->irq, sh_tmu_interrupt, in sh_tmu_register_clockevent()
442 dev_name(&ch->tmu->pdev->dev), ch); in sh_tmu_register_clockevent()
444 dev_err(&ch->tmu->pdev->dev, "ch%u: failed to request irq %d\n", in sh_tmu_register_clockevent()
445 ch->index, ch->irq); in sh_tmu_register_clockevent()
450 static int sh_tmu_register(struct sh_tmu_channel *ch, const char *name, in sh_tmu_register() argument
454 ch->tmu->has_clockevent = true; in sh_tmu_register()
455 sh_tmu_register_clockevent(ch, name); in sh_tmu_register()
457 ch->tmu->has_clocksource = true; in sh_tmu_register()
458 sh_tmu_register_clocksource(ch, name); in sh_tmu_register()
464 static int sh_tmu_channel_setup(struct sh_tmu_channel *ch, unsigned int index, in sh_tmu_channel_setup() argument
472 ch->tmu = tmu; in sh_tmu_channel_setup()
473 ch->index = index; in sh_tmu_channel_setup()
476 ch->base = tmu->mapbase + 4 + ch->index * 12; in sh_tmu_channel_setup()
478 ch->base = tmu->mapbase + 8 + ch->index * 12; in sh_tmu_channel_setup()
480 ch->irq = platform_get_irq(tmu->pdev, index); in sh_tmu_channel_setup()
481 if (ch->irq < 0) { in sh_tmu_channel_setup()
483 ch->index); in sh_tmu_channel_setup()
484 return ch->irq; in sh_tmu_channel_setup()
487 ch->cs_enabled = false; in sh_tmu_channel_setup()
488 ch->enable_count = 0; in sh_tmu_channel_setup()
490 return sh_tmu_register(ch, dev_name(&tmu->pdev->dev), in sh_tmu_channel_setup()