Lines Matching refs:mtu

33 	struct sh_mtu2_device *mtu;  member
161 return ioread8(ch->mtu->mapbase + 0x280); in sh_mtu2_read()
177 return iowrite8(value, ch->mtu->mapbase + 0x280); in sh_mtu2_write()
192 raw_spin_lock_irqsave(&ch->mtu->lock, flags); in sh_mtu2_start_stop_ch()
201 raw_spin_unlock_irqrestore(&ch->mtu->lock, flags); in sh_mtu2_start_stop_ch()
210 pm_runtime_get_sync(&ch->mtu->pdev->dev); in sh_mtu2_enable()
211 dev_pm_syscore_device(&ch->mtu->pdev->dev, true); in sh_mtu2_enable()
214 ret = clk_enable(ch->mtu->clk); in sh_mtu2_enable()
216 dev_err(&ch->mtu->pdev->dev, "ch%u: cannot enable clock\n", in sh_mtu2_enable()
224 rate = clk_get_rate(ch->mtu->clk) / 64; in sh_mtu2_enable()
251 clk_disable(ch->mtu->clk); in sh_mtu2_disable()
253 dev_pm_syscore_device(&ch->mtu->pdev->dev, false); in sh_mtu2_disable()
254 pm_runtime_put(&ch->mtu->pdev->dev); in sh_mtu2_disable()
292 dev_info(&ch->mtu->pdev->dev, "ch%u: used for periodic clock events\n", in sh_mtu2_clock_event_set_periodic()
300 dev_pm_genpd_suspend(&ced_to_sh_mtu2(ced)->mtu->pdev->dev); in sh_mtu2_clock_event_suspend()
305 dev_pm_genpd_resume(&ced_to_sh_mtu2(ced)->mtu->pdev->dev); in sh_mtu2_clock_event_resume()
322 dev_info(&ch->mtu->pdev->dev, "ch%u: used for clock events\n", in sh_mtu2_register_clockevent()
329 ch->mtu->has_clockevent = true; in sh_mtu2_register()
340 struct sh_mtu2_device *mtu) in sh_mtu2_setup_channel() argument
346 ch->mtu = mtu; in sh_mtu2_setup_channel()
349 irq = platform_get_irq_byname(mtu->pdev, name); in sh_mtu2_setup_channel()
357 dev_name(&ch->mtu->pdev->dev), ch); in sh_mtu2_setup_channel()
359 dev_err(&ch->mtu->pdev->dev, "ch%u: failed to request irq %d\n", in sh_mtu2_setup_channel()
364 ch->base = mtu->mapbase + sh_mtu2_channel_offsets[index]; in sh_mtu2_setup_channel()
367 return sh_mtu2_register(ch, dev_name(&mtu->pdev->dev)); in sh_mtu2_setup_channel()
370 static int sh_mtu2_map_memory(struct sh_mtu2_device *mtu) in sh_mtu2_map_memory() argument
374 res = platform_get_resource(mtu->pdev, IORESOURCE_MEM, 0); in sh_mtu2_map_memory()
376 dev_err(&mtu->pdev->dev, "failed to get I/O memory\n"); in sh_mtu2_map_memory()
380 mtu->mapbase = ioremap(res->start, resource_size(res)); in sh_mtu2_map_memory()
381 if (mtu->mapbase == NULL) in sh_mtu2_map_memory()
387 static int sh_mtu2_setup(struct sh_mtu2_device *mtu, in sh_mtu2_setup() argument
393 mtu->pdev = pdev; in sh_mtu2_setup()
395 raw_spin_lock_init(&mtu->lock); in sh_mtu2_setup()
398 mtu->clk = clk_get(&mtu->pdev->dev, "fck"); in sh_mtu2_setup()
399 if (IS_ERR(mtu->clk)) { in sh_mtu2_setup()
400 dev_err(&mtu->pdev->dev, "cannot get clock\n"); in sh_mtu2_setup()
401 return PTR_ERR(mtu->clk); in sh_mtu2_setup()
404 ret = clk_prepare(mtu->clk); in sh_mtu2_setup()
409 ret = sh_mtu2_map_memory(mtu); in sh_mtu2_setup()
411 dev_err(&mtu->pdev->dev, "failed to remap I/O memory\n"); in sh_mtu2_setup()
420 mtu->num_channels = min_t(unsigned int, ret, in sh_mtu2_setup()
423 mtu->channels = kcalloc(mtu->num_channels, sizeof(*mtu->channels), in sh_mtu2_setup()
425 if (mtu->channels == NULL) { in sh_mtu2_setup()
430 for (i = 0; i < mtu->num_channels; ++i) { in sh_mtu2_setup()
431 ret = sh_mtu2_setup_channel(&mtu->channels[i], i, mtu); in sh_mtu2_setup()
436 platform_set_drvdata(pdev, mtu); in sh_mtu2_setup()
441 kfree(mtu->channels); in sh_mtu2_setup()
442 iounmap(mtu->mapbase); in sh_mtu2_setup()
444 clk_unprepare(mtu->clk); in sh_mtu2_setup()
446 clk_put(mtu->clk); in sh_mtu2_setup()
452 struct sh_mtu2_device *mtu = platform_get_drvdata(pdev); in sh_mtu2_probe() local
460 if (mtu) { in sh_mtu2_probe()
465 mtu = kzalloc(sizeof(*mtu), GFP_KERNEL); in sh_mtu2_probe()
466 if (mtu == NULL) in sh_mtu2_probe()
469 ret = sh_mtu2_setup(mtu, pdev); in sh_mtu2_probe()
471 kfree(mtu); in sh_mtu2_probe()
479 if (mtu->has_clockevent) in sh_mtu2_probe()