• Home
  • Raw
  • Download

Lines Matching refs:d40c

603 static struct device *chan2dev(struct d40_chan *d40c)  in chan2dev()  argument
605 return &d40c->chan.dev->device; in chan2dev()
627 #define chan_err(d40c, format, arg...) \ argument
628 d40_err(chan2dev(d40c), format, ## arg)
634 static int d40_pool_lli_alloc(struct d40_chan *d40c, struct d40_desc *d40d, in d40_pool_lli_alloc() argument
637 bool is_log = chan_is_logical(d40c); in d40_pool_lli_alloc()
669 d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev, in d40_pool_lli_alloc()
674 if (dma_mapping_error(d40c->base->dev, in d40_pool_lli_alloc()
686 static void d40_pool_lli_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_pool_lli_free() argument
689 dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr, in d40_pool_lli_free()
701 static int d40_lcla_alloc_one(struct d40_chan *d40c, in d40_lcla_alloc_one() argument
708 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
715 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_alloc_one()
717 if (!d40c->base->lcla_pool.alloc_map[idx]) { in d40_lcla_alloc_one()
718 d40c->base->lcla_pool.alloc_map[idx] = d40d; in d40_lcla_alloc_one()
725 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
730 static int d40_lcla_free_all(struct d40_chan *d40c, in d40_lcla_free_all() argument
737 if (chan_is_physical(d40c)) in d40_lcla_free_all()
740 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
743 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_free_all()
745 if (d40c->base->lcla_pool.alloc_map[idx] == d40d) { in d40_lcla_free_all()
746 d40c->base->lcla_pool.alloc_map[idx] = NULL; in d40_lcla_free_all()
755 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
766 static struct d40_desc *d40_desc_get(struct d40_chan *d40c) in d40_desc_get() argument
770 if (!list_empty(&d40c->client)) { in d40_desc_get()
774 list_for_each_entry_safe(d, _d, &d40c->client, node) { in d40_desc_get()
785 desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT); in d40_desc_get()
793 static void d40_desc_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_free() argument
796 d40_pool_lli_free(d40c, d40d); in d40_desc_free()
797 d40_lcla_free_all(d40c, d40d); in d40_desc_free()
798 kmem_cache_free(d40c->base->desc_slab, d40d); in d40_desc_free()
801 static void d40_desc_submit(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_submit() argument
803 list_add_tail(&desc->node, &d40c->active); in d40_desc_submit()
823 static void d40_desc_done(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_done() argument
825 list_add_tail(&desc->node, &d40c->done); in d40_desc_done()
940 static void d40_desc_load(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_load() argument
942 if (chan_is_physical(d40c)) { in d40_desc_load()
943 d40_phy_lli_load(d40c, d40d); in d40_desc_load()
946 d40_log_lli_to_lcxa(d40c, d40d); in d40_desc_load()
949 static struct d40_desc *d40_first_active_get(struct d40_chan *d40c) in d40_first_active_get() argument
951 return list_first_entry_or_null(&d40c->active, struct d40_desc, node); in d40_first_active_get()
955 static void d40_desc_queue(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_queue() argument
959 list_add_tail(&desc->node, &d40c->pending_queue); in d40_desc_queue()
962 static struct d40_desc *d40_first_pending(struct d40_chan *d40c) in d40_first_pending() argument
964 return list_first_entry_or_null(&d40c->pending_queue, struct d40_desc, in d40_first_pending()
968 static struct d40_desc *d40_first_queued(struct d40_chan *d40c) in d40_first_queued() argument
970 return list_first_entry_or_null(&d40c->queue, struct d40_desc, node); in d40_first_queued()
973 static struct d40_desc *d40_first_done(struct d40_chan *d40c) in d40_first_done() argument
975 return list_first_entry_or_null(&d40c->done, struct d40_desc, node); in d40_first_done()
1038 static int __d40_execute_command_phy(struct d40_chan *d40c, in __d40_execute_command_phy() argument
1049 ret = __d40_execute_command_phy(d40c, D40_DMA_SUSPEND_REQ); in __d40_execute_command_phy()
1054 spin_lock_irqsave(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1056 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_phy()
1057 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_phy()
1059 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_phy()
1063 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1064 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1070 wmask = 0xffffffff & ~(D40_CHAN_POS_MASK(d40c->phy_chan->num)); in __d40_execute_command_phy()
1071 writel(wmask | (command << D40_CHAN_POS(d40c->phy_chan->num)), in __d40_execute_command_phy()
1078 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1079 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1094 chan_err(d40c, in __d40_execute_command_phy()
1096 d40c->phy_chan->num, d40c->log_num, in __d40_execute_command_phy()
1104 spin_unlock_irqrestore(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1108 static void d40_term_all(struct d40_chan *d40c) in d40_term_all() argument
1114 while ((d40d = d40_first_done(d40c))) { in d40_term_all()
1116 d40_desc_free(d40c, d40d); in d40_term_all()
1120 while ((d40d = d40_first_active_get(d40c))) { in d40_term_all()
1122 d40_desc_free(d40c, d40d); in d40_term_all()
1126 while ((d40d = d40_first_queued(d40c))) { in d40_term_all()
1128 d40_desc_free(d40c, d40d); in d40_term_all()
1132 while ((d40d = d40_first_pending(d40c))) { in d40_term_all()
1134 d40_desc_free(d40c, d40d); in d40_term_all()
1138 if (!list_empty(&d40c->client)) in d40_term_all()
1139 list_for_each_entry_safe(d40d, _d, &d40c->client, node) { in d40_term_all()
1141 d40_desc_free(d40c, d40d); in d40_term_all()
1145 if (!list_empty(&d40c->prepare_queue)) in d40_term_all()
1147 &d40c->prepare_queue, node) { in d40_term_all()
1149 d40_desc_free(d40c, d40d); in d40_term_all()
1152 d40c->pending_tx = 0; in d40_term_all()
1155 static void __d40_config_set_event(struct d40_chan *d40c, in __d40_config_set_event() argument
1159 void __iomem *addr = chan_base(d40c) + reg; in __d40_config_set_event()
1199 chan_err(d40c, in __d40_config_set_event()
1201 "status %x\n", d40c->phy_chan->num, in __d40_config_set_event()
1202 d40c->log_num, status); in __d40_config_set_event()
1223 dev_dbg(chan2dev(d40c), in __d40_config_set_event()
1238 static void d40_config_set_event(struct d40_chan *d40c, in d40_config_set_event() argument
1241 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_config_set_event()
1244 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_config_set_event()
1245 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_config_set_event()
1246 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1249 if (d40c->dma_cfg.dir != DMA_DEV_TO_MEM) in d40_config_set_event()
1250 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1254 static u32 d40_chan_has_events(struct d40_chan *d40c) in d40_chan_has_events() argument
1256 void __iomem *chanbase = chan_base(d40c); in d40_chan_has_events()
1266 __d40_execute_command_log(struct d40_chan *d40c, enum d40_command command) in __d40_execute_command_log() argument
1273 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_log()
1274 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_log()
1276 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_log()
1279 spin_lock_irqsave(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1286 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_log()
1287 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_log()
1290 d40_config_set_event(d40c, D40_SUSPEND_REQ_EVENTLINE); in __d40_execute_command_log()
1292 d40_config_set_event(d40c, D40_DEACTIVATE_EVENTLINE); in __d40_execute_command_log()
1294 if (!d40_chan_has_events(d40c) && (command == D40_DMA_STOP)) in __d40_execute_command_log()
1295 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1301 d40_config_set_event(d40c, D40_ACTIVATE_EVENTLINE); in __d40_execute_command_log()
1302 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1310 spin_unlock_irqrestore(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1314 static int d40_channel_execute_command(struct d40_chan *d40c, in d40_channel_execute_command() argument
1317 if (chan_is_logical(d40c)) in d40_channel_execute_command()
1318 return __d40_execute_command_log(d40c, command); in d40_channel_execute_command()
1320 return __d40_execute_command_phy(d40c, command); in d40_channel_execute_command()
1323 static u32 d40_get_prmo(struct d40_chan *d40c) in d40_get_prmo() argument
1342 if (chan_is_physical(d40c)) in d40_get_prmo()
1343 return phy_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1345 return log_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1348 static void d40_config_write(struct d40_chan *d40c) in d40_config_write() argument
1354 addr_base = (d40c->phy_chan->num % 2) * 4; in d40_config_write()
1356 var = ((u32)(chan_is_logical(d40c)) + 1) << in d40_config_write()
1357 D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1358 writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base); in d40_config_write()
1361 var = d40_get_prmo(d40c) << D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1363 writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base); in d40_config_write()
1365 if (chan_is_logical(d40c)) { in d40_config_write()
1366 int lidx = (d40c->phy_chan->num << D40_SREG_ELEM_LOG_LIDX_POS) in d40_config_write()
1368 void __iomem *chanbase = chan_base(d40c); in d40_config_write()
1371 writel(d40c->src_def_cfg, chanbase + D40_CHAN_REG_SSCFG); in d40_config_write()
1372 writel(d40c->dst_def_cfg, chanbase + D40_CHAN_REG_SDCFG); in d40_config_write()
1384 static u32 d40_residue(struct d40_chan *d40c) in d40_residue() argument
1388 if (chan_is_logical(d40c)) in d40_residue()
1389 num_elt = (readl(&d40c->lcpa->lcsp2) & D40_MEM_LCSP2_ECNT_MASK) in d40_residue()
1392 u32 val = readl(chan_base(d40c) + D40_CHAN_REG_SDELT); in d40_residue()
1397 return num_elt * d40c->dma_cfg.dst_info.data_width; in d40_residue()
1400 static bool d40_tx_is_linked(struct d40_chan *d40c) in d40_tx_is_linked() argument
1404 if (chan_is_logical(d40c)) in d40_tx_is_linked()
1405 is_link = readl(&d40c->lcpa->lcsp3) & D40_MEM_LCSP3_DLOS_MASK; in d40_tx_is_linked()
1407 is_link = readl(chan_base(d40c) + D40_CHAN_REG_SDLNK) in d40_tx_is_linked()
1415 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_pause() local
1419 if (d40c->phy_chan == NULL) { in d40_pause()
1420 chan_err(d40c, "Channel is not allocated!\n"); in d40_pause()
1424 if (!d40c->busy) in d40_pause()
1427 spin_lock_irqsave(&d40c->lock, flags); in d40_pause()
1428 pm_runtime_get_sync(d40c->base->dev); in d40_pause()
1430 res = d40_channel_execute_command(d40c, D40_DMA_SUSPEND_REQ); in d40_pause()
1432 pm_runtime_mark_last_busy(d40c->base->dev); in d40_pause()
1433 pm_runtime_put_autosuspend(d40c->base->dev); in d40_pause()
1434 spin_unlock_irqrestore(&d40c->lock, flags); in d40_pause()
1440 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_resume() local
1444 if (d40c->phy_chan == NULL) { in d40_resume()
1445 chan_err(d40c, "Channel is not allocated!\n"); in d40_resume()
1449 if (!d40c->busy) in d40_resume()
1452 spin_lock_irqsave(&d40c->lock, flags); in d40_resume()
1453 pm_runtime_get_sync(d40c->base->dev); in d40_resume()
1456 if (d40_residue(d40c) || d40_tx_is_linked(d40c)) in d40_resume()
1457 res = d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_resume()
1459 pm_runtime_mark_last_busy(d40c->base->dev); in d40_resume()
1460 pm_runtime_put_autosuspend(d40c->base->dev); in d40_resume()
1461 spin_unlock_irqrestore(&d40c->lock, flags); in d40_resume()
1467 struct d40_chan *d40c = container_of(tx->chan, in d40_tx_submit() local
1474 spin_lock_irqsave(&d40c->lock, flags); in d40_tx_submit()
1476 d40_desc_queue(d40c, d40d); in d40_tx_submit()
1477 spin_unlock_irqrestore(&d40c->lock, flags); in d40_tx_submit()
1482 static int d40_start(struct d40_chan *d40c) in d40_start() argument
1484 return d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_start()
1487 static struct d40_desc *d40_queue_start(struct d40_chan *d40c) in d40_queue_start() argument
1493 d40d = d40_first_queued(d40c); in d40_queue_start()
1496 if (!d40c->busy) { in d40_queue_start()
1497 d40c->busy = true; in d40_queue_start()
1498 pm_runtime_get_sync(d40c->base->dev); in d40_queue_start()
1505 d40_desc_submit(d40c, d40d); in d40_queue_start()
1508 d40_desc_load(d40c, d40d); in d40_queue_start()
1511 err = d40_start(d40c); in d40_queue_start()
1521 static void dma_tc_handle(struct d40_chan *d40c) in dma_tc_handle() argument
1526 d40d = d40_first_active_get(d40c); in dma_tc_handle()
1539 && !d40_tx_is_linked(d40c) in dma_tc_handle()
1540 && !d40_residue(d40c)) { in dma_tc_handle()
1541 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1542 d40_desc_load(d40c, d40d); in dma_tc_handle()
1543 (void) d40_start(d40c); in dma_tc_handle()
1549 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1552 d40_desc_load(d40c, d40d); in dma_tc_handle()
1554 (void) d40_start(d40c); in dma_tc_handle()
1558 if (d40_queue_start(d40c) == NULL) { in dma_tc_handle()
1559 d40c->busy = false; in dma_tc_handle()
1561 pm_runtime_mark_last_busy(d40c->base->dev); in dma_tc_handle()
1562 pm_runtime_put_autosuspend(d40c->base->dev); in dma_tc_handle()
1566 d40_desc_done(d40c, d40d); in dma_tc_handle()
1569 d40c->pending_tx++; in dma_tc_handle()
1570 tasklet_schedule(&d40c->tasklet); in dma_tc_handle()
1576 struct d40_chan *d40c = from_tasklet(d40c, t, tasklet); in dma_tasklet() local
1582 spin_lock_irqsave(&d40c->lock, flags); in dma_tasklet()
1585 d40d = d40_first_done(d40c); in dma_tasklet()
1588 d40d = d40_first_active_get(d40c); in dma_tasklet()
1600 if (d40c->pending_tx == 0) { in dma_tasklet()
1601 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1612 d40_desc_free(d40c, d40d); in dma_tasklet()
1615 d40_lcla_free_all(d40c, d40d); in dma_tasklet()
1616 list_add_tail(&d40d->node, &d40c->client); in dma_tasklet()
1621 d40c->pending_tx--; in dma_tasklet()
1623 if (d40c->pending_tx) in dma_tasklet()
1624 tasklet_schedule(&d40c->tasklet); in dma_tasklet()
1626 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1634 if (d40c->pending_tx > 0) in dma_tasklet()
1635 d40c->pending_tx--; in dma_tasklet()
1636 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1645 struct d40_chan *d40c; in d40_handle_interrupt() local
1670 d40c = base->lookup_phy_chans[idx]; in d40_handle_interrupt()
1672 d40c = base->lookup_log_chans[il[row].offset + idx]; in d40_handle_interrupt()
1674 if (!d40c) { in d40_handle_interrupt()
1685 spin_lock(&d40c->lock); in d40_handle_interrupt()
1688 dma_tc_handle(d40c); in d40_handle_interrupt()
1693 spin_unlock(&d40c->lock); in d40_handle_interrupt()
1701 static int d40_validate_conf(struct d40_chan *d40c, in d40_validate_conf() argument
1708 chan_err(d40c, "Invalid direction.\n"); in d40_validate_conf()
1712 if ((is_log && conf->dev_type > d40c->base->num_log_chans) || in d40_validate_conf()
1713 (!is_log && conf->dev_type > d40c->base->num_phy_chans) || in d40_validate_conf()
1715 chan_err(d40c, "Invalid device type (%d)\n", conf->dev_type); in d40_validate_conf()
1724 chan_err(d40c, "periph to periph not supported\n"); in d40_validate_conf()
1737 chan_err(d40c, "src (burst x width) != dst (burst x width)\n"); in d40_validate_conf()
1831 static int d40_allocate_channel(struct d40_chan *d40c, bool *first_phy_user) in d40_allocate_channel() argument
1833 int dev_type = d40c->dma_cfg.dev_type; in d40_allocate_channel()
1842 bool is_log = d40c->dma_cfg.mode == STEDMA40_MODE_LOGICAL; in d40_allocate_channel()
1844 phys = d40c->base->phy_res; in d40_allocate_channel()
1845 num_phy_chans = d40c->base->num_phy_chans; in d40_allocate_channel()
1847 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_allocate_channel()
1850 } else if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_allocate_channel()
1851 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1862 if (d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1864 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1865 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1879 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1892 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1893 d40c->log_num = D40_PHY_CHAN; in d40_allocate_channel()
1900 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1903 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1904 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1907 dev_err(chan2dev(d40c), in d40_allocate_channel()
1916 dev_err(chan2dev(d40c), in d40_allocate_channel()
1945 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1946 d40c->log_num = log_num; in d40_allocate_channel()
1950 d40c->base->lookup_log_chans[d40c->log_num] = d40c; in d40_allocate_channel()
1952 d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c; in d40_allocate_channel()
1958 static int d40_config_memcpy(struct d40_chan *d40c) in d40_config_memcpy() argument
1960 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy()
1963 d40c->dma_cfg = dma40_memcpy_conf_log; in d40_config_memcpy()
1964 d40c->dma_cfg.dev_type = dma40_memcpy_channels[d40c->chan.chan_id]; in d40_config_memcpy()
1966 d40_log_cfg(&d40c->dma_cfg, in d40_config_memcpy()
1967 &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_config_memcpy()
1971 d40c->dma_cfg = dma40_memcpy_conf_phy; in d40_config_memcpy()
1974 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_TIM_POS); in d40_config_memcpy()
1977 d40c->src_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
1978 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
1981 chan_err(d40c, "No memcpy\n"); in d40_config_memcpy()
1988 static int d40_free_dma(struct d40_chan *d40c) in d40_free_dma() argument
1992 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_free_dma()
1993 struct d40_phy_res *phy = d40c->phy_chan; in d40_free_dma()
1997 d40_term_all(d40c); in d40_free_dma()
2000 chan_err(d40c, "phy == null\n"); in d40_free_dma()
2006 chan_err(d40c, "channel already free\n"); in d40_free_dma()
2010 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_free_dma()
2011 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) in d40_free_dma()
2013 else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_free_dma()
2016 chan_err(d40c, "Unknown direction\n"); in d40_free_dma()
2020 pm_runtime_get_sync(d40c->base->dev); in d40_free_dma()
2021 res = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_free_dma()
2023 chan_err(d40c, "stop failed\n"); in d40_free_dma()
2027 d40_alloc_mask_free(phy, is_src, chan_is_logical(d40c) ? event : 0); in d40_free_dma()
2029 if (chan_is_logical(d40c)) in d40_free_dma()
2030 d40c->base->lookup_log_chans[d40c->log_num] = NULL; in d40_free_dma()
2032 d40c->base->lookup_phy_chans[phy->num] = NULL; in d40_free_dma()
2034 if (d40c->busy) { in d40_free_dma()
2035 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2036 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2039 d40c->busy = false; in d40_free_dma()
2040 d40c->phy_chan = NULL; in d40_free_dma()
2041 d40c->configured = false; in d40_free_dma()
2043 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2044 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2048 static bool d40_is_paused(struct d40_chan *d40c) in d40_is_paused() argument
2050 void __iomem *chanbase = chan_base(d40c); in d40_is_paused()
2055 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_is_paused()
2057 spin_lock_irqsave(&d40c->lock, flags); in d40_is_paused()
2059 if (chan_is_physical(d40c)) { in d40_is_paused()
2060 if (d40c->phy_chan->num % 2 == 0) in d40_is_paused()
2061 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in d40_is_paused()
2063 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in d40_is_paused()
2066 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in d40_is_paused()
2067 D40_CHAN_POS(d40c->phy_chan->num); in d40_is_paused()
2073 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_is_paused()
2074 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_is_paused()
2076 } else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_is_paused()
2079 chan_err(d40c, "Unknown direction\n"); in d40_is_paused()
2089 spin_unlock_irqrestore(&d40c->lock, flags); in d40_is_paused()
2096 struct d40_chan *d40c = in stedma40_residue() local
2101 spin_lock_irqsave(&d40c->lock, flags); in stedma40_residue()
2102 bytes_left = d40_residue(d40c); in stedma40_residue()
2103 spin_unlock_irqrestore(&d40c->lock, flags); in stedma40_residue()
2274 struct d40_chan *d40c = in stedma40_filter() local
2279 err = d40_validate_conf(d40c, info); in stedma40_filter()
2281 d40c->dma_cfg = *info; in stedma40_filter()
2283 err = d40_config_memcpy(d40c); in stedma40_filter()
2286 d40c->configured = true; in stedma40_filter()
2292 static void __d40_set_prio_rt(struct d40_chan *d40c, int dev_type, bool src) in __d40_set_prio_rt() argument
2294 bool realtime = d40c->dma_cfg.realtime; in __d40_set_prio_rt()
2295 bool highprio = d40c->dma_cfg.high_priority; in __d40_set_prio_rt()
2301 struct d40_gen_dmac *dmac = &d40c->base->gen_dmac; in __d40_set_prio_rt()
2312 if (!src && chan_is_logical(d40c)) in __d40_set_prio_rt()
2321 writel(bit, d40c->base->virtbase + prioreg + group * 4); in __d40_set_prio_rt()
2322 writel(bit, d40c->base->virtbase + rtreg + group * 4); in __d40_set_prio_rt()
2325 static void d40_set_prio_realtime(struct d40_chan *d40c) in d40_set_prio_realtime() argument
2327 if (d40c->base->rev < 3) in d40_set_prio_realtime()
2330 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_set_prio_realtime()
2331 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2332 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, true); in d40_set_prio_realtime()
2334 if ((d40c->dma_cfg.dir == DMA_MEM_TO_DEV) || in d40_set_prio_realtime()
2335 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2336 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, false); in d40_set_prio_realtime()
2392 struct d40_chan *d40c = in d40_alloc_chan_resources() local
2395 spin_lock_irqsave(&d40c->lock, flags); in d40_alloc_chan_resources()
2400 if (!d40c->configured) { in d40_alloc_chan_resources()
2401 err = d40_config_memcpy(d40c); in d40_alloc_chan_resources()
2403 chan_err(d40c, "Failed to configure memcpy channel\n"); in d40_alloc_chan_resources()
2408 err = d40_allocate_channel(d40c, &is_free_phy); in d40_alloc_chan_resources()
2410 chan_err(d40c, "Failed to allocate channel\n"); in d40_alloc_chan_resources()
2411 d40c->configured = false; in d40_alloc_chan_resources()
2415 pm_runtime_get_sync(d40c->base->dev); in d40_alloc_chan_resources()
2417 d40_set_prio_realtime(d40c); in d40_alloc_chan_resources()
2419 if (chan_is_logical(d40c)) { in d40_alloc_chan_resources()
2420 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_alloc_chan_resources()
2421 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2422 d40c->dma_cfg.dev_type * D40_LCPA_CHAN_SIZE; in d40_alloc_chan_resources()
2424 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2425 d40c->dma_cfg.dev_type * in d40_alloc_chan_resources()
2429 d40c->src_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2430 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2433 dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n", in d40_alloc_chan_resources()
2434 chan_is_logical(d40c) ? "logical" : "physical", in d40_alloc_chan_resources()
2435 d40c->phy_chan->num, in d40_alloc_chan_resources()
2436 d40c->dma_cfg.use_fixed_channel ? ", fixed" : ""); in d40_alloc_chan_resources()
2445 d40_config_write(d40c); in d40_alloc_chan_resources()
2447 pm_runtime_mark_last_busy(d40c->base->dev); in d40_alloc_chan_resources()
2448 pm_runtime_put_autosuspend(d40c->base->dev); in d40_alloc_chan_resources()
2449 spin_unlock_irqrestore(&d40c->lock, flags); in d40_alloc_chan_resources()
2455 struct d40_chan *d40c = in d40_free_chan_resources() local
2460 if (d40c->phy_chan == NULL) { in d40_free_chan_resources()
2461 chan_err(d40c, "Cannot free unallocated channel\n"); in d40_free_chan_resources()
2465 spin_lock_irqsave(&d40c->lock, flags); in d40_free_chan_resources()
2467 err = d40_free_dma(d40c); in d40_free_chan_resources()
2470 chan_err(d40c, "Failed to free channel\n"); in d40_free_chan_resources()
2471 spin_unlock_irqrestore(&d40c->lock, flags); in d40_free_chan_resources()
2541 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_tx_status() local
2544 if (d40c->phy_chan == NULL) { in d40_tx_status()
2545 chan_err(d40c, "Cannot read status of unallocated channel\n"); in d40_tx_status()
2553 if (d40_is_paused(d40c)) in d40_tx_status()
2561 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_issue_pending() local
2564 if (d40c->phy_chan == NULL) { in d40_issue_pending()
2565 chan_err(d40c, "Channel is not allocated!\n"); in d40_issue_pending()
2569 spin_lock_irqsave(&d40c->lock, flags); in d40_issue_pending()
2571 list_splice_tail_init(&d40c->pending_queue, &d40c->queue); in d40_issue_pending()
2574 if (!d40c->busy) in d40_issue_pending()
2575 (void) d40_queue_start(d40c); in d40_issue_pending()
2577 spin_unlock_irqrestore(&d40c->lock, flags); in d40_issue_pending()
2583 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_terminate_all() local
2586 if (d40c->phy_chan == NULL) { in d40_terminate_all()
2587 chan_err(d40c, "Channel is not allocated!\n"); in d40_terminate_all()
2591 spin_lock_irqsave(&d40c->lock, flags); in d40_terminate_all()
2593 pm_runtime_get_sync(d40c->base->dev); in d40_terminate_all()
2594 ret = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_terminate_all()
2596 chan_err(d40c, "Failed to stop channel\n"); in d40_terminate_all()
2598 d40_term_all(d40c); in d40_terminate_all()
2599 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2600 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2601 if (d40c->busy) { in d40_terminate_all()
2602 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2603 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2605 d40c->busy = false; in d40_terminate_all()
2607 spin_unlock_irqrestore(&d40c->lock, flags); in d40_terminate_all()
2612 dma40_config_to_halfchannel(struct d40_chan *d40c, in dma40_config_to_halfchannel() argument
2618 if (chan_is_logical(d40c)) { in dma40_config_to_halfchannel()
2647 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config() local
2649 memcpy(&d40c->slave_config, config, sizeof(*config)); in d40_set_runtime_config()
2659 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config_write() local
2660 struct stedma40_chan_cfg *cfg = &d40c->dma_cfg; in d40_set_runtime_config_write()
2666 if (d40c->phy_chan == NULL) { in d40_set_runtime_config_write()
2667 chan_err(d40c, "Channel is not allocated!\n"); in d40_set_runtime_config_write()
2680 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2696 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2708 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2715 dev_err(d40c->base->dev, "no address supplied\n"); in d40_set_runtime_config_write()
2720 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2749 ret = dma40_config_to_halfchannel(d40c, &cfg->src_info, in d40_set_runtime_config_write()
2754 ret = dma40_config_to_halfchannel(d40c, &cfg->dst_info, in d40_set_runtime_config_write()
2760 if (chan_is_logical(d40c)) in d40_set_runtime_config_write()
2761 d40_log_cfg(cfg, &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_set_runtime_config_write()
2763 d40_phy_cfg(cfg, &d40c->src_def_cfg, &d40c->dst_def_cfg); in d40_set_runtime_config_write()
2766 d40c->runtime_addr = config_addr; in d40_set_runtime_config_write()
2767 d40c->runtime_direction = direction; in d40_set_runtime_config_write()
2768 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2786 struct d40_chan *d40c; in d40_chan_init() local
2791 d40c = &chans[i]; in d40_chan_init()
2792 d40c->base = base; in d40_chan_init()
2793 d40c->chan.device = dma; in d40_chan_init()
2795 spin_lock_init(&d40c->lock); in d40_chan_init()
2797 d40c->log_num = D40_PHY_CHAN; in d40_chan_init()
2799 INIT_LIST_HEAD(&d40c->done); in d40_chan_init()
2800 INIT_LIST_HEAD(&d40c->active); in d40_chan_init()
2801 INIT_LIST_HEAD(&d40c->queue); in d40_chan_init()
2802 INIT_LIST_HEAD(&d40c->pending_queue); in d40_chan_init()
2803 INIT_LIST_HEAD(&d40c->client); in d40_chan_init()
2804 INIT_LIST_HEAD(&d40c->prepare_queue); in d40_chan_init()
2806 tasklet_setup(&d40c->tasklet, dma_tasklet); in d40_chan_init()
2808 list_add_tail(&d40c->chan.device_node, in d40_chan_init()