Lines Matching refs:chan
61 axi_chan_iowrite32(struct axi_dma_chan *chan, u32 reg, u32 val) in axi_chan_iowrite32() argument
63 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
66 static inline u32 axi_chan_ioread32(struct axi_dma_chan *chan, u32 reg) in axi_chan_ioread32() argument
68 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
72 axi_chan_iowrite64(struct axi_dma_chan *chan, u32 reg, u64 val) in axi_chan_iowrite64() argument
78 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
79 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
118 static inline void axi_chan_irq_disable(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_disable() argument
123 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, DWAXIDMAC_IRQ_NONE); in axi_chan_irq_disable()
125 val = axi_chan_ioread32(chan, CH_INTSTATUS_ENA); in axi_chan_irq_disable()
127 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, val); in axi_chan_irq_disable()
131 static inline void axi_chan_irq_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_set() argument
133 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, irq_mask); in axi_chan_irq_set()
136 static inline void axi_chan_irq_sig_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_sig_set() argument
138 axi_chan_iowrite32(chan, CH_INTSIGNAL_ENA, irq_mask); in axi_chan_irq_sig_set()
141 static inline void axi_chan_irq_clear(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_clear() argument
143 axi_chan_iowrite32(chan, CH_INTCLEAR, irq_mask); in axi_chan_irq_clear()
146 static inline u32 axi_chan_irq_read(struct axi_dma_chan *chan) in axi_chan_irq_read() argument
148 return axi_chan_ioread32(chan, CH_INTSTATUS); in axi_chan_irq_read()
151 static inline void axi_chan_disable(struct axi_dma_chan *chan) in axi_chan_disable() argument
155 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
156 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
157 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
158 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
161 static inline void axi_chan_enable(struct axi_dma_chan *chan) in axi_chan_enable() argument
165 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
166 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
167 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
168 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
171 static inline bool axi_chan_is_hw_enable(struct axi_dma_chan *chan) in axi_chan_is_hw_enable() argument
175 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
177 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
185 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
186 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
190 static u32 axi_chan_get_xfer_width(struct axi_dma_chan *chan, dma_addr_t src, in axi_chan_get_xfer_width() argument
193 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
198 static inline const char *axi_chan_name(struct axi_dma_chan *chan) in axi_chan_name() argument
200 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
220 static struct axi_dma_lli *axi_desc_get(struct axi_dma_chan *chan, in axi_desc_get() argument
226 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
228 dev_err(chan2dev(chan), "%s: not enough descriptors available\n", in axi_desc_get()
229 axi_chan_name(chan)); in axi_desc_get()
233 atomic_inc(&chan->descs_allocated); in axi_desc_get()
241 struct axi_dma_chan *chan = desc->chan; in axi_desc_put() local
242 int count = atomic_read(&chan->descs_allocated); in axi_desc_put()
248 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
253 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
254 dev_vdbg(chan2dev(chan), "%s: %d descs put, %d still allocated\n", in axi_desc_put()
255 axi_chan_name(chan), descs_put, in axi_desc_put()
256 atomic_read(&chan->descs_allocated)); in axi_desc_put()
268 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_tx_status() local
282 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
284 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
293 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
304 static void write_chan_llp(struct axi_dma_chan *chan, dma_addr_t adr) in write_chan_llp() argument
306 axi_chan_iowrite64(chan, CH_LLP, adr); in write_chan_llp()
309 static void dw_axi_dma_set_byte_halfword(struct axi_dma_chan *chan, bool set) in dw_axi_dma_set_byte_halfword() argument
314 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
315 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
319 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
323 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
326 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
328 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
330 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
333 static void axi_chan_block_xfer_start(struct axi_dma_chan *chan, in axi_chan_block_xfer_start() argument
336 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
340 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_start()
341 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
342 axi_chan_name(chan)); in axi_chan_block_xfer_start()
347 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
351 axi_chan_iowrite32(chan, CH_CFG_L, reg); in axi_chan_block_xfer_start()
357 switch (chan->direction) { in axi_chan_block_xfer_start()
359 dw_axi_dma_set_byte_halfword(chan, true); in axi_chan_block_xfer_start()
360 reg |= (chan->config.device_fc ? in axi_chan_block_xfer_start()
364 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
365 reg |= (chan->id << CH_CFG_H_DST_PER_POS); in axi_chan_block_xfer_start()
368 reg |= (chan->config.device_fc ? in axi_chan_block_xfer_start()
372 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
373 reg |= (chan->id << CH_CFG_H_SRC_PER_POS); in axi_chan_block_xfer_start()
378 axi_chan_iowrite32(chan, CH_CFG_H, reg); in axi_chan_block_xfer_start()
380 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
383 axi_chan_irq_sig_set(chan, irq_mask); in axi_chan_block_xfer_start()
387 axi_chan_irq_set(chan, irq_mask); in axi_chan_block_xfer_start()
389 axi_chan_enable(chan); in axi_chan_block_xfer_start()
392 static void axi_chan_start_first_queued(struct axi_dma_chan *chan) in axi_chan_start_first_queued() argument
397 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
402 dev_vdbg(chan2dev(chan), "%s: started %u\n", axi_chan_name(chan), in axi_chan_start_first_queued()
404 axi_chan_block_xfer_start(chan, desc); in axi_chan_start_first_queued()
409 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_issue_pending() local
412 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
413 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
414 axi_chan_start_first_queued(chan); in dma_chan_issue_pending()
415 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
420 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_synchronize() local
422 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
427 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_alloc_chan_resources() local
430 if (axi_chan_is_hw_enable(chan)) { in dma_chan_alloc_chan_resources()
431 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
432 axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
437 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
438 chan->chip->dev, in dma_chan_alloc_chan_resources()
441 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
442 dev_err(chan2dev(chan), "No memory for descriptors\n"); in dma_chan_alloc_chan_resources()
445 dev_vdbg(dchan2dev(dchan), "%s: allocating\n", axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
447 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
454 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_free_chan_resources() local
457 if (axi_chan_is_hw_enable(chan)) in dma_chan_free_chan_resources()
459 axi_chan_name(chan)); in dma_chan_free_chan_resources()
461 axi_chan_disable(chan); in dma_chan_free_chan_resources()
462 axi_chan_irq_disable(chan, DWAXIDMAC_IRQ_ALL); in dma_chan_free_chan_resources()
464 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
466 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
467 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
470 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
472 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
475 static void dw_axi_dma_set_hw_channel(struct axi_dma_chan *chan, bool set) in dw_axi_dma_set_hw_channel() argument
477 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel()
491 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
501 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
502 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
550 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
558 static int dw_axi_dma_set_hw_desc(struct axi_dma_chan *chan, in dw_axi_dma_set_hw_desc() argument
562 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
571 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
578 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
582 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
584 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
585 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
593 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
594 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
608 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
614 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
615 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
623 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
643 static size_t calculate_block_len(struct axi_dma_chan *chan, in calculate_block_len() argument
650 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
654 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
662 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
678 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_prep_cyclic() local
693 axi_block_len = calculate_block_len(chan, dma_addr, buf_len, direction); in dw_axi_dma_chan_prep_cyclic()
706 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
707 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
708 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
715 status = dw_axi_dma_set_hw_desc(chan, hw_desc, src_addr, in dw_axi_dma_chan_prep_cyclic()
738 dw_axi_dma_set_hw_channel(chan, true); in dw_axi_dma_chan_prep_cyclic()
740 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
755 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_prep_slave_sg() local
775 axi_block_len = calculate_block_len(chan, mem, len, direction); in dw_axi_dma_chan_prep_slave_sg()
786 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
788 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
798 status = dw_axi_dma_set_hw_desc(chan, hw_desc, mem, segment_len); in dw_axi_dma_chan_prep_slave_sg()
818 dw_axi_dma_set_hw_channel(chan, true); in dw_axi_dma_chan_prep_slave_sg()
820 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
833 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_prep_dma_memcpy() local
841 dev_dbg(chan2dev(chan), "%s: memcpy: src: %pad dst: %pad length: %zd flags: %#lx", in dma_chan_prep_dma_memcpy()
842 axi_chan_name(chan), &src_adr, &dst_adr, len, flags); in dma_chan_prep_dma_memcpy()
844 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
845 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, len); in dma_chan_prep_dma_memcpy()
851 desc->chan = chan; in dma_chan_prep_dma_memcpy()
863 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, xfer_len); in dma_chan_prep_dma_memcpy()
876 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
885 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
886 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
924 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
935 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_slave_config() local
937 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
942 static void axi_chan_dump_lli(struct axi_dma_chan *chan, in axi_chan_dump_lli() argument
946 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
950 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
960 static void axi_chan_list_dump_lli(struct axi_dma_chan *chan, in axi_chan_list_dump_lli() argument
963 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
967 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
970 static noinline void axi_chan_handle_err(struct axi_dma_chan *chan, u32 status) in axi_chan_handle_err() argument
975 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
977 axi_chan_disable(chan); in axi_chan_handle_err()
980 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
982 dev_err(chan2dev(chan), "BUG: %s, IRQ with no descriptors\n", in axi_chan_handle_err()
983 axi_chan_name(chan)); in axi_chan_handle_err()
990 dev_err(chan2dev(chan), in axi_chan_handle_err()
992 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
993 axi_chan_list_dump_lli(chan, vd_to_axi_desc(vd)); in axi_chan_handle_err()
998 axi_chan_start_first_queued(chan); in axi_chan_handle_err()
1001 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1004 static void axi_chan_block_xfer_complete(struct axi_dma_chan *chan) in axi_chan_block_xfer_complete() argument
1006 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1014 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1015 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_complete()
1016 dev_err(chan2dev(chan), "BUG: %s caught DWAXIDMAC_IRQ_DMA_TRF, but channel not idle!\n", in axi_chan_block_xfer_complete()
1017 axi_chan_name(chan)); in axi_chan_block_xfer_complete()
1018 axi_chan_disable(chan); in axi_chan_block_xfer_complete()
1022 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1024 dev_err(chan2dev(chan), "BUG: %s, IRQ with no descriptors\n", in axi_chan_block_xfer_complete()
1025 axi_chan_name(chan)); in axi_chan_block_xfer_complete()
1029 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1032 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1036 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1046 axi_chan_enable(chan); in axi_chan_block_xfer_complete()
1054 axi_chan_start_first_queued(chan); in axi_chan_block_xfer_complete()
1058 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1065 struct axi_dma_chan *chan; in dw_axi_dma_interrupt() local
1074 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1075 status = axi_chan_irq_read(chan); in dw_axi_dma_interrupt()
1076 axi_chan_irq_clear(chan, status); in dw_axi_dma_interrupt()
1079 axi_chan_name(chan), i, status); in dw_axi_dma_interrupt()
1082 axi_chan_handle_err(chan, status); in dw_axi_dma_interrupt()
1084 axi_chan_block_xfer_complete(chan); in dw_axi_dma_interrupt()
1095 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_terminate_all() local
1096 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1102 axi_chan_disable(chan); in dma_chan_terminate_all()
1104 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1108 "%s failed to stop\n", axi_chan_name(chan)); in dma_chan_terminate_all()
1110 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1111 dw_axi_dma_set_hw_channel(chan, false); in dma_chan_terminate_all()
1112 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1113 dw_axi_dma_set_byte_halfword(chan, false); in dma_chan_terminate_all()
1115 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1117 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1119 chan->cyclic = false; in dma_chan_terminate_all()
1120 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1122 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1124 dev_vdbg(dchan2dev(dchan), "terminated: %s\n", axi_chan_name(chan)); in dma_chan_terminate_all()
1131 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_pause() local
1136 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1138 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1139 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1140 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1141 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in dma_chan_pause()
1144 if (axi_chan_irq_read(chan) & DWAXIDMAC_IRQ_SUSPENDED) in dma_chan_pause()
1150 axi_chan_irq_clear(chan, DWAXIDMAC_IRQ_SUSPENDED); in dma_chan_pause()
1152 chan->is_paused = true; in dma_chan_pause()
1154 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1160 static inline void axi_chan_resume(struct axi_dma_chan *chan) in axi_chan_resume() argument
1164 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1165 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1166 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1167 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_resume()
1169 chan->is_paused = false; in axi_chan_resume()
1174 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_resume() local
1177 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1179 if (chan->is_paused) in dma_chan_resume()
1180 axi_chan_resume(chan); in dma_chan_resume()
1182 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1234 struct axi_dma_chan *chan; in dw_axi_dma_of_xlate() local
1241 chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_of_xlate()
1242 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1367 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1368 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1369 if (!dw->chan) in dw_probe()
1379 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe() local
1381 chan->chip = chip; in dw_probe()
1382 chan->id = i; in dw_probe()
1383 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1384 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1386 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1387 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1471 struct axi_dma_chan *chan, *_chan; in dw_remove() local
1479 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1480 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1491 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1492 vc.chan.device_node) { in dw_remove()
1493 list_del(&chan->vc.chan.device_node); in dw_remove()
1494 tasklet_kill(&chan->vc.task); in dw_remove()