• Home
  • Raw
  • Download

Lines Matching refs:chan

66 axi_chan_iowrite32(struct axi_dma_chan *chan, u32 reg, u32 val)  in axi_chan_iowrite32()  argument
68 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
71 static inline u32 axi_chan_ioread32(struct axi_dma_chan *chan, u32 reg) in axi_chan_ioread32() argument
73 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
77 axi_chan_iowrite64(struct axi_dma_chan *chan, u32 reg, u64 val) in axi_chan_iowrite64() argument
83 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
84 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
87 static inline void axi_chan_config_write(struct axi_dma_chan *chan, in axi_chan_config_write() argument
94 if (chan->chip->dw->hdata->reg_map_8_channels && in axi_chan_config_write()
95 !chan->chip->dw->hdata->use_cfg2) { in axi_chan_config_write()
110 axi_chan_iowrite32(chan, CH_CFG_L, cfg_lo); in axi_chan_config_write()
111 axi_chan_iowrite32(chan, CH_CFG_H, cfg_hi); in axi_chan_config_write()
150 static inline void axi_chan_irq_disable(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_disable() argument
155 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, DWAXIDMAC_IRQ_NONE); in axi_chan_irq_disable()
157 val = axi_chan_ioread32(chan, CH_INTSTATUS_ENA); in axi_chan_irq_disable()
159 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, val); in axi_chan_irq_disable()
163 static inline void axi_chan_irq_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_set() argument
165 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, irq_mask); in axi_chan_irq_set()
168 static inline void axi_chan_irq_sig_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_sig_set() argument
170 axi_chan_iowrite32(chan, CH_INTSIGNAL_ENA, irq_mask); in axi_chan_irq_sig_set()
173 static inline void axi_chan_irq_clear(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_clear() argument
175 axi_chan_iowrite32(chan, CH_INTCLEAR, irq_mask); in axi_chan_irq_clear()
178 static inline u32 axi_chan_irq_read(struct axi_dma_chan *chan) in axi_chan_irq_read() argument
180 return axi_chan_ioread32(chan, CH_INTSTATUS); in axi_chan_irq_read()
183 static inline void axi_chan_disable(struct axi_dma_chan *chan) in axi_chan_disable() argument
187 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
188 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
189 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_disable()
190 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
192 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
193 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
196 static inline void axi_chan_enable(struct axi_dma_chan *chan) in axi_chan_enable() argument
200 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
201 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_enable()
202 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
203 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
205 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
206 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
207 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
210 static inline bool axi_chan_is_hw_enable(struct axi_dma_chan *chan) in axi_chan_is_hw_enable() argument
214 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
216 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
225 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
226 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
233 static u32 axi_chan_get_xfer_width(struct axi_dma_chan *chan, dma_addr_t src, in axi_chan_get_xfer_width() argument
236 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
241 static inline const char *axi_chan_name(struct axi_dma_chan *chan) in axi_chan_name() argument
243 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
264 static struct axi_dma_lli *axi_desc_get(struct axi_dma_chan *chan, in axi_desc_get() argument
270 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
272 dev_err(chan2dev(chan), "%s: not enough descriptors available\n", in axi_desc_get()
273 axi_chan_name(chan)); in axi_desc_get()
277 atomic_inc(&chan->descs_allocated); in axi_desc_get()
285 struct axi_dma_chan *chan = desc->chan; in axi_desc_put() local
292 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
297 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
298 dev_vdbg(chan2dev(chan), "%s: %d descs put, %d still allocated\n", in axi_desc_put()
299 axi_chan_name(chan), descs_put, in axi_desc_put()
300 atomic_read(&chan->descs_allocated)); in axi_desc_put()
312 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_tx_status() local
326 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
328 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
337 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
348 static void write_chan_llp(struct axi_dma_chan *chan, dma_addr_t adr) in write_chan_llp() argument
350 axi_chan_iowrite64(chan, CH_LLP, adr); in write_chan_llp()
353 static void dw_axi_dma_set_byte_halfword(struct axi_dma_chan *chan, bool set) in dw_axi_dma_set_byte_halfword() argument
358 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
359 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
363 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
367 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
370 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
372 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
374 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
377 static void axi_chan_block_xfer_start(struct axi_dma_chan *chan, in axi_chan_block_xfer_start() argument
380 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
385 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_start()
386 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
387 axi_chan_name(chan)); in axi_chan_block_xfer_start()
392 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
400 switch (chan->direction) { in axi_chan_block_xfer_start()
402 dw_axi_dma_set_byte_halfword(chan, true); in axi_chan_block_xfer_start()
403 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
406 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
407 config.dst_per = chan->id; in axi_chan_block_xfer_start()
409 config.dst_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
412 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
415 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
416 config.src_per = chan->id; in axi_chan_block_xfer_start()
418 config.src_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
423 axi_chan_config_write(chan, &config); in axi_chan_block_xfer_start()
425 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
428 axi_chan_irq_sig_set(chan, irq_mask); in axi_chan_block_xfer_start()
432 axi_chan_irq_set(chan, irq_mask); in axi_chan_block_xfer_start()
434 axi_chan_enable(chan); in axi_chan_block_xfer_start()
437 static void axi_chan_start_first_queued(struct axi_dma_chan *chan) in axi_chan_start_first_queued() argument
442 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
447 dev_vdbg(chan2dev(chan), "%s: started %u\n", axi_chan_name(chan), in axi_chan_start_first_queued()
449 axi_chan_block_xfer_start(chan, desc); in axi_chan_start_first_queued()
454 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_issue_pending() local
457 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
458 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
459 axi_chan_start_first_queued(chan); in dma_chan_issue_pending()
460 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
465 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_synchronize() local
467 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
472 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_alloc_chan_resources() local
475 if (axi_chan_is_hw_enable(chan)) { in dma_chan_alloc_chan_resources()
476 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
477 axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
482 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
483 chan->chip->dev, in dma_chan_alloc_chan_resources()
486 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
487 dev_err(chan2dev(chan), "No memory for descriptors\n"); in dma_chan_alloc_chan_resources()
490 dev_vdbg(dchan2dev(dchan), "%s: allocating\n", axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
492 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
499 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_free_chan_resources() local
502 if (axi_chan_is_hw_enable(chan)) in dma_chan_free_chan_resources()
504 axi_chan_name(chan)); in dma_chan_free_chan_resources()
506 axi_chan_disable(chan); in dma_chan_free_chan_resources()
507 axi_chan_irq_disable(chan, DWAXIDMAC_IRQ_ALL); in dma_chan_free_chan_resources()
509 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
511 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
512 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
515 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
517 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
520 static void dw_axi_dma_set_hw_channel(struct axi_dma_chan *chan, bool set) in dw_axi_dma_set_hw_channel() argument
522 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel()
536 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
546 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
547 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
595 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
603 static int dw_axi_dma_set_hw_desc(struct axi_dma_chan *chan, in dw_axi_dma_set_hw_desc() argument
607 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
616 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
623 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
627 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
629 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
630 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
638 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
639 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
653 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
659 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
660 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
668 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
688 static size_t calculate_block_len(struct axi_dma_chan *chan, in calculate_block_len() argument
695 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
699 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
707 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
723 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_prep_cyclic() local
738 axi_block_len = calculate_block_len(chan, dma_addr, buf_len, direction); in dw_axi_dma_chan_prep_cyclic()
751 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
752 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
753 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
760 status = dw_axi_dma_set_hw_desc(chan, hw_desc, src_addr, in dw_axi_dma_chan_prep_cyclic()
783 dw_axi_dma_set_hw_channel(chan, true); in dw_axi_dma_chan_prep_cyclic()
785 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
800 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_prep_slave_sg() local
820 axi_block_len = calculate_block_len(chan, mem, len, direction); in dw_axi_dma_chan_prep_slave_sg()
831 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
833 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
843 status = dw_axi_dma_set_hw_desc(chan, hw_desc, mem, segment_len); in dw_axi_dma_chan_prep_slave_sg()
863 dw_axi_dma_set_hw_channel(chan, true); in dw_axi_dma_chan_prep_slave_sg()
865 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
878 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_prep_dma_memcpy() local
886 dev_dbg(chan2dev(chan), "%s: memcpy: src: %pad dst: %pad length: %zd flags: %#lx", in dma_chan_prep_dma_memcpy()
887 axi_chan_name(chan), &src_adr, &dst_adr, len, flags); in dma_chan_prep_dma_memcpy()
889 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
890 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, len); in dma_chan_prep_dma_memcpy()
896 desc->chan = chan; in dma_chan_prep_dma_memcpy()
908 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, xfer_len); in dma_chan_prep_dma_memcpy()
921 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
930 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
931 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
969 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
980 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_chan_slave_config() local
982 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
987 static void axi_chan_dump_lli(struct axi_dma_chan *chan, in axi_chan_dump_lli() argument
991 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
995 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
1005 static void axi_chan_list_dump_lli(struct axi_dma_chan *chan, in axi_chan_list_dump_lli() argument
1008 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
1012 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
1015 static noinline void axi_chan_handle_err(struct axi_dma_chan *chan, u32 status) in axi_chan_handle_err() argument
1020 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
1022 axi_chan_disable(chan); in axi_chan_handle_err()
1025 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
1027 dev_err(chan2dev(chan), "BUG: %s, IRQ with no descriptors\n", in axi_chan_handle_err()
1028 axi_chan_name(chan)); in axi_chan_handle_err()
1035 dev_err(chan2dev(chan), in axi_chan_handle_err()
1037 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
1038 axi_chan_list_dump_lli(chan, vd_to_axi_desc(vd)); in axi_chan_handle_err()
1043 axi_chan_start_first_queued(chan); in axi_chan_handle_err()
1046 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1049 static void axi_chan_block_xfer_complete(struct axi_dma_chan *chan) in axi_chan_block_xfer_complete() argument
1051 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1059 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1060 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_complete()
1061 dev_err(chan2dev(chan), "BUG: %s caught DWAXIDMAC_IRQ_DMA_TRF, but channel not idle!\n", in axi_chan_block_xfer_complete()
1062 axi_chan_name(chan)); in axi_chan_block_xfer_complete()
1063 axi_chan_disable(chan); in axi_chan_block_xfer_complete()
1067 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1069 dev_err(chan2dev(chan), "BUG: %s, IRQ with no descriptors\n", in axi_chan_block_xfer_complete()
1070 axi_chan_name(chan)); in axi_chan_block_xfer_complete()
1074 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1077 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1081 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1091 axi_chan_enable(chan); in axi_chan_block_xfer_complete()
1100 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1107 struct axi_dma_chan *chan; in dw_axi_dma_interrupt() local
1116 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1117 status = axi_chan_irq_read(chan); in dw_axi_dma_interrupt()
1118 axi_chan_irq_clear(chan, status); in dw_axi_dma_interrupt()
1121 axi_chan_name(chan), i, status); in dw_axi_dma_interrupt()
1124 axi_chan_handle_err(chan, status); in dw_axi_dma_interrupt()
1126 axi_chan_block_xfer_complete(chan); in dw_axi_dma_interrupt()
1137 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_terminate_all() local
1138 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1144 axi_chan_disable(chan); in dma_chan_terminate_all()
1146 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1150 "%s failed to stop\n", axi_chan_name(chan)); in dma_chan_terminate_all()
1152 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1153 dw_axi_dma_set_hw_channel(chan, false); in dma_chan_terminate_all()
1154 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1155 dw_axi_dma_set_byte_halfword(chan, false); in dma_chan_terminate_all()
1157 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1159 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1161 chan->cyclic = false; in dma_chan_terminate_all()
1162 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1164 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1166 dev_vdbg(dchan2dev(dchan), "terminated: %s\n", axi_chan_name(chan)); in dma_chan_terminate_all()
1173 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_pause() local
1178 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1180 if (chan->chip->dw->hdata->reg_map_8_channels) { in dma_chan_pause()
1181 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1182 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1183 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1184 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in dma_chan_pause()
1186 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1187 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1188 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1189 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in dma_chan_pause()
1193 if (axi_chan_irq_read(chan) & DWAXIDMAC_IRQ_SUSPENDED) in dma_chan_pause()
1199 axi_chan_irq_clear(chan, DWAXIDMAC_IRQ_SUSPENDED); in dma_chan_pause()
1201 chan->is_paused = true; in dma_chan_pause()
1203 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1209 static inline void axi_chan_resume(struct axi_dma_chan *chan) in axi_chan_resume() argument
1213 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_resume()
1214 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1215 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1216 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1217 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_resume()
1219 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1220 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1221 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1222 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in axi_chan_resume()
1225 chan->is_paused = false; in axi_chan_resume()
1230 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_resume() local
1233 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1235 if (chan->is_paused) in dma_chan_resume()
1236 axi_chan_resume(chan); in dma_chan_resume()
1238 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1290 struct axi_dma_chan *chan; in dw_axi_dma_of_xlate() local
1297 chan = dchan_to_axi_dma_chan(dchan); in dw_axi_dma_of_xlate()
1298 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1437 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1438 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1439 if (!dw->chan) in dw_probe()
1449 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe() local
1451 chan->chip = chip; in dw_probe()
1452 chan->id = i; in dw_probe()
1453 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1454 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1456 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1457 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1540 struct axi_dma_chan *chan, *_chan; in dw_remove() local
1548 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1549 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1560 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1561 vc.chan.device_node) { in dw_remove()
1562 list_del(&chan->vc.chan.device_node); in dw_remove()
1563 tasklet_kill(&chan->vc.task); in dw_remove()