• Home
  • Raw
  • Download

Lines Matching refs:c

305 static inline bool ldma_chan_tx(struct ldma_chan *c)  in ldma_chan_tx()  argument
307 return !!(c->flags & DMA_TX_CH); in ldma_chan_tx()
310 static inline bool ldma_chan_is_hw_desc(struct ldma_chan *c) in ldma_chan_is_hw_desc() argument
312 return !!(c->flags & DMA_HW_DESC); in ldma_chan_is_hw_desc()
519 static int ldma_chan_cctrl_cfg(struct ldma_chan *c, u32 val) in ldma_chan_cctrl_cfg() argument
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg()
527 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_cctrl_cfg()
531 c->flags |= DMA_TX_CH; in ldma_chan_cctrl_cfg()
533 c->flags |= DMA_RX_CH; in ldma_chan_cctrl_cfg()
548 static void ldma_chan_irq_init(struct ldma_chan *c) in ldma_chan_irq_init() argument
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init()
555 if (c->nr < MAX_LOWER_CHANS) { in ldma_chan_irq_init()
563 cn_bit = BIT(c->nr & MASK_LOWER_CHANS); in ldma_chan_irq_init()
565 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_irq_init()
576 static void ldma_chan_set_class(struct ldma_chan *c, u32 val) in ldma_chan_set_class() argument
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class()
589 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_set_class()
594 static int ldma_chan_on(struct ldma_chan *c) in ldma_chan_on() argument
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on()
600 if (WARN_ON(!c->desc_init)) in ldma_chan_on()
604 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_on()
608 c->onoff = DMA_CH_ON; in ldma_chan_on()
613 static int ldma_chan_off(struct ldma_chan *c) in ldma_chan_off() argument
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off()
621 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_off()
630 c->onoff = DMA_CH_OFF; in ldma_chan_off()
635 static void ldma_chan_desc_hw_cfg(struct ldma_chan *c, dma_addr_t desc_base, in ldma_chan_desc_hw_cfg() argument
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg()
642 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_hw_cfg()
655 c->desc_init = true; in ldma_chan_desc_hw_cfg()
661 struct ldma_chan *c = to_ldma_chan(chan); in ldma_chan_desc_cfg() local
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg()
668 c->nr); in ldma_chan_desc_cfg()
674 c->nr, desc_num); in ldma_chan_desc_cfg()
678 ldma_chan_desc_hw_cfg(c, desc_base, desc_num); in ldma_chan_desc_cfg()
680 c->flags |= DMA_HW_DESC; in ldma_chan_desc_cfg()
681 c->desc_cnt = desc_num; in ldma_chan_desc_cfg()
682 c->desc_phys = desc_base; in ldma_chan_desc_cfg()
694 static int ldma_chan_reset(struct ldma_chan *c) in ldma_chan_reset() argument
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset()
701 ret = ldma_chan_off(c); in ldma_chan_reset()
706 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_reset()
715 c->rst = 1; in ldma_chan_reset()
716 c->desc_init = false; in ldma_chan_reset()
721 static void ldma_chan_byte_offset_cfg(struct ldma_chan *c, u32 boff_len) in ldma_chan_byte_offset_cfg() argument
723 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_byte_offset_cfg()
732 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_byte_offset_cfg()
736 static void ldma_chan_data_endian_cfg(struct ldma_chan *c, bool enable, in ldma_chan_data_endian_cfg() argument
739 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_data_endian_cfg()
748 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_data_endian_cfg()
752 static void ldma_chan_desc_endian_cfg(struct ldma_chan *c, bool enable, in ldma_chan_desc_endian_cfg() argument
755 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_endian_cfg()
764 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_endian_cfg()
768 static void ldma_chan_hdr_mode_cfg(struct ldma_chan *c, u32 hdr_len, bool csum) in ldma_chan_hdr_mode_cfg() argument
770 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_hdr_mode_cfg()
783 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_hdr_mode_cfg()
787 static void ldma_chan_rxwr_np_cfg(struct ldma_chan *c, bool enable) in ldma_chan_rxwr_np_cfg() argument
789 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_rxwr_np_cfg()
793 if (ldma_chan_tx(c)) in ldma_chan_rxwr_np_cfg()
799 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_rxwr_np_cfg()
803 static void ldma_chan_abc_cfg(struct ldma_chan *c, bool enable) in ldma_chan_abc_cfg() argument
805 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_abc_cfg()
808 if (d->ver < DMA_VER32 || ldma_chan_tx(c)) in ldma_chan_abc_cfg()
814 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_abc_cfg()
860 static int ldma_chan_cfg(struct ldma_chan *c) in ldma_chan_cfg() argument
862 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cfg()
866 reg = c->pden ? DMA_CCTRL_PDEN : 0; in ldma_chan_cfg()
867 reg |= c->onoff ? DMA_CCTRL_ON : 0; in ldma_chan_cfg()
868 reg |= c->rst ? DMA_CCTRL_RST : 0; in ldma_chan_cfg()
870 ldma_chan_cctrl_cfg(c, reg); in ldma_chan_cfg()
871 ldma_chan_irq_init(c); in ldma_chan_cfg()
877 ldma_chan_set_class(c, c->nr); in ldma_chan_cfg()
878 ldma_chan_byte_offset_cfg(c, c->boff_len); in ldma_chan_cfg()
879 ldma_chan_data_endian_cfg(c, c->data_endian_en, c->data_endian); in ldma_chan_cfg()
880 ldma_chan_desc_endian_cfg(c, c->desc_endian_en, c->desc_endian); in ldma_chan_cfg()
881 ldma_chan_hdr_mode_cfg(c, c->hdrm_len, c->hdrm_csum); in ldma_chan_cfg()
882 ldma_chan_rxwr_np_cfg(c, c->desc_rx_np); in ldma_chan_cfg()
883 ldma_chan_abc_cfg(c, c->abc_en); in ldma_chan_cfg()
886 if (ldma_chan_is_hw_desc(c)) in ldma_chan_cfg()
887 ldma_chan_desc_hw_cfg(c, c->desc_phys, c->desc_cnt); in ldma_chan_cfg()
896 struct ldma_chan *c; in ldma_dev_init() local
912 c = &d->chans[j]; in ldma_dev_init()
913 ldma_chan_cfg(c); in ldma_dev_init()
968 struct ldma_chan *c = ds->chan; in dma_free_desc_resource() local
970 dma_pool_free(c->desc_pool, ds->desc_hw, ds->desc_phys); in dma_free_desc_resource()
975 dma_alloc_desc_resource(int num, struct ldma_chan *c) in dma_alloc_desc_resource() argument
977 struct device *dev = c->vchan.chan.device->dev; in dma_alloc_desc_resource()
980 if (num > c->desc_num) { in dma_alloc_desc_resource()
981 dev_err(dev, "sg num %d exceed max %d\n", num, c->desc_num); in dma_alloc_desc_resource()
989 ds->chan = c; in dma_alloc_desc_resource()
990 ds->desc_hw = dma_pool_zalloc(c->desc_pool, GFP_ATOMIC, in dma_alloc_desc_resource()
1002 static void ldma_chan_irq_en(struct ldma_chan *c) in ldma_chan_irq_en() argument
1004 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_en()
1008 writel(c->nr, d->base + DMA_CS); in ldma_chan_irq_en()
1010 writel(BIT(c->nr), d->base + DMA_IRNEN); in ldma_chan_irq_en()
1016 struct ldma_chan *c = to_ldma_chan(chan); in ldma_issue_pending() local
1017 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_issue_pending()
1021 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_issue_pending()
1022 if (vchan_issue_pending(&c->vchan)) { in ldma_issue_pending()
1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending()
1028 c->ds = NULL; in ldma_issue_pending()
1029 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1033 c->ds = to_lgm_dma_desc(vdesc); in ldma_issue_pending()
1034 ldma_chan_desc_hw_cfg(c, c->ds->desc_phys, c->ds->desc_cnt); in ldma_issue_pending()
1035 ldma_chan_irq_en(c); in ldma_issue_pending()
1037 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_issue_pending()
1039 ldma_chan_on(c); in ldma_issue_pending()
1044 struct ldma_chan *c = to_ldma_chan(chan); in ldma_synchronize() local
1050 cancel_work_sync(&c->work); in ldma_synchronize()
1051 vchan_synchronize(&c->vchan); in ldma_synchronize()
1052 if (c->ds) in ldma_synchronize()
1053 dma_free_desc_resource(&c->ds->vdesc); in ldma_synchronize()
1058 struct ldma_chan *c = to_ldma_chan(chan); in ldma_terminate_all() local
1062 spin_lock_irqsave(&c->vchan.lock, flags); in ldma_terminate_all()
1063 vchan_get_all_descriptors(&c->vchan, &head); in ldma_terminate_all()
1064 spin_unlock_irqrestore(&c->vchan.lock, flags); in ldma_terminate_all()
1065 vchan_dma_desc_free_list(&c->vchan, &head); in ldma_terminate_all()
1067 return ldma_chan_reset(c); in ldma_terminate_all()
1072 struct ldma_chan *c = to_ldma_chan(chan); in ldma_resume_chan() local
1074 ldma_chan_on(c); in ldma_resume_chan()
1081 struct ldma_chan *c = to_ldma_chan(chan); in ldma_pause_chan() local
1083 return ldma_chan_off(c); in ldma_pause_chan()
1090 struct ldma_chan *c = to_ldma_chan(chan); in ldma_tx_status() local
1091 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_tx_status()
1102 struct ldma_chan *c = data; in dma_chan_irq() local
1103 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in dma_chan_irq()
1107 writel(c->nr, d->base + DMA_CS); in dma_chan_irq()
1114 queue_work(d->wq, &c->work); in dma_chan_irq()
1120 struct ldma_chan *c; in dma_interrupt() local
1136 c = &d->chans[cid]; in dma_interrupt()
1137 dma_chan_irq(irq, c); in dma_interrupt()
1143 static void prep_slave_burst_len(struct ldma_chan *c) in prep_slave_burst_len() argument
1145 struct ldma_port *p = c->port; in prep_slave_burst_len()
1146 struct dma_slave_config *cfg = &c->config; in prep_slave_burst_len()
1161 struct ldma_chan *c = to_ldma_chan(chan); in ldma_prep_slave_sg() local
1162 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_prep_slave_sg()
1182 ds = dma_alloc_desc_resource(num, c); in ldma_prep_slave_sg()
1186 c->ds = ds; in ldma_prep_slave_sg()
1254 prep_slave_burst_len(c); in ldma_prep_slave_sg()
1256 return vchan_tx_prep(&c->vchan, &ds->vdesc, DMA_CTRL_ACK); in ldma_prep_slave_sg()
1262 struct ldma_chan *c = to_ldma_chan(chan); in ldma_slave_config() local
1264 memcpy(&c->config, cfg, sizeof(c->config)); in ldma_slave_config()
1271 struct ldma_chan *c = to_ldma_chan(chan); in ldma_alloc_chan_resources() local
1272 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_alloc_chan_resources()
1273 struct device *dev = c->vchan.chan.device->dev; in ldma_alloc_chan_resources()
1277 c->flags |= CHAN_IN_USE; in ldma_alloc_chan_resources()
1281 if (c->desc_pool) in ldma_alloc_chan_resources()
1282 return c->desc_num; in ldma_alloc_chan_resources()
1284 desc_sz = c->desc_num * sizeof(struct dw2_desc); in ldma_alloc_chan_resources()
1285 c->desc_pool = dma_pool_create(c->name, dev, desc_sz, in ldma_alloc_chan_resources()
1288 if (!c->desc_pool) { in ldma_alloc_chan_resources()
1293 return c->desc_num; in ldma_alloc_chan_resources()
1298 struct ldma_chan *c = to_ldma_chan(chan); in ldma_free_chan_resources() local
1299 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_free_chan_resources()
1302 dma_pool_destroy(c->desc_pool); in ldma_free_chan_resources()
1303 c->desc_pool = NULL; in ldma_free_chan_resources()
1305 ldma_chan_reset(c); in ldma_free_chan_resources()
1307 c->flags &= ~CHAN_IN_USE; in ldma_free_chan_resources()
1313 struct ldma_chan *c = container_of(work, struct ldma_chan, work); in dma_work() local
1314 struct dma_async_tx_descriptor *tx = &c->ds->vdesc.tx; in dma_work()
1315 struct virt_dma_chan *vc = &c->vchan; in dma_work()
1321 spin_lock_irqsave(&c->vchan.lock, flags); in dma_work()
1323 spin_unlock_irqrestore(&c->vchan.lock, flags); in dma_work()
1336 c->ds = NULL; in dma_work()
1340 update_burst_len_v22(struct ldma_chan *c, struct ldma_port *p, u32 burst) in update_burst_len_v22() argument
1342 if (ldma_chan_tx(c)) in update_burst_len_v22()
1349 update_burst_len_v3X(struct ldma_chan *c, struct ldma_port *p, u32 burst) in update_burst_len_v3X() argument
1351 if (ldma_chan_tx(c)) in update_burst_len_v3X()
1365 struct ldma_chan *c; in update_client_configs() local
1371 c = &d->chans[chan_id]; in update_client_configs()
1372 c->port = p; in update_client_configs()
1375 update_burst_len_v22(c, p, burst); in update_client_configs()
1377 update_burst_len_v3X(c, p, burst); in update_client_configs()
1406 struct ldma_chan *c; in ldma_dma_init_v22() local
1408 c = &d->chans[i]; in ldma_dma_init_v22()
1409 c->nr = i; /* Real channel number */ in ldma_dma_init_v22()
1410 c->rst = DMA_CHAN_RST; in ldma_dma_init_v22()
1411 c->desc_num = DMA_DFT_DESC_NUM; in ldma_dma_init_v22()
1412 snprintf(c->name, sizeof(c->name), "chan%d", c->nr); in ldma_dma_init_v22()
1413 INIT_WORK(&c->work, dma_work); in ldma_dma_init_v22()
1414 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v22()
1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22()
1420 struct ldma_chan *c; in ldma_dma_init_v3X() local
1422 c = &d->chans[i]; in ldma_dma_init_v3X()
1423 c->data_endian = DMA_DFT_ENDIAN; in ldma_dma_init_v3X()
1424 c->desc_endian = DMA_DFT_ENDIAN; in ldma_dma_init_v3X()
1425 c->data_endian_en = false; in ldma_dma_init_v3X()
1426 c->desc_endian_en = false; in ldma_dma_init_v3X()
1427 c->desc_rx_np = false; in ldma_dma_init_v3X()
1428 c->flags |= DEVICE_ALLOC_DESC; in ldma_dma_init_v3X()
1429 c->onoff = DMA_CH_OFF; in ldma_dma_init_v3X()
1430 c->rst = DMA_CHAN_RST; in ldma_dma_init_v3X()
1431 c->abc_en = true; in ldma_dma_init_v3X()
1432 c->hdrm_csum = false; in ldma_dma_init_v3X()
1433 c->boff_len = 0; in ldma_dma_init_v3X()
1434 c->nr = i; in ldma_dma_init_v3X()
1435 c->vchan.desc_free = dma_free_desc_resource; in ldma_dma_init_v3X()
1436 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v3X()
1569 struct ldma_chan *c; in intel_ldma_probe() local
1654 d->chans = devm_kcalloc(d->dev, d->chan_nrs, sizeof(*c), GFP_KERNEL); in intel_ldma_probe()