• Home
  • Raw
  • Download

Lines Matching refs:ioat_chan

72 static void ioat_eh(struct ioatdma_chan *ioat_chan);
74 static void ioat_print_chanerrs(struct ioatdma_chan *ioat_chan, u32 chanerr) in ioat_print_chanerrs() argument
80 dev_err(to_dev(ioat_chan), "Err(%d): %s\n", in ioat_print_chanerrs()
94 struct ioatdma_chan *ioat_chan; in ioat_dma_do_interrupt() local
111 ioat_chan = ioat_chan_by_index(instance, bit); in ioat_dma_do_interrupt()
112 if (test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_dma_do_interrupt()
113 tasklet_schedule(&ioat_chan->cleanup_task); in ioat_dma_do_interrupt()
127 struct ioatdma_chan *ioat_chan = data; in ioat_dma_do_interrupt_msix() local
129 if (test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_dma_do_interrupt_msix()
130 tasklet_schedule(&ioat_chan->cleanup_task); in ioat_dma_do_interrupt_msix()
135 void ioat_stop(struct ioatdma_chan *ioat_chan) in ioat_stop() argument
137 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_stop()
139 int chan_id = chan_num(ioat_chan); in ioat_stop()
145 clear_bit(IOAT_RUN, &ioat_chan->state); in ioat_stop()
162 del_timer_sync(&ioat_chan->timer); in ioat_stop()
165 tasklet_kill(&ioat_chan->cleanup_task); in ioat_stop()
168 ioat_cleanup_event(&ioat_chan->cleanup_task); in ioat_stop()
171 static void __ioat_issue_pending(struct ioatdma_chan *ioat_chan) in __ioat_issue_pending() argument
173 ioat_chan->dmacount += ioat_ring_pending(ioat_chan); in __ioat_issue_pending()
174 ioat_chan->issued = ioat_chan->head; in __ioat_issue_pending()
175 writew(ioat_chan->dmacount, in __ioat_issue_pending()
176 ioat_chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET); in __ioat_issue_pending()
177 dev_dbg(to_dev(ioat_chan), in __ioat_issue_pending()
179 __func__, ioat_chan->head, ioat_chan->tail, in __ioat_issue_pending()
180 ioat_chan->issued, ioat_chan->dmacount); in __ioat_issue_pending()
185 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_issue_pending() local
187 if (ioat_ring_pending(ioat_chan)) { in ioat_issue_pending()
188 spin_lock_bh(&ioat_chan->prep_lock); in ioat_issue_pending()
189 __ioat_issue_pending(ioat_chan); in ioat_issue_pending()
190 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_issue_pending()
201 static void ioat_update_pending(struct ioatdma_chan *ioat_chan) in ioat_update_pending() argument
203 if (ioat_ring_pending(ioat_chan) > ioat_pending_level) in ioat_update_pending()
204 __ioat_issue_pending(ioat_chan); in ioat_update_pending()
207 static void __ioat_start_null_desc(struct ioatdma_chan *ioat_chan) in __ioat_start_null_desc() argument
212 if (ioat_ring_space(ioat_chan) < 1) { in __ioat_start_null_desc()
213 dev_err(to_dev(ioat_chan), in __ioat_start_null_desc()
218 dev_dbg(to_dev(ioat_chan), in __ioat_start_null_desc()
220 __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); in __ioat_start_null_desc()
221 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head); in __ioat_start_null_desc()
233 ioat_set_chainaddr(ioat_chan, desc->txd.phys); in __ioat_start_null_desc()
234 dump_desc_dbg(ioat_chan, desc); in __ioat_start_null_desc()
237 ioat_chan->head += 1; in __ioat_start_null_desc()
238 __ioat_issue_pending(ioat_chan); in __ioat_start_null_desc()
241 void ioat_start_null_desc(struct ioatdma_chan *ioat_chan) in ioat_start_null_desc() argument
243 spin_lock_bh(&ioat_chan->prep_lock); in ioat_start_null_desc()
244 if (!test_bit(IOAT_CHAN_DOWN, &ioat_chan->state)) in ioat_start_null_desc()
245 __ioat_start_null_desc(ioat_chan); in ioat_start_null_desc()
246 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_start_null_desc()
249 static void __ioat_restart_chan(struct ioatdma_chan *ioat_chan) in __ioat_restart_chan() argument
252 ioat_chan->issued = ioat_chan->tail; in __ioat_restart_chan()
253 ioat_chan->dmacount = 0; in __ioat_restart_chan()
254 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in __ioat_restart_chan()
256 dev_dbg(to_dev(ioat_chan), in __ioat_restart_chan()
258 __func__, ioat_chan->head, ioat_chan->tail, in __ioat_restart_chan()
259 ioat_chan->issued, ioat_chan->dmacount); in __ioat_restart_chan()
261 if (ioat_ring_pending(ioat_chan)) { in __ioat_restart_chan()
264 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); in __ioat_restart_chan()
265 ioat_set_chainaddr(ioat_chan, desc->txd.phys); in __ioat_restart_chan()
266 __ioat_issue_pending(ioat_chan); in __ioat_restart_chan()
268 __ioat_start_null_desc(ioat_chan); in __ioat_restart_chan()
271 static int ioat_quiesce(struct ioatdma_chan *ioat_chan, unsigned long tmo) in ioat_quiesce() argument
277 status = ioat_chansts(ioat_chan); in ioat_quiesce()
279 ioat_suspend(ioat_chan); in ioat_quiesce()
285 status = ioat_chansts(ioat_chan); in ioat_quiesce()
292 static int ioat_reset_sync(struct ioatdma_chan *ioat_chan, unsigned long tmo) in ioat_reset_sync() argument
297 ioat_reset(ioat_chan); in ioat_reset_sync()
298 while (ioat_reset_pending(ioat_chan)) { in ioat_reset_sync()
310 __releases(&ioat_chan->prep_lock) in ioat_tx_submit_unlock()
313 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_tx_submit_unlock() local
317 dev_dbg(to_dev(ioat_chan), "%s: cookie: %d\n", __func__, cookie); in ioat_tx_submit_unlock()
319 if (!test_and_set_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state)) in ioat_tx_submit_unlock()
320 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_tx_submit_unlock()
328 ioat_chan->head += ioat_chan->produce; in ioat_tx_submit_unlock()
330 ioat_update_pending(ioat_chan); in ioat_tx_submit_unlock()
331 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_tx_submit_unlock()
341 struct ioatdma_chan *ioat_chan = to_ioat_chan(chan); in ioat_alloc_ring_ent() local
350 pos = (u8 *)ioat_chan->descs[chunk].virt + offs; in ioat_alloc_ring_ent()
351 phys = ioat_chan->descs[chunk].hw + offs; in ioat_alloc_ring_ent()
374 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_alloc_ring() local
375 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_alloc_ring()
386 ioat_chan->desc_chunks = chunks; in ioat_alloc_ring()
389 struct ioat_descs *descs = &ioat_chan->descs[i]; in ioat_alloc_ring()
391 descs->virt = dma_alloc_coherent(to_dev(ioat_chan), in ioat_alloc_ring()
397 descs = &ioat_chan->descs[idx]; in ioat_alloc_ring()
398 dma_free_coherent(to_dev(ioat_chan), in ioat_alloc_ring()
405 ioat_chan->desc_chunks = 0; in ioat_alloc_ring()
419 for (idx = 0; idx < ioat_chan->desc_chunks; idx++) { in ioat_alloc_ring()
420 dma_free_coherent(to_dev(ioat_chan), in ioat_alloc_ring()
422 ioat_chan->descs[idx].virt, in ioat_alloc_ring()
423 ioat_chan->descs[idx].hw); in ioat_alloc_ring()
424 ioat_chan->descs[idx].virt = NULL; in ioat_alloc_ring()
425 ioat_chan->descs[idx].hw = 0; in ioat_alloc_ring()
428 ioat_chan->desc_chunks = 0; in ioat_alloc_ring()
451 writew(drsctl, ioat_chan->reg_base + IOAT_CHAN_DRSCTL_OFFSET); in ioat_alloc_ring()
463 int ioat_check_space_lock(struct ioatdma_chan *ioat_chan, int num_descs) in ioat_check_space_lock() argument
464 __acquires(&ioat_chan->prep_lock) in ioat_check_space_lock()
466 spin_lock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
471 if (likely(ioat_ring_space(ioat_chan) > num_descs)) { in ioat_check_space_lock()
472 dev_dbg(to_dev(ioat_chan), "%s: num_descs: %d (%x:%x:%x)\n", in ioat_check_space_lock()
473 __func__, num_descs, ioat_chan->head, in ioat_check_space_lock()
474 ioat_chan->tail, ioat_chan->issued); in ioat_check_space_lock()
475 ioat_chan->produce = num_descs; in ioat_check_space_lock()
478 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
480 dev_dbg_ratelimited(to_dev(ioat_chan), in ioat_check_space_lock()
482 __func__, num_descs, ioat_chan->head, in ioat_check_space_lock()
483 ioat_chan->tail, ioat_chan->issued); in ioat_check_space_lock()
489 if (time_is_before_jiffies(ioat_chan->timer.expires) in ioat_check_space_lock()
490 && timer_pending(&ioat_chan->timer)) { in ioat_check_space_lock()
491 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_check_space_lock()
492 ioat_timer_event(&ioat_chan->timer); in ioat_check_space_lock()
529 static u64 ioat_get_current_completion(struct ioatdma_chan *ioat_chan) in ioat_get_current_completion() argument
534 completion = *ioat_chan->completion; in ioat_get_current_completion()
537 dev_dbg(to_dev(ioat_chan), "%s: phys_complete: %#llx\n", __func__, in ioat_get_current_completion()
543 static bool ioat_cleanup_preamble(struct ioatdma_chan *ioat_chan, in ioat_cleanup_preamble() argument
546 *phys_complete = ioat_get_current_completion(ioat_chan); in ioat_cleanup_preamble()
547 if (*phys_complete == ioat_chan->last_completion) in ioat_cleanup_preamble()
550 clear_bit(IOAT_COMPLETION_ACK, &ioat_chan->state); in ioat_cleanup_preamble()
551 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_cleanup_preamble()
557 desc_get_errstat(struct ioatdma_chan *ioat_chan, struct ioat_ring_ent *desc) in desc_get_errstat() argument
591 static void __cleanup(struct ioatdma_chan *ioat_chan, dma_addr_t phys_complete) in __cleanup() argument
593 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in __cleanup()
596 int idx = ioat_chan->tail, i; in __cleanup()
599 dev_dbg(to_dev(ioat_chan), "%s: head: %#x tail: %#x issued: %#x\n", in __cleanup()
600 __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); in __cleanup()
612 active = ioat_ring_active(ioat_chan); in __cleanup()
616 prefetch(ioat_get_ring_ent(ioat_chan, idx + i + 1)); in __cleanup()
617 desc = ioat_get_ring_ent(ioat_chan, idx + i); in __cleanup()
618 dump_desc_dbg(ioat_chan, desc); in __cleanup()
622 desc_get_errstat(ioat_chan, desc); in __cleanup()
651 ioat_chan->tail = idx + i; in __cleanup()
654 ioat_chan->last_completion = phys_complete; in __cleanup()
657 dev_dbg(to_dev(ioat_chan), "%s: cancel completion timeout\n", in __cleanup()
659 mod_timer_pending(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in __cleanup()
663 if (ioat_chan->intr_coalesce != ioat_chan->prev_intr_coalesce) { in __cleanup()
664 writew(min((ioat_chan->intr_coalesce * (active - i)), in __cleanup()
666 ioat_chan->ioat_dma->reg_base + IOAT_INTRDELAY_OFFSET); in __cleanup()
667 ioat_chan->prev_intr_coalesce = ioat_chan->intr_coalesce; in __cleanup()
671 static void ioat_cleanup(struct ioatdma_chan *ioat_chan) in ioat_cleanup() argument
675 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_cleanup()
677 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_cleanup()
678 __cleanup(ioat_chan, phys_complete); in ioat_cleanup()
680 if (is_ioat_halted(*ioat_chan->completion)) { in ioat_cleanup()
681 u32 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_cleanup()
685 mod_timer_pending(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in ioat_cleanup()
686 ioat_eh(ioat_chan); in ioat_cleanup()
690 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_cleanup()
695 struct ioatdma_chan *ioat_chan = from_tasklet(ioat_chan, t, cleanup_task); in ioat_cleanup_event() local
697 ioat_cleanup(ioat_chan); in ioat_cleanup_event()
698 if (!test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_cleanup_event()
700 writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET); in ioat_cleanup_event()
703 static void ioat_restart_channel(struct ioatdma_chan *ioat_chan) in ioat_restart_channel() argument
708 writel(lower_32_bits(ioat_chan->completion_dma), in ioat_restart_channel()
709 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_LOW); in ioat_restart_channel()
710 writel(upper_32_bits(ioat_chan->completion_dma), in ioat_restart_channel()
711 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH); in ioat_restart_channel()
713 ioat_quiesce(ioat_chan, 0); in ioat_restart_channel()
714 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_restart_channel()
715 __cleanup(ioat_chan, phys_complete); in ioat_restart_channel()
717 __ioat_restart_chan(ioat_chan); in ioat_restart_channel()
721 static void ioat_abort_descs(struct ioatdma_chan *ioat_chan) in ioat_abort_descs() argument
723 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_abort_descs()
726 int idx = ioat_chan->tail, i; in ioat_abort_descs()
733 active = ioat_ring_active(ioat_chan); in ioat_abort_descs()
739 prefetch(ioat_get_ring_ent(ioat_chan, idx + i + 1)); in ioat_abort_descs()
740 desc = ioat_get_ring_ent(ioat_chan, idx + i); in ioat_abort_descs()
768 ioat_chan->tail = idx + active; in ioat_abort_descs()
770 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); in ioat_abort_descs()
771 ioat_chan->last_completion = *ioat_chan->completion = desc->txd.phys; in ioat_abort_descs()
774 static void ioat_eh(struct ioatdma_chan *ioat_chan) in ioat_eh() argument
776 struct pci_dev *pdev = to_pdev(ioat_chan); in ioat_eh()
788 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_eh()
789 __cleanup(ioat_chan, phys_complete); in ioat_eh()
791 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_eh()
794 dev_dbg(to_dev(ioat_chan), "%s: error = %x:%x\n", in ioat_eh()
797 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); in ioat_eh()
799 dump_desc_dbg(ioat_chan, desc); in ioat_eh()
836 dev_err(to_dev(ioat_chan), "%s: fatal error (%x:%x)\n", in ioat_eh()
838 dev_err(to_dev(ioat_chan), "Errors handled:\n"); in ioat_eh()
839 ioat_print_chanerrs(ioat_chan, err_handled); in ioat_eh()
840 dev_err(to_dev(ioat_chan), "Errors not handled:\n"); in ioat_eh()
841 ioat_print_chanerrs(ioat_chan, (chanerr & ~err_handled)); in ioat_eh()
857 *ioat_chan->completion = desc->txd.phys; in ioat_eh()
859 spin_lock_bh(&ioat_chan->prep_lock); in ioat_eh()
862 ioat_abort_descs(ioat_chan); in ioat_eh()
864 ioat_reset_hw(ioat_chan); in ioat_eh()
867 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_eh()
870 ioat_restart_channel(ioat_chan); in ioat_eh()
871 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_eh()
874 static void check_active(struct ioatdma_chan *ioat_chan) in check_active() argument
876 if (ioat_ring_active(ioat_chan)) { in check_active()
877 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in check_active()
881 if (test_and_clear_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state)) in check_active()
882 mod_timer_pending(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in check_active()
885 static void ioat_reboot_chan(struct ioatdma_chan *ioat_chan) in ioat_reboot_chan() argument
887 spin_lock_bh(&ioat_chan->prep_lock); in ioat_reboot_chan()
888 set_bit(IOAT_CHAN_DOWN, &ioat_chan->state); in ioat_reboot_chan()
889 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_reboot_chan()
891 ioat_abort_descs(ioat_chan); in ioat_reboot_chan()
892 dev_warn(to_dev(ioat_chan), "Reset channel...\n"); in ioat_reboot_chan()
893 ioat_reset_hw(ioat_chan); in ioat_reboot_chan()
894 dev_warn(to_dev(ioat_chan), "Restart channel...\n"); in ioat_reboot_chan()
895 ioat_restart_channel(ioat_chan); in ioat_reboot_chan()
897 spin_lock_bh(&ioat_chan->prep_lock); in ioat_reboot_chan()
898 clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state); in ioat_reboot_chan()
899 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_reboot_chan()
904 struct ioatdma_chan *ioat_chan = from_timer(ioat_chan, t, timer); in ioat_timer_event() local
908 status = ioat_chansts(ioat_chan); in ioat_timer_event()
916 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_timer_event()
917 dev_err(to_dev(ioat_chan), "%s: Channel halted (%x)\n", in ioat_timer_event()
919 dev_err(to_dev(ioat_chan), "Errors:\n"); in ioat_timer_event()
920 ioat_print_chanerrs(ioat_chan, chanerr); in ioat_timer_event()
922 if (test_bit(IOAT_RUN, &ioat_chan->state)) { in ioat_timer_event()
923 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
924 ioat_reboot_chan(ioat_chan); in ioat_timer_event()
925 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
931 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
934 if (!ioat_ring_active(ioat_chan)) { in ioat_timer_event()
935 spin_lock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
936 check_active(ioat_chan); in ioat_timer_event()
937 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
942 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) { in ioat_timer_event()
946 __cleanup(ioat_chan, phys_complete); in ioat_timer_event()
954 if (test_bit(IOAT_COMPLETION_ACK, &ioat_chan->state)) { in ioat_timer_event()
957 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_timer_event()
958 dev_err(to_dev(ioat_chan), "CHANSTS: %#Lx CHANERR: %#x\n", in ioat_timer_event()
960 dev_err(to_dev(ioat_chan), "Errors:\n"); in ioat_timer_event()
961 ioat_print_chanerrs(ioat_chan, chanerr); in ioat_timer_event()
963 dev_dbg(to_dev(ioat_chan), "Active descriptors: %d\n", in ioat_timer_event()
964 ioat_ring_active(ioat_chan)); in ioat_timer_event()
966 ioat_reboot_chan(ioat_chan); in ioat_timer_event()
972 if (ioat_ring_pending(ioat_chan)) { in ioat_timer_event()
973 dev_warn(to_dev(ioat_chan), in ioat_timer_event()
975 spin_lock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
976 __ioat_issue_pending(ioat_chan); in ioat_timer_event()
977 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
980 set_bit(IOAT_COMPLETION_ACK, &ioat_chan->state); in ioat_timer_event()
981 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_timer_event()
983 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
990 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_tx_status() local
997 ioat_cleanup(ioat_chan); in ioat_tx_status()
1002 int ioat_reset_hw(struct ioatdma_chan *ioat_chan) in ioat_reset_hw() argument
1007 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_reset_hw()
1013 ioat_quiesce(ioat_chan, msecs_to_jiffies(100)); in ioat_reset_hw()
1015 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_reset_hw()
1016 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_reset_hw()
1048 err = ioat_reset_sync(ioat_chan, msecs_to_jiffies(200)); in ioat_reset_hw()