• Home
  • Raw
  • Download

Lines Matching refs:txq

20 					  struct iwl_txq *txq, u16 byte_cnt,  in iwl_pcie_gen2_update_byte_tbl()  argument
23 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_pcie_gen2_update_byte_tbl()
28 if (WARN(idx >= txq->n_window, "%d >= %d\n", idx, txq->n_window)) in iwl_pcie_gen2_update_byte_tbl()
44 struct iwl_gen3_bc_tbl *scd_bc_tbl_gen3 = txq->bc_tbl.addr; in iwl_pcie_gen2_update_byte_tbl()
52 struct iwlagn_scd_bc_tbl *scd_bc_tbl = txq->bc_tbl.addr; in iwl_pcie_gen2_update_byte_tbl()
66 void iwl_txq_inc_wr_ptr(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_inc_wr_ptr() argument
68 lockdep_assert_held(&txq->lock); in iwl_txq_inc_wr_ptr()
70 IWL_DEBUG_TX(trans, "Q:%d WR: 0x%x\n", txq->id, txq->write_ptr); in iwl_txq_inc_wr_ptr()
76 iwl_write32(trans, HBUS_TARG_WRPTR, txq->write_ptr | (txq->id << 16)); in iwl_txq_inc_wr_ptr()
115 void iwl_txq_gen2_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_gen2_free_tfd() argument
120 int idx = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_gen2_free_tfd()
123 lockdep_assert_held(&txq->lock); in iwl_txq_gen2_free_tfd()
125 if (!txq->entries) in iwl_txq_gen2_free_tfd()
128 iwl_txq_gen2_tfd_unmap(trans, &txq->entries[idx].meta, in iwl_txq_gen2_free_tfd()
129 iwl_txq_get_tfd(trans, txq, idx)); in iwl_txq_gen2_free_tfd()
131 skb = txq->entries[idx].skb; in iwl_txq_gen2_free_tfd()
139 txq->entries[idx].skb = NULL; in iwl_txq_gen2_free_tfd()
454 struct iwl_txq *txq, in iwl_txq_gen2_build_tx_amsdu() argument
461 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tx_amsdu()
462 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx_amsdu()
467 tb_phys = iwl_txq_get_first_tb_dma(txq, idx); in iwl_txq_gen2_build_tx_amsdu()
503 memcpy(&txq->first_tb_bufs[idx], dev_cmd, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx_amsdu()
541 struct iwl_txq *txq, in iwl_txq_gen2_build_tx() argument
549 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tx()
550 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx()
556 tb_phys = iwl_txq_get_first_tb_dma(txq, idx); in iwl_txq_gen2_build_tx()
559 memcpy(&txq->first_tb_bufs[idx], dev_cmd, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx()
636 struct iwl_txq *txq, in iwl_txq_gen2_build_tfd() argument
642 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tfd()
643 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tfd()
669 return iwl_txq_gen2_build_tx_amsdu(trans, txq, dev_cmd, skb, in iwl_txq_gen2_build_tfd()
671 return iwl_txq_gen2_build_tx(trans, txq, dev_cmd, skb, out_meta, in iwl_txq_gen2_build_tfd()
708 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_tx() local
726 spin_lock(&txq->lock); in iwl_txq_gen2_tx()
728 if (iwl_txq_space(trans, txq) < txq->high_mark) { in iwl_txq_gen2_tx()
729 iwl_txq_stop(trans, txq); in iwl_txq_gen2_tx()
732 if (unlikely(iwl_txq_space(trans, txq) < 3)) { in iwl_txq_gen2_tx()
739 __skb_queue_tail(&txq->overflow_q, skb); in iwl_txq_gen2_tx()
740 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
745 idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_tx()
748 txq->entries[idx].skb = skb; in iwl_txq_gen2_tx()
749 txq->entries[idx].cmd = dev_cmd; in iwl_txq_gen2_tx()
756 out_meta = &txq->entries[idx].meta; in iwl_txq_gen2_tx()
759 tfd = iwl_txq_gen2_build_tfd(trans, txq, dev_cmd, skb, out_meta); in iwl_txq_gen2_tx()
761 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
778 iwl_pcie_gen2_update_byte_tbl(trans, txq, cmd_len, in iwl_txq_gen2_tx()
782 if (txq->read_ptr == txq->write_ptr && txq->wd_timeout) in iwl_txq_gen2_tx()
783 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_txq_gen2_tx()
786 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_txq_gen2_tx()
787 iwl_txq_inc_wr_ptr(trans, txq); in iwl_txq_gen2_tx()
792 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
803 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_unmap() local
805 spin_lock_bh(&txq->lock); in iwl_txq_gen2_unmap()
806 while (txq->write_ptr != txq->read_ptr) { in iwl_txq_gen2_unmap()
808 txq_id, txq->read_ptr); in iwl_txq_gen2_unmap()
811 int idx = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_gen2_unmap()
812 struct sk_buff *skb = txq->entries[idx].skb; in iwl_txq_gen2_unmap()
817 iwl_txq_gen2_free_tfd(trans, txq); in iwl_txq_gen2_unmap()
818 txq->read_ptr = iwl_txq_inc_wrap(trans, txq->read_ptr); in iwl_txq_gen2_unmap()
821 while (!skb_queue_empty(&txq->overflow_q)) { in iwl_txq_gen2_unmap()
822 struct sk_buff *skb = __skb_dequeue(&txq->overflow_q); in iwl_txq_gen2_unmap()
827 spin_unlock_bh(&txq->lock); in iwl_txq_gen2_unmap()
830 iwl_wake_queue(trans, txq); in iwl_txq_gen2_unmap()
834 struct iwl_txq *txq) in iwl_txq_gen2_free_memory() argument
839 if (txq->tfds) { in iwl_txq_gen2_free_memory()
841 trans->txqs.tfd.size * txq->n_window, in iwl_txq_gen2_free_memory()
842 txq->tfds, txq->dma_addr); in iwl_txq_gen2_free_memory()
844 sizeof(*txq->first_tb_bufs) * txq->n_window, in iwl_txq_gen2_free_memory()
845 txq->first_tb_bufs, txq->first_tb_dma); in iwl_txq_gen2_free_memory()
848 kfree(txq->entries); in iwl_txq_gen2_free_memory()
849 if (txq->bc_tbl.addr) in iwl_txq_gen2_free_memory()
851 txq->bc_tbl.addr, txq->bc_tbl.dma); in iwl_txq_gen2_free_memory()
852 kfree(txq); in iwl_txq_gen2_free_memory()
865 struct iwl_txq *txq; in iwl_txq_gen2_free() local
872 txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_free()
874 if (WARN_ON(!txq)) in iwl_txq_gen2_free()
881 for (i = 0; i < txq->n_window; i++) { in iwl_txq_gen2_free()
882 kfree_sensitive(txq->entries[i].cmd); in iwl_txq_gen2_free()
883 kfree_sensitive(txq->entries[i].free_buf); in iwl_txq_gen2_free()
885 del_timer_sync(&txq->stuck_timer); in iwl_txq_gen2_free()
887 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_gen2_free()
889 trans->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free()
920 int iwl_txq_init(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num, in iwl_txq_init() argument
927 txq->need_update = false; in iwl_txq_init()
937 ret = iwl_queue_init(txq, slots_num); in iwl_txq_init()
941 spin_lock_init(&txq->lock); in iwl_txq_init()
946 lockdep_set_class(&txq->lock, &iwl_txq_cmd_queue_lock_class); in iwl_txq_init()
949 __skb_queue_head_init(&txq->overflow_q); in iwl_txq_init()
972 void iwl_txq_log_scd_error(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_log_scd_error() argument
974 u32 txq_id = txq->id; in iwl_txq_log_scd_error()
981 txq->read_ptr, txq->write_ptr); in iwl_txq_log_scd_error()
993 jiffies_to_msecs(txq->wd_timeout), in iwl_txq_log_scd_error()
994 txq->read_ptr, txq->write_ptr, in iwl_txq_log_scd_error()
1004 struct iwl_txq *txq = from_timer(txq, t, stuck_timer); in iwl_txq_stuck_timer() local
1005 struct iwl_trans *trans = txq->trans; in iwl_txq_stuck_timer()
1007 spin_lock(&txq->lock); in iwl_txq_stuck_timer()
1009 if (txq->read_ptr == txq->write_ptr) { in iwl_txq_stuck_timer()
1010 spin_unlock(&txq->lock); in iwl_txq_stuck_timer()
1013 spin_unlock(&txq->lock); in iwl_txq_stuck_timer()
1015 iwl_txq_log_scd_error(trans, txq); in iwl_txq_stuck_timer()
1020 int iwl_txq_alloc(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num, in iwl_txq_alloc() argument
1028 if (WARN_ON(txq->entries || txq->tfds)) in iwl_txq_alloc()
1034 timer_setup(&txq->stuck_timer, iwl_txq_stuck_timer, 0); in iwl_txq_alloc()
1035 txq->trans = trans; in iwl_txq_alloc()
1037 txq->n_window = slots_num; in iwl_txq_alloc()
1039 txq->entries = kcalloc(slots_num, in iwl_txq_alloc()
1043 if (!txq->entries) in iwl_txq_alloc()
1048 txq->entries[i].cmd = in iwl_txq_alloc()
1051 if (!txq->entries[i].cmd) in iwl_txq_alloc()
1057 txq->tfds = dma_alloc_coherent(trans->dev, tfd_sz, in iwl_txq_alloc()
1058 &txq->dma_addr, GFP_KERNEL); in iwl_txq_alloc()
1059 if (!txq->tfds) in iwl_txq_alloc()
1062 BUILD_BUG_ON(sizeof(*txq->first_tb_bufs) != IWL_FIRST_TB_SIZE_ALIGN); in iwl_txq_alloc()
1064 tb0_buf_sz = sizeof(*txq->first_tb_bufs) * slots_num; in iwl_txq_alloc()
1066 txq->first_tb_bufs = dma_alloc_coherent(trans->dev, tb0_buf_sz, in iwl_txq_alloc()
1067 &txq->first_tb_dma, in iwl_txq_alloc()
1069 if (!txq->first_tb_bufs) in iwl_txq_alloc()
1074 dma_free_coherent(trans->dev, tfd_sz, txq->tfds, txq->dma_addr); in iwl_txq_alloc()
1075 txq->tfds = NULL; in iwl_txq_alloc()
1077 if (txq->entries && cmd_queue) in iwl_txq_alloc()
1079 kfree(txq->entries[i].cmd); in iwl_txq_alloc()
1080 kfree(txq->entries); in iwl_txq_alloc()
1081 txq->entries = NULL; in iwl_txq_alloc()
1091 struct iwl_txq *txq; in iwl_txq_dyn_alloc_dma() local
1102 txq = kzalloc(sizeof(*txq), GFP_KERNEL); in iwl_txq_dyn_alloc_dma()
1103 if (!txq) in iwl_txq_dyn_alloc_dma()
1106 txq->bc_tbl.addr = dma_pool_alloc(trans->txqs.bc_pool, GFP_KERNEL, in iwl_txq_dyn_alloc_dma()
1107 &txq->bc_tbl.dma); in iwl_txq_dyn_alloc_dma()
1108 if (!txq->bc_tbl.addr) { in iwl_txq_dyn_alloc_dma()
1110 kfree(txq); in iwl_txq_dyn_alloc_dma()
1114 ret = iwl_txq_alloc(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
1119 ret = iwl_txq_init(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
1125 txq->wd_timeout = msecs_to_jiffies(timeout); in iwl_txq_dyn_alloc_dma()
1127 *intxq = txq; in iwl_txq_dyn_alloc_dma()
1131 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc_dma()
1135 static int iwl_txq_alloc_response(struct iwl_trans *trans, struct iwl_txq *txq, in iwl_txq_alloc_response() argument
1152 if (qid >= ARRAY_SIZE(trans->txqs.txq)) { in iwl_txq_alloc_response()
1164 if (WARN_ONCE(trans->txqs.txq[qid], in iwl_txq_alloc_response()
1170 txq->id = qid; in iwl_txq_alloc_response()
1171 trans->txqs.txq[qid] = txq; in iwl_txq_alloc_response()
1175 txq->read_ptr = wr_ptr; in iwl_txq_alloc_response()
1176 txq->write_ptr = wr_ptr; in iwl_txq_alloc_response()
1185 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_alloc_response()
1192 struct iwl_txq *txq = NULL; in iwl_txq_dyn_alloc() local
1206 ret = iwl_txq_dyn_alloc_dma(trans, &txq, size, timeout); in iwl_txq_dyn_alloc()
1210 cmd.tfdq_addr = cpu_to_le64(txq->dma_addr); in iwl_txq_dyn_alloc()
1211 cmd.byte_cnt_addr = cpu_to_le64(txq->bc_tbl.dma); in iwl_txq_dyn_alloc()
1218 return iwl_txq_alloc_response(trans, txq, &hcmd); in iwl_txq_dyn_alloc()
1221 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc()
1255 for (i = 0; i < ARRAY_SIZE(trans->txqs.txq); i++) { in iwl_txq_gen2_tx_free()
1256 if (!trans->txqs.txq[i]) in iwl_txq_gen2_tx_free()
1269 if (!trans->txqs.txq[txq_id]) { in iwl_txq_gen2_init()
1275 trans->txqs.txq[txq_id] = queue; in iwl_txq_gen2_init()
1282 queue = trans->txqs.txq[txq_id]; in iwl_txq_gen2_init()
1291 trans->txqs.txq[txq_id]->id = txq_id; in iwl_txq_gen2_init()
1335 struct iwl_txq *txq, int index) in iwl_txq_gen1_tfd_unmap() argument
1338 void *tfd = iwl_txq_get_tfd(trans, txq, index); in iwl_txq_gen1_tfd_unmap()
1388 struct iwl_txq *txq, u16 byte_cnt, in iwl_txq_gen1_update_byte_cnt_tbl() argument
1392 int write_ptr = txq->write_ptr; in iwl_txq_gen1_update_byte_cnt_tbl()
1393 int txq_id = txq->id; in iwl_txq_gen1_update_byte_cnt_tbl()
1397 struct iwl_device_tx_cmd *dev_cmd = txq->entries[txq->write_ptr].cmd; in iwl_txq_gen1_update_byte_cnt_tbl()
1432 struct iwl_txq *txq) in iwl_txq_gen1_inval_byte_cnt_tbl() argument
1435 int txq_id = txq->id; in iwl_txq_gen1_inval_byte_cnt_tbl()
1436 int read_ptr = txq->read_ptr; in iwl_txq_gen1_inval_byte_cnt_tbl()
1439 struct iwl_device_tx_cmd *dev_cmd = txq->entries[read_ptr].cmd; in iwl_txq_gen1_inval_byte_cnt_tbl()
1465 void iwl_txq_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_free_tfd() argument
1470 int rd_ptr = txq->read_ptr; in iwl_txq_free_tfd()
1471 int idx = iwl_txq_get_cmd_index(txq, rd_ptr); in iwl_txq_free_tfd()
1474 lockdep_assert_held(&txq->lock); in iwl_txq_free_tfd()
1476 if (!txq->entries) in iwl_txq_free_tfd()
1482 iwl_txq_gen1_tfd_unmap(trans, &txq->entries[idx].meta, txq, rd_ptr); in iwl_txq_free_tfd()
1485 skb = txq->entries[idx].skb; in iwl_txq_free_tfd()
1493 txq->entries[idx].skb = NULL; in iwl_txq_free_tfd()
1497 void iwl_txq_progress(struct iwl_txq *txq) in iwl_txq_progress() argument
1499 lockdep_assert_held(&txq->lock); in iwl_txq_progress()
1501 if (!txq->wd_timeout) in iwl_txq_progress()
1508 if (txq->frozen) in iwl_txq_progress()
1515 if (txq->read_ptr == txq->write_ptr) in iwl_txq_progress()
1516 del_timer(&txq->stuck_timer); in iwl_txq_progress()
1518 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_txq_progress()
1525 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_reclaim() local
1526 int tfd_num = iwl_txq_get_cmd_index(txq, ssn); in iwl_txq_reclaim()
1527 int read_ptr = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_reclaim()
1534 spin_lock_bh(&txq->lock); in iwl_txq_reclaim()
1546 txq_id, txq->read_ptr, tfd_num, ssn); in iwl_txq_reclaim()
1552 if (!iwl_txq_used(txq, last_to_free)) { in iwl_txq_reclaim()
1557 txq->write_ptr, txq->read_ptr); in iwl_txq_reclaim()
1570 txq->read_ptr = iwl_txq_inc_wrap(trans, txq->read_ptr), in iwl_txq_reclaim()
1571 read_ptr = iwl_txq_get_cmd_index(txq, txq->read_ptr)) { in iwl_txq_reclaim()
1572 struct sk_buff *skb = txq->entries[read_ptr].skb; in iwl_txq_reclaim()
1581 txq->entries[read_ptr].skb = NULL; in iwl_txq_reclaim()
1584 iwl_txq_gen1_inval_byte_cnt_tbl(trans, txq); in iwl_txq_reclaim()
1586 iwl_txq_free_tfd(trans, txq); in iwl_txq_reclaim()
1589 iwl_txq_progress(txq); in iwl_txq_reclaim()
1591 if (iwl_txq_space(trans, txq) > txq->low_mark && in iwl_txq_reclaim()
1597 skb_queue_splice_init(&txq->overflow_q, in iwl_txq_reclaim()
1607 txq->overflow_tx = true; in iwl_txq_reclaim()
1616 spin_unlock_bh(&txq->lock); in iwl_txq_reclaim()
1632 if (iwl_txq_space(trans, txq) > txq->low_mark) in iwl_txq_reclaim()
1633 iwl_wake_queue(trans, txq); in iwl_txq_reclaim()
1635 spin_lock_bh(&txq->lock); in iwl_txq_reclaim()
1636 txq->overflow_tx = false; in iwl_txq_reclaim()
1640 spin_unlock_bh(&txq->lock); in iwl_txq_reclaim()
1646 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_set_q_ptrs() local
1648 spin_lock_bh(&txq->lock); in iwl_txq_set_q_ptrs()
1650 txq->write_ptr = ptr; in iwl_txq_set_q_ptrs()
1651 txq->read_ptr = txq->write_ptr; in iwl_txq_set_q_ptrs()
1653 spin_unlock_bh(&txq->lock); in iwl_txq_set_q_ptrs()
1662 struct iwl_txq *txq = trans->txqs.txq[queue]; in iwl_trans_txq_freeze_timer() local
1665 spin_lock_bh(&txq->lock); in iwl_trans_txq_freeze_timer()
1669 if (txq->frozen == freeze) in iwl_trans_txq_freeze_timer()
1675 txq->frozen = freeze; in iwl_trans_txq_freeze_timer()
1677 if (txq->read_ptr == txq->write_ptr) in iwl_trans_txq_freeze_timer()
1682 txq->stuck_timer.expires))) { in iwl_trans_txq_freeze_timer()
1690 txq->frozen_expiry_remainder = in iwl_trans_txq_freeze_timer()
1691 txq->stuck_timer.expires - now; in iwl_trans_txq_freeze_timer()
1692 del_timer(&txq->stuck_timer); in iwl_trans_txq_freeze_timer()
1700 mod_timer(&txq->stuck_timer, in iwl_trans_txq_freeze_timer()
1701 now + txq->frozen_expiry_remainder); in iwl_trans_txq_freeze_timer()
1704 spin_unlock_bh(&txq->lock); in iwl_trans_txq_freeze_timer()
1714 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_trans_txq_send_hcmd_sync() local
1745 txq->read_ptr, txq->write_ptr); in iwl_trans_txq_send_hcmd_sync()
1786 txq->entries[cmd_idx].meta.flags &= ~CMD_WANT_SKB; in iwl_trans_txq_send_hcmd_sync()