Lines Matching refs:trans
105 int iwl_queue_space(struct iwl_trans *trans, const struct iwl_txq *q) in iwl_queue_space() argument
116 if (q->n_window < trans->trans_cfg->base_params->max_tfd_queue_size) in iwl_queue_space()
119 max = trans->trans_cfg->base_params->max_tfd_queue_size - 1; in iwl_queue_space()
126 (trans->trans_cfg->base_params->max_tfd_queue_size - 1); in iwl_queue_space()
160 int iwl_pcie_alloc_dma_ptr(struct iwl_trans *trans, in iwl_pcie_alloc_dma_ptr() argument
166 ptr->addr = dma_alloc_coherent(trans->dev, size, in iwl_pcie_alloc_dma_ptr()
174 void iwl_pcie_free_dma_ptr(struct iwl_trans *trans, struct iwl_dma_ptr *ptr) in iwl_pcie_free_dma_ptr() argument
179 dma_free_coherent(trans->dev, ptr->size, ptr->addr, ptr->dma); in iwl_pcie_free_dma_ptr()
187 struct iwl_trans *trans = iwl_trans_pcie_get_trans(trans_pcie); in iwl_pcie_txq_stuck_timer() local
197 iwl_trans_pcie_log_scd_error(trans, txq); in iwl_pcie_txq_stuck_timer()
199 iwl_force_nmi(trans); in iwl_pcie_txq_stuck_timer()
205 static void iwl_pcie_txq_update_byte_cnt_tbl(struct iwl_trans *trans, in iwl_pcie_txq_update_byte_cnt_tbl() argument
210 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_update_byte_cnt_tbl()
250 static void iwl_pcie_txq_inval_byte_cnt_tbl(struct iwl_trans *trans, in iwl_pcie_txq_inval_byte_cnt_tbl() argument
254 IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_inval_byte_cnt_tbl()
280 static void iwl_pcie_txq_inc_wr_ptr(struct iwl_trans *trans, in iwl_pcie_txq_inc_wr_ptr() argument
283 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_inc_wr_ptr()
295 if (!trans->trans_cfg->base_params->shadow_reg_enable && in iwl_pcie_txq_inc_wr_ptr()
297 test_bit(STATUS_TPOWER_PMI, &trans->status)) { in iwl_pcie_txq_inc_wr_ptr()
303 reg = iwl_read32(trans, CSR_UCODE_DRV_GP1); in iwl_pcie_txq_inc_wr_ptr()
306 IWL_DEBUG_INFO(trans, "Tx queue %d requesting wakeup, GP1 = 0x%x\n", in iwl_pcie_txq_inc_wr_ptr()
308 iwl_set_bit(trans, CSR_GP_CNTRL, in iwl_pcie_txq_inc_wr_ptr()
309 BIT(trans->trans_cfg->csr->flag_mac_access_req)); in iwl_pcie_txq_inc_wr_ptr()
319 IWL_DEBUG_TX(trans, "Q:%d WR: 0x%x\n", txq_id, txq->write_ptr); in iwl_pcie_txq_inc_wr_ptr()
321 iwl_write32(trans, HBUS_TARG_WRPTR, in iwl_pcie_txq_inc_wr_ptr()
325 void iwl_pcie_txq_check_wrptrs(struct iwl_trans *trans) in iwl_pcie_txq_check_wrptrs() argument
327 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_check_wrptrs()
330 for (i = 0; i < trans->trans_cfg->base_params->num_of_queues; i++) { in iwl_pcie_txq_check_wrptrs()
338 iwl_pcie_txq_inc_wr_ptr(trans, txq); in iwl_pcie_txq_check_wrptrs()
345 static inline dma_addr_t iwl_pcie_tfd_tb_get_addr(struct iwl_trans *trans, in iwl_pcie_tfd_tb_get_addr() argument
349 if (trans->trans_cfg->use_tfh) { in iwl_pcie_tfd_tb_get_addr()
374 static inline void iwl_pcie_tfd_set_tb(struct iwl_trans *trans, void *tfd, in iwl_pcie_tfd_set_tb() argument
390 static inline u8 iwl_pcie_tfd_get_num_tbs(struct iwl_trans *trans, void *_tfd) in iwl_pcie_tfd_get_num_tbs() argument
392 if (trans->trans_cfg->use_tfh) { in iwl_pcie_tfd_get_num_tbs()
403 static void iwl_pcie_tfd_unmap(struct iwl_trans *trans, in iwl_pcie_tfd_unmap() argument
407 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tfd_unmap()
409 void *tfd = iwl_pcie_get_tfd(trans, txq, index); in iwl_pcie_tfd_unmap()
412 num_tbs = iwl_pcie_tfd_get_num_tbs(trans, tfd); in iwl_pcie_tfd_unmap()
415 IWL_ERR(trans, "Too many chunks: %i\n", num_tbs); in iwl_pcie_tfd_unmap()
424 dma_unmap_page(trans->dev, in iwl_pcie_tfd_unmap()
425 iwl_pcie_tfd_tb_get_addr(trans, tfd, i), in iwl_pcie_tfd_unmap()
426 iwl_pcie_tfd_tb_get_len(trans, tfd, i), in iwl_pcie_tfd_unmap()
429 dma_unmap_single(trans->dev, in iwl_pcie_tfd_unmap()
430 iwl_pcie_tfd_tb_get_addr(trans, tfd, in iwl_pcie_tfd_unmap()
432 iwl_pcie_tfd_tb_get_len(trans, tfd, in iwl_pcie_tfd_unmap()
439 if (trans->trans_cfg->use_tfh) { in iwl_pcie_tfd_unmap()
460 void iwl_pcie_txq_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_pcie_txq_free_tfd() argument
473 iwl_pcie_tfd_unmap(trans, &txq->entries[idx].meta, txq, rd_ptr); in iwl_pcie_txq_free_tfd()
486 iwl_op_mode_free_skb(trans->op_mode, skb); in iwl_pcie_txq_free_tfd()
492 static int iwl_pcie_txq_build_tfd(struct iwl_trans *trans, struct iwl_txq *txq, in iwl_pcie_txq_build_tfd() argument
495 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_build_tfd()
504 num_tbs = iwl_pcie_tfd_get_num_tbs(trans, tfd); in iwl_pcie_txq_build_tfd()
508 IWL_ERR(trans, "Error can not send more than %d chunks\n", in iwl_pcie_txq_build_tfd()
517 iwl_pcie_tfd_set_tb(trans, tfd, num_tbs, addr, len); in iwl_pcie_txq_build_tfd()
522 int iwl_pcie_txq_alloc(struct iwl_trans *trans, struct iwl_txq *txq, in iwl_pcie_txq_alloc() argument
525 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_alloc()
527 trans->trans_cfg->base_params->max_tfd_queue_size; in iwl_pcie_txq_alloc()
534 if (trans->trans_cfg->use_tfh) in iwl_pcie_txq_alloc()
560 txq->tfds = dma_alloc_coherent(trans->dev, tfd_sz, in iwl_pcie_txq_alloc()
569 txq->first_tb_bufs = dma_alloc_coherent(trans->dev, tb0_buf_sz, in iwl_pcie_txq_alloc()
577 dma_free_coherent(trans->dev, tfd_sz, txq->tfds, txq->dma_addr); in iwl_pcie_txq_alloc()
589 int iwl_pcie_txq_init(struct iwl_trans *trans, struct iwl_txq *txq, in iwl_pcie_txq_init() argument
594 trans->trans_cfg->base_params->max_tfd_queue_size; in iwl_pcie_txq_init()
636 static void iwl_pcie_clear_cmd_in_flight(struct iwl_trans *trans) in iwl_pcie_clear_cmd_in_flight() argument
638 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_clear_cmd_in_flight()
642 if (!trans->trans_cfg->base_params->apmg_wake_up_wa) in iwl_pcie_clear_cmd_in_flight()
648 __iwl_trans_pcie_clear_bit(trans, CSR_GP_CNTRL, in iwl_pcie_clear_cmd_in_flight()
649 BIT(trans->trans_cfg->csr->flag_mac_access_req)); in iwl_pcie_clear_cmd_in_flight()
655 static void iwl_pcie_txq_unmap(struct iwl_trans *trans, int txq_id) in iwl_pcie_txq_unmap() argument
657 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_unmap()
662 IWL_DEBUG_TX_REPLY(trans, "Q %d Free %d\n", in iwl_pcie_txq_unmap()
673 iwl_pcie_txq_free_tfd(trans, txq); in iwl_pcie_txq_unmap()
674 txq->read_ptr = iwl_queue_inc_wrap(trans, txq->read_ptr); in iwl_pcie_txq_unmap()
681 iwl_pcie_clear_cmd_in_flight(trans); in iwl_pcie_txq_unmap()
689 iwl_op_mode_free_skb(trans->op_mode, skb); in iwl_pcie_txq_unmap()
695 iwl_wake_queue(trans, txq); in iwl_pcie_txq_unmap()
706 static void iwl_pcie_txq_free(struct iwl_trans *trans, int txq_id) in iwl_pcie_txq_free() argument
708 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_free()
710 struct device *dev = trans->dev; in iwl_pcie_txq_free()
716 iwl_pcie_txq_unmap(trans, txq_id); in iwl_pcie_txq_free()
729 trans->trans_cfg->base_params->max_tfd_queue_size, in iwl_pcie_txq_free()
748 void iwl_pcie_tx_start(struct iwl_trans *trans, u32 scd_base_addr) in iwl_pcie_tx_start() argument
750 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_start()
751 int nq = trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_start()
762 iwl_read_prph(trans, SCD_SRAM_BASE_ADDR); in iwl_pcie_tx_start()
768 iwl_trans_write_mem(trans, trans_pcie->scd_base_addr + in iwl_pcie_tx_start()
772 iwl_write_prph(trans, SCD_DRAM_BASE_ADDR, in iwl_pcie_tx_start()
778 if (trans->trans_cfg->base_params->scd_chain_ext_wa) in iwl_pcie_tx_start()
779 iwl_write_prph(trans, SCD_CHAINEXT_EN, 0); in iwl_pcie_tx_start()
781 iwl_trans_ac_txq_enable(trans, trans_pcie->cmd_queue, in iwl_pcie_tx_start()
786 iwl_scd_activate_fifos(trans); in iwl_pcie_tx_start()
790 iwl_write_direct32(trans, FH_TCSR_CHNL_TX_CONFIG_REG(chan), in iwl_pcie_tx_start()
795 reg_val = iwl_read_direct32(trans, FH_TX_CHICKEN_BITS_REG); in iwl_pcie_tx_start()
796 iwl_write_direct32(trans, FH_TX_CHICKEN_BITS_REG, in iwl_pcie_tx_start()
800 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_8000) in iwl_pcie_tx_start()
801 iwl_clear_bits_prph(trans, APMG_PCIDEV_STT_REG, in iwl_pcie_tx_start()
805 void iwl_trans_pcie_tx_reset(struct iwl_trans *trans) in iwl_trans_pcie_tx_reset() argument
807 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_tx_reset()
814 if (WARN_ON_ONCE(trans->trans_cfg->gen2)) in iwl_trans_pcie_tx_reset()
817 for (txq_id = 0; txq_id < trans->trans_cfg->base_params->num_of_queues; in iwl_trans_pcie_tx_reset()
820 if (trans->trans_cfg->use_tfh) in iwl_trans_pcie_tx_reset()
821 iwl_write_direct64(trans, in iwl_trans_pcie_tx_reset()
822 FH_MEM_CBBC_QUEUE(trans, txq_id), in iwl_trans_pcie_tx_reset()
825 iwl_write_direct32(trans, in iwl_trans_pcie_tx_reset()
826 FH_MEM_CBBC_QUEUE(trans, txq_id), in iwl_trans_pcie_tx_reset()
828 iwl_pcie_txq_unmap(trans, txq_id); in iwl_trans_pcie_tx_reset()
834 iwl_write_direct32(trans, FH_KW_MEM_ADDR_REG, in iwl_trans_pcie_tx_reset()
842 iwl_pcie_tx_start(trans, 0); in iwl_trans_pcie_tx_reset()
845 static void iwl_pcie_tx_stop_fh(struct iwl_trans *trans) in iwl_pcie_tx_stop_fh() argument
847 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_stop_fh()
854 if (!iwl_trans_grab_nic_access(trans, &flags)) in iwl_pcie_tx_stop_fh()
859 iwl_write32(trans, FH_TCSR_CHNL_TX_CONFIG_REG(ch), 0x0); in iwl_pcie_tx_stop_fh()
864 ret = iwl_poll_bit(trans, FH_TSSR_TX_STATUS_REG, mask, mask, 5000); in iwl_pcie_tx_stop_fh()
866 IWL_ERR(trans, in iwl_pcie_tx_stop_fh()
868 ch, iwl_read32(trans, FH_TSSR_TX_STATUS_REG)); in iwl_pcie_tx_stop_fh()
870 iwl_trans_release_nic_access(trans, &flags); in iwl_pcie_tx_stop_fh()
879 int iwl_pcie_tx_stop(struct iwl_trans *trans) in iwl_pcie_tx_stop() argument
881 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_stop()
885 iwl_scd_deactivate_fifos(trans); in iwl_pcie_tx_stop()
888 iwl_pcie_tx_stop_fh(trans); in iwl_pcie_tx_stop()
903 for (txq_id = 0; txq_id < trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_stop()
905 iwl_pcie_txq_unmap(trans, txq_id); in iwl_pcie_tx_stop()
915 void iwl_pcie_tx_free(struct iwl_trans *trans) in iwl_pcie_tx_free() argument
918 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_free()
925 txq_id < trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_free()
927 iwl_pcie_txq_free(trans, txq_id); in iwl_pcie_tx_free()
935 iwl_pcie_free_dma_ptr(trans, &trans_pcie->kw); in iwl_pcie_tx_free()
937 iwl_pcie_free_dma_ptr(trans, &trans_pcie->scd_bc_tbls); in iwl_pcie_tx_free()
944 static int iwl_pcie_tx_alloc(struct iwl_trans *trans) in iwl_pcie_tx_alloc() argument
948 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_alloc()
949 u16 bc_tbls_size = trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_alloc()
951 bc_tbls_size *= (trans->trans_cfg->device_family >= in iwl_pcie_tx_alloc()
963 ret = iwl_pcie_alloc_dma_ptr(trans, &trans_pcie->scd_bc_tbls, in iwl_pcie_tx_alloc()
966 IWL_ERR(trans, "Scheduler BC Table allocation failed\n"); in iwl_pcie_tx_alloc()
971 ret = iwl_pcie_alloc_dma_ptr(trans, &trans_pcie->kw, IWL_KW_SIZE); in iwl_pcie_tx_alloc()
973 IWL_ERR(trans, "Keep Warm allocation failed\n"); in iwl_pcie_tx_alloc()
978 kcalloc(trans->trans_cfg->base_params->num_of_queues, in iwl_pcie_tx_alloc()
981 IWL_ERR(trans, "Not enough memory for txq\n"); in iwl_pcie_tx_alloc()
987 for (txq_id = 0; txq_id < trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_alloc()
993 trans->cfg->min_txq_size); in iwl_pcie_tx_alloc()
996 trans->cfg->min_256_ba_txq_size); in iwl_pcie_tx_alloc()
998 ret = iwl_pcie_txq_alloc(trans, trans_pcie->txq[txq_id], in iwl_pcie_tx_alloc()
1001 IWL_ERR(trans, "Tx %d queue alloc failed\n", txq_id); in iwl_pcie_tx_alloc()
1010 iwl_pcie_tx_free(trans); in iwl_pcie_tx_alloc()
1015 int iwl_pcie_tx_init(struct iwl_trans *trans) in iwl_pcie_tx_init() argument
1017 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_tx_init()
1023 ret = iwl_pcie_tx_alloc(trans); in iwl_pcie_tx_init()
1032 iwl_scd_deactivate_fifos(trans); in iwl_pcie_tx_init()
1035 iwl_write_direct32(trans, FH_KW_MEM_ADDR_REG, in iwl_pcie_tx_init()
1041 for (txq_id = 0; txq_id < trans->trans_cfg->base_params->num_of_queues; in iwl_pcie_tx_init()
1047 trans->cfg->min_txq_size); in iwl_pcie_tx_init()
1050 trans->cfg->min_256_ba_txq_size); in iwl_pcie_tx_init()
1051 ret = iwl_pcie_txq_init(trans, trans_pcie->txq[txq_id], in iwl_pcie_tx_init()
1054 IWL_ERR(trans, "Tx %d queue init failed\n", txq_id); in iwl_pcie_tx_init()
1064 iwl_write_direct32(trans, FH_MEM_CBBC_QUEUE(trans, txq_id), in iwl_pcie_tx_init()
1068 iwl_set_bits_prph(trans, SCD_GP_CTRL, SCD_GP_CTRL_AUTO_ACTIVE_MODE); in iwl_pcie_tx_init()
1069 if (trans->trans_cfg->base_params->num_of_queues > 20) in iwl_pcie_tx_init()
1070 iwl_set_bits_prph(trans, SCD_GP_CTRL, in iwl_pcie_tx_init()
1077 iwl_pcie_tx_free(trans); in iwl_pcie_tx_init()
1106 void iwl_trans_pcie_reclaim(struct iwl_trans *trans, int txq_id, int ssn, in iwl_trans_pcie_reclaim() argument
1109 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_reclaim()
1122 IWL_DEBUG_TX_QUEUES(trans, "Q %d inactive - ignoring idx %d\n", in iwl_trans_pcie_reclaim()
1130 IWL_DEBUG_TX_REPLY(trans, "[Q %d] %d -> %d (%d)\n", in iwl_trans_pcie_reclaim()
1135 last_to_free = iwl_queue_dec_wrap(trans, tfd_num); in iwl_trans_pcie_reclaim()
1138 IWL_ERR(trans, in iwl_trans_pcie_reclaim()
1141 trans->trans_cfg->base_params->max_tfd_queue_size, in iwl_trans_pcie_reclaim()
1151 txq->read_ptr = iwl_queue_inc_wrap(trans, txq->read_ptr), in iwl_trans_pcie_reclaim()
1164 if (!trans->trans_cfg->use_tfh) in iwl_trans_pcie_reclaim()
1165 iwl_pcie_txq_inval_byte_cnt_tbl(trans, txq); in iwl_trans_pcie_reclaim()
1167 iwl_pcie_txq_free_tfd(trans, txq); in iwl_trans_pcie_reclaim()
1172 if (iwl_queue_space(trans, txq) > txq->low_mark && in iwl_trans_pcie_reclaim()
1209 iwl_trans_tx(trans, skb, dev_cmd_ptr, txq_id); in iwl_trans_pcie_reclaim()
1212 if (iwl_queue_space(trans, txq) > txq->low_mark) in iwl_trans_pcie_reclaim()
1213 iwl_wake_queue(trans, txq); in iwl_trans_pcie_reclaim()
1224 void iwl_trans_pcie_set_q_ptrs(struct iwl_trans *trans, int txq_id, int ptr) in iwl_trans_pcie_set_q_ptrs() argument
1226 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_set_q_ptrs()
1237 static int iwl_pcie_set_cmd_in_flight(struct iwl_trans *trans, in iwl_pcie_set_cmd_in_flight() argument
1240 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_set_cmd_in_flight()
1246 if (test_bit(STATUS_TRANS_DEAD, &trans->status)) in iwl_pcie_set_cmd_in_flight()
1255 if (trans->trans_cfg->base_params->apmg_wake_up_wa && in iwl_pcie_set_cmd_in_flight()
1257 __iwl_trans_pcie_set_bit(trans, CSR_GP_CNTRL, in iwl_pcie_set_cmd_in_flight()
1258 BIT(trans->trans_cfg->csr->flag_mac_access_req)); in iwl_pcie_set_cmd_in_flight()
1260 ret = iwl_poll_bit(trans, CSR_GP_CNTRL, in iwl_pcie_set_cmd_in_flight()
1261 BIT(trans->trans_cfg->csr->flag_val_mac_access_en), in iwl_pcie_set_cmd_in_flight()
1262 (BIT(trans->trans_cfg->csr->flag_mac_clock_ready) | in iwl_pcie_set_cmd_in_flight()
1266 __iwl_trans_pcie_clear_bit(trans, CSR_GP_CNTRL, in iwl_pcie_set_cmd_in_flight()
1267 BIT(trans->trans_cfg->csr->flag_mac_access_req)); in iwl_pcie_set_cmd_in_flight()
1268 IWL_ERR(trans, "Failed to wake NIC for hcmd\n"); in iwl_pcie_set_cmd_in_flight()
1284 void iwl_pcie_cmdq_reclaim(struct iwl_trans *trans, int txq_id, int idx) in iwl_pcie_cmdq_reclaim() argument
1286 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_cmdq_reclaim()
1297 if (idx >= trans->trans_cfg->base_params->max_tfd_queue_size || in iwl_pcie_cmdq_reclaim()
1302 trans->trans_cfg->base_params->max_tfd_queue_size, in iwl_pcie_cmdq_reclaim()
1307 for (idx = iwl_queue_inc_wrap(trans, idx); r != idx; in iwl_pcie_cmdq_reclaim()
1308 r = iwl_queue_inc_wrap(trans, r)) { in iwl_pcie_cmdq_reclaim()
1309 txq->read_ptr = iwl_queue_inc_wrap(trans, txq->read_ptr); in iwl_pcie_cmdq_reclaim()
1312 IWL_ERR(trans, "HCMD skipped: index (%d) %d %d\n", in iwl_pcie_cmdq_reclaim()
1314 iwl_force_nmi(trans); in iwl_pcie_cmdq_reclaim()
1320 iwl_pcie_clear_cmd_in_flight(trans); in iwl_pcie_cmdq_reclaim()
1327 static int iwl_pcie_txq_set_ratid_map(struct iwl_trans *trans, u16 ra_tid, in iwl_pcie_txq_set_ratid_map() argument
1330 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_set_ratid_map()
1340 tbl_dw = iwl_trans_read_mem32(trans, tbl_dw_addr); in iwl_pcie_txq_set_ratid_map()
1347 iwl_trans_write_mem32(trans, tbl_dw_addr, tbl_dw); in iwl_pcie_txq_set_ratid_map()
1356 bool iwl_trans_pcie_txq_enable(struct iwl_trans *trans, int txq_id, u16 ssn, in iwl_trans_pcie_txq_enable() argument
1360 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_txq_enable()
1376 iwl_scd_enable_set_active(trans, 0); in iwl_trans_pcie_txq_enable()
1379 iwl_scd_txq_set_inactive(trans, txq_id); in iwl_trans_pcie_txq_enable()
1383 iwl_scd_txq_set_chain(trans, txq_id); in iwl_trans_pcie_txq_enable()
1389 iwl_pcie_txq_set_ratid_map(trans, ra_tid, txq_id); in iwl_trans_pcie_txq_enable()
1392 iwl_scd_txq_enable_agg(trans, txq_id); in iwl_trans_pcie_txq_enable()
1400 iwl_scd_txq_disable_agg(trans, txq_id); in iwl_trans_pcie_txq_enable()
1416 scd_bug = !trans->trans_cfg->mq_rx_supported && in iwl_trans_pcie_txq_enable()
1427 iwl_write_direct32(trans, HBUS_TARG_WRPTR, in iwl_trans_pcie_txq_enable()
1433 iwl_write_prph(trans, SCD_QUEUE_RDPTR(txq_id), ssn); in iwl_trans_pcie_txq_enable()
1436 iwl_trans_write_mem32(trans, trans_pcie->scd_base_addr + in iwl_trans_pcie_txq_enable()
1438 iwl_trans_write_mem32(trans, in iwl_trans_pcie_txq_enable()
1445 iwl_write_prph(trans, SCD_QUEUE_STATUS_BITS(txq_id), in iwl_trans_pcie_txq_enable()
1454 iwl_scd_enable_set_active(trans, BIT(txq_id)); in iwl_trans_pcie_txq_enable()
1456 IWL_DEBUG_TX_QUEUES(trans, in iwl_trans_pcie_txq_enable()
1460 IWL_DEBUG_TX_QUEUES(trans, in iwl_trans_pcie_txq_enable()
1468 void iwl_trans_pcie_txq_set_shared_mode(struct iwl_trans *trans, u32 txq_id, in iwl_trans_pcie_txq_set_shared_mode() argument
1471 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_txq_set_shared_mode()
1477 void iwl_trans_pcie_txq_disable(struct iwl_trans *trans, int txq_id, in iwl_trans_pcie_txq_disable() argument
1480 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_txq_disable()
1495 WARN_ONCE(test_bit(STATUS_DEVICE_ENABLED, &trans->status), in iwl_trans_pcie_txq_disable()
1501 iwl_scd_txq_set_inactive(trans, txq_id); in iwl_trans_pcie_txq_disable()
1503 iwl_trans_write_mem(trans, stts_addr, (void *)zero_val, in iwl_trans_pcie_txq_disable()
1507 iwl_pcie_txq_unmap(trans, txq_id); in iwl_trans_pcie_txq_disable()
1510 IWL_DEBUG_TX_QUEUES(trans, "Deactivate queue %d\n", txq_id); in iwl_trans_pcie_txq_disable()
1524 static int iwl_pcie_enqueue_hcmd(struct iwl_trans *trans, in iwl_pcie_enqueue_hcmd() argument
1527 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_enqueue_hcmd()
1543 if (WARN(!trans->wide_cmd_header && in iwl_pcie_enqueue_hcmd()
1619 iwl_get_cmd_string(trans, cmd->id), in iwl_pcie_enqueue_hcmd()
1627 if (iwl_queue_space(trans, txq) < ((cmd->flags & CMD_ASYNC) ? 2 : 1)) { in iwl_pcie_enqueue_hcmd()
1630 IWL_ERR(trans, "No space in command queue\n"); in iwl_pcie_enqueue_hcmd()
1631 iwl_op_mode_cmd_queue_full(trans->op_mode); in iwl_pcie_enqueue_hcmd()
1708 IWL_DEBUG_HC(trans, in iwl_pcie_enqueue_hcmd()
1710 iwl_get_cmd_string(trans, cmd->id), in iwl_pcie_enqueue_hcmd()
1718 iwl_pcie_txq_build_tfd(trans, txq, in iwl_pcie_enqueue_hcmd()
1724 phys_addr = dma_map_single(trans->dev, in iwl_pcie_enqueue_hcmd()
1728 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd()
1729 iwl_pcie_tfd_unmap(trans, out_meta, txq, in iwl_pcie_enqueue_hcmd()
1735 iwl_pcie_txq_build_tfd(trans, txq, phys_addr, in iwl_pcie_enqueue_hcmd()
1750 phys_addr = dma_map_single(trans->dev, (void *)data, in iwl_pcie_enqueue_hcmd()
1752 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd()
1753 iwl_pcie_tfd_unmap(trans, out_meta, txq, in iwl_pcie_enqueue_hcmd()
1759 iwl_pcie_txq_build_tfd(trans, txq, phys_addr, cmdlen[i], false); in iwl_pcie_enqueue_hcmd()
1768 trace_iwlwifi_dev_hcmd(trans->dev, cmd, cmd_size, &out_cmd->hdr_wide); in iwl_pcie_enqueue_hcmd()
1775 ret = iwl_pcie_set_cmd_in_flight(trans, cmd); in iwl_pcie_enqueue_hcmd()
1783 txq->write_ptr = iwl_queue_inc_wrap(trans, txq->write_ptr); in iwl_pcie_enqueue_hcmd()
1784 iwl_pcie_txq_inc_wr_ptr(trans, txq); in iwl_pcie_enqueue_hcmd()
1800 void iwl_pcie_hcmd_complete(struct iwl_trans *trans, in iwl_pcie_hcmd_complete() argument
1812 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_hcmd_complete()
1822 iwl_print_hex_error(trans, pkt, 32); in iwl_pcie_hcmd_complete()
1834 iwl_pcie_tfd_unmap(trans, meta, txq, index); in iwl_pcie_hcmd_complete()
1846 iwl_op_mode_async_cb(trans->op_mode, cmd); in iwl_pcie_hcmd_complete()
1848 iwl_pcie_cmdq_reclaim(trans, txq_id, index); in iwl_pcie_hcmd_complete()
1851 if (!test_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status)) { in iwl_pcie_hcmd_complete()
1852 IWL_WARN(trans, in iwl_pcie_hcmd_complete()
1854 iwl_get_cmd_string(trans, cmd_id)); in iwl_pcie_hcmd_complete()
1856 clear_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status); in iwl_pcie_hcmd_complete()
1857 IWL_DEBUG_INFO(trans, "Clearing HCMD_ACTIVE for command %s\n", in iwl_pcie_hcmd_complete()
1858 iwl_get_cmd_string(trans, cmd_id)); in iwl_pcie_hcmd_complete()
1869 static int iwl_pcie_send_hcmd_async(struct iwl_trans *trans, in iwl_pcie_send_hcmd_async() argument
1878 ret = iwl_pcie_enqueue_hcmd(trans, cmd); in iwl_pcie_send_hcmd_async()
1880 IWL_ERR(trans, in iwl_pcie_send_hcmd_async()
1882 iwl_get_cmd_string(trans, cmd->id), ret); in iwl_pcie_send_hcmd_async()
1888 static int iwl_pcie_send_hcmd_sync(struct iwl_trans *trans, in iwl_pcie_send_hcmd_sync() argument
1891 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_send_hcmd_sync()
1896 IWL_DEBUG_INFO(trans, "Attempting to send sync command %s\n", in iwl_pcie_send_hcmd_sync()
1897 iwl_get_cmd_string(trans, cmd->id)); in iwl_pcie_send_hcmd_sync()
1900 &trans->status), in iwl_pcie_send_hcmd_sync()
1902 iwl_get_cmd_string(trans, cmd->id))) in iwl_pcie_send_hcmd_sync()
1905 IWL_DEBUG_INFO(trans, "Setting HCMD_ACTIVE for command %s\n", in iwl_pcie_send_hcmd_sync()
1906 iwl_get_cmd_string(trans, cmd->id)); in iwl_pcie_send_hcmd_sync()
1908 cmd_idx = iwl_pcie_enqueue_hcmd(trans, cmd); in iwl_pcie_send_hcmd_sync()
1911 clear_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status); in iwl_pcie_send_hcmd_sync()
1912 IWL_ERR(trans, in iwl_pcie_send_hcmd_sync()
1914 iwl_get_cmd_string(trans, cmd->id), ret); in iwl_pcie_send_hcmd_sync()
1920 &trans->status), in iwl_pcie_send_hcmd_sync()
1923 IWL_ERR(trans, "Error sending %s: time out after %dms.\n", in iwl_pcie_send_hcmd_sync()
1924 iwl_get_cmd_string(trans, cmd->id), in iwl_pcie_send_hcmd_sync()
1927 IWL_ERR(trans, "Current CMD queue read_ptr %d write_ptr %d\n", in iwl_pcie_send_hcmd_sync()
1930 clear_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status); in iwl_pcie_send_hcmd_sync()
1931 IWL_DEBUG_INFO(trans, "Clearing HCMD_ACTIVE for command %s\n", in iwl_pcie_send_hcmd_sync()
1932 iwl_get_cmd_string(trans, cmd->id)); in iwl_pcie_send_hcmd_sync()
1935 iwl_trans_pcie_sync_nmi(trans); in iwl_pcie_send_hcmd_sync()
1939 if (test_bit(STATUS_FW_ERROR, &trans->status)) { in iwl_pcie_send_hcmd_sync()
1940 iwl_trans_pcie_dump_regs(trans); in iwl_pcie_send_hcmd_sync()
1941 IWL_ERR(trans, "FW error in SYNC CMD %s\n", in iwl_pcie_send_hcmd_sync()
1942 iwl_get_cmd_string(trans, cmd->id)); in iwl_pcie_send_hcmd_sync()
1949 test_bit(STATUS_RFKILL_OPMODE, &trans->status)) { in iwl_pcie_send_hcmd_sync()
1950 IWL_DEBUG_RF_KILL(trans, "RFKILL in SYNC CMD... no rsp\n"); in iwl_pcie_send_hcmd_sync()
1956 IWL_ERR(trans, "Error: Response NULL in '%s'\n", in iwl_pcie_send_hcmd_sync()
1957 iwl_get_cmd_string(trans, cmd->id)); in iwl_pcie_send_hcmd_sync()
1983 int iwl_trans_pcie_send_hcmd(struct iwl_trans *trans, struct iwl_host_cmd *cmd) in iwl_trans_pcie_send_hcmd() argument
1986 if (test_bit(STATUS_TRANS_DEAD, &trans->status)) in iwl_trans_pcie_send_hcmd()
1990 test_bit(STATUS_RFKILL_OPMODE, &trans->status)) { in iwl_trans_pcie_send_hcmd()
1991 IWL_DEBUG_RF_KILL(trans, "Dropping CMD 0x%x: RF KILL\n", in iwl_trans_pcie_send_hcmd()
1997 return iwl_pcie_send_hcmd_async(trans, cmd); in iwl_trans_pcie_send_hcmd()
2000 return iwl_pcie_send_hcmd_sync(trans, cmd); in iwl_trans_pcie_send_hcmd()
2003 static int iwl_fill_data_tbs(struct iwl_trans *trans, struct sk_buff *skb, in iwl_fill_data_tbs() argument
2017 dma_addr_t tb_phys = dma_map_single(trans->dev, in iwl_fill_data_tbs()
2020 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs()
2022 trace_iwlwifi_dev_tx_tb(trans->dev, skb, in iwl_fill_data_tbs()
2025 iwl_pcie_txq_build_tfd(trans, txq, tb_phys, head_tb_len, false); in iwl_fill_data_tbs()
2037 tb_phys = skb_frag_dma_map(trans->dev, frag, 0, in iwl_fill_data_tbs()
2040 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs()
2042 trace_iwlwifi_dev_tx_tb(trans->dev, skb, in iwl_fill_data_tbs()
2045 tb_idx = iwl_pcie_txq_build_tfd(trans, txq, tb_phys, in iwl_fill_data_tbs()
2057 struct iwl_tso_hdr_page *get_page_hdr(struct iwl_trans *trans, size_t len) in get_page_hdr() argument
2059 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in get_page_hdr()
2099 static int iwl_fill_data_tbs_amsdu(struct iwl_trans *trans, struct sk_buff *skb, in iwl_fill_data_tbs_amsdu() argument
2120 trace_iwlwifi_dev_tx(trans->dev, skb, in iwl_fill_data_tbs_amsdu()
2121 iwl_pcie_get_tfd(trans, txq, txq->write_ptr), in iwl_fill_data_tbs_amsdu()
2135 hdr_page = get_page_hdr(trans, hdr_room); in iwl_fill_data_tbs_amsdu()
2216 hdr_tb_phys = dma_map_single(trans->dev, start_hdr, in iwl_fill_data_tbs_amsdu()
2218 if (unlikely(dma_mapping_error(trans->dev, hdr_tb_phys))) { in iwl_fill_data_tbs_amsdu()
2222 iwl_pcie_txq_build_tfd(trans, txq, hdr_tb_phys, in iwl_fill_data_tbs_amsdu()
2224 trace_iwlwifi_dev_tx_tb(trans->dev, skb, start_hdr, in iwl_fill_data_tbs_amsdu()
2241 tb_phys = dma_map_single(trans->dev, tso.data, in iwl_fill_data_tbs_amsdu()
2243 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) { in iwl_fill_data_tbs_amsdu()
2248 iwl_pcie_txq_build_tfd(trans, txq, tb_phys, in iwl_fill_data_tbs_amsdu()
2250 trace_iwlwifi_dev_tx_tb(trans->dev, skb, tso.data, in iwl_fill_data_tbs_amsdu()
2267 dma_sync_single_for_cpu(trans->dev, hdr_tb_phys, in iwl_fill_data_tbs_amsdu()
2270 dma_sync_single_for_device(trans->dev, hdr_tb_phys, in iwl_fill_data_tbs_amsdu()
2281 static int iwl_fill_data_tbs_amsdu(struct iwl_trans *trans, struct sk_buff *skb, in iwl_fill_data_tbs_amsdu() argument
2293 int iwl_trans_pcie_tx(struct iwl_trans *trans, struct sk_buff *skb, in iwl_trans_pcie_tx() argument
2296 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_tx()
2346 if (iwl_queue_space(trans, txq) < txq->high_mark) { in iwl_trans_pcie_tx()
2347 iwl_stop_queue(trans, txq); in iwl_trans_pcie_tx()
2350 if (unlikely(iwl_queue_space(trans, txq) < 3)) { in iwl_trans_pcie_tx()
2419 iwl_pcie_txq_build_tfd(trans, txq, tb0_phys, in iwl_trans_pcie_tx()
2427 tb1_phys = dma_map_single(trans->dev, tb1_addr, tb1_len, DMA_TO_DEVICE); in iwl_trans_pcie_tx()
2428 if (unlikely(dma_mapping_error(trans->dev, tb1_phys))) in iwl_trans_pcie_tx()
2430 iwl_pcie_txq_build_tfd(trans, txq, tb1_phys, tb1_len, false); in iwl_trans_pcie_tx()
2432 trace_iwlwifi_dev_tx(trans->dev, skb, in iwl_trans_pcie_tx()
2433 iwl_pcie_get_tfd(trans, txq, in iwl_trans_pcie_tx()
2446 if (unlikely(iwl_fill_data_tbs_amsdu(trans, skb, txq, hdr_len, in iwl_trans_pcie_tx()
2453 if (unlikely(iwl_fill_data_tbs(trans, skb, txq, hdr_len, in iwl_trans_pcie_tx()
2458 if (unlikely(iwl_fill_data_tbs(trans, frag, txq, 0, in iwl_trans_pcie_tx()
2467 tfd = iwl_pcie_get_tfd(trans, txq, txq->write_ptr); in iwl_trans_pcie_tx()
2469 iwl_pcie_txq_update_byte_cnt_tbl(trans, txq, le16_to_cpu(tx_cmd->len), in iwl_trans_pcie_tx()
2470 iwl_pcie_tfd_get_num_tbs(trans, tfd)); in iwl_trans_pcie_tx()
2490 txq->write_ptr = iwl_queue_inc_wrap(trans, txq->write_ptr); in iwl_trans_pcie_tx()
2492 iwl_pcie_txq_inc_wr_ptr(trans, txq); in iwl_trans_pcie_tx()
2501 iwl_pcie_tfd_unmap(trans, out_meta, txq, txq->write_ptr); in iwl_trans_pcie_tx()