Lines Matching refs:trans
18 static struct page *get_workaround_page(struct iwl_trans *trans, in get_workaround_page() argument
21 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in get_workaround_page()
36 phys = dma_map_page_attrs(trans->dev, ret, 0, PAGE_SIZE, in get_workaround_page()
38 if (unlikely(dma_mapping_error(trans->dev, phys))) { in get_workaround_page()
60 static int iwl_txq_gen2_set_tb_with_wa(struct iwl_trans *trans, in iwl_txq_gen2_set_tb_with_wa() argument
71 if (unlikely(dma_mapping_error(trans->dev, phys))) in iwl_txq_gen2_set_tb_with_wa()
75 ret = iwl_txq_gen2_set_tb(trans, tfd, phys, len); in iwl_txq_gen2_set_tb_with_wa()
101 page = get_workaround_page(trans, skb); in iwl_txq_gen2_set_tb_with_wa()
120 phys = dma_map_single(trans->dev, page_address(page), len, in iwl_txq_gen2_set_tb_with_wa()
122 if (unlikely(dma_mapping_error(trans->dev, phys))) in iwl_txq_gen2_set_tb_with_wa()
126 dma_sync_single_for_device(trans->dev, phys, len, in iwl_txq_gen2_set_tb_with_wa()
130 ret = iwl_txq_gen2_set_tb(trans, tfd, phys, len); in iwl_txq_gen2_set_tb_with_wa()
138 IWL_DEBUG_TX(trans, in iwl_txq_gen2_set_tb_with_wa()
149 dma_unmap_page(trans->dev, oldphys, len, DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa()
151 dma_unmap_single(trans->dev, oldphys, len, DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa()
153 trace_iwlwifi_dev_tx_tb(trans->dev, skb, virt, phys, len); in iwl_txq_gen2_set_tb_with_wa()
158 static int iwl_txq_gen2_build_amsdu(struct iwl_trans *trans, in iwl_txq_gen2_build_amsdu() argument
178 trace_iwlwifi_dev_tx(trans->dev, skb, tfd, sizeof(*tfd), in iwl_txq_gen2_build_amsdu()
191 sgt = iwl_pcie_prep_tso(trans, skb, out_meta, &start_hdr, hdr_room); in iwl_txq_gen2_build_amsdu()
250 iwl_txq_gen2_set_tb(trans, tfd, tb_phys, tb_len); in iwl_txq_gen2_build_amsdu()
251 trace_iwlwifi_dev_tx_tb(trans->dev, skb, start_hdr, in iwl_txq_gen2_build_amsdu()
270 ret = iwl_txq_gen2_set_tb_with_wa(trans, skb, tfd, in iwl_txq_gen2_build_amsdu()
282 dma_sync_single_for_device(trans->dev, start_hdr_phys, hdr_room, in iwl_txq_gen2_build_amsdu()
296 iwl_tfh_tfd *iwl_txq_gen2_build_tx_amsdu(struct iwl_trans *trans, in iwl_txq_gen2_build_tx_amsdu() argument
305 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx_amsdu()
317 iwl_txq_gen2_set_tb(trans, tfd, tb_phys, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx_amsdu()
332 tb_phys = dma_map_single(trans->dev, tb1_addr, len, DMA_TO_DEVICE); in iwl_txq_gen2_build_tx_amsdu()
333 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_txq_gen2_build_tx_amsdu()
339 iwl_txq_gen2_set_tb(trans, tfd, tb_phys, len); in iwl_txq_gen2_build_tx_amsdu()
341 if (iwl_txq_gen2_build_amsdu(trans, skb, tfd, out_meta, in iwl_txq_gen2_build_tx_amsdu()
350 iwl_txq_gen2_tfd_unmap(trans, out_meta, tfd); in iwl_txq_gen2_build_tx_amsdu()
354 static int iwl_txq_gen2_tx_add_frags(struct iwl_trans *trans, in iwl_txq_gen2_tx_add_frags() argument
370 tb_phys = skb_frag_dma_map(trans->dev, frag, 0, in iwl_txq_gen2_tx_add_frags()
372 ret = iwl_txq_gen2_set_tb_with_wa(trans, skb, tfd, tb_phys, in iwl_txq_gen2_tx_add_frags()
383 iwl_tfh_tfd *iwl_txq_gen2_build_tx(struct iwl_trans *trans, in iwl_txq_gen2_build_tx() argument
393 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx()
409 iwl_txq_gen2_set_tb(trans, tfd, tb_phys, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx()
427 tb_phys = dma_map_single(trans->dev, tb1_addr, tb1_len, DMA_TO_DEVICE); in iwl_txq_gen2_build_tx()
428 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_txq_gen2_build_tx()
434 iwl_txq_gen2_set_tb(trans, tfd, tb_phys, tb1_len); in iwl_txq_gen2_build_tx()
435 trace_iwlwifi_dev_tx(trans->dev, skb, tfd, sizeof(*tfd), &dev_cmd->hdr, in iwl_txq_gen2_build_tx()
444 tb_phys = dma_map_single(trans->dev, skb->data + hdr_len, in iwl_txq_gen2_build_tx()
446 ret = iwl_txq_gen2_set_tb_with_wa(trans, skb, tfd, tb_phys, in iwl_txq_gen2_build_tx()
453 if (iwl_txq_gen2_tx_add_frags(trans, skb, tfd, out_meta)) in iwl_txq_gen2_build_tx()
459 tb_phys = dma_map_single(trans->dev, frag->data, in iwl_txq_gen2_build_tx()
461 ret = iwl_txq_gen2_set_tb_with_wa(trans, skb, tfd, tb_phys, in iwl_txq_gen2_build_tx()
467 if (iwl_txq_gen2_tx_add_frags(trans, frag, tfd, out_meta)) in iwl_txq_gen2_build_tx()
474 iwl_txq_gen2_tfd_unmap(trans, out_meta, tfd); in iwl_txq_gen2_build_tx()
479 struct iwl_tfh_tfd *iwl_txq_gen2_build_tfd(struct iwl_trans *trans, in iwl_txq_gen2_build_tfd() argument
487 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tfd()
503 if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) in iwl_txq_gen2_build_tfd()
520 return iwl_txq_gen2_build_tx_amsdu(trans, txq, dev_cmd, skb, in iwl_txq_gen2_build_tfd()
522 return iwl_txq_gen2_build_tx(trans, txq, dev_cmd, skb, out_meta, in iwl_txq_gen2_build_tfd()
526 int iwl_txq_space(struct iwl_trans *trans, const struct iwl_txq *q) in iwl_txq_space() argument
537 if (q->n_window < trans->trans_cfg->base_params->max_tfd_queue_size) in iwl_txq_space()
540 max = trans->trans_cfg->base_params->max_tfd_queue_size - 1; in iwl_txq_space()
547 (trans->trans_cfg->base_params->max_tfd_queue_size - 1); in iwl_txq_space()
558 static void iwl_pcie_gen2_update_byte_tbl(struct iwl_trans *trans, in iwl_pcie_gen2_update_byte_tbl() argument
562 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_gen2_update_byte_tbl()
583 if (trans->trans_cfg->device_family >= IWL_DEVICE_FAMILY_AX210) { in iwl_pcie_gen2_update_byte_tbl()
608 int iwl_txq_gen2_set_tb(struct iwl_trans *trans, struct iwl_tfh_tfd *tfd, in iwl_txq_gen2_set_tb() argument
611 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_set_tb()
631 IWL_ERR(trans, "Error can not send more than %d chunks\n", in iwl_txq_gen2_set_tb()
644 void iwl_txq_gen2_tfd_unmap(struct iwl_trans *trans, in iwl_txq_gen2_tfd_unmap() argument
648 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tfd_unmap()
655 IWL_ERR(trans, "Too many chunks: %i\n", num_tbs); in iwl_txq_gen2_tfd_unmap()
666 dma_unmap_page(trans->dev, in iwl_txq_gen2_tfd_unmap()
671 dma_unmap_single(trans->dev, in iwl_txq_gen2_tfd_unmap()
677 iwl_txq_set_tfd_invalid_gen2(trans, tfd); in iwl_txq_gen2_tfd_unmap()
680 static void iwl_txq_gen2_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_gen2_free_tfd() argument
693 iwl_txq_gen2_tfd_unmap(trans, &txq->entries[idx].meta, in iwl_txq_gen2_free_tfd()
694 iwl_txq_get_tfd(trans, txq, idx)); in iwl_txq_gen2_free_tfd()
703 iwl_op_mode_free_skb(trans->op_mode, skb); in iwl_txq_gen2_free_tfd()
711 static void iwl_txq_inc_wr_ptr(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_inc_wr_ptr() argument
715 IWL_DEBUG_TX(trans, "Q:%d WR: 0x%x\n", txq->id, txq->write_ptr); in iwl_txq_inc_wr_ptr()
721 iwl_write32(trans, HBUS_TARG_WRPTR, txq->write_ptr | (txq->id << 16)); in iwl_txq_inc_wr_ptr()
724 int iwl_txq_gen2_tx(struct iwl_trans *trans, struct sk_buff *skb, in iwl_txq_gen2_tx() argument
727 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tx()
749 if (iwl_txq_space(trans, txq) < txq->high_mark) { in iwl_txq_gen2_tx()
750 iwl_txq_stop(trans, txq); in iwl_txq_gen2_tx()
753 if (unlikely(iwl_txq_space(trans, txq) < 3)) { in iwl_txq_gen2_tx()
780 tfd = iwl_txq_gen2_build_tfd(trans, txq, dev_cmd, skb, out_meta); in iwl_txq_gen2_tx()
786 if (trans->trans_cfg->device_family >= IWL_DEVICE_FAMILY_AX210) { in iwl_txq_gen2_tx()
799 iwl_pcie_gen2_update_byte_tbl(trans, txq, cmd_len, in iwl_txq_gen2_tx()
807 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_txq_gen2_tx()
808 iwl_txq_inc_wr_ptr(trans, txq); in iwl_txq_gen2_tx()
822 static void iwl_txq_gen2_unmap(struct iwl_trans *trans, int txq_id) in iwl_txq_gen2_unmap() argument
824 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_unmap()
830 IWL_DEBUG_TX_REPLY(trans, "Q %d Free %d\n", in iwl_txq_gen2_unmap()
839 iwl_pcie_free_tso_pages(trans, skb, cmd_meta); in iwl_txq_gen2_unmap()
841 iwl_txq_gen2_free_tfd(trans, txq); in iwl_txq_gen2_unmap()
842 txq->read_ptr = iwl_txq_inc_wrap(trans, txq->read_ptr); in iwl_txq_gen2_unmap()
848 iwl_op_mode_free_skb(trans->op_mode, skb); in iwl_txq_gen2_unmap()
855 iwl_trans_pcie_wake_queue(trans, txq); in iwl_txq_gen2_unmap()
858 static void iwl_txq_gen2_free_memory(struct iwl_trans *trans, in iwl_txq_gen2_free_memory() argument
861 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_free_memory()
862 struct device *dev = trans->dev; in iwl_txq_gen2_free_memory()
889 static void iwl_txq_gen2_free(struct iwl_trans *trans, int txq_id) in iwl_txq_gen2_free() argument
891 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_free()
904 iwl_txq_gen2_unmap(trans, txq_id); in iwl_txq_gen2_free()
914 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_gen2_free()
922 iwl_txq_dyn_alloc_dma(struct iwl_trans *trans, int size, unsigned int timeout) in iwl_txq_dyn_alloc_dma() argument
924 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_alloc_dma()
944 IWL_ERR(trans, "Scheduler BC Table allocation failed\n"); in iwl_txq_dyn_alloc_dma()
949 ret = iwl_pcie_txq_alloc(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
951 IWL_ERR(trans, "Tx queue alloc failed\n"); in iwl_txq_dyn_alloc_dma()
954 ret = iwl_txq_init(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
956 IWL_ERR(trans, "Tx queue init failed\n"); in iwl_txq_dyn_alloc_dma()
965 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc_dma()
969 static int iwl_pcie_txq_alloc_response(struct iwl_trans *trans, in iwl_pcie_txq_alloc_response() argument
973 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_alloc_response()
1008 wr_ptr &= (trans->trans_cfg->base_params->max_tfd_queue_size - 1); in iwl_pcie_txq_alloc_response()
1014 IWL_DEBUG_TX_QUEUES(trans, "Activate queue %d\n", qid); in iwl_pcie_txq_alloc_response()
1021 iwl_txq_gen2_free_memory(trans, txq); in iwl_pcie_txq_alloc_response()
1025 int iwl_txq_dyn_alloc(struct iwl_trans *trans, u32 flags, u32 sta_mask, in iwl_txq_dyn_alloc() argument
1028 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_alloc()
1044 if (trans->trans_cfg->device_family == IWL_DEVICE_FAMILY_BZ && in iwl_txq_dyn_alloc()
1045 trans->hw_rev_step == SILICON_A_STEP) { in iwl_txq_dyn_alloc()
1047 txq = iwl_txq_dyn_alloc_dma(trans, size, timeout); in iwl_txq_dyn_alloc()
1050 txq = iwl_txq_dyn_alloc_dma(trans, size, timeout); in iwl_txq_dyn_alloc()
1054 IWL_DEBUG_TX_QUEUES(trans, in iwl_txq_dyn_alloc()
1100 ret = iwl_trans_send_cmd(trans, &hcmd); in iwl_txq_dyn_alloc()
1104 return iwl_pcie_txq_alloc_response(trans, txq, &hcmd); in iwl_txq_dyn_alloc()
1107 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc()
1111 void iwl_txq_dyn_free(struct iwl_trans *trans, int queue) in iwl_txq_dyn_free() argument
1113 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_free()
1126 WARN_ONCE(test_bit(STATUS_DEVICE_ENABLED, &trans->status), in iwl_txq_dyn_free()
1131 iwl_txq_gen2_free(trans, queue); in iwl_txq_dyn_free()
1133 IWL_DEBUG_TX_QUEUES(trans, "Deactivate queue %d\n", queue); in iwl_txq_dyn_free()
1136 void iwl_txq_gen2_tx_free(struct iwl_trans *trans) in iwl_txq_gen2_tx_free() argument
1138 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tx_free()
1149 iwl_txq_gen2_free(trans, i); in iwl_txq_gen2_tx_free()
1153 int iwl_txq_gen2_init(struct iwl_trans *trans, int txq_id, int queue_size) in iwl_txq_gen2_init() argument
1155 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_init()
1163 IWL_ERR(trans, "Not enough memory for tx queue\n"); in iwl_txq_gen2_init()
1167 ret = iwl_pcie_txq_alloc(trans, queue, queue_size, true); in iwl_txq_gen2_init()
1169 IWL_ERR(trans, "Tx %d queue init failed\n", txq_id); in iwl_txq_gen2_init()
1176 ret = iwl_txq_init(trans, queue, queue_size, in iwl_txq_gen2_init()
1179 IWL_ERR(trans, "Tx %d queue alloc failed\n", txq_id); in iwl_txq_gen2_init()
1188 iwl_txq_gen2_tx_free(trans); in iwl_txq_gen2_init()
1203 int iwl_pcie_gen2_enqueue_hcmd(struct iwl_trans *trans, in iwl_pcie_gen2_enqueue_hcmd() argument
1206 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_gen2_enqueue_hcmd()
1286 iwl_get_cmd_string(trans, cmd->id), cmd->id, copy_size)) { in iwl_pcie_gen2_enqueue_hcmd()
1294 tfd = iwl_txq_get_tfd(trans, txq, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
1297 if (iwl_txq_space(trans, txq) < ((cmd->flags & CMD_ASYNC) ? 2 : 1)) { in iwl_pcie_gen2_enqueue_hcmd()
1300 IWL_ERR(trans, "No space in command queue\n"); in iwl_pcie_gen2_enqueue_hcmd()
1301 iwl_op_mode_cmd_queue_full(trans->op_mode); in iwl_pcie_gen2_enqueue_hcmd()
1366 IWL_DEBUG_HC(trans, in iwl_pcie_gen2_enqueue_hcmd()
1368 iwl_get_cmd_string(trans, cmd->id), group_id, in iwl_pcie_gen2_enqueue_hcmd()
1375 iwl_txq_gen2_set_tb(trans, tfd, iwl_txq_get_first_tb_dma(txq, idx), in iwl_pcie_gen2_enqueue_hcmd()
1380 phys_addr = dma_map_single(trans->dev, in iwl_pcie_gen2_enqueue_hcmd()
1384 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
1386 iwl_txq_gen2_tfd_unmap(trans, out_meta, tfd); in iwl_pcie_gen2_enqueue_hcmd()
1389 iwl_txq_gen2_set_tb(trans, tfd, phys_addr, in iwl_pcie_gen2_enqueue_hcmd()
1404 phys_addr = dma_map_single(trans->dev, data, in iwl_pcie_gen2_enqueue_hcmd()
1406 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
1408 iwl_txq_gen2_tfd_unmap(trans, out_meta, tfd); in iwl_pcie_gen2_enqueue_hcmd()
1411 iwl_txq_gen2_set_tb(trans, tfd, phys_addr, cmdlen[i]); in iwl_pcie_gen2_enqueue_hcmd()
1420 trace_iwlwifi_dev_hcmd(trans->dev, cmd, cmd_size, &out_cmd->hdr_wide); in iwl_pcie_gen2_enqueue_hcmd()
1428 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
1429 iwl_txq_inc_wr_ptr(trans, txq); in iwl_pcie_gen2_enqueue_hcmd()