Lines Matching refs:trans_pcie
21 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in get_workaround_page() local
27 page_ptr = (void *)((u8 *)skb->cb + trans_pcie->txqs.page_offs); in get_workaround_page()
562 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_gen2_update_byte_tbl() local
587 WARN_ON(trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
595 WARN_ON(!trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
611 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_set_tb() local
630 if (le16_to_cpu(tfd->num_tbs) >= trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_set_tb()
632 trans_pcie->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb()
648 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tfd_unmap() local
654 if (num_tbs > trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_tfd_unmap()
727 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tx() local
729 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_tx()
738 if (WARN_ONCE(!test_bit(txq_id, trans_pcie->txqs.queue_used), in iwl_txq_gen2_tx()
743 skb_shinfo(skb)->nr_frags > IWL_TRANS_PCIE_MAX_FRAGS(trans_pcie) && in iwl_txq_gen2_tx()
757 trans_pcie->txqs.dev_cmd_offs); in iwl_txq_gen2_tx()
824 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_unmap() local
825 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_unmap()
833 if (txq_id != trans_pcie->txqs.cmd.q_id) { in iwl_txq_gen2_unmap()
861 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_free_memory() local
867 trans_pcie->txqs.tfd.size * txq->n_window, in iwl_txq_gen2_free_memory()
876 dma_pool_free(trans_pcie->txqs.bc_pool, in iwl_txq_gen2_free_memory()
891 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_free() local
899 txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_free()
907 if (txq_id == trans_pcie->txqs.cmd.q_id) in iwl_txq_gen2_free()
916 trans_pcie->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free()
918 clear_bit(txq_id, trans_pcie->txqs.queue_used); in iwl_txq_gen2_free()
924 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_alloc_dma() local
929 WARN_ON(!trans_pcie->txqs.bc_tbl_size); in iwl_txq_dyn_alloc_dma()
931 bc_tbl_size = trans_pcie->txqs.bc_tbl_size; in iwl_txq_dyn_alloc_dma()
941 txq->bc_tbl.addr = dma_pool_alloc(trans_pcie->txqs.bc_pool, GFP_KERNEL, in iwl_txq_dyn_alloc_dma()
973 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_txq_alloc_response() local
988 if (qid >= ARRAY_SIZE(trans_pcie->txqs.txq)) { in iwl_pcie_txq_alloc_response()
994 if (test_and_set_bit(qid, trans_pcie->txqs.queue_used)) { in iwl_pcie_txq_alloc_response()
1000 if (WARN_ONCE(trans_pcie->txqs.txq[qid], in iwl_pcie_txq_alloc_response()
1007 trans_pcie->txqs.txq[qid] = txq; in iwl_pcie_txq_alloc_response()
1028 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_alloc() local
1040 size = min_t(u32, size, trans_pcie->txqs.bc_tbl_size / sizeof(u16)); in iwl_txq_dyn_alloc()
1065 if (trans_pcie->txqs.queue_alloc_cmd_ver == 0) { in iwl_txq_dyn_alloc()
1082 } else if (trans_pcie->txqs.queue_alloc_cmd_ver == 3) { in iwl_txq_dyn_alloc()
1113 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_dyn_free() local
1125 if (!test_and_clear_bit(queue, trans_pcie->txqs.queue_used)) { in iwl_txq_dyn_free()
1138 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_tx_free() local
1141 memset(trans_pcie->txqs.queue_used, 0, in iwl_txq_gen2_tx_free()
1142 sizeof(trans_pcie->txqs.queue_used)); in iwl_txq_gen2_tx_free()
1145 for (i = 0; i < ARRAY_SIZE(trans_pcie->txqs.txq); i++) { in iwl_txq_gen2_tx_free()
1146 if (!trans_pcie->txqs.txq[i]) in iwl_txq_gen2_tx_free()
1155 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_txq_gen2_init() local
1160 if (!trans_pcie->txqs.txq[txq_id]) { in iwl_txq_gen2_init()
1166 trans_pcie->txqs.txq[txq_id] = queue; in iwl_txq_gen2_init()
1173 queue = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_init()
1177 (txq_id == trans_pcie->txqs.cmd.q_id)); in iwl_txq_gen2_init()
1182 trans_pcie->txqs.txq[txq_id]->id = txq_id; in iwl_txq_gen2_init()
1183 set_bit(txq_id, trans_pcie->txqs.queue_used); in iwl_txq_gen2_init()
1206 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_gen2_enqueue_hcmd() local
1207 struct iwl_txq *txq = trans_pcie->txqs.txq[trans_pcie->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
1322 cpu_to_le16(QUEUE_TO_SEQ(trans_pcie->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
1370 cmd_size, txq->write_ptr, idx, trans_pcie->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
1426 spin_lock(&trans_pcie->reg_lock); in iwl_pcie_gen2_enqueue_hcmd()
1430 spin_unlock(&trans_pcie->reg_lock); in iwl_pcie_gen2_enqueue_hcmd()