Lines Matching refs:txq

20 					  struct iwl_txq *txq, u16 byte_cnt,  in iwl_pcie_gen2_update_byte_tbl()  argument
23 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_pcie_gen2_update_byte_tbl()
28 if (WARN(idx >= txq->n_window, "%d >= %d\n", idx, txq->n_window)) in iwl_pcie_gen2_update_byte_tbl()
44 struct iwl_gen3_bc_tbl *scd_bc_tbl_gen3 = txq->bc_tbl.addr; in iwl_pcie_gen2_update_byte_tbl()
52 struct iwlagn_scd_bc_tbl *scd_bc_tbl = txq->bc_tbl.addr; in iwl_pcie_gen2_update_byte_tbl()
66 void iwl_txq_inc_wr_ptr(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_inc_wr_ptr() argument
68 lockdep_assert_held(&txq->lock); in iwl_txq_inc_wr_ptr()
70 IWL_DEBUG_TX(trans, "Q:%d WR: 0x%x\n", txq->id, txq->write_ptr); in iwl_txq_inc_wr_ptr()
76 iwl_write32(trans, HBUS_TARG_WRPTR, txq->write_ptr | (txq->id << 16)); in iwl_txq_inc_wr_ptr()
115 void iwl_txq_gen2_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_gen2_free_tfd() argument
120 int idx = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_gen2_free_tfd()
123 lockdep_assert_held(&txq->lock); in iwl_txq_gen2_free_tfd()
125 if (!txq->entries) in iwl_txq_gen2_free_tfd()
128 iwl_txq_gen2_tfd_unmap(trans, &txq->entries[idx].meta, in iwl_txq_gen2_free_tfd()
129 iwl_txq_get_tfd(trans, txq, idx)); in iwl_txq_gen2_free_tfd()
131 skb = txq->entries[idx].skb; in iwl_txq_gen2_free_tfd()
139 txq->entries[idx].skb = NULL; in iwl_txq_gen2_free_tfd()
454 struct iwl_txq *txq, in iwl_txq_gen2_build_tx_amsdu() argument
461 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tx_amsdu()
462 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx_amsdu()
467 tb_phys = iwl_txq_get_first_tb_dma(txq, idx); in iwl_txq_gen2_build_tx_amsdu()
503 memcpy(&txq->first_tb_bufs[idx], dev_cmd, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx_amsdu()
541 struct iwl_txq *txq, in iwl_txq_gen2_build_tx() argument
549 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tx()
550 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tx()
556 tb_phys = iwl_txq_get_first_tb_dma(txq, idx); in iwl_txq_gen2_build_tx()
559 memcpy(&txq->first_tb_bufs[idx], dev_cmd, IWL_FIRST_TB_SIZE); in iwl_txq_gen2_build_tx()
636 struct iwl_txq *txq, in iwl_txq_gen2_build_tfd() argument
642 int idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_build_tfd()
643 struct iwl_tfh_tfd *tfd = iwl_txq_get_tfd(trans, txq, idx); in iwl_txq_gen2_build_tfd()
669 return iwl_txq_gen2_build_tx_amsdu(trans, txq, dev_cmd, skb, in iwl_txq_gen2_build_tfd()
671 return iwl_txq_gen2_build_tx(trans, txq, dev_cmd, skb, out_meta, in iwl_txq_gen2_build_tfd()
708 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_tx() local
726 spin_lock(&txq->lock); in iwl_txq_gen2_tx()
728 if (iwl_txq_space(trans, txq) < txq->high_mark) { in iwl_txq_gen2_tx()
729 iwl_txq_stop(trans, txq); in iwl_txq_gen2_tx()
732 if (unlikely(iwl_txq_space(trans, txq) < 3)) { in iwl_txq_gen2_tx()
739 __skb_queue_tail(&txq->overflow_q, skb); in iwl_txq_gen2_tx()
740 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
745 idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_txq_gen2_tx()
748 txq->entries[idx].skb = skb; in iwl_txq_gen2_tx()
749 txq->entries[idx].cmd = dev_cmd; in iwl_txq_gen2_tx()
756 out_meta = &txq->entries[idx].meta; in iwl_txq_gen2_tx()
759 tfd = iwl_txq_gen2_build_tfd(trans, txq, dev_cmd, skb, out_meta); in iwl_txq_gen2_tx()
761 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
778 iwl_pcie_gen2_update_byte_tbl(trans, txq, cmd_len, in iwl_txq_gen2_tx()
782 if (txq->read_ptr == txq->write_ptr && txq->wd_timeout) in iwl_txq_gen2_tx()
783 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_txq_gen2_tx()
786 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_txq_gen2_tx()
787 iwl_txq_inc_wr_ptr(trans, txq); in iwl_txq_gen2_tx()
792 spin_unlock(&txq->lock); in iwl_txq_gen2_tx()
803 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_unmap() local
805 spin_lock_bh(&txq->lock); in iwl_txq_gen2_unmap()
806 while (txq->write_ptr != txq->read_ptr) { in iwl_txq_gen2_unmap()
808 txq_id, txq->read_ptr); in iwl_txq_gen2_unmap()
811 int idx = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_gen2_unmap()
812 struct sk_buff *skb = txq->entries[idx].skb; in iwl_txq_gen2_unmap()
817 iwl_txq_gen2_free_tfd(trans, txq); in iwl_txq_gen2_unmap()
818 txq->read_ptr = iwl_txq_inc_wrap(trans, txq->read_ptr); in iwl_txq_gen2_unmap()
821 while (!skb_queue_empty(&txq->overflow_q)) { in iwl_txq_gen2_unmap()
822 struct sk_buff *skb = __skb_dequeue(&txq->overflow_q); in iwl_txq_gen2_unmap()
827 spin_unlock_bh(&txq->lock); in iwl_txq_gen2_unmap()
830 iwl_wake_queue(trans, txq); in iwl_txq_gen2_unmap()
834 struct iwl_txq *txq) in iwl_txq_gen2_free_memory() argument
839 if (txq->tfds) { in iwl_txq_gen2_free_memory()
841 trans->txqs.tfd.size * txq->n_window, in iwl_txq_gen2_free_memory()
842 txq->tfds, txq->dma_addr); in iwl_txq_gen2_free_memory()
844 sizeof(*txq->first_tb_bufs) * txq->n_window, in iwl_txq_gen2_free_memory()
845 txq->first_tb_bufs, txq->first_tb_dma); in iwl_txq_gen2_free_memory()
848 kfree(txq->entries); in iwl_txq_gen2_free_memory()
849 if (txq->bc_tbl.addr) in iwl_txq_gen2_free_memory()
851 txq->bc_tbl.addr, txq->bc_tbl.dma); in iwl_txq_gen2_free_memory()
852 kfree(txq); in iwl_txq_gen2_free_memory()
865 struct iwl_txq *txq; in iwl_txq_gen2_free() local
872 txq = trans->txqs.txq[txq_id]; in iwl_txq_gen2_free()
874 if (WARN_ON(!txq)) in iwl_txq_gen2_free()
881 for (i = 0; i < txq->n_window; i++) { in iwl_txq_gen2_free()
882 kfree_sensitive(txq->entries[i].cmd); in iwl_txq_gen2_free()
883 kfree_sensitive(txq->entries[i].free_buf); in iwl_txq_gen2_free()
885 del_timer_sync(&txq->stuck_timer); in iwl_txq_gen2_free()
887 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_gen2_free()
889 trans->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free()
920 int iwl_txq_init(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num, in iwl_txq_init() argument
927 txq->need_update = false; in iwl_txq_init()
937 ret = iwl_queue_init(txq, slots_num); in iwl_txq_init()
941 spin_lock_init(&txq->lock); in iwl_txq_init()
946 lockdep_set_class(&txq->lock, &iwl_txq_cmd_queue_lock_class); in iwl_txq_init()
949 __skb_queue_head_init(&txq->overflow_q); in iwl_txq_init()
972 void iwl_txq_log_scd_error(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_log_scd_error() argument
974 u32 txq_id = txq->id; in iwl_txq_log_scd_error()
981 txq->read_ptr, txq->write_ptr); in iwl_txq_log_scd_error()
993 jiffies_to_msecs(txq->wd_timeout), in iwl_txq_log_scd_error()
994 txq->read_ptr, txq->write_ptr, in iwl_txq_log_scd_error()
1004 struct iwl_txq *txq = from_timer(txq, t, stuck_timer); in iwl_txq_stuck_timer() local
1005 struct iwl_trans *trans = txq->trans; in iwl_txq_stuck_timer()
1007 spin_lock(&txq->lock); in iwl_txq_stuck_timer()
1009 if (txq->read_ptr == txq->write_ptr) { in iwl_txq_stuck_timer()
1010 spin_unlock(&txq->lock); in iwl_txq_stuck_timer()
1013 spin_unlock(&txq->lock); in iwl_txq_stuck_timer()
1015 iwl_txq_log_scd_error(trans, txq); in iwl_txq_stuck_timer()
1020 int iwl_txq_alloc(struct iwl_trans *trans, struct iwl_txq *txq, int slots_num, in iwl_txq_alloc() argument
1028 if (WARN_ON(txq->entries || txq->tfds)) in iwl_txq_alloc()
1034 timer_setup(&txq->stuck_timer, iwl_txq_stuck_timer, 0); in iwl_txq_alloc()
1035 txq->trans = trans; in iwl_txq_alloc()
1037 txq->n_window = slots_num; in iwl_txq_alloc()
1039 txq->entries = kcalloc(slots_num, in iwl_txq_alloc()
1043 if (!txq->entries) in iwl_txq_alloc()
1048 txq->entries[i].cmd = in iwl_txq_alloc()
1051 if (!txq->entries[i].cmd) in iwl_txq_alloc()
1057 txq->tfds = dma_alloc_coherent(trans->dev, tfd_sz, in iwl_txq_alloc()
1058 &txq->dma_addr, GFP_KERNEL); in iwl_txq_alloc()
1059 if (!txq->tfds) in iwl_txq_alloc()
1062 BUILD_BUG_ON(sizeof(*txq->first_tb_bufs) != IWL_FIRST_TB_SIZE_ALIGN); in iwl_txq_alloc()
1064 tb0_buf_sz = sizeof(*txq->first_tb_bufs) * slots_num; in iwl_txq_alloc()
1066 txq->first_tb_bufs = dma_alloc_coherent(trans->dev, tb0_buf_sz, in iwl_txq_alloc()
1067 &txq->first_tb_dma, in iwl_txq_alloc()
1069 if (!txq->first_tb_bufs) in iwl_txq_alloc()
1074 dma_free_coherent(trans->dev, tfd_sz, txq->tfds, txq->dma_addr); in iwl_txq_alloc()
1076 if (txq->entries && cmd_queue) in iwl_txq_alloc()
1078 kfree(txq->entries[i].cmd); in iwl_txq_alloc()
1079 kfree(txq->entries); in iwl_txq_alloc()
1080 txq->entries = NULL; in iwl_txq_alloc()
1090 struct iwl_txq *txq; in iwl_txq_dyn_alloc_dma() local
1101 txq = kzalloc(sizeof(*txq), GFP_KERNEL); in iwl_txq_dyn_alloc_dma()
1102 if (!txq) in iwl_txq_dyn_alloc_dma()
1105 txq->bc_tbl.addr = dma_pool_alloc(trans->txqs.bc_pool, GFP_KERNEL, in iwl_txq_dyn_alloc_dma()
1106 &txq->bc_tbl.dma); in iwl_txq_dyn_alloc_dma()
1107 if (!txq->bc_tbl.addr) { in iwl_txq_dyn_alloc_dma()
1109 kfree(txq); in iwl_txq_dyn_alloc_dma()
1113 ret = iwl_txq_alloc(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
1118 ret = iwl_txq_init(trans, txq, size, false); in iwl_txq_dyn_alloc_dma()
1124 txq->wd_timeout = msecs_to_jiffies(timeout); in iwl_txq_dyn_alloc_dma()
1126 *intxq = txq; in iwl_txq_dyn_alloc_dma()
1130 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc_dma()
1134 static int iwl_txq_alloc_response(struct iwl_trans *trans, struct iwl_txq *txq, in iwl_txq_alloc_response() argument
1151 if (qid >= ARRAY_SIZE(trans->txqs.txq)) { in iwl_txq_alloc_response()
1163 if (WARN_ONCE(trans->txqs.txq[qid], in iwl_txq_alloc_response()
1169 txq->id = qid; in iwl_txq_alloc_response()
1170 trans->txqs.txq[qid] = txq; in iwl_txq_alloc_response()
1174 txq->read_ptr = wr_ptr; in iwl_txq_alloc_response()
1175 txq->write_ptr = wr_ptr; in iwl_txq_alloc_response()
1184 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_alloc_response()
1191 struct iwl_txq *txq = NULL; in iwl_txq_dyn_alloc() local
1205 ret = iwl_txq_dyn_alloc_dma(trans, &txq, size, timeout); in iwl_txq_dyn_alloc()
1209 cmd.tfdq_addr = cpu_to_le64(txq->dma_addr); in iwl_txq_dyn_alloc()
1210 cmd.byte_cnt_addr = cpu_to_le64(txq->bc_tbl.dma); in iwl_txq_dyn_alloc()
1217 return iwl_txq_alloc_response(trans, txq, &hcmd); in iwl_txq_dyn_alloc()
1220 iwl_txq_gen2_free_memory(trans, txq); in iwl_txq_dyn_alloc()
1254 for (i = 0; i < ARRAY_SIZE(trans->txqs.txq); i++) { in iwl_txq_gen2_tx_free()
1255 if (!trans->txqs.txq[i]) in iwl_txq_gen2_tx_free()
1268 if (!trans->txqs.txq[txq_id]) { in iwl_txq_gen2_init()
1274 trans->txqs.txq[txq_id] = queue; in iwl_txq_gen2_init()
1281 queue = trans->txqs.txq[txq_id]; in iwl_txq_gen2_init()
1290 trans->txqs.txq[txq_id]->id = txq_id; in iwl_txq_gen2_init()
1334 struct iwl_txq *txq, int index) in iwl_txq_gen1_tfd_unmap() argument
1337 void *tfd = iwl_txq_get_tfd(trans, txq, index); in iwl_txq_gen1_tfd_unmap()
1387 struct iwl_txq *txq, u16 byte_cnt, in iwl_txq_gen1_update_byte_cnt_tbl() argument
1391 int write_ptr = txq->write_ptr; in iwl_txq_gen1_update_byte_cnt_tbl()
1392 int txq_id = txq->id; in iwl_txq_gen1_update_byte_cnt_tbl()
1396 struct iwl_device_tx_cmd *dev_cmd = txq->entries[txq->write_ptr].cmd; in iwl_txq_gen1_update_byte_cnt_tbl()
1431 struct iwl_txq *txq) in iwl_txq_gen1_inval_byte_cnt_tbl() argument
1434 int txq_id = txq->id; in iwl_txq_gen1_inval_byte_cnt_tbl()
1435 int read_ptr = txq->read_ptr; in iwl_txq_gen1_inval_byte_cnt_tbl()
1438 struct iwl_device_tx_cmd *dev_cmd = txq->entries[read_ptr].cmd; in iwl_txq_gen1_inval_byte_cnt_tbl()
1464 void iwl_txq_free_tfd(struct iwl_trans *trans, struct iwl_txq *txq) in iwl_txq_free_tfd() argument
1469 int rd_ptr = txq->read_ptr; in iwl_txq_free_tfd()
1470 int idx = iwl_txq_get_cmd_index(txq, rd_ptr); in iwl_txq_free_tfd()
1473 lockdep_assert_held(&txq->lock); in iwl_txq_free_tfd()
1475 if (!txq->entries) in iwl_txq_free_tfd()
1481 iwl_txq_gen1_tfd_unmap(trans, &txq->entries[idx].meta, txq, rd_ptr); in iwl_txq_free_tfd()
1484 skb = txq->entries[idx].skb; in iwl_txq_free_tfd()
1492 txq->entries[idx].skb = NULL; in iwl_txq_free_tfd()
1496 void iwl_txq_progress(struct iwl_txq *txq) in iwl_txq_progress() argument
1498 lockdep_assert_held(&txq->lock); in iwl_txq_progress()
1500 if (!txq->wd_timeout) in iwl_txq_progress()
1507 if (txq->frozen) in iwl_txq_progress()
1514 if (txq->read_ptr == txq->write_ptr) in iwl_txq_progress()
1515 del_timer(&txq->stuck_timer); in iwl_txq_progress()
1517 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_txq_progress()
1524 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_reclaim() local
1525 int tfd_num = iwl_txq_get_cmd_index(txq, ssn); in iwl_txq_reclaim()
1526 int read_ptr = iwl_txq_get_cmd_index(txq, txq->read_ptr); in iwl_txq_reclaim()
1533 spin_lock_bh(&txq->lock); in iwl_txq_reclaim()
1545 txq_id, txq->read_ptr, tfd_num, ssn); in iwl_txq_reclaim()
1551 if (!iwl_txq_used(txq, last_to_free)) { in iwl_txq_reclaim()
1556 txq->write_ptr, txq->read_ptr); in iwl_txq_reclaim()
1569 txq->read_ptr = iwl_txq_inc_wrap(trans, txq->read_ptr), in iwl_txq_reclaim()
1570 read_ptr = iwl_txq_get_cmd_index(txq, txq->read_ptr)) { in iwl_txq_reclaim()
1571 struct sk_buff *skb = txq->entries[read_ptr].skb; in iwl_txq_reclaim()
1580 txq->entries[read_ptr].skb = NULL; in iwl_txq_reclaim()
1583 iwl_txq_gen1_inval_byte_cnt_tbl(trans, txq); in iwl_txq_reclaim()
1585 iwl_txq_free_tfd(trans, txq); in iwl_txq_reclaim()
1588 iwl_txq_progress(txq); in iwl_txq_reclaim()
1590 if (iwl_txq_space(trans, txq) > txq->low_mark && in iwl_txq_reclaim()
1595 skb_queue_splice_init(&txq->overflow_q, &overflow_skbs); in iwl_txq_reclaim()
1604 txq->overflow_tx = true; in iwl_txq_reclaim()
1613 spin_unlock_bh(&txq->lock); in iwl_txq_reclaim()
1630 if (iwl_txq_space(trans, txq) > txq->low_mark) in iwl_txq_reclaim()
1631 iwl_wake_queue(trans, txq); in iwl_txq_reclaim()
1633 spin_lock_bh(&txq->lock); in iwl_txq_reclaim()
1634 txq->overflow_tx = false; in iwl_txq_reclaim()
1638 spin_unlock_bh(&txq->lock); in iwl_txq_reclaim()
1644 struct iwl_txq *txq = trans->txqs.txq[txq_id]; in iwl_txq_set_q_ptrs() local
1646 spin_lock_bh(&txq->lock); in iwl_txq_set_q_ptrs()
1648 txq->write_ptr = ptr; in iwl_txq_set_q_ptrs()
1649 txq->read_ptr = txq->write_ptr; in iwl_txq_set_q_ptrs()
1651 spin_unlock_bh(&txq->lock); in iwl_txq_set_q_ptrs()
1660 struct iwl_txq *txq = trans->txqs.txq[queue]; in iwl_trans_txq_freeze_timer() local
1663 spin_lock_bh(&txq->lock); in iwl_trans_txq_freeze_timer()
1667 if (txq->frozen == freeze) in iwl_trans_txq_freeze_timer()
1673 txq->frozen = freeze; in iwl_trans_txq_freeze_timer()
1675 if (txq->read_ptr == txq->write_ptr) in iwl_trans_txq_freeze_timer()
1680 txq->stuck_timer.expires))) { in iwl_trans_txq_freeze_timer()
1688 txq->frozen_expiry_remainder = in iwl_trans_txq_freeze_timer()
1689 txq->stuck_timer.expires - now; in iwl_trans_txq_freeze_timer()
1690 del_timer(&txq->stuck_timer); in iwl_trans_txq_freeze_timer()
1698 mod_timer(&txq->stuck_timer, in iwl_trans_txq_freeze_timer()
1699 now + txq->frozen_expiry_remainder); in iwl_trans_txq_freeze_timer()
1702 spin_unlock_bh(&txq->lock); in iwl_trans_txq_freeze_timer()
1712 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_trans_txq_send_hcmd_sync() local
1743 txq->read_ptr, txq->write_ptr); in iwl_trans_txq_send_hcmd_sync()
1784 txq->entries[cmd_idx].meta.flags &= ~CMD_WANT_SKB; in iwl_trans_txq_send_hcmd_sync()