/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
A D | gm20b.c | 183 nvkm_falcon_cmdq_init(pmu->hpq, msg.queue_info[0].index, in gm20b_pmu_initmsg() 184 msg.queue_info[0].offset, in gm20b_pmu_initmsg() 185 msg.queue_info[0].size); in gm20b_pmu_initmsg() 186 nvkm_falcon_cmdq_init(pmu->lpq, msg.queue_info[1].index, in gm20b_pmu_initmsg() 187 msg.queue_info[1].offset, in gm20b_pmu_initmsg() 188 msg.queue_info[1].size); in gm20b_pmu_initmsg() 189 nvkm_falcon_msgq_init(pmu->msgq, msg.queue_info[4].index, in gm20b_pmu_initmsg() 190 msg.queue_info[4].offset, in gm20b_pmu_initmsg() 191 msg.queue_info[4].size); in gm20b_pmu_initmsg()
|
/linux/drivers/net/wireless/intel/iwlwifi/mvm/ |
A D | sta.c | 348 mvm->queue_info[queue].tid_bitmap); in iwl_mvm_disable_txq() 358 WARN(mvm->queue_info[queue].tid_bitmap, in iwl_mvm_disable_txq() 363 mvm->queue_info[queue].tid_bitmap = 0; in iwl_mvm_disable_txq() 373 mvm->queue_info[queue].reserved = false; in iwl_mvm_disable_txq() 501 tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_free_inactive_queue() 664 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_redirect_queue() 798 if (mvm->queue_info[queue].tid_bitmap) in iwl_mvm_update_txq_mapping() 806 mvm->queue_info[queue].mac80211_ac = in iwl_mvm_update_txq_mapping() 811 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_update_txq_mapping() 895 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_change_queue_tid() [all …]
|
A D | tx.c | 958 unsigned long queue_tid_bitmap = mvm->queue_info[txq_id].tid_bitmap; in iwl_mvm_txq_should_update() 966 if (time_before(mvm->queue_info[txq_id].last_frame_time[tid] + in iwl_mvm_txq_should_update() 1107 mvm->queue_info[txq_id].last_frame_time[tid] = jiffies; in iwl_mvm_tx_mpdu() 1119 if (unlikely(mvm->queue_info[txq_id].status == in iwl_mvm_tx_mpdu() 2143 struct iwl_flush_queue_info *queue_info = &rsp->queues[i]; in iwl_mvm_flush_sta_tids() local 2144 int tid = le16_to_cpu(queue_info->tid); in iwl_mvm_flush_sta_tids() 2145 int read_before = le16_to_cpu(queue_info->read_before_flush); in iwl_mvm_flush_sta_tids() 2146 int read_after = le16_to_cpu(queue_info->read_after_flush); in iwl_mvm_flush_sta_tids() 2147 int queue_num = le16_to_cpu(queue_info->queue_num); in iwl_mvm_flush_sta_tids()
|
A D | fw.c | 361 memset(&mvm->queue_info, 0, sizeof(mvm->queue_info)); in iwl_mvm_load_ucode_wait_alive() 368 mvm->queue_info[IWL_MVM_DQA_CMD_QUEUE].tid_bitmap = in iwl_mvm_load_ucode_wait_alive()
|
A D | ops.c | 1445 mvm->queue_info[hw_queue].ra_sta_id; in iwl_mvm_queue_state_change() 1471 tid_bitmap = mvm->queue_info[hw_queue].tid_bitmap; in iwl_mvm_queue_state_change()
|
A D | utils.c | 263 if (WARN(mvm->queue_info[queue].tid_bitmap == 0, in iwl_mvm_reconfig_scd()
|
A D | mvm.h | 826 struct iwl_mvm_dqa_txq_info queue_info[IWL_MAX_HW_QUEUES]; member
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/sec2/ |
A D | gp102.c | 146 for (i = 0; i < ARRAY_SIZE(msg.queue_info); i++) { in gp102_sec2_initmsg() 147 if (msg.queue_info[i].id == NV_SEC2_INIT_MSG_QUEUE_ID_MSGQ) { in gp102_sec2_initmsg() 149 msg.queue_info[i].index, in gp102_sec2_initmsg() 150 msg.queue_info[i].offset, in gp102_sec2_initmsg() 151 msg.queue_info[i].size); in gp102_sec2_initmsg() 154 msg.queue_info[i].index, in gp102_sec2_initmsg() 155 msg.queue_info[i].offset, in gp102_sec2_initmsg() 156 msg.queue_info[i].size); in gp102_sec2_initmsg()
|
/linux/drivers/net/ethernet/huawei/hinic/ |
A D | hinic_tx.c | 274 static int offload_tso(struct hinic_sq_task *task, u32 *queue_info, in offload_tso() argument 348 hinic_set_tso_inner_l4(task, queue_info, l4_offload, l4_len, offset, in offload_tso() 354 static int offload_csum(struct hinic_sq_task *task, u32 *queue_info, in offload_csum() argument 446 *queue_info |= HINIC_SQ_CTRL_SET(vlan_pri, QUEUE_INFO_PRI); in offload_vlan() 450 u32 *queue_info) in hinic_tx_offload() argument 456 enabled = offload_tso(task, queue_info, skb); in hinic_tx_offload() 460 enabled = offload_csum(task, queue_info, skb); in hinic_tx_offload() 469 offload_vlan(task, queue_info, vlan_tag, in hinic_tx_offload() 478 if (HINIC_SQ_CTRL_GET(*queue_info, QUEUE_INFO_PLDOFF) > in hinic_tx_offload() 485 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MSS); in hinic_tx_offload() [all …]
|
A D | hinic_hw_qp.c | 490 ctrl->queue_info = HINIC_SQ_CTRL_SET(HINIC_MSS_DEFAULT, in sq_prepare_ctrl() 537 void hinic_set_cs_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_cs_inner_l4() argument 553 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_cs_inner_l4() 557 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 558 *queue_info |= HINIC_SQ_CTRL_SET(mss, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 561 void hinic_set_tso_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_tso_inner_l4() argument 578 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_tso_inner_l4() 584 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MSS); in hinic_set_tso_inner_l4() 585 *queue_info |= HINIC_SQ_CTRL_SET(mss, QUEUE_INFO_MSS); in hinic_set_tso_inner_l4()
|
A D | hinic_hw_qp.h | 168 u32 *queue_info, 173 u32 *queue_info,
|
A D | hinic_hw_wqe.h | 390 u32 queue_info; member
|
/linux/drivers/net/wireless/ath/ath5k/ |
A D | qcu.c | 139 struct ath5k_txq_info *queue_info) in ath5k_hw_get_tx_queueprops() argument 141 memcpy(queue_info, &ah->ah_txq[queue], sizeof(struct ath5k_txq_info)); in ath5k_hw_get_tx_queueprops() 204 struct ath5k_txq_info *queue_info) in ath5k_hw_setup_tx_queue() argument 228 queue = queue_info->tqi_subtype; in ath5k_hw_setup_tx_queue() 250 if (queue_info != NULL) { in ath5k_hw_setup_tx_queue() 251 queue_info->tqi_type = queue_type; in ath5k_hw_setup_tx_queue() 252 ret = ath5k_hw_set_tx_queueprops(ah, queue, queue_info); in ath5k_hw_setup_tx_queue()
|
A D | ath5k.h | 1563 struct ath5k_txq_info *queue_info); 1565 const struct ath5k_txq_info *queue_info); 1568 struct ath5k_txq_info *queue_info);
|
/linux/drivers/gpu/drm/nouveau/include/nvfw/ |
A D | sec2.h | 30 } queue_info[2]; member
|
A D | pmu.h | 40 } queue_info[5]; member
|
/linux/sound/core/seq/ |
A D | seq_clientmgr.c | 2137 struct snd_seq_queue_info queue_info; in snd_seq_ioctl() member
|