Lines Matching +full:bi +full:- +full:directional
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
4 * Copyright (C) 2018-2020, 2023 Intel Corporation
9 #include "iwl-debug.h"
10 #include "iwl-csr.h"
11 #include "iwl-io.h"
19 * iwl_pcie_gen2_enqueue_hcmd - enqueue a uCode command
31 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
39 u8 group_id = iwl_cmd_groupid(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
45 if (WARN_ON(cmd->flags & CMD_BLOCK_TXQS)) in iwl_pcie_gen2_enqueue_hcmd()
46 return -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
52 cmddata[i] = cmd->data[i]; in iwl_pcie_gen2_enqueue_hcmd()
53 cmdlen[i] = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
55 if (!cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
60 int copy = IWL_FIRST_TB_SIZE - copy_size; in iwl_pcie_gen2_enqueue_hcmd()
64 cmdlen[i] -= copy; in iwl_pcie_gen2_enqueue_hcmd()
69 if (cmd->dataflags[i] & IWL_HCMD_DFL_NOCOPY) { in iwl_pcie_gen2_enqueue_hcmd()
71 if (WARN_ON(cmd->dataflags[i] & IWL_HCMD_DFL_DUP)) { in iwl_pcie_gen2_enqueue_hcmd()
72 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
75 } else if (cmd->dataflags[i] & IWL_HCMD_DFL_DUP) { in iwl_pcie_gen2_enqueue_hcmd()
84 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
91 return -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
95 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
100 cmd_size += cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
110 iwl_get_cmd_string(trans, cmd->id), cmd->id, copy_size)) { in iwl_pcie_gen2_enqueue_hcmd()
111 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
115 spin_lock_irqsave(&txq->lock, flags); in iwl_pcie_gen2_enqueue_hcmd()
117 idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
118 tfd = iwl_txq_get_tfd(trans, txq, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
121 if (iwl_txq_space(trans, txq) < ((cmd->flags & CMD_ASYNC) ? 2 : 1)) { in iwl_pcie_gen2_enqueue_hcmd()
122 spin_unlock_irqrestore(&txq->lock, flags); in iwl_pcie_gen2_enqueue_hcmd()
125 iwl_op_mode_cmd_queue_full(trans->op_mode); in iwl_pcie_gen2_enqueue_hcmd()
126 idx = -ENOSPC; in iwl_pcie_gen2_enqueue_hcmd()
130 out_cmd = txq->entries[idx].cmd; in iwl_pcie_gen2_enqueue_hcmd()
131 out_meta = &txq->entries[idx].meta; in iwl_pcie_gen2_enqueue_hcmd()
133 /* re-initialize to NULL */ in iwl_pcie_gen2_enqueue_hcmd()
135 if (cmd->flags & CMD_WANT_SKB) in iwl_pcie_gen2_enqueue_hcmd()
136 out_meta->source = cmd; in iwl_pcie_gen2_enqueue_hcmd()
139 out_cmd->hdr_wide.cmd = iwl_cmd_opcode(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
140 out_cmd->hdr_wide.group_id = group_id; in iwl_pcie_gen2_enqueue_hcmd()
141 out_cmd->hdr_wide.version = iwl_cmd_version(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
142 out_cmd->hdr_wide.length = in iwl_pcie_gen2_enqueue_hcmd()
143 cpu_to_le16(cmd_size - sizeof(struct iwl_cmd_header_wide)); in iwl_pcie_gen2_enqueue_hcmd()
144 out_cmd->hdr_wide.reserved = 0; in iwl_pcie_gen2_enqueue_hcmd()
145 out_cmd->hdr_wide.sequence = in iwl_pcie_gen2_enqueue_hcmd()
146 cpu_to_le16(QUEUE_TO_SEQ(trans->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
147 INDEX_TO_SEQ(txq->write_ptr)); in iwl_pcie_gen2_enqueue_hcmd()
156 if (!cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
160 if (!(cmd->dataflags[i] & (IWL_HCMD_DFL_NOCOPY | in iwl_pcie_gen2_enqueue_hcmd()
162 copy = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
164 memcpy((u8 *)out_cmd + cmd_pos, cmd->data[i], copy); in iwl_pcie_gen2_enqueue_hcmd()
172 * in total (for bi-directional DMA), but copy up to what in iwl_pcie_gen2_enqueue_hcmd()
175 copy = min_t(int, TFD_MAX_PAYLOAD_SIZE - cmd_pos, cmd->len[i]); in iwl_pcie_gen2_enqueue_hcmd()
177 memcpy((u8 *)out_cmd + cmd_pos, cmd->data[i], copy); in iwl_pcie_gen2_enqueue_hcmd()
182 copy = IWL_FIRST_TB_SIZE - copy_size; in iwl_pcie_gen2_enqueue_hcmd()
184 if (copy > cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
185 copy = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
192 iwl_get_cmd_string(trans, cmd->id), group_id, in iwl_pcie_gen2_enqueue_hcmd()
193 out_cmd->hdr.cmd, le16_to_cpu(out_cmd->hdr.sequence), in iwl_pcie_gen2_enqueue_hcmd()
194 cmd_size, txq->write_ptr, idx, trans->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
198 memcpy(&txq->first_tb_bufs[idx], out_cmd, tb0_size); in iwl_pcie_gen2_enqueue_hcmd()
204 phys_addr = dma_map_single(trans->dev, in iwl_pcie_gen2_enqueue_hcmd()
206 copy_size - tb0_size, in iwl_pcie_gen2_enqueue_hcmd()
208 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
209 idx = -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
214 copy_size - tb0_size); in iwl_pcie_gen2_enqueue_hcmd()
223 if (!(cmd->dataflags[i] & (IWL_HCMD_DFL_NOCOPY | in iwl_pcie_gen2_enqueue_hcmd()
226 if (cmd->dataflags[i] & IWL_HCMD_DFL_DUP) in iwl_pcie_gen2_enqueue_hcmd()
228 phys_addr = dma_map_single(trans->dev, data, in iwl_pcie_gen2_enqueue_hcmd()
230 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
231 idx = -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
238 BUILD_BUG_ON(IWL_TFH_NUM_TBS > sizeof(out_meta->tbs) * BITS_PER_BYTE); in iwl_pcie_gen2_enqueue_hcmd()
239 out_meta->flags = cmd->flags; in iwl_pcie_gen2_enqueue_hcmd()
240 if (WARN_ON_ONCE(txq->entries[idx].free_buf)) in iwl_pcie_gen2_enqueue_hcmd()
241 kfree_sensitive(txq->entries[idx].free_buf); in iwl_pcie_gen2_enqueue_hcmd()
242 txq->entries[idx].free_buf = dup_buf; in iwl_pcie_gen2_enqueue_hcmd()
244 trace_iwlwifi_dev_hcmd(trans->dev, cmd, cmd_size, &out_cmd->hdr_wide); in iwl_pcie_gen2_enqueue_hcmd()
247 if (txq->read_ptr == txq->write_ptr && txq->wd_timeout) in iwl_pcie_gen2_enqueue_hcmd()
248 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_pcie_gen2_enqueue_hcmd()
250 spin_lock(&trans_pcie->reg_lock); in iwl_pcie_gen2_enqueue_hcmd()
252 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
254 spin_unlock(&trans_pcie->reg_lock); in iwl_pcie_gen2_enqueue_hcmd()
257 spin_unlock_irqrestore(&txq->lock, flags); in iwl_pcie_gen2_enqueue_hcmd()