Lines Matching +full:bi +full:- +full:directional

9  * Copyright(c) 2018 - 2020 Intel Corporation
23 * Copyright(c) 2018 - 2020 Intel Corporation
56 #include "iwl-debug.h"
57 #include "iwl-csr.h"
58 #include "iwl-io.h"
66 * iwl_pcie_gen2_enqueue_hcmd - enqueue a uCode command
78 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
87 u8 group_id = iwl_cmd_groupid(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
96 cmddata[i] = cmd->data[i]; in iwl_pcie_gen2_enqueue_hcmd()
97 cmdlen[i] = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
99 if (!cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
104 int copy = IWL_FIRST_TB_SIZE - copy_size; in iwl_pcie_gen2_enqueue_hcmd()
108 cmdlen[i] -= copy; in iwl_pcie_gen2_enqueue_hcmd()
113 if (cmd->dataflags[i] & IWL_HCMD_DFL_NOCOPY) { in iwl_pcie_gen2_enqueue_hcmd()
115 if (WARN_ON(cmd->dataflags[i] & IWL_HCMD_DFL_DUP)) { in iwl_pcie_gen2_enqueue_hcmd()
116 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
119 } else if (cmd->dataflags[i] & IWL_HCMD_DFL_DUP) { in iwl_pcie_gen2_enqueue_hcmd()
128 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
135 return -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
139 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
144 cmd_size += cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
154 iwl_get_cmd_string(trans, cmd->id), cmd->id, copy_size)) { in iwl_pcie_gen2_enqueue_hcmd()
155 idx = -EINVAL; in iwl_pcie_gen2_enqueue_hcmd()
159 spin_lock_bh(&txq->lock); in iwl_pcie_gen2_enqueue_hcmd()
161 idx = iwl_txq_get_cmd_index(txq, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
162 tfd = iwl_txq_get_tfd(trans, txq, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
165 if (iwl_txq_space(trans, txq) < ((cmd->flags & CMD_ASYNC) ? 2 : 1)) { in iwl_pcie_gen2_enqueue_hcmd()
166 spin_unlock_bh(&txq->lock); in iwl_pcie_gen2_enqueue_hcmd()
169 iwl_op_mode_cmd_queue_full(trans->op_mode); in iwl_pcie_gen2_enqueue_hcmd()
170 idx = -ENOSPC; in iwl_pcie_gen2_enqueue_hcmd()
174 out_cmd = txq->entries[idx].cmd; in iwl_pcie_gen2_enqueue_hcmd()
175 out_meta = &txq->entries[idx].meta; in iwl_pcie_gen2_enqueue_hcmd()
177 /* re-initialize to NULL */ in iwl_pcie_gen2_enqueue_hcmd()
179 if (cmd->flags & CMD_WANT_SKB) in iwl_pcie_gen2_enqueue_hcmd()
180 out_meta->source = cmd; in iwl_pcie_gen2_enqueue_hcmd()
183 out_cmd->hdr_wide.cmd = iwl_cmd_opcode(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
184 out_cmd->hdr_wide.group_id = group_id; in iwl_pcie_gen2_enqueue_hcmd()
185 out_cmd->hdr_wide.version = iwl_cmd_version(cmd->id); in iwl_pcie_gen2_enqueue_hcmd()
186 out_cmd->hdr_wide.length = in iwl_pcie_gen2_enqueue_hcmd()
187 cpu_to_le16(cmd_size - sizeof(struct iwl_cmd_header_wide)); in iwl_pcie_gen2_enqueue_hcmd()
188 out_cmd->hdr_wide.reserved = 0; in iwl_pcie_gen2_enqueue_hcmd()
189 out_cmd->hdr_wide.sequence = in iwl_pcie_gen2_enqueue_hcmd()
190 cpu_to_le16(QUEUE_TO_SEQ(trans->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
191 INDEX_TO_SEQ(txq->write_ptr)); in iwl_pcie_gen2_enqueue_hcmd()
200 if (!cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
204 if (!(cmd->dataflags[i] & (IWL_HCMD_DFL_NOCOPY | in iwl_pcie_gen2_enqueue_hcmd()
206 copy = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
208 memcpy((u8 *)out_cmd + cmd_pos, cmd->data[i], copy); in iwl_pcie_gen2_enqueue_hcmd()
216 * in total (for bi-directional DMA), but copy up to what in iwl_pcie_gen2_enqueue_hcmd()
219 copy = min_t(int, TFD_MAX_PAYLOAD_SIZE - cmd_pos, cmd->len[i]); in iwl_pcie_gen2_enqueue_hcmd()
221 memcpy((u8 *)out_cmd + cmd_pos, cmd->data[i], copy); in iwl_pcie_gen2_enqueue_hcmd()
226 copy = IWL_FIRST_TB_SIZE - copy_size; in iwl_pcie_gen2_enqueue_hcmd()
228 if (copy > cmd->len[i]) in iwl_pcie_gen2_enqueue_hcmd()
229 copy = cmd->len[i]; in iwl_pcie_gen2_enqueue_hcmd()
236 iwl_get_cmd_string(trans, cmd->id), group_id, in iwl_pcie_gen2_enqueue_hcmd()
237 out_cmd->hdr.cmd, le16_to_cpu(out_cmd->hdr.sequence), in iwl_pcie_gen2_enqueue_hcmd()
238 cmd_size, txq->write_ptr, idx, trans->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
242 memcpy(&txq->first_tb_bufs[idx], out_cmd, tb0_size); in iwl_pcie_gen2_enqueue_hcmd()
248 phys_addr = dma_map_single(trans->dev, in iwl_pcie_gen2_enqueue_hcmd()
250 copy_size - tb0_size, in iwl_pcie_gen2_enqueue_hcmd()
252 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
253 idx = -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
258 copy_size - tb0_size); in iwl_pcie_gen2_enqueue_hcmd()
267 if (!(cmd->dataflags[i] & (IWL_HCMD_DFL_NOCOPY | in iwl_pcie_gen2_enqueue_hcmd()
270 if (cmd->dataflags[i] & IWL_HCMD_DFL_DUP) in iwl_pcie_gen2_enqueue_hcmd()
272 phys_addr = dma_map_single(trans->dev, (void *)data, in iwl_pcie_gen2_enqueue_hcmd()
274 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
275 idx = -ENOMEM; in iwl_pcie_gen2_enqueue_hcmd()
282 BUILD_BUG_ON(IWL_TFH_NUM_TBS > sizeof(out_meta->tbs) * BITS_PER_BYTE); in iwl_pcie_gen2_enqueue_hcmd()
283 out_meta->flags = cmd->flags; in iwl_pcie_gen2_enqueue_hcmd()
284 if (WARN_ON_ONCE(txq->entries[idx].free_buf)) in iwl_pcie_gen2_enqueue_hcmd()
285 kfree_sensitive(txq->entries[idx].free_buf); in iwl_pcie_gen2_enqueue_hcmd()
286 txq->entries[idx].free_buf = dup_buf; in iwl_pcie_gen2_enqueue_hcmd()
288 trace_iwlwifi_dev_hcmd(trans->dev, cmd, cmd_size, &out_cmd->hdr_wide); in iwl_pcie_gen2_enqueue_hcmd()
291 if (txq->read_ptr == txq->write_ptr && txq->wd_timeout) in iwl_pcie_gen2_enqueue_hcmd()
292 mod_timer(&txq->stuck_timer, jiffies + txq->wd_timeout); in iwl_pcie_gen2_enqueue_hcmd()
294 spin_lock_irqsave(&trans_pcie->reg_lock, flags); in iwl_pcie_gen2_enqueue_hcmd()
296 txq->write_ptr = iwl_txq_inc_wrap(trans, txq->write_ptr); in iwl_pcie_gen2_enqueue_hcmd()
298 spin_unlock_irqrestore(&trans_pcie->reg_lock, flags); in iwl_pcie_gen2_enqueue_hcmd()
301 spin_unlock_bh(&txq->lock); in iwl_pcie_gen2_enqueue_hcmd()
314 const char *cmd_str = iwl_get_cmd_string(trans, cmd->id); in iwl_pcie_gen2_send_hcmd_sync()
315 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_send_hcmd_sync()
322 &trans->status), in iwl_pcie_gen2_send_hcmd_sync()
324 return -EIO; in iwl_pcie_gen2_send_hcmd_sync()
331 clear_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status); in iwl_pcie_gen2_send_hcmd_sync()
337 ret = wait_event_timeout(trans_pcie->wait_command_queue, in iwl_pcie_gen2_send_hcmd_sync()
339 &trans->status), in iwl_pcie_gen2_send_hcmd_sync()
346 txq->read_ptr, txq->write_ptr); in iwl_pcie_gen2_send_hcmd_sync()
348 clear_bit(STATUS_SYNC_HCMD_ACTIVE, &trans->status); in iwl_pcie_gen2_send_hcmd_sync()
351 ret = -ETIMEDOUT; in iwl_pcie_gen2_send_hcmd_sync()
357 if (test_bit(STATUS_FW_ERROR, &trans->status)) { in iwl_pcie_gen2_send_hcmd_sync()
360 ret = -EIO; in iwl_pcie_gen2_send_hcmd_sync()
364 if (!(cmd->flags & CMD_SEND_IN_RFKILL) && in iwl_pcie_gen2_send_hcmd_sync()
365 test_bit(STATUS_RFKILL_OPMODE, &trans->status)) { in iwl_pcie_gen2_send_hcmd_sync()
367 ret = -ERFKILL; in iwl_pcie_gen2_send_hcmd_sync()
371 if ((cmd->flags & CMD_WANT_SKB) && !cmd->resp_pkt) { in iwl_pcie_gen2_send_hcmd_sync()
373 ret = -EIO; in iwl_pcie_gen2_send_hcmd_sync()
380 if (cmd->flags & CMD_WANT_SKB) { in iwl_pcie_gen2_send_hcmd_sync()
385 * address (cmd->meta.source). in iwl_pcie_gen2_send_hcmd_sync()
387 txq->entries[cmd_idx].meta.flags &= ~CMD_WANT_SKB; in iwl_pcie_gen2_send_hcmd_sync()
390 if (cmd->resp_pkt) { in iwl_pcie_gen2_send_hcmd_sync()
392 cmd->resp_pkt = NULL; in iwl_pcie_gen2_send_hcmd_sync()
401 if (!(cmd->flags & CMD_SEND_IN_RFKILL) && in iwl_trans_pcie_gen2_send_hcmd()
402 test_bit(STATUS_RFKILL_OPMODE, &trans->status)) { in iwl_trans_pcie_gen2_send_hcmd()
404 cmd->id); in iwl_trans_pcie_gen2_send_hcmd()
405 return -ERFKILL; in iwl_trans_pcie_gen2_send_hcmd()
408 if (cmd->flags & CMD_ASYNC) { in iwl_trans_pcie_gen2_send_hcmd()
412 if (WARN_ON(cmd->flags & CMD_WANT_SKB)) in iwl_trans_pcie_gen2_send_hcmd()
413 return -EINVAL; in iwl_trans_pcie_gen2_send_hcmd()
419 iwl_get_cmd_string(trans, cmd->id), ret); in iwl_trans_pcie_gen2_send_hcmd()