Home
last modified time | relevance | path

Searched defs:wqe (Results 1 – 25 of 84) sorted by relevance

1234

/linux/drivers/infiniband/sw/rxe/
H A Drxe_req.c17 struct rxe_send_wqe *wqe, int npsn) in retry_first_write_send() argument
39 struct rxe_send_wqe *wqe; req_retry() local
120 struct rxe_send_wqe *wqe; req_check_sq_drain_done() local
176 struct rxe_send_wqe *wqe; req_next_wqe() local
205 rxe_wqe_is_fenced(struct rxe_qp * qp,struct rxe_send_wqe * wqe) rxe_wqe_is_fenced() argument
353 next_opcode(struct rxe_qp * qp,struct rxe_send_wqe * wqe,u32 opcode) next_opcode() argument
383 check_init_depth(struct rxe_qp * qp,struct rxe_send_wqe * wqe) check_init_depth() argument
415 init_req_packet(struct rxe_qp * qp,struct rxe_av * av,struct rxe_send_wqe * wqe,int opcode,u32 payload,struct rxe_pkt_info * pkt) init_req_packet() argument
499 finish_packet(struct rxe_qp * qp,struct rxe_av * av,struct rxe_send_wqe * wqe,struct rxe_pkt_info * pkt,struct sk_buff * skb,u32 payload) finish_packet() argument
542 update_wqe_state(struct rxe_qp * qp,struct rxe_send_wqe * wqe,struct rxe_pkt_info * pkt) update_wqe_state() argument
556 update_wqe_psn(struct rxe_qp * qp,struct rxe_send_wqe * wqe,struct rxe_pkt_info * pkt,u32 payload) update_wqe_psn() argument
593 rxe_do_local_ops(struct rxe_qp * qp,struct rxe_send_wqe * wqe) rxe_do_local_ops() argument
644 struct rxe_send_wqe *wqe; rxe_requester() local
[all...]
H A Drxe_mw.c50 static int rxe_check_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in rxe_check_bind_mw() argument
135 static void rxe_do_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in rxe_do_bind_mw() argument
164 rxe_bind_mw(struct rxe_qp * qp,struct rxe_send_wqe * wqe) rxe_bind_mw() argument
[all...]
H A Drxe_resp.c263 struct rxe_recv_wqe *wqe; in get_srq_wqe() local
1075 struct rxe_recv_wqe *wqe = qp->resp.wqe; do_complete() local
1444 flush_recv_wqe(struct rxe_qp * qp,struct rxe_recv_wqe * wqe) flush_recv_wqe() argument
1475 struct rxe_recv_wqe *wqe; flush_recv_queue() local
[all...]
/linux/drivers/infiniband/hw/irdma/
H A Dctrl.c186 __le64 *wqe; in irdma_sc_add_arp_cache_entry() local
222 __le64 *wqe; in irdma_sc_del_arp_cache_entry() local
256 __le64 *wqe; irdma_sc_manage_apbvt_entry() local
304 __le64 *wqe; irdma_sc_manage_qhash_table_entry() local
452 __le64 *wqe; irdma_sc_qp_create() local
502 __le64 *wqe; irdma_sc_qp_modify() local
572 __le64 *wqe; irdma_sc_qp_destroy() local
764 __le64 *wqe; irdma_sc_alloc_local_mac_entry() local
799 __le64 *wqe; irdma_sc_add_local_mac_entry() local
837 __le64 *wqe; irdma_sc_del_local_mac_entry() local
1059 __le64 *wqe; irdma_sc_alloc_stag() local
1123 __le64 *wqe; irdma_sc_mr_reg_non_shared() local
1215 __le64 *wqe; irdma_sc_dealloc_stag() local
1256 __le64 *wqe; irdma_sc_mw_alloc() local
1296 __le64 *wqe; irdma_sc_mr_fast_register() local
1366 __le64 *wqe; irdma_sc_gen_rts_ae() local
1403 __le64 *wqe; irdma_sc_send_lsmm() local
1445 __le64 *wqe; irdma_sc_send_rtt() local
2051 __le64 *wqe; irdma_sc_gather_stats() local
2098 __le64 *wqe; irdma_sc_manage_stats_inst() local
2134 __le64 *wqe; irdma_sc_set_up_map() local
2177 __le64 *wqe; irdma_sc_manage_ws_node() local
2220 __le64 *wqe; irdma_sc_qp_flush_wqes() local
2294 __le64 *wqe; irdma_sc_gen_ae() local
2333 __le64 *wqe; irdma_sc_qp_upload_context() local
2373 __le64 *wqe; irdma_sc_manage_push_page() local
2412 __le64 *wqe; irdma_sc_suspend_qp() local
2442 __le64 *wqe; irdma_sc_resume_qp() local
2519 __le64 *wqe; irdma_sc_cq_create() local
2592 __le64 *wqe; irdma_sc_cq_destroy() local
2660 __le64 *wqe; irdma_sc_cq_modify() local
3263 __le64 *wqe = NULL; irdma_sc_cqp_get_next_send_wqe_idx() local
3460 __le64 *wqe; irdma_sc_manage_hmc_pm_func_table() local
3518 __le64 *wqe; irdma_sc_commit_fpm_val() local
3579 __le64 *wqe; irdma_sc_query_fpm_val() local
3669 __le64 *wqe; irdma_sc_ceq_create() local
3767 __le64 *wqe; irdma_sc_ceq_destroy() local
3932 __le64 *wqe; irdma_sc_aeq_create() local
3971 __le64 *wqe; irdma_sc_aeq_destroy() local
4261 __le64 *wqe; irdma_sc_ccq_destroy() local
4418 __le64 *wqe; cqp_sds_wqe_fill() local
4541 __le64 *wqe; irdma_sc_static_hmc_pages_allocated() local
4637 __le64 *wqe; irdma_sc_query_rdma_features() local
[all...]
H A Duk.c15 static void irdma_set_fragment(__le64 *wqe, u32 offset, struct ib_sge *sge, in irdma_set_fragment() argument
39 irdma_set_fragment_gen_1(__le64 * wqe,u32 offset,struct ib_sge * sge,u8 valid) irdma_set_fragment_gen_1() argument
61 __le64 *wqe; irdma_nop_1() local
158 __le64 *wqe; irdma_qp_get_next_send_wqe() local
208 __le64 *wqe; irdma_qp_get_next_recv_wqe() local
236 __le64 *wqe; irdma_uk_rdma_write() local
335 __le64 *wqe; irdma_uk_rdma_read() local
410 __le64 *wqe; irdma_uk_send() local
501 irdma_set_mw_bind_wqe_gen_1(__le64 * wqe,struct irdma_bind_window * op_info) irdma_set_mw_bind_wqe_gen_1() argument
518 irdma_copy_inline_data_gen_1(u8 * wqe,struct ib_sge * sge_list,u32 num_sges,u8 polarity) irdma_copy_inline_data_gen_1() argument
563 irdma_set_mw_bind_wqe(__le64 * wqe,struct irdma_bind_window * op_info) irdma_set_mw_bind_wqe() argument
580 irdma_copy_inline_data(u8 * wqe,struct ib_sge * sge_list,u32 num_sges,u8 polarity) irdma_copy_inline_data() argument
657 __le64 *wqe; irdma_uk_inline_rdma_write() local
725 __le64 *wqe; irdma_uk_inline_send() local
798 __le64 *wqe; irdma_uk_stag_local_invalidate() local
846 __le64 *wqe; irdma_uk_post_receive() local
1533 __le64 *wqe; irdma_nop() local
[all...]
H A Duda.c23 __le64 *wqe; in irdma_sc_access_ah() local
119 __le64 *wqe; irdma_access_mcast_grp() local
[all...]
H A Dpuda.c82 __le64 *wqe; in irdma_puda_post_recvbuf() local
427 __le64 *wqe; irdma_puda_send() local
608 __le64 *wqe; irdma_puda_qp_wqe() local
725 __le64 *wqe; irdma_puda_cq_wqe() local
1118 __le64 *wqe; irdma_ilq_putback_rcvbuf() local
[all...]
/linux/drivers/infiniband/sw/siw/
H A Dsiw_qp_tx.c51 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_try_1seg() local
119 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_qp_prepare_tx() local
440 struct siw_wqe *wqe = &c_tx->wqe_active; siw_tx_hdt() local
706 siw_prepare_fpdu(struct siw_qp * qp,struct siw_wqe * wqe) siw_prepare_fpdu() argument
763 siw_check_sgl_tx(struct ib_pd * pd,struct siw_wqe * wqe,enum ib_access_flags perms) siw_check_sgl_tx() argument
793 siw_qp_sq_proc_tx(struct siw_qp * qp,struct siw_wqe * wqe) siw_qp_sq_proc_tx() argument
978 siw_qp_sq_proc_local(struct siw_qp * qp,struct siw_wqe * wqe) siw_qp_sq_proc_local() argument
1025 struct siw_wqe *wqe = tx_wqe(qp); siw_qp_sq_process() local
[all...]
H A Dsiw_qp_rx.c169 struct siw_wqe *wqe = &frx->wqe_active; in siw_rresp_check_ntoh() local
281 struct siw_wqe *wqe = &frx->wqe_active; in siw_send_check_ntoh() local
334 struct siw_wqe *wqe = NULL; siw_rqe_get() local
439 struct siw_wqe *wqe; siw_proc_send() local
565 struct siw_wqe *wqe = rx_wqe(frx); siw_proc_write() local
742 struct siw_wqe *wqe = NULL; siw_orqe_start_rx() local
786 struct siw_wqe *wqe = rx_wqe(frx); siw_proc_rresp() local
1201 struct siw_wqe *wqe = rx_wqe(qp->rx_fpdu); siw_rdmap_complete() local
[all...]
H A Dsiw_qp.c238 struct siw_wqe *wqe = tx_wqe(qp); in siw_qp_mpa_rts() local
438 struct siw_wqe *wqe = tx_wqe(qp); siw_send_terminate() local
855 struct siw_wqe *wqe = tx_wqe(qp); siw_activate_tx_from_sq() local
951 struct siw_wqe *wqe = tx_wqe(qp); siw_activate_tx() local
1162 struct siw_wqe *wqe = tx_wqe(qp); siw_sq_flush() local
1237 struct siw_wqe *wqe = &qp->rx_untagged.wqe_active; siw_rq_flush() local
[all...]
/linux/drivers/infiniband/hw/cxgb4/
H A Dqp.c489 static int build_rdma_send(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_send() argument
556 build_rdma_write(struct t4_sq * sq,union t4_wr * wqe,const struct ib_send_wr * wr,u8 * len16) build_rdma_write() argument
660 build_rdma_read(union t4_wr * wqe,const struct ib_send_wr * wr,u8 * len16) build_rdma_read() argument
697 union t4_wr *wqe; post_write_cmpl() local
759 build_rdma_recv(struct c4iw_qp * qhp,union t4_recv_wr * wqe,const struct ib_recv_wr * wr,u8 * len16) build_rdma_recv() argument
774 build_srq_recv(union t4_recv_wr * wqe,const struct ib_recv_wr * wr,u8 * len16) build_srq_recv() argument
820 build_memreg(struct t4_sq * sq,union t4_wr * wqe,const struct ib_reg_wr * wr,struct c4iw_mr * mhp,u8 * len16,bool dsgl_supported) build_memreg() argument
884 build_inv_stag(union t4_wr * wqe,const struct ib_send_wr * wr,u8 * len16) build_inv_stag() argument
1086 union t4_wr *wqe = NULL; c4iw_post_send() local
1266 union t4_recv_wr *wqe = NULL; c4iw_post_receive() local
1341 defer_srq_wr(struct t4_srq * srq,union t4_recv_wr * wqe,u64 wr_id,u8 len16) defer_srq_wr() argument
1360 union t4_recv_wr *wqe, lwqe; c4iw_post_srq_recv() local
1563 struct fw_ri_wr *wqe; post_terminate() local
1703 struct fw_ri_wr *wqe; rdma_fini() local
1759 struct fw_ri_wr *wqe; rdma_init() local
2672 c4iw_copy_wr_to_srq(struct t4_srq * srq,union t4_recv_wr * wqe,u8 len16) c4iw_copy_wr_to_srq() argument
[all...]
/linux/drivers/infiniband/hw/mlx5/
H A Dumr.c238 struct mlx5r_umr_wqe *wqe, bool with_data) in mlx5r_umr_post_send() argument
285 struct mlx5r_umr_wqe *wqe, bool with_data) in mlx5r_umr_recover() argument
354 struct mlx5r_umr_wqe *wqe, bool with_data) in mlx5r_umr_post_send_wait() argument
424 struct mlx5r_umr_wqe wqe = {}; in mlx5r_umr_revoke_mr() local
464 struct mlx5r_umr_wqe wqe = {}; mlx5r_umr_rereg_pd_access() local
637 mlx5r_umr_final_update_xlt(struct mlx5_ib_dev * dev,struct mlx5r_umr_wqe * wqe,struct mlx5_ib_mr * mr,struct ib_sge * sg,unsigned int flags) mlx5r_umr_final_update_xlt() argument
673 _mlx5r_umr_init_wqe(struct mlx5_ib_mr * mr,struct mlx5r_umr_wqe * wqe,struct ib_sge * sg,unsigned int flags,unsigned int page_shift,bool dd) _mlx5r_umr_init_wqe() argument
693 struct mlx5r_umr_wqe wqe = {}; _mlx5r_umr_update_mr_pas() local
852 struct mlx5r_umr_wqe wqe = {}; mlx5r_umr_update_xlt() local
939 struct mlx5r_umr_wqe wqe = {}; mlx5r_umr_update_mr_page_shift() local
[all...]
/linux/drivers/infiniband/hw/hfi1/
H A Drc.c394 struct rvt_swqe *wqe; in hfi1_make_rc_req() local
1418 update_num_rd_atomic(struct rvt_qp * qp,u32 psn,struct rvt_swqe * wqe) update_num_rd_atomic() argument
1461 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); reset_psn() local
1567 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); hfi1_restart_rc() local
1637 struct rvt_swqe *wqe; reset_sending_psn() local
1697 struct rvt_swqe *wqe; hfi1_rc_send_complete() local
1825 do_rc_completion(struct rvt_qp * qp,struct rvt_swqe * wqe,struct hfi1_ibport * ibp) do_rc_completion() argument
1970 struct rvt_swqe *wqe; do_rc_ack() local
2266 struct rvt_swqe *wqe; rdma_seq_err() local
2310 struct rvt_swqe *wqe; rc_rcv_resp() local
[all...]
H A Dtid_rdma.c378 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, i); in hfi1_qp_priv_init() local
416 struct rvt_swqe *wqe; in hfi1_qp_priv_tid_free() local
1621 void __trdma_clean_swqe(struct rvt_qp *qp, struct rvt_swqe *wqe) in __trdma_clean_swqe() argument
1703 hfi1_build_tid_rdma_read_packet(struct rvt_swqe * wqe,struct ib_other_headers * ohdr,u32 * bth1,u32 * bth2,u32 * len) hfi1_build_tid_rdma_read_packet() argument
1787 hfi1_build_tid_rdma_read_req(struct rvt_qp * qp,struct rvt_swqe * wqe,struct ib_other_headers * ohdr,u32 * bth1,u32 * bth2,u32 * len) hfi1_build_tid_rdma_read_req() argument
2418 struct rvt_swqe *wqe; find_tid_request() local
2582 struct rvt_swqe *wqe; hfi1_kern_read_tid_flow_free() local
2630 restart_tid_rdma_read_req(struct hfi1_ctxtdata * rcd,struct rvt_qp * qp,struct rvt_swqe * wqe) restart_tid_rdma_read_req() argument
2663 struct rvt_swqe *wqe; handle_read_kdeth_eflags() local
3046 hfi1_tid_rdma_restart_req(struct rvt_qp * qp,struct rvt_swqe * wqe,u32 * bth2) hfi1_tid_rdma_restart_req() argument
3181 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, i); hfi1_qp_kern_exp_rcv_clear_all() local
3210 hfi1_tid_rdma_wqe_interlock(struct rvt_qp * qp,struct rvt_swqe * wqe) hfi1_tid_rdma_wqe_interlock() argument
3282 setup_tid_rdma_wqe(struct rvt_qp * qp,struct rvt_swqe * wqe) setup_tid_rdma_wqe() argument
3366 hfi1_build_tid_rdma_write_req(struct rvt_qp * qp,struct rvt_swqe * wqe,struct ib_other_headers * ohdr,u32 * bth1,u32 * bth2,u32 * len) hfi1_build_tid_rdma_write_req() argument
4044 struct rvt_swqe *wqe; hfi1_rc_rcv_tid_rdma_write_resp() local
4205 hfi1_build_tid_rdma_packet(struct rvt_swqe * wqe,struct ib_other_headers * ohdr,u32 * bth1,u32 * bth2,u32 * len) hfi1_build_tid_rdma_packet() argument
4502 struct rvt_swqe *wqe; hfi1_rc_rcv_tid_rdma_ack() local
4803 struct rvt_swqe *wqe; hfi1_tid_retry_timeout() local
4844 hfi1_build_tid_rdma_resync(struct rvt_qp * qp,struct rvt_swqe * wqe,struct ib_other_headers * ohdr,u32 * bth1,u32 * bth2,u16 fidx) hfi1_build_tid_rdma_resync() argument
4989 struct rvt_swqe *wqe; update_tid_tail() local
5013 struct rvt_swqe *wqe; hfi1_make_tid_rdma_pkt() local
[all...]
H A Dud.c224 static void hfi1_make_bth_deth(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_make_bth_deth() argument
262 struct rvt_swqe *wqe) in hfi1_make_ud_req_9B() argument
333 hfi1_make_ud_req_16B(struct rvt_qp * qp,struct hfi1_pkt_state * ps,struct rvt_swqe * wqe) hfi1_make_ud_req_16B() argument
441 struct rvt_swqe *wqe; hfi1_make_ud_req() local
[all...]
H A Drc.h35 static inline u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe, in restart_sge() argument
/linux/drivers/infiniband/sw/rdmavt/
H A Dqp.c591 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_last); in rvt_clear_mr_refs() local
621 static bool rvt_swqe_has_lkey(struct rvt_swqe *wqe, u32 lkey) in rvt_swqe_has_lkey() argument
644 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, s_last); in rvt_qp_sends_has_lkey() local
979 struct rvt_swqe *wqe; in free_ud_wq_attr() local
999 struct rvt_swqe *wqe; alloc_ud_wq_attr() local
1796 struct rvt_rwqe *wqe; rvt_post_recv() local
1963 struct rvt_swqe *wqe; rvt_post_one_wr() local
2211 struct rvt_rwqe *wqe; rvt_post_srq_recv() local
2266 init_sge(struct rvt_qp * qp,struct rvt_rwqe * wqe) init_sge() argument
2349 struct rvt_rwqe *wqe; rvt_get_rwqe() local
2777 rvt_send_complete(struct rvt_qp * qp,struct rvt_swqe * wqe,enum ib_wc_status status) rvt_send_complete() argument
2906 struct rvt_swqe *wqe; rvt_ruc_loopback() local
[all...]
/linux/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_cmdq.c54 #define CMDQ_WQE_HEADER(wqe) ((struct hinic_cmdq_header *)(wqe)) argument
177 static void cmdq_prepare_wqe_ctrl(struct hinic_cmdq_wqe *wqe, int wrapped, in cmdq_prepare_wqe_ctrl() argument
234 static void cmdq_set_direct_wqe_data(struct hinic_cmdq_direct_wqe *wqe, in cmdq_set_direct_wqe_data() argument
243 cmdq_set_lcmd_wqe(struct hinic_cmdq_wqe * wqe,enum cmdq_cmd_type cmd_type,struct hinic_cmdq_buf * buf_in,struct hinic_cmdq_buf * buf_out,int wrapped,enum hinic_cmd_ack_type ack_type,enum hinic_mod_type mod,u8 cmd,u16 prod_idx) cmdq_set_lcmd_wqe() argument
271 cmdq_set_direct_wqe(struct hinic_cmdq_wqe * wqe,enum cmdq_cmd_type cmd_type,void * buf_in,u16 in_size,struct hinic_cmdq_buf * buf_out,int wrapped,enum hinic_cmd_ack_type ack_type,enum hinic_mod_type mod,u8 cmd,u16 prod_idx) cmdq_set_direct_wqe() argument
529 clear_wqe_complete_bit(struct hinic_cmdq * cmdq,struct hinic_cmdq_wqe * wqe) clear_wqe_complete_bit() argument
563 cmdq_arm_ceq_handler(struct hinic_cmdq * cmdq,struct hinic_cmdq_wqe * wqe) cmdq_arm_ceq_handler() argument
[all...]
H A Dhinic_hw_wq.c73 #define WQE_IN_RANGE(wqe, start, end) \ argument
77 #define WQE_SHADOW_PAGE(wq, wqe) \ argument
874 wqe_shadow(struct hinic_wq * wq,struct hinic_hw_wqe * wqe) wqe_shadow() argument
888 hinic_write_wqe(struct hinic_wq * wq,struct hinic_hw_wqe * wqe,unsigned int wqe_size) hinic_write_wqe() argument
[all...]
/linux/drivers/infiniband/hw/mthca/
H A Dmthca_srq.c92 static inline int *wqe_to_link(void *wqe) in wqe_to_link() argument
158 void *wqe; in mthca_alloc_srq_buf() local
495 void *wqe; in mthca_tavor_post_srq_recv() local
588 void *wqe; mthca_arbel_post_srq_recv() local
[all...]
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dktls_txrx.c74 mlx5e_ktls_build_static_params(struct mlx5e_set_tls_static_params_wqe *wqe, in mlx5e_ktls_build_static_params() argument
117 mlx5e_ktls_build_progress_params(struct mlx5e_set_tls_progress_params_wqe *wqe, in mlx5e_ktls_build_progress_params() argument
/linux/drivers/scsi/lpfc/
H A Dlpfc_nvme.c399 union lpfc_wqe128 *wqe; in lpfc_nvme_gen_req() local
711 struct lpfc_iocbq *wqe, *next_wqe; __lpfc_nvme_ls_abort() local
831 union lpfc_wqe128 *wqe; lpfc_nvme_adj_fcp_sgls() local
1216 union lpfc_wqe128 *wqe = &pwqeq->wqe; lpfc_nvme_prep_io_cmd() local
1342 union lpfc_wqe128 *wqe = &lpfc_ncmd->cur_iocbq.wqe; lpfc_nvme_prep_io_dma() local
2043 union lpfc_wqe128 *wqe; lpfc_get_nvme_buf() local
[all...]
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
H A Drx.c183 struct mlx5e_rx_wqe_cyc *wqe; in mlx5e_xsk_alloc_rx_wqes_batched() local
206 struct mlx5e_rx_wqe_cyc *wqe; in mlx5e_xsk_alloc_rx_wqes() local
[all...]
/linux/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_tx.c357 struct mlx5e_tx_wqe *wqe; in mlx5e_tx_flush() local
429 struct mlx5e_tx_wqe *wqe, u16 pi, bool xmit_more) in mlx5e_sq_xmit_wqe() argument
521 struct mlx5e_tx_wqe *wqe; in mlx5e_tx_mpwqe_session_start() local
676 struct mlx5e_tx_wqe *wqe; mlx5e_xmit() local
977 struct mlx5i_tx_wqe *wqe; mlx5i_sq_xmit() local
[all...]
/linux/drivers/scsi/bnx2fc/
H A Dbnx2fc_hwi.c626 static void bnx2fc_process_unsol_compl(struct bnx2fc_rport *tgt, u16 wqe) in bnx2fc_process_unsol_compl() argument
859 void bnx2fc_process_cq_compl(struct bnx2fc_rport *tgt, u16 wqe, in bnx2fc_process_cq_compl() argument
970 static struct bnx2fc_work *bnx2fc_alloc_work(struct bnx2fc_rport *tgt, u16 wqe, in bnx2fc_alloc_work() argument
991 bnx2fc_pending_work(struct bnx2fc_rport * tgt,unsigned int wqe) bnx2fc_pending_work() argument
1071 u16 wqe; bnx2fc_process_new_cqes() local
[all...]

1234