Lines Matching full:cqe
656 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_set_gro_params() argument
658 u16 parsing_flags = le16_to_cpu(cqe->pars_flags.flags); in qede_set_gro_params()
666 skb_shinfo(skb)->gso_size = __le16_to_cpu(cqe->len_on_first_bd) - in qede_set_gro_params()
667 cqe->header_len; in qede_set_gro_params()
836 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_tpa_start() argument
838 struct qede_agg_info *tpa_info = &rxq->tpa_info[cqe->tpa_agg_index]; in qede_tpa_start()
843 pad = cqe->placement_offset + rxq->rx_headroom; in qede_tpa_start()
846 le16_to_cpu(cqe->len_on_first_bd), in qede_tpa_start()
867 if ((le16_to_cpu(cqe->pars_flags.flags) >> in qede_tpa_start()
870 tpa_info->vlan_tag = le16_to_cpu(cqe->vlan_tag); in qede_tpa_start()
874 qede_get_rxhash(tpa_info->skb, cqe->bitfields, cqe->rss_hash); in qede_tpa_start()
877 qede_set_gro_params(edev, tpa_info->skb, cqe); in qede_tpa_start()
880 if (likely(cqe->bw_ext_bd_len_list[0])) in qede_tpa_start()
881 qede_fill_frag_skb(edev, rxq, cqe->tpa_agg_index, in qede_tpa_start()
882 le16_to_cpu(cqe->bw_ext_bd_len_list[0])); in qede_tpa_start()
884 if (unlikely(cqe->bw_ext_bd_len_list[1])) { in qede_tpa_start()
962 struct eth_fast_path_rx_tpa_cont_cqe *cqe) in qede_tpa_cont() argument
966 for (i = 0; cqe->len_list[i]; i++) in qede_tpa_cont()
967 qede_fill_frag_skb(edev, rxq, cqe->tpa_agg_index, in qede_tpa_cont()
968 le16_to_cpu(cqe->len_list[i])); in qede_tpa_cont()
977 struct eth_fast_path_rx_tpa_end_cqe *cqe) in qede_tpa_end() argument
984 tpa_info = &rxq->tpa_info[cqe->tpa_agg_index]; in qede_tpa_end()
991 for (i = 0; cqe->len_list[i]; i++) in qede_tpa_end()
992 qede_fill_frag_skb(edev, rxq, cqe->tpa_agg_index, in qede_tpa_end()
993 le16_to_cpu(cqe->len_list[i])); in qede_tpa_end()
1002 if (unlikely(cqe->num_of_bds != tpa_info->frag_id + 1)) in qede_tpa_end()
1005 cqe->num_of_bds, tpa_info->frag_id); in qede_tpa_end()
1006 if (unlikely(skb->len != le16_to_cpu(cqe->total_packet_len))) in qede_tpa_end()
1008 "Strange - total packet len [cqe] is %4x but SKB has len %04x\n", in qede_tpa_end()
1009 le16_to_cpu(cqe->total_packet_len), skb->len); in qede_tpa_end()
1018 NAPI_GRO_CB(skb)->count = le16_to_cpu(cqe->num_of_coalesced_segs); in qede_tpa_end()
1067 static bool qede_pkt_is_ip_fragmented(struct eth_fast_path_rx_reg_cqe *cqe, in qede_pkt_is_ip_fragmented() argument
1070 u8 tun_pars_flg = cqe->tunnel_pars_flags.flags; in qede_pkt_is_ip_fragmented()
1087 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_xdp() argument
1175 qede_recycle_rx_bd_ring(rxq, cqe->bd_num); in qede_rx_xdp()
1184 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_build_jumbo() argument
1187 u16 pkt_len = le16_to_cpu(cqe->pkt_len); in qede_rx_build_jumbo()
1195 for (num_frags = cqe->bd_num - 1; num_frags > 0; num_frags--) { in qede_rx_build_jumbo()
1241 union eth_rx_cqe *cqe, in qede_rx_process_tpa_cqe() argument
1246 qede_tpa_start(edev, rxq, &cqe->fast_path_tpa_start); in qede_rx_process_tpa_cqe()
1249 qede_tpa_cont(edev, rxq, &cqe->fast_path_tpa_cont); in qede_rx_process_tpa_cqe()
1252 return qede_tpa_end(edev, fp, &cqe->fast_path_tpa_end); in qede_rx_process_tpa_cqe()
1266 union eth_rx_cqe *cqe; in qede_rx_process_cqe() local
1272 /* Get the CQE from the completion ring */ in qede_rx_process_cqe()
1273 cqe = (union eth_rx_cqe *)qed_chain_consume(&rxq->rx_comp_ring); in qede_rx_process_cqe()
1274 cqe_type = cqe->fast_path_regular.type; in qede_rx_process_cqe()
1280 sp_cqe = (struct eth_slow_path_rx_cqe *)cqe; in qede_rx_process_cqe()
1287 return qede_rx_process_tpa_cqe(edev, fp, rxq, cqe, cqe_type); in qede_rx_process_cqe()
1295 fp_cqe = &cqe->fast_path_regular; in qede_rx_process_cqe()
1306 flags = cqe->fast_path_regular.pars_flags.flags; in qede_rx_process_cqe()
1328 * by a single cqe. in qede_rx_process_cqe()
1346 qede_ptp_record_rx_ts(edev, cqe, skb); in qede_rx_process_cqe()
1364 /* Memory barrier to prevent the CPU from doing speculative reads of CQE in qede_rx_int()
1365 * / BD in the while-loop before reading hw_comp_cons. If the CQE is in qede_rx_int()
1366 * read before it is written by FW, then FW writes CQE and SB, and then in qede_rx_int()
1367 * the CPU reads the hw_comp_cons, it will use an old CQE. in qede_rx_int()