Home
last modified time | relevance | path

Searched refs:sc_dev (Results 1 – 10 of 10) sorted by relevance

/linux/drivers/infiniband/hw/irdma/
H A Dhw.c79 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_puda_ce_handler()
176 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_ceq()
259 irdma_sc_cqp_def_cmpl_ae_handler(&rf->sc_dev, info, true, in irdma_process_ae_def_cmpl()
266 irdma_sc_cqp_def_cmpl_ae_handler(&rf->sc_dev, info, false, in irdma_process_ae_def_cmpl()
277 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_aeq()
543 irdma_ena_intr(&rf->sc_dev, rf->iw_msixtbl[0].idx); in irdma_dpc()
556 irdma_ena_intr(&rf->sc_dev, iwceq->msix_idx); in irdma_ceq_dpc()
639 ibdev_err(to_ibdev(&iwceq->rf->sc_dev), "expected irq = %d received irq = %d\n", in irdma_ceq_handler()
657 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_irq()
680 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_cqp()
[all …]
H A Dig3rdma_if.c15 rf->sc_dev.vchnl_up = false; in ig3rdma_idc_core_event_handler()
49 mutex_init(&rf->sc_dev.vchnl_mutex); in ig3rdma_vchnl_init()
55 ret = irdma_sc_vchnl_init(&rf->sc_dev, &virt_info); in ig3rdma_vchnl_init()
58 mutex_destroy(&rf->sc_dev.vchnl_mutex); in ig3rdma_vchnl_init()
62 *rdma_ver = rf->sc_dev.hw_attrs.uk_attrs.hw_rev; in ig3rdma_vchnl_init()
129 mutex_destroy(&rf->sc_dev.vchnl_mutex); in ig3rdma_decfg_rf()
140 rf->sc_dev.hw = &rf->hw; in ig3rdma_cfg_rf()
154 mutex_destroy(&rf->sc_dev.vchnl_mutex); in ig3rdma_cfg_rf()
H A Dicrdma_if.c55 irdma_log_invalid_mtu(l2params.mtu, &iwdev->rf->sc_dev); in icrdma_iidc_event_handler()
83 pe_criterr = readl(iwdev->rf->sc_dev.hw_regs[IRDMA_GLPE_CRITERR]); in icrdma_iidc_event_handler()
207 rf->sc_dev.hw = &rf->hw; in icrdma_fill_device_info()
215 rf->sc_dev.hw_attrs.uk_attrs.hw_rev = IRDMA_GEN_2; in icrdma_fill_device_info()
216 rf->sc_dev.is_pf = true; in icrdma_fill_device_info()
217 rf->sc_dev.privileged = true; in icrdma_fill_device_info()
H A Dverbs.c18 struct irdma_hw_attrs *hw_attrs = &rf->sc_dev.hw_attrs; in irdma_query_device()
26 props->fw_ver = (u64)irdma_fw_major_ver(&rf->sc_dev) << 32 | in irdma_query_device()
27 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device()
120 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port()
142 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy()
268 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page()
294 struct irdma_uk_attrs *uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext()
323 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext()
324 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext()
330 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_alloc_ucontext()
[all …]
H A Di40iw_if.c78 rf->sc_dev.hw = &rf->hw; in i40iw_fill_device_info()
79 rf->sc_dev.hw_attrs.uk_attrs.hw_rev = IRDMA_GEN_1; in i40iw_fill_device_info()
80 rf->sc_dev.privileged = true; in i40iw_fill_device_info()
H A Dutils.c549 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_cleanup_pending_cqp_op()
605 int timeout_threshold = irdma_get_timeout_threshold(&rf->sc_dev); in irdma_wait_event()
609 cqp_timeout.compl_cqp_cmds = atomic64_read(&rf->sc_dev.cqp->completed_ops); in irdma_wait_event()
623 irdma_get_def_timeout_threshold(&rf->sc_dev); in irdma_wait_event()
625 irdma_check_cqp_progress(&cqp_timeout, &rf->sc_dev); in irdma_wait_event()
749 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_handle_cqp_op()
841 return &(container_of(dev, struct irdma_pci_f, sc_dev))->iwdev->ibdev; in to_ibdev()
1127 if (rf->sc_dev.hw_attrs.uk_attrs.hw_rev < IRDMA_GEN_3) in irdma_free_gsi_qp_rsrc()
1130 irdma_vchnl_req_del_vport(&rf->sc_dev, iwdev->vport_id, qp_num); in irdma_free_gsi_qp_rsrc()
1165 dma_free_coherent(rf->sc_dev.hw->device, iwqp->q2_ctx_mem.size, in irdma_free_qp_rsrc()
[all …]
H A Dmain.c61 irdma_log_invalid_mtu(l2params.mtu, &iwdev->rf->sc_dev); in ig3rdma_idc_vport_event_handler()
H A Dmain.h312 struct irdma_sc_dev sc_dev; member
426 return container_of(dev, struct irdma_pci_f, sc_dev); in dev_to_rf()
H A Dcm.c2206 if (irdma_puda_create_ah(&iwdev->rf->sc_dev, &ah_info, wait, in irdma_cm_create_ah()
2224 irdma_puda_free_ah(&iwdev->rf->sc_dev, cm_node->ah); in irdma_cm_free_ah()
2287 cm_node->dev = &iwdev->rf->sc_dev; in irdma_make_cm_node()
3158 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) { in irdma_receive_ilq()
3270 cm_core->dev = &iwdev->rf->sc_dev; in irdma_setup_cm_core()
3628 dma_free_coherent(iwdev->rf->sc_dev.hw->device, in irdma_free_lsmm_rsrc()
3663 dev = &iwdev->rf->sc_dev; in irdma_accept()
4153 dev = &iwdev->rf->sc_dev; in irdma_cm_event_connected()
/linux/drivers/platform/x86/lenovo/
H A Dwmi-capdata.c421 static int lwmi_cd_sub_component_bind(struct device *sc_dev, in lwmi_cd_sub_component_bind() argument
424 struct lwmi_cd_priv *priv = dev_get_drvdata(sc_dev); in lwmi_cd_sub_component_bind()