/linux/drivers/net/ethernet/microsoft/mana/ |
H A D | hw_channel.c | 284 enum gdma_queue_type type, u64 queue_size, in mana_hwc_create_gdma_wq() argument 294 spec.queue_size = queue_size; in mana_hwc_create_gdma_wq() 300 u64 queue_size, in mana_hwc_create_gdma_cq() argument 309 spec.queue_size = queue_size; in mana_hwc_create_gdma_cq() 318 u64 queue_size, in mana_hwc_create_gdma_eq() argument 326 spec.queue_size = queue_size; in mana_hwc_create_gdma_eq() 517 u32 queue_size; in mana_hwc_create_wq() local 523 queue_size = roundup_pow_of_two(GDMA_MAX_RQE_SIZE * q_depth); in mana_hwc_create_wq() 525 queue_size = roundup_pow_of_two(GDMA_MAX_SQE_SIZE * q_depth); in mana_hwc_create_wq() 527 if (queue_size < MANA_MIN_QSIZE) in mana_hwc_create_wq() [all …]
|
H A D | gdma_main.c | 265 req.queue_size = queue->queue_size; in mana_gd_create_hw_eq() 379 u32 num_cqe = cq->queue_size / GDMA_CQE_SIZE; in mana_gd_ring_cq() 491 u32 head = eq->head % (eq->queue_size / GDMA_EQE_SIZE); in mana_gd_process_eqe() 586 num_eqe = eq->queue_size / GDMA_EQE_SIZE; in mana_gd_process_eq_events() 772 log2_num_entries = ilog2(queue->queue_size / GDMA_EQE_SIZE); in mana_gd_create_eq() 811 u32 log2_num_entries = ilog2(spec->queue_size / GDMA_CQE_SIZE); in mana_gd_create_cq() 847 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_hwc_queue() 850 spec->type, spec->queue_size, err); in mana_gd_create_hwc_queue() 857 queue->queue_size = spec->queue_size; in mana_gd_create_hwc_queue() 874 spec->type, spec->queue_size, err); in mana_gd_create_hwc_queue() [all …]
|
/linux/drivers/vdpa/ifcvf/ |
H A D | ifcvf_base.c | 74 u16 queue_size; in ifcvf_get_vq_size() local 80 queue_size = vp_ioread16(&hw->common_cfg->queue_size); in ifcvf_get_vq_size() 82 return queue_size; in ifcvf_get_vq_size() 87 u16 queue_size, max_size, qid; in ifcvf_get_max_vq_size() local 91 queue_size = ifcvf_get_vq_size(hw, qid); in ifcvf_get_max_vq_size() 93 if (!queue_size) in ifcvf_get_max_vq_size() 96 max_size = max(queue_size, max_size); in ifcvf_get_max_vq_size() 352 vp_iowrite16(num, &cfg->queue_size); in ifcvf_set_vq_num()
|
/linux/drivers/staging/media/ipu7/ |
H A D | ipu7-boot.c | 252 u32 queue_size = qconfigs[i].max_capacity * in ipu7_boot_init_boot_config() local 255 queue_size = ALIGN(queue_size, 64U); in ipu7_boot_init_boot_config() 256 total_queue_size_aligned += queue_size; in ipu7_boot_init_boot_config() 257 qconfigs[i].queue_size = queue_size; in ipu7_boot_init_boot_config() 279 queue_mem_dma_ptr += qconfigs[i].queue_size; in ipu7_boot_init_boot_config() 280 queue_mem_ptr += qconfigs[i].queue_size; in ipu7_boot_init_boot_config()
|
H A D | ipu7-syscom.h | 16 u32 queue_size; member
|
/linux/drivers/media/platform/qcom/iris/ |
H A D | iris_hfi_queue.c | 250 u32 queue_size; in iris_hfi_queues_init() local 253 queue_size = ALIGN((sizeof(*q_tbl_hdr) + (IFACEQ_QUEUE_SIZE * IFACEQ_NUMQ)), SZ_4K); in iris_hfi_queues_init() 254 core->iface_q_table_vaddr = dma_alloc_attrs(core->dev, queue_size, in iris_hfi_queues_init() 295 u32 queue_size; in iris_hfi_queues_deinit() local 310 queue_size = ALIGN(sizeof(struct iris_hfi_queue_table_header) + in iris_hfi_queues_deinit() 313 dma_free_attrs(core->dev, queue_size, core->iface_q_table_vaddr, in iris_hfi_queues_deinit()
|
H A D | iris_vpu_common.c | 86 u32 queue_size, value; in iris_vpu_setup_ucregion_memory_map() local 89 queue_size = ALIGN(sizeof(struct iris_hfi_queue_table_header) + in iris_vpu_setup_ucregion_memory_map() 96 value = ALIGN(SFR_SIZE + queue_size, SZ_1M); in iris_vpu_setup_ucregion_memory_map()
|
/linux/drivers/firmware/tegra/ |
H A D | ivc.c | 562 unsigned tegra_ivc_total_queue_size(unsigned queue_size) in tegra_ivc_total_queue_size() argument 564 if (!IS_ALIGNED(queue_size, TEGRA_IVC_ALIGN)) { in tegra_ivc_total_queue_size() 566 __func__, queue_size, TEGRA_IVC_ALIGN); in tegra_ivc_total_queue_size() 570 return queue_size + sizeof(struct tegra_ivc_header); in tegra_ivc_total_queue_size() 652 size_t queue_size; in tegra_ivc_init() local 670 queue_size = tegra_ivc_total_queue_size(num_frames * frame_size); in tegra_ivc_init() 673 ivc->rx.phys = dma_map_single(peer, iosys_map_get_vaddr(rx), queue_size, in tegra_ivc_init() 678 ivc->tx.phys = dma_map_single(peer, iosys_map_get_vaddr(tx), queue_size, in tegra_ivc_init() 681 dma_unmap_single(peer, ivc->rx.phys, queue_size, in tegra_ivc_init()
|
H A D | bpmp-tegra186.c | 112 size_t message_size, queue_size; in tegra186_bpmp_channel_init() local 123 queue_size = tegra_ivc_total_queue_size(message_size); in tegra186_bpmp_channel_init() 124 offset = queue_size * index; in tegra186_bpmp_channel_init()
|
/linux/sound/firewire/ |
H A D | amdtp-stream.c | 668 if (++s->packet_index >= s->queue_size) in queue_packet() 928 unsigned int queue_size) in compute_ohci_it_cycle() argument 931 return increment_ohci_cycle_count(cycle, queue_size); in compute_ohci_it_cycle() 941 unsigned int queue_size = s->queue_size; in generate_tx_packet_descs() local 1010 packet_index = (packet_index + 1) % queue_size; in generate_tx_packet_descs() 1043 unsigned int index = (s->packet_index + i) % s->queue_size; in generate_rx_packet_descs() 1046 desc->cycle = compute_ohci_it_cycle(*ctx_header, s->queue_size); in generate_rx_packet_descs() 1257 cycle = compute_ohci_it_cycle(ctx_header[packets - 1], s->queue_size); in skip_rx_packets() 1283 const unsigned int queue_size = s->queue_size; in process_rx_packets_intermediately() local 1294 unsigned int cycle = compute_ohci_it_cycle(ctx_header[offset], queue_size); in process_rx_packets_intermediately() [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | mes_userqueue.c | 142 queue_input.queue_size = userq_props->queue_size >> 2; in mes_userq_map() 219 !mqd_user->queue_va || mqd_user->queue_size == 0) { in mes_userq_mqd_create() 234 userq_props->queue_size = mqd_user->queue_size; in mes_userq_mqd_create()
|
H A D | amdgpu_amdkfd_gc_9_4_3.c | 328 uint32_t queue_size = in kgd_gfx_v9_4_3_hqd_load() local 331 uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1); in kgd_gfx_v9_4_3_hqd_load() 333 if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr) in kgd_gfx_v9_4_3_hqd_load() 334 guessed_wptr += queue_size; in kgd_gfx_v9_4_3_hqd_load() 335 guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1); in kgd_gfx_v9_4_3_hqd_load()
|
H A D | amdgpu_amdkfd_gfx_v10_3.c | 239 uint32_t queue_size = in hqd_load_v10_3() local 242 uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1); in hqd_load_v10_3() 244 if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr) in hqd_load_v10_3() 245 guessed_wptr += queue_size; in hqd_load_v10_3() 246 guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1); in hqd_load_v10_3()
|
/linux/drivers/infiniband/hw/mana/ |
H A D | qp.c | 187 wq_spec.queue_size = wq->wq_buf_size; in mana_ib_create_qp_rss() 190 cq_spec.queue_size = cq->cqe * COMP_ENTRY_SIZE; in mana_ib_create_qp_rss() 338 wq_spec.queue_size = ucmd.sq_buf_size; in mana_ib_create_qp_raw() 341 cq_spec.queue_size = send_cq->cqe * COMP_ENTRY_SIZE; in mana_ib_create_qp_raw() 410 u32 queue_size; in mana_ib_queue_size() local 416 queue_size = attr->cap.max_send_wr * in mana_ib_queue_size() 419 queue_size = attr->cap.max_recv_wr * in mana_ib_queue_size() 426 return MANA_PAGE_ALIGN(roundup_pow_of_two(queue_size)); in mana_ib_queue_size() 558 err = mana_ib_create_queue(mdev, ucmd.queue_buf[j], ucmd.queue_size[j], in mana_ib_create_rc_qp() 638 u32 doorbell, queue_size; in mana_ib_create_ud_qp() local [all …]
|
/linux/drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/ |
H A D | rx.c | 123 WARN_ON(rxq->queue_size & (rxq->queue_size - 1)); in iwl_rxq_space() 131 return (rxq->read - rxq->write - 1) & (rxq->queue_size - 1); in iwl_rxq_space() 273 rxq->write = (rxq->write + 1) & (rxq->queue_size - 1); in iwl_pcie_rxmq_restock() 686 free_size * rxq->queue_size, in iwl_pcie_free_rxq_dma() 697 rxq->queue_size, in iwl_pcie_free_rxq_dma() 725 rxq->queue_size = iwl_trans_get_num_rbds(trans); in iwl_pcie_alloc_rxq_dma() 727 rxq->queue_size = RX_QUEUE_SIZE; in iwl_pcie_alloc_rxq_dma() 735 rxq->bd = dma_alloc_coherent(dev, free_size * rxq->queue_size, in iwl_pcie_alloc_rxq_dma() 743 rxq->queue_size, in iwl_pcie_alloc_rxq_dma() 1088 int i, err, queue_size, allocator_pool_size, num_alloc; in _iwl_pcie_rx_init() local [all …]
|
/linux/kernel/bpf/ |
H A D | queue_stack_maps.c | 69 u64 size, queue_size; in queue_stack_map_alloc() local 72 queue_size = sizeof(*qs) + size * attr->value_size; in queue_stack_map_alloc() 74 qs = bpf_map_area_alloc(queue_size, numa_node); in queue_stack_map_alloc()
|
/linux/drivers/media/pci/intel/ipu6/ |
H A D | ipu6-fw-isys.c | 287 input_queue_cfg[i].queue_size = IPU6_ISYS_SIZE_PROXY_SEND_QUEUE; in ipu6_isys_fwcom_cfg_init() 293 input_queue_cfg[base_dev_send + i].queue_size = max_devq_size; in ipu6_isys_fwcom_cfg_init() 299 input_queue_cfg[base_msg_send + i].queue_size = in ipu6_isys_fwcom_cfg_init() 306 output_queue_cfg[i].queue_size = in ipu6_isys_fwcom_cfg_init() 313 output_queue_cfg[base_msg_recv + i].queue_size = in ipu6_isys_fwcom_cfg_init()
|
H A D | ipu6-fw-com.c | 199 sizeinput += size_mul(cfg->input[i].queue_size + 1, in ipu6_fw_com_prepare() 203 sizeoutput += size_mul(cfg->output[i].queue_size + 1, in ipu6_fw_com_prepare() 249 cfg->input[i].queue_size, in ipu6_fw_com_prepare() 258 cfg->output[i].queue_size, in ipu6_fw_com_prepare()
|
/linux/net/xfrm/ |
H A D | trace_iptfs.h | 76 __field(u32, queue_size) 84 __entry->queue_size = 85 xtfs->cfg.max_queue_size - xtfs->queue_size; 93 __entry->queue_size, __entry->proto, __entry->proto_seq,
|
/linux/include/linux/ |
H A D | vmw_vmci_defs.h | 864 u64 queue_size) in vmci_q_header_add_producer_tail() argument 866 vmci_qp_add_pointer(&q_header->producer_tail, add, queue_size); in vmci_q_header_add_producer_tail() 876 u64 queue_size) in vmci_q_header_add_consumer_head() argument 878 vmci_qp_add_pointer(&q_header->consumer_head, add, queue_size); in vmci_q_header_add_consumer_head()
|
/linux/drivers/nvme/host/ |
H A D | rdma.c | 87 int queue_size; member 266 init_attr.cap.max_send_wr = factor * queue->queue_size + 1; in nvme_rdma_create_qp() 268 init_attr.cap.max_recv_wr = queue->queue_size + 1; in nvme_rdma_create_qp() 444 nvme_rdma_free_ring(ibdev, queue->rsp_ring, queue->queue_size, in nvme_rdma_destroy_queue_ib() 505 queue->cq_size = cq_factor * queue->queue_size + 1; in nvme_rdma_create_queue_ib() 515 queue->rsp_ring = nvme_rdma_alloc_ring(ibdev, queue->queue_size, in nvme_rdma_create_queue_ib() 529 queue->queue_size, in nvme_rdma_create_queue_ib() 535 queue->queue_size, nvme_rdma_queue_idx(queue)); in nvme_rdma_create_queue_ib() 541 queue->queue_size, IB_MR_TYPE_INTEGRITY, in nvme_rdma_create_queue_ib() 546 queue->queue_size, nvme_rdma_queue_idx(queue)); in nvme_rdma_create_queue_ib() [all …]
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_mqd_manager_cik.c | 165 uint32_t wptr_mask = (uint32_t)((p->queue_size / 4) - 1); in load_mqd() 191 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd() 230 m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma() 351 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd_hiq()
|
H A D | kfd_queue.c | 37 pr_debug("Queue Size: %llu\n", q->queue_size); in print_queue_properties() 54 pr_debug("Queue Size: %llu\n", q->properties.queue_size); in print_queue() 250 expected_queue_size = properties->queue_size / 2; in kfd_queue_acquire_buffers() 252 expected_queue_size = properties->queue_size; in kfd_queue_acquire_buffers()
|
/linux/sound/core/ |
H A D | timer.c | 79 int queue_size; member 1305 prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1; in snd_timer_user_interrupt() 1312 if (tu->qused >= tu->queue_size) { in snd_timer_user_interrupt() 1316 tu->qtail %= tu->queue_size; in snd_timer_user_interrupt() 1329 if (tu->qused >= tu->queue_size) { in snd_timer_user_append_to_tqueue() 1333 tu->qtail %= tu->queue_size; in snd_timer_user_append_to_tqueue() 1406 prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1; in snd_timer_user_tinterrupt() 1447 tu->queue_size = size; in realloc_user_queue() 1816 if (params.queue_size > 0 && in snd_timer_user_params() 1817 (params.queue_size < 32 || params.queue_size > 1024)) { in snd_timer_user_params() [all …]
|
/linux/drivers/misc/genwqe/ |
H A D | card_ddcb.c | 1026 unsigned int queue_size; in setup_ddcb_queue() local 1032 queue_size = roundup(GENWQE_DDCB_MAX * sizeof(struct ddcb), PAGE_SIZE); in setup_ddcb_queue() 1042 queue->ddcb_vaddr = __genwqe_alloc_consistent(cd, queue_size, in setup_ddcb_queue() 1090 __genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr, in setup_ddcb_queue() 1105 unsigned int queue_size; in free_ddcb_queue() local 1107 queue_size = roundup(queue->ddcb_max * sizeof(struct ddcb), PAGE_SIZE); in free_ddcb_queue() 1113 __genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr, in free_ddcb_queue()
|