/linux/drivers/gpu/drm/amd/display/dc/sspl/ |
H A D | dc_spl_scl_easf_filters.c | 2383 unsigned int num_entries) in spl_easf_get_scale_ratio_to_reg_value() argument 2389 lookup_table_index_ptr = (lookup_table_base_ptr + num_entries - 1); in spl_easf_get_scale_ratio_to_reg_value() 2392 while (count < num_entries) { in spl_easf_get_scale_ratio_to_reg_value() 2412 unsigned int num_entries = sizeof(easf_v_bf3_mode_lookup) / in spl_get_v_bf3_mode() local 2415 easf_v_bf3_mode_lookup, num_entries); in spl_get_v_bf3_mode() 2421 unsigned int num_entries = sizeof(easf_h_bf3_mode_lookup) / in spl_get_h_bf3_mode() local 2424 easf_h_bf3_mode_lookup, num_entries); in spl_get_h_bf3_mode() 2430 unsigned int num_entries; in spl_get_reducer_gain6() local 2433 num_entries = sizeof(easf_reducer_gain6_4tap_lookup) / in spl_get_reducer_gain6() 2436 easf_reducer_gain6_4tap_lookup, num_entries); in spl_get_reducer_gain6() 2449 unsigned int num_entries; spl_get_reducer_gain4() local 2468 unsigned int num_entries; spl_get_gainRing6() local 2487 unsigned int num_entries; spl_get_gainRing4() local 2506 unsigned int num_entries; spl_get_3tap_dntilt_uptilt_offset() local 2520 unsigned int num_entries; spl_get_3tap_uptilt_maxval() local 2534 unsigned int num_entries; spl_get_3tap_dntilt_slope() local 2548 unsigned int num_entries; spl_get_3tap_uptilt1_slope() local 2562 unsigned int num_entries; spl_get_3tap_uptilt2_slope() local 2576 unsigned int num_entries; spl_get_3tap_uptilt2_offset() local [all...] |
/linux/drivers/gpu/drm/i915/display/ |
H A D | intel_ddi_buf_trans.c | 32 .num_entries = ARRAY_SIZE(_hsw_trans_dp), 49 .num_entries = ARRAY_SIZE(_hsw_trans_fdi), 70 .num_entries = ARRAY_SIZE(_hsw_trans_hdmi), 88 .num_entries = ARRAY_SIZE(_bdw_trans_edp), 105 .num_entries = ARRAY_SIZE(_bdw_trans_dp), 122 .num_entries = ARRAY_SIZE(_bdw_trans_fdi), 141 .num_entries = ARRAY_SIZE(_bdw_trans_hdmi), 160 .num_entries = ARRAY_SIZE(_skl_trans_dp), 178 .num_entries = ARRAY_SIZE(_skl_u_trans_dp), 196 .num_entries 1140 intel_get_buf_trans(const struct intel_ddi_buf_trans * trans,int * num_entries) intel_get_buf_trans() argument [all...] |
/linux/include/net/tc_act/ |
H A D | tc_gate.h | 33 size_t num_entries; member 92 u32 num_entries; in tcf_gate_num_entries() local 94 num_entries = to_gate(a)->param.num_entries; in tcf_gate_num_entries() 96 return num_entries; in tcf_gate_num_entries() 105 u32 num_entries; in tcf_gate_get_list() local 109 num_entries = p->num_entries; in tcf_gate_get_list() 114 if (i != num_entries) in tcf_gate_get_list() 117 oe = kcalloc(num_entries, sizeo in tcf_gate_get_list() [all...] |
/linux/drivers/char/agp/ |
H A D | generic.c | 321 int num_entries; in agp_num_entries() local 328 num_entries = A_SIZE_8(temp)->num_entries; in agp_num_entries() 331 num_entries = A_SIZE_16(temp)->num_entries; in agp_num_entries() 334 num_entries = A_SIZE_32(temp)->num_entries; in agp_num_entries() 337 num_entries = A_SIZE_LVL2(temp)->num_entries; in agp_num_entries() 340 num_entries in agp_num_entries() 855 int num_entries; agp_generic_create_gatt_table() local 1027 int num_entries; agp_generic_insert_memory() local 1111 int mask_type, num_entries; agp_generic_remove_memory() local [all...] |
H A D | efficeon-agp.c | 198 int num_entries, l1_pages; in efficeon_create_gatt_table() local 200 num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries; in efficeon_create_gatt_table() 202 printk(KERN_DEBUG PFX "efficeon_create_gatt_table(%d)\n", num_entries); in efficeon_create_gatt_table() 205 BUG_ON(num_entries & 0x3ff); in efficeon_create_gatt_table() 206 l1_pages = num_entries >> 10; in efficeon_create_gatt_table() 238 int i, count = mem->page_count, num_entries; in efficeon_insert_memory() local 245 num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries; in efficeon_insert_memory() 246 if ((pg_start + mem->page_count) > num_entries) in efficeon_insert_memory() 287 int i, count = mem->page_count, num_entries; efficeon_remove_memory() local [all...] |
/linux/arch/loongarch/kernel/ |
H A D | module-sections.c | 15 int i = got_sec->num_entries; in module_emit_got_entry() 25 got_sec->num_entries++; in module_emit_got_entry() 26 if (got_sec->num_entries > got_sec->max_entries) { in module_emit_got_entry() 49 nr = plt_sec->num_entries; in module_emit_plt_entry() 57 plt_sec->num_entries++; in module_emit_plt_entry() 58 plt_idx_sec->num_entries++; in module_emit_plt_entry() 59 BUG_ON(plt_sec->num_entries > plt_sec->max_entries); in module_emit_plt_entry() 158 mod->arch.got.num_entries = 0; in module_frob_arch_sections() 166 mod->arch.plt.num_entries = 0; in module_frob_arch_sections() 174 mod->arch.plt_idx.num_entries in module_frob_arch_sections() [all...] |
H A D | unwind_orc.c | 60 unsigned int num_entries, unsigned long ip) in __orc_find() argument 64 int *last = ip_table + num_entries - 1; in __orc_find() 66 if (!num_entries) in __orc_find() 248 unsigned int num_entries = orc_ip_size / sizeof(int); in unwind_module_init() local 252 num_entries != orc_size / sizeof(*orc)); in unwind_module_init() 262 sort(orc_ip, num_entries, sizeof(int), orc_sort_cmp, orc_sort_swap); in unwind_module_init() 267 mod->arch.num_orcs = num_entries; in unwind_module_init() 276 size_t num_entries = orc_ip_size / sizeof(int); in unwind_init() local 279 if (!num_entries || orc_ip_size % sizeof(int) != 0 || in unwind_init() 281 num_entries ! in unwind_init() [all...] |
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_bo_list.h | 53 unsigned num_entries; member 59 struct amdgpu_bo_list_entry entries[] __counted_by(num_entries); 71 size_t num_entries, 76 e != &list->entries[list->num_entries]; \ 81 e != &list->entries[list->num_entries]; \
|
H A D | amdgpu_bo_list.c | 72 size_t num_entries, struct amdgpu_bo_list **result) in amdgpu_bo_list_create() argument 74 unsigned last_entry = 0, first_userptr = num_entries; in amdgpu_bo_list_create() 81 list = kvzalloc(struct_size(list, entries, num_entries), GFP_KERNEL); in amdgpu_bo_list_create() 87 list->num_entries = num_entries; in amdgpu_bo_list_create() 90 for (i = 0; i < num_entries; ++i) { in amdgpu_bo_list_create() 136 trace_amdgpu_cs_bo_status(list->num_entries, total_size); in amdgpu_bo_list_create() 145 for (i = first_userptr; i < num_entries; ++i) in amdgpu_bo_list_create()
|
/linux/arch/riscv/kernel/ |
H A D | module-sections.c | 17 int i = got_sec->num_entries; in module_emit_got_entry() 27 got_sec->num_entries++; in module_emit_got_entry() 28 BUG_ON(got_sec->num_entries > got_sec->max_entries); in module_emit_got_entry() 39 int i = plt_sec->num_entries; in module_emit_plt_entry() 52 plt_sec->num_entries++; in module_emit_plt_entry() 53 got_plt_sec->num_entries++; in module_emit_plt_entry() 54 BUG_ON(plt_sec->num_entries > plt_sec->max_entries); in module_emit_plt_entry() 192 mod->arch.plt.num_entries = 0; in module_frob_arch_sections() 199 mod->arch.got.num_entries = 0; in module_frob_arch_sections() 206 mod->arch.got_plt.num_entries in module_frob_arch_sections() [all...] |
/linux/drivers/net/wwan/iosm/ |
H A D | iosm_ipc_coredump.c | 66 u32 byte_read, num_entries, file_size; in ipc_coredump_get_list() local 95 num_entries = le32_to_cpu(cd_table->list.num_entries); in ipc_coredump_get_list() 96 if (num_entries == 0 || num_entries > IOSM_NOF_CD_REGION) { in ipc_coredump_get_list() 101 for (i = 0; i < num_entries; i++) { in ipc_coredump_get_list()
|
/linux/tools/perf/pmu-events/ |
H A D | empty-pmu-events.c | 17 uint32_t num_entries; member 129 .num_entries = ARRAY_SIZE(pmu_events__common_software), 134 .num_entries = ARRAY_SIZE(pmu_events__common_tool), 169 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_default_core), 174 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_hisi_sccl_ddrc), 179 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_hisi_sccl_l3c), 184 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_uncore_cbox), 189 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_uncore_imc), 194 .num_entries = ARRAY_SIZE(pmu_events__test_soc_cpu_uncore_imc_free_running), 221 .num_entries [all...] |
/linux/drivers/net/ethernet/netronome/nfp/ |
H A D | nfp_shared_buf.c | 81 unsigned int i, num_entries, entry_sz; in nfp_shared_buf_register() local 92 num_entries = n; in nfp_shared_buf_register() 95 num_entries * sizeof(pf->shared_bufs[0]), in nfp_shared_buf_register() 100 entry_sz = nfp_cpp_area_size(sb_desc_area) / num_entries; in nfp_shared_buf_register() 102 pf->shared_bufs = kmalloc_array(num_entries, sizeof(pf->shared_bufs[0]), in nfp_shared_buf_register() 109 for (i = 0; i < num_entries; i++) { in nfp_shared_buf_register() 125 pf->num_shared_bufs = num_entries; in nfp_shared_buf_register()
|
/linux/drivers/net/ethernet/intel/ice/ |
H A D | ice_irq.c | 18 pf->irq_tracker.num_entries = max_vectors; in ice_init_irq_tracker() 24 ice_init_virt_irq_tracker(struct ice_pf *pf, u32 base, u32 num_entries) in ice_init_virt_irq_tracker() argument 26 pf->virt_irq_tracker.bm = bitmap_zalloc(num_entries, GFP_KERNEL); in ice_init_virt_irq_tracker() 30 pf->virt_irq_tracker.num_entries = num_entries; in ice_init_virt_irq_tracker() 77 struct xa_limit limit = { .max = pf->irq_tracker.num_entries - 1, in ice_get_irq_res() 253 pf->virt_irq_tracker.num_entries, in ice_virt_get_irqs() 256 if (res >= pf->virt_irq_tracker.num_entries) in ice_virt_get_irqs()
|
/linux/drivers/gpu/drm/amd/display/dc/dml/dcn351/ |
H A D | dcn351_fpu.c | 278 ASSERT(clk_table->num_entries); in dcn351_update_bw_bounding_box_fpu() 281 for (i = 0; i < clk_table->num_entries; ++i) { in dcn351_update_bw_bounding_box_fpu() 288 for (i = 0; i < clk_table->num_entries; i++) { in dcn351_update_bw_bounding_box_fpu() 298 if (clk_table->num_entries == 1) { in dcn351_update_bw_bounding_box_fpu() 307 if (clk_table->num_entries == 1 && in dcn351_update_bw_bounding_box_fpu() 350 if (clk_table->num_entries) in dcn351_update_bw_bounding_box_fpu() 351 dcn3_51_soc.num_states = clk_table->num_entries; in dcn351_update_bw_bounding_box_fpu() 387 if (clk_table->num_entries > 2) { in dcn351_update_bw_bounding_box_fpu() 389 for (i = 0; i < clk_table->num_entries; i++) { in dcn351_update_bw_bounding_box_fpu() 391 clk_table->num_entries; in dcn351_update_bw_bounding_box_fpu() [all...] |
/linux/drivers/gpu/drm/amd/display/dc/dml/dcn35/ |
H A D | dcn35_fpu.c | 244 ASSERT(clk_table->num_entries); in dcn35_update_bw_bounding_box_fpu() 247 for (i = 0; i < clk_table->num_entries; ++i) { in dcn35_update_bw_bounding_box_fpu() 254 for (i = 0; i < clk_table->num_entries; i++) { in dcn35_update_bw_bounding_box_fpu() 264 if (clk_table->num_entries == 1) { in dcn35_update_bw_bounding_box_fpu() 273 if (clk_table->num_entries == 1 && in dcn35_update_bw_bounding_box_fpu() 316 if (clk_table->num_entries) in dcn35_update_bw_bounding_box_fpu() 317 dcn3_5_soc.num_states = clk_table->num_entries; in dcn35_update_bw_bounding_box_fpu() 353 if (clk_table->num_entries > 2) { in dcn35_update_bw_bounding_box_fpu() 355 for (i = 0; i < clk_table->num_entries; i++) { in dcn35_update_bw_bounding_box_fpu() 357 clk_table->num_entries; in dcn35_update_bw_bounding_box_fpu() [all...] |
/linux/drivers/net/dsa/sja1105/ |
H A D | sja1105_tas.c | 172 int num_entries = 0; in sja1105_init_scheduling() local 213 num_entries += tas_data->offload[port]->num_entries; in sja1105_init_scheduling() 219 num_entries += gating_cfg->num_entries; in sja1105_init_scheduling() 231 table->entries = kcalloc(num_entries, table->ops->unpacked_entry_size, in sja1105_init_scheduling() 235 table->entry_count = num_entries; in sja1105_init_scheduling() 283 schedule_end_idx = k + offload->num_entries - 1; in sja1105_init_scheduling() 310 for (i = 0; i < offload->num_entries; i++, k++) { in sja1105_init_scheduling() 329 schedule_end_idx = k + gating_cfg->num_entries in sja1105_init_scheduling() 470 size_t num_entries = gating_cfg->num_entries; sja1105_gating_check_conflicts() local [all...] |
/linux/drivers/net/ethernet/engleder/ |
H A D | tsnep_selftests.c | 381 qopt->num_entries = 7; in tsnep_test_taprio() 405 qopt->num_entries = 8; in tsnep_test_taprio() 434 qopt->num_entries = 10; in tsnep_test_taprio() 468 qopt->num_entries = 2; in tsnep_test_taprio_change() 501 qopt->num_entries = 3; in tsnep_test_taprio_change() 513 qopt->num_entries = 2; in tsnep_test_taprio_change() 527 qopt->num_entries = 4; in tsnep_test_taprio_change() 539 qopt->num_entries = 2; in tsnep_test_taprio_change() 551 qopt->num_entries = 3; in tsnep_test_taprio_change() 567 qopt->num_entries in tsnep_test_taprio_change() [all...] |
/linux/fs/exfat/ |
H A D | dir.c | 49 for (i = ES_IDX_FIRST_FILENAME; i < es.num_entries; i++) { in exfat_get_uniname_from_ext_entry() 484 void exfat_init_ext_entry(struct exfat_entry_set_cache *es, int num_entries, in exfat_init_ext_entry() argument 492 ep->dentry.file.num_ext = (unsigned char)(num_entries - 1); in exfat_init_ext_entry() 498 for (i = ES_IDX_FIRST_FILENAME; i < num_entries; i++) { in exfat_init_ext_entry() 513 for (i = order; i < es->num_entries; i++) { in exfat_remove_entries() 522 if (order < es->num_entries) in exfat_remove_entries() 532 for (i = ES_IDX_FILE; i < es->num_entries; i++) { in exfat_update_dir_chksum() 742 * num_entries: specifies how many dentries should be included. 743 * It will be set to es->num_entries if it is not 0. 744 * If num_entries i 754 __exfat_get_dentry_set(struct exfat_entry_set_cache * es,struct super_block * sb,struct exfat_chain * p_dir,int entry,unsigned int num_entries) __exfat_get_dentry_set() argument 835 exfat_get_dentry_set(struct exfat_entry_set_cache * es,struct super_block * sb,struct exfat_chain * p_dir,int entry,unsigned int num_entries) exfat_get_dentry_set() argument 922 exfat_get_empty_dentry_set(struct exfat_entry_set_cache * es,struct super_block * sb,struct exfat_chain * p_dir,int entry,unsigned int num_entries) exfat_get_empty_dentry_set() argument 945 exfat_set_empty_hint(struct exfat_inode_info * ei,struct exfat_hint_femp * candi_empty,struct exfat_chain * clu,int dentry,int num_entries,int entry_type) exfat_set_empty_hint() argument 998 int num_entries = exfat_calc_num_entries(p_uniname); exfat_find_dir_entry() local [all...] |
/linux/drivers/fwctl/pds/ |
H A D | main.c | 114 num_endpoints = le32_to_cpu(pdsfc->endpoints->num_entries); in pdsfc_free_endpoints() 131 num_endpoints = le32_to_cpu(pdsfc->endpoints->num_entries); in pdsfc_free_operations() 193 num_endpoints = le32_to_cpu(pdsfc->endpoints->num_entries); in pdsfc_init_endpoints() 219 u32 num_entries; in pdsfc_get_operations() local 252 num_entries = le32_to_cpu(data->num_entries); in pdsfc_get_operations() 253 dev_dbg(dev, "num_entries %d\n", num_entries); in pdsfc_get_operations() 254 for (i = 0; i < num_entries; i++) { in pdsfc_get_operations() 287 u32 num_entries; in pdsfc_validate_rpc() local [all...] |
/linux/drivers/iommu/ |
H A D | io-pgtable-dart.c | 112 dart_iopte prot, int num_entries, in dart_init_pte() argument 119 for (i = 0; i < num_entries; i++) in dart_init_pte() 132 for (i = 0; i < num_entries; i++) in dart_init_pte() 230 int ret = 0, tbl, num_entries, max_entries, map_idx_start; in dart_map_pages() local 269 num_entries = min_t(int, pgcount, max_entries); in dart_map_pages() 271 ret = dart_init_pte(data, iova, paddr, prot, num_entries, ptep); in dart_map_pages() 273 *mapped += num_entries * pgsize; in dart_map_pages() 290 int i = 0, num_entries, max_entries, unmap_idx_start; in dart_unmap_pages() local 306 num_entries = min_t(int, pgcount, max_entries); in dart_unmap_pages() 308 while (i < num_entries) { in dart_unmap_pages() [all...] |
H A D | io-pgtable-arm-v7s.c | 314 static void __arm_v7s_pte_sync(arm_v7s_iopte *ptep, int num_entries, in __arm_v7s_pte_sync() argument 321 num_entries * sizeof(*ptep), DMA_TO_DEVICE); in __arm_v7s_pte_sync() 324 int num_entries, struct io_pgtable_cfg *cfg) in __arm_v7s_set_pte() argument 328 for (i = 0; i < num_entries; i++) in __arm_v7s_set_pte() 331 __arm_v7s_pte_sync(ptep, num_entries, cfg); in __arm_v7s_set_pte() 396 int lvl, int num_entries, arm_v7s_iopte *ptep) in arm_v7s_init_pte() argument 402 for (i = 0; i < num_entries; i++) in arm_v7s_init_pte() 422 if (num_entries > 1) in arm_v7s_init_pte() 427 __arm_v7s_set_pte(ptep, pte, num_entries, cfg); in arm_v7s_init_pte() 466 int num_entries in __arm_v7s_map() local 564 int idx, i = 0, num_entries = size >> ARM_V7S_LVL_SHIFT(lvl); __arm_v7s_unmap() local [all...] |
/linux/drivers/gpu/drm/amd/display/dc/dml2/dml21/src/dml2_mcg/ |
H A D | dml2_mcg_dcn4.c | 67 min_table->dram_bw_table.num_entries = soc_bb->clk_table.uclk.num_clk_values; in build_min_clk_table_fine_grained() 70 for (i = min_table->dram_bw_table.num_entries - 1; i > 0; i--) { in build_min_clk_table_fine_grained() 82 for (i = 0; i < (int)min_table->dram_bw_table.num_entries; i++) { in build_min_clk_table_fine_grained() 106 for (i = 0; i < (int)min_table->dram_bw_table.num_entries; i++) { in build_min_clk_table_fine_grained() 109 min_table->dram_bw_table.num_entries = i; in build_min_clk_table_fine_grained() 115 for (i = 0; i < (int)min_table->dram_bw_table.num_entries - 1; i++) { in build_min_clk_table_fine_grained() 121 for (j = i + 1; j < min_table->dram_bw_table.num_entries; j++) { in build_min_clk_table_fine_grained() 126 min_table->dram_bw_table.num_entries--; in build_min_clk_table_fine_grained() 142 min_table->dram_bw_table.num_entries = soc_bb->clk_table.uclk.num_clk_values; in build_min_clk_table_coarse_grained()
|
/linux/drivers/pci/ |
H A D | tph.c | 428 int num_entries, i, offset; in pci_restore_tph_state() local 447 num_entries = pcie_tph_get_st_table_size(pdev); in pci_restore_tph_state() 448 for (i = 0; i < num_entries; i++) { in pci_restore_tph_state() 458 int num_entries, i, offset; in pci_save_tph_state() local 479 num_entries = pcie_tph_get_st_table_size(pdev); in pci_save_tph_state() 480 for (i = 0; i < num_entries; i++) { in pci_save_tph_state() 496 int num_entries; in pci_tph_init() local 503 num_entries = pcie_tph_get_st_table_size(pdev); in pci_tph_init() 504 save_size = sizeof(u32) + num_entries * sizeof(u16); in pci_tph_init()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_pt.c | 678 * @num_entries: On output contains the number of @entries used. 694 u32 *num_entries, bool clear_pt) in xe_pt_stage_bind() argument 777 *num_entries = xe_walk.wupd.num_used_entries; in xe_pt_stage_bind() 985 u32 num_entries) in xe_pt_cancel_bind() argument 989 for (i = 0; i < num_entries; i++) { in xe_pt_cancel_bind() 1041 u32 num_entries, struct llist_head *deferred) in xe_pt_commit() argument 1047 for (i = 0; i < num_entries; i++) { in xe_pt_commit() 1068 u32 num_entries, bool rebind) in xe_pt_abort_bind() argument 1074 for (i = num_entries - 1; i >= 0; --i) { in xe_pt_abort_bind() 1098 u32 num_entries, boo in xe_pt_commit_prepare_bind() argument 1130 xe_pt_free_bind(struct xe_vm_pgtable_update * entries,u32 num_entries) xe_pt_free_bind() argument 1142 xe_pt_prepare_bind(struct xe_tile * tile,struct xe_vma * vma,struct xe_svm_range * range,struct xe_vm_pgtable_update * entries,u32 * num_entries,bool invalidate_on_bind) xe_pt_prepare_bind() argument 1157 xe_vm_dbg_print_entries(struct xe_device * xe,const struct xe_vm_pgtable_update * entries,unsigned int num_entries,bool bind) xe_vm_dbg_print_entries() argument 1760 xe_pt_abort_unbind(struct xe_vma * vma,struct xe_vm_pgtable_update * entries,u32 num_entries) xe_pt_abort_unbind() argument 1786 xe_pt_commit_prepare_unbind(struct xe_vma * vma,struct xe_vm_pgtable_update * entries,u32 num_entries) xe_pt_commit_prepare_unbind() argument [all...] |