Home
last modified time | relevance | path

Searched refs:range_start (Results 1 – 25 of 59) sorted by relevance

123

/linux/drivers/firmware/efi/
H A Dunaccepted_memory.c36 unsigned long range_start, range_end; in accept_memory() local
126 range_start = range.start; in accept_memory()
127 for_each_set_bitrange_from(range_start, range_end, unaccepted->bitmap, in accept_memory()
130 unsigned long len = range_end - range_start; in accept_memory()
132 phys_start = range_start * unit_size + unaccepted->phys_base; in accept_memory()
150 bitmap_clear(unaccepted->bitmap, range_start, len); in accept_memory()
/linux/drivers/firmware/efi/libstub/
H A Dunaccepted_memory.c182 unsigned long range_start, range_end; in accept_memory() local
209 range_start = start / unit_size; in accept_memory()
212 for_each_set_bitrange_from(range_start, range_end, in accept_memory()
216 phys_start = range_start * unit_size + unaccepted_table->phys_base; in accept_memory()
221 range_start, range_end - range_start); in accept_memory()
/linux/drivers/infiniband/hw/hfi1/
H A Dfault.c111 unsigned long range_start, range_end, i; in fault_opcodes_write() local
125 if (kstrtoul(token, 0, &range_start)) in fault_opcodes_write()
132 range_end = range_start; in fault_opcodes_write()
134 if (range_start == range_end && range_start == -1UL) { in fault_opcodes_write()
140 if (range_start >= bound || range_end >= bound) in fault_opcodes_write()
143 for (i = range_start; i <= range_end; i++) { in fault_opcodes_write()
/linux/drivers/gpu/drm/xe/
H A Dxe_reg_whitelist.c159 u32 range_start, range_end; in xe_reg_whitelist_print_entry() local
176 range_start = reg & REG_GENMASK(25, range_bit); in xe_reg_whitelist_print_entry()
177 range_end = range_start | REG_GENMASK(range_bit, 0); in xe_reg_whitelist_print_entry()
192 range_start, range_end, in xe_reg_whitelist_print_entry()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_doorbell.c212 int range_start = dev->shared_resources.non_cp_doorbells_start; in init_doorbell_bitmap() local
219 pr_debug("reserved doorbell 0x%03x - 0x%03x\n", range_start, range_end); in init_doorbell_bitmap()
221 range_start + KFD_QUEUE_DOORBELL_MIRROR_OFFSET, in init_doorbell_bitmap()
225 if (i >= range_start && i <= range_end) { in init_doorbell_bitmap()
H A Dkfd_device.c1323 (*mem_obj)->range_start = found; in kfd_gtt_sa_allocate()
1375 (*mem_obj)->range_start, (*mem_obj)->range_end); in kfd_gtt_sa_allocate()
1378 bitmap_set(kfd->gtt_sa_bitmap, (*mem_obj)->range_start, in kfd_gtt_sa_allocate()
1379 (*mem_obj)->range_end - (*mem_obj)->range_start + 1); in kfd_gtt_sa_allocate()
1401 mem_obj, mem_obj->range_start, mem_obj->range_end); in kfd_gtt_sa_free()
1406 bitmap_clear(kfd->gtt_sa_bitmap, mem_obj->range_start, in kfd_gtt_sa_free()
1407 mem_obj->range_end - mem_obj->range_start + 1); in kfd_gtt_sa_free()
/linux/drivers/base/regmap/
H A Dregcache-maple.c348 int range_start; in regcache_maple_init() local
363 range_start = 0; in regcache_maple_init()
369 ret = regcache_maple_insert_block(map, range_start, in regcache_maple_init()
374 range_start = i; in regcache_maple_init()
379 ret = regcache_maple_insert_block(map, range_start, in regcache_maple_init()
/linux/drivers/gpu/drm/
H A Ddrm_mm.c518 u64 range_start, u64 range_end, in drm_mm_insert_node_in_range() argument
525 DRM_MM_BUG_ON(range_start > range_end); in drm_mm_insert_node_in_range()
527 if (unlikely(size == 0 || range_end - range_start < size)) in drm_mm_insert_node_in_range()
540 for (hole = first_hole(mm, range_start, range_end, size, mode); in drm_mm_insert_node_in_range()
551 if (mode == DRM_MM_INSERT_HIGH && hole_end <= range_start) in drm_mm_insert_node_in_range()
559 adj_start = max(col_start, range_start); in drm_mm_insert_node_in_range()
580 if (adj_start < max(col_start, range_start) || in drm_mm_insert_node_in_range()
726 scan->range_start = start; in drm_mm_scan_init_with_range()
777 adj_start = max(col_start, scan->range_start); in drm_mm_scan_add_block()
796 if (adj_start < max(col_start, scan->range_start) || in drm_mm_scan_add_block()
/linux/net/bridge/
H A Dbr_vlan.c1990 struct net_bridge_vlan *v, *range_start = NULL, *range_end = NULL; in br_vlan_dump_dev() local
2043 if (!range_start) { in br_vlan_dump_dev()
2044 range_start = v; in br_vlan_dump_dev()
2052 if (!br_vlan_global_opts_fill(skb, range_start->vid, in br_vlan_dump_dev()
2054 range_start)) { in br_vlan_dump_dev()
2059 idx += range_end->vid - range_start->vid + 1; in br_vlan_dump_dev()
2061 range_start = v; in br_vlan_dump_dev()
2064 u16 vlan_flags = br_vlan_flags(range_start, pvid); in br_vlan_dump_dev()
2066 if (!br_vlan_fill_vids(skb, range_start->vid, in br_vlan_dump_dev()
2067 range_end->vid, range_start, in br_vlan_dump_dev()
[all …]
H A Dbr_vlan_options.c265 struct net_bridge_vlan *range_start, in br_vlan_process_options() argument
280 if (!range_start || !br_vlan_should_use(range_start)) { in br_vlan_process_options()
290 for (vid = range_start->vid; vid <= range_end->vid; vid++) { in br_vlan_process_options()
/linux/fs/btrfs/
H A Dfiemap.c642 u64 range_start; in extent_fiemap() local
660 range_start = round_down(start, sectorsize); in extent_fiemap()
662 prev_extent_end = range_start; in extent_fiemap()
664 btrfs_lock_extent(&inode->io_tree, range_start, range_end, &cached_state); in extent_fiemap()
672 ret = fiemap_search_slot(inode, path, range_start); in extent_fiemap()
707 if (extent_end <= range_start) in extent_fiemap()
844 btrfs_unlock_extent(&inode->io_tree, range_start, range_end, &cached_state); in extent_fiemap()
H A Dordered-data.c760 u64 range_start, range_len; in btrfs_wait_ordered_extents() local
764 range_start = bg->start; in btrfs_wait_ordered_extents()
767 range_start = 0; in btrfs_wait_ordered_extents()
770 range_end = range_start + range_len; in btrfs_wait_ordered_extents()
780 ordered->disk_bytenr + ordered->disk_num_bytes <= range_start) { in btrfs_wait_ordered_extents()
/linux/arch/arm64/kvm/hyp/nvhe/
H A Dpage_alloc.c45 if (addr < pool->range_start || addr >= pool->range_end) in __find_buddy_nocheck()
103 if (phys < pool->range_start || phys >= pool->range_end) in __hyp_attach_page()
235 pool->range_start = phys; in hyp_pool_init()
/linux/include/uapi/linux/
H A Dsed-opal.h84 __u64 range_start; member
93 __u64 range_start; member
/linux/arch/arm64/kvm/hyp/include/nvhe/
H A Dgfp.h20 phys_addr_t range_start; member
/linux/drivers/mmc/host/
H A Ddw_mmc-k3.c327 unsigned int range_start = 0; in dw_mci_get_best_clksmpl() local
350 range_start = i; in dw_mci_get_best_clksmpl()
360 middle_range = range_start + range_length / 2; in dw_mci_get_best_clksmpl()
/linux/drivers/gpu/drm/msm/
H A Dmsm_gem.c460 struct drm_gpuvm *vm, u64 range_start, in get_vma_locked() argument
470 vma = msm_gem_vma_new(vm, obj, 0, range_start, range_end); in get_vma_locked()
472 GEM_WARN_ON(vma->va.addr < range_start); in get_vma_locked()
548 u64 range_start, u64 range_end) in get_and_pin_iova_range_locked() argument
558 vma = get_vma_locked(obj, vm, range_start, range_end); in get_and_pin_iova_range_locked()
577 u64 range_start, u64 range_end) in msm_gem_get_and_pin_iova_range() argument
583 ret = get_and_pin_iova_range_locked(obj, vm, iova, range_start, range_end); in msm_gem_get_and_pin_iova_range()
H A Dmsm_gem_vma.c372 u64 offset, u64 range_start, u64 range_end) in msm_gem_vma_new() argument
390 range_start, range_end, 0); in msm_gem_vma_new()
395 range_start = vma->node.start; in msm_gem_vma_new()
396 range_end = range_start + obj->size; in msm_gem_vma_new()
400 GEM_WARN_ON((range_end - range_start) > obj->size); in msm_gem_vma_new()
402 drm_gpuva_init(&vma->base, range_start, range_end - range_start, obj, offset); in msm_gem_vma_new()
H A Dmsm_gem.h190 u64 offset, u64 range_start, u64 range_end);
278 u64 range_start, u64 range_end);
/linux/drivers/media/i2c/
H A Dimx274.c636 int range_start = -1; in imx274_write_table() local
642 if ((next->addr != range_start + range_count) || in imx274_write_table()
648 range_start, range_vals[0]); in imx274_write_table()
650 err = regmap_bulk_write(regmap, range_start, in imx274_write_table()
659 range_start = -1; in imx274_write_table()
674 if (range_start == -1) in imx274_write_table()
675 range_start = next->addr; in imx274_write_table()
/linux/drivers/gpu/drm/amd/display/dc/dml2/dml21/src/dml2_top/
H A Ddml2_top_soc15.c337 int range_start; in calculate_first_second_splitting() local
349 range_start = 0; in calculate_first_second_splitting()
353 if (range_start <= pipe_h_vp_start && pipe_h_vp_start <= range_end) in calculate_first_second_splitting()
356 range_start = range_end + 1; in calculate_first_second_splitting()
362 range_start = mcache_boundaries[right_cache_id] - shift; in calculate_first_second_splitting()
364 range_start = 0; in calculate_first_second_splitting()
366 if (range_start <= pipe_h_vp_end && pipe_h_vp_end <= range_end) { in calculate_first_second_splitting()
369 range_end = range_start - 1; in calculate_first_second_splitting()
/linux/drivers/net/ethernet/netronome/nfp/bpf/
H A Djit.c2733 s16 range_start = meta->pkt_cache.range_start; in mem_ldx_data_init_pktcache() local
2739 off = re_load_imm_any(nfp_prog, range_start, imm_b(nfp_prog)); in mem_ldx_data_init_pktcache()
2741 len = range_end - range_start; in mem_ldx_data_init_pktcache()
2760 s16 range_start = meta->pkt_cache.range_start; in mem_ldx_data_from_pktcache_unaligned() local
2761 s16 insn_off = meta->insn.off - range_start; in mem_ldx_data_from_pktcache_unaligned()
2817 idx = (meta->insn.off - meta->pkt_cache.range_start) / REG_WIDTH; in mem_ldx_data_from_pktcache_aligned()
2843 u8 off = meta->insn.off - meta->pkt_cache.range_start; in mem_ldx_data_from_pktcache()
4279 s16 range_start = 0, range_end = 0; in nfp_bpf_opt_pkt_cache() local
4330 s16 new_start = range_start; in nfp_bpf_opt_pkt_cache()
4335 if (off < range_start) { in nfp_bpf_opt_pkt_cache()
[all …]
/linux/include/trace/events/
H A Dwriteback.h463 __field(long, range_start)
476 __entry->range_start = (long)wbc->range_start;
490 __entry->range_start,
/linux/fs/iomap/
H A Dbuffered-io.c88 struct iomap_folio_state *ifs, u64 *range_start, u64 range_end) in ifs_find_dirty_range() argument
92 offset_in_folio(folio, *range_start) >> inode->i_blkbits; in ifs_find_dirty_range()
108 *range_start = folio_pos(folio) + (start_blk << inode->i_blkbits); in ifs_find_dirty_range()
112 static unsigned iomap_find_dirty_range(struct folio *folio, u64 *range_start, in iomap_find_dirty_range() argument
117 if (*range_start >= range_end) in iomap_find_dirty_range()
121 return ifs_find_dirty_range(folio, ifs, range_start, range_end); in iomap_find_dirty_range()
122 return range_end - *range_start; in iomap_find_dirty_range()
/linux/fs/nfs/
H A Dnfstrace.h282 loff_t range_start,
286 TP_ARGS(inode, range_start, range_end),
293 __field(loff_t, range_start)
304 __entry->range_start = range_start;
314 __entry->range_start, __entry->range_end
322 loff_t range_start, \
325 TP_ARGS(inode, range_start, range_end))

123