Home
last modified time | relevance | path

Searched refs:mas (Results 1 – 25 of 36) sorted by relevance

12

/linux/lib/
H A Dmaple_tree.c221 static unsigned int mas_mt_height(struct ma_state *mas) in mas_mt_height() argument
223 return mt_height(mas->tree); in mas_mt_height()
263 static __always_inline void mas_set_err(struct ma_state *mas, long err) in mas_set_err() argument
265 mas->node = MA_ERROR(err); in mas_set_err()
266 mas->status = ma_error; in mas_set_err()
269 static __always_inline bool mas_is_ptr(const struct ma_state *mas) in mas_is_ptr() argument
271 return mas->status == ma_root; in mas_is_ptr()
274 static __always_inline bool mas_is_start(const struct ma_state *mas) in mas_is_start() argument
276 return mas->status == ma_start; in mas_is_start()
279 static __always_inline bool mas_is_none(const struct ma_state *mas) in mas_is_none() argument
[all …]
H A Dtest_maple_tree.c22 #define mas_dump(mas) do {} while (0) argument
23 #define mas_wr_dump(mas) do {} while (0) argument
352 MA_STATE(mas, mt, 0, 0); in check_rev_find()
359 mas_set(&mas, 1000); in check_rev_find()
360 val = mas_find_rev(&mas, 1000); in check_rev_find()
362 val = mas_find_rev(&mas, 1000); in check_rev_find()
365 mas_set(&mas, 999); in check_rev_find()
366 val = mas_find_rev(&mas, 997); in check_rev_find()
369 mas_set(&mas, 1000); in check_rev_find()
370 val = mas_find_rev(&mas, 900); in check_rev_find()
[all …]
H A Dalloc_tag.c368 MA_STATE(mas, &mod_area_mt, 0, module_tags.size); in clean_unused_module_areas_locked()
371 mas_for_each(&mas, val, module_tags.size) { in clean_unused_module_areas_locked()
379 start_tag = (struct alloc_tag *)(module_tags.start_addr + mas.index); in clean_unused_module_areas_locked()
380 end_tag = (struct alloc_tag *)(module_tags.start_addr + mas.last); in clean_unused_module_areas_locked()
382 mas_erase(&mas); in clean_unused_module_areas_locked()
387 static bool find_aligned_area(struct ma_state *mas, unsigned long section_size, in find_aligned_area() argument
394 if (!mas_empty_area(mas, 0, section_size - 1, prepend + size)) { in find_aligned_area()
395 if (IS_ALIGNED(mas->index + prepend, align)) in find_aligned_area()
399 mas_reset(mas); in find_aligned_area()
400 if (!mas_empty_area(mas, 0, section_size - 1, in find_aligned_area()
[all …]
H A Dinterval_tree_test.c202 static void mas_cur_span(struct ma_state *mas, struct interval_tree_span_iter *state) in mas_cur_span() argument
208 if (mas->status == ma_overflow) in mas_cur_span()
212 state->is_hole = mas_walk(mas) ? 0 : 1; in mas_cur_span()
214 cur_start = mas->index < state->first_index ? in mas_cur_span()
215 state->first_index : mas->index; in mas_cur_span()
220 cur_last = mas->last > state->last_index ? in mas_cur_span()
221 state->last_index : mas->last; in mas_cur_span()
223 is_hole = mas_next_range(mas, state->last_index) ? 0 : 1; in mas_cur_span()
225 } while (mas->status != ma_overflow && is_hole == state->is_hole); in mas_cur_span()
236 if (mas->status != ma_overflow) in mas_cur_span()
[all …]
/linux/drivers/spi/
H A Dspi-geni-qcom.c106 static void spi_slv_setup(struct spi_geni_master *mas) in spi_slv_setup() argument
108 struct geni_se *se = &mas->se; in spi_slv_setup()
113 dev_dbg(mas->dev, "spi slave setup done\n"); in spi_slv_setup()
117 struct spi_geni_master *mas, in get_spi_clk_cfg() argument
125 ret = geni_se_clk_freq_match(&mas->se, in get_spi_clk_cfg()
126 speed_hz * mas->oversampling, in get_spi_clk_cfg()
129 dev_err(mas->dev, "Failed(%d) to find src clk for %dHz\n", in get_spi_clk_cfg()
134 *clk_div = DIV_ROUND_UP(sclk_freq, mas->oversampling * speed_hz); in get_spi_clk_cfg()
135 actual_hz = sclk_freq / (mas->oversampling * *clk_div); in get_spi_clk_cfg()
137 dev_dbg(mas->dev, "req %u=>%u sclk %lu, idx %d, div %d\n", speed_hz, in get_spi_clk_cfg()
[all …]
/linux/drivers/base/regmap/
H A Dregcache-maple.c20 MA_STATE(mas, mt, reg, reg); in regcache_maple_read()
25 entry = mas_walk(&mas); in regcache_maple_read()
31 *value = entry[reg - mas.index]; in regcache_maple_read()
42 MA_STATE(mas, mt, reg, reg); in regcache_maple_write()
50 entry = mas_walk(&mas); in regcache_maple_write()
52 entry[reg - mas.index] = val; in regcache_maple_write()
58 mas_set_range(&mas, reg - 1, reg + 1); in regcache_maple_write()
62 lower = mas_find(&mas, reg - 1); in regcache_maple_write()
64 index = mas.index; in regcache_maple_write()
65 lower_sz = (mas.last - mas.index + 1) * sizeof(unsigned long); in regcache_maple_write()
[all …]
/linux/include/linux/
H A Dmaple_tree.h272 #define mtree_lock_nested(mas, subclass) \ argument
457 struct ma_state *mas; member
472 #define mas_lock(mas) spin_lock(&((mas)->tree->ma_lock)) argument
473 #define mas_lock_nested(mas, subclass) \ argument
474 spin_lock_nested(&((mas)->tree->ma_lock), subclass)
475 #define mas_unlock(mas) spin_unlock(&((mas)->tree->ma_lock)) argument
507 .mas = ma_state, \
521 void *mas_walk(struct ma_state *mas);
522 void *mas_store(struct ma_state *mas, void *entry);
523 void *mas_erase(struct ma_state *mas);
[all …]
H A Dmm.h922 return mas_find(&vmi->mas, max - 1); in vma_find()
931 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
937 return mas_next_range(&vmi->mas, ULONG_MAX); in vma_iter_next_range()
943 return mas_prev(&vmi->mas, 0); in vma_prev()
949 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
950 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
951 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
960 mas_destroy(&vmi->mas); in vma_iter_free()
966 vmi->mas.index = vma->vm_start; in vma_iter_bulk_store()
967 vmi->mas.last = vma->vm_end - 1; in vma_iter_bulk_store()
[all …]
H A Dmm_types.h1353 struct ma_state mas; member
1358 .mas = { \
1369 mas_init(&vmi->mas, &mm->mm_mt, addr); in vma_iter_init()
/linux/include/trace/events/
H A Dmaple_tree.h15 TP_PROTO(const char *fn, struct ma_state *mas),
17 TP_ARGS(fn, mas),
30 __entry->min = mas->min;
31 __entry->max = mas->max;
32 __entry->index = mas->index;
33 __entry->last = mas->last;
34 __entry->node = mas->node;
48 TP_PROTO(const char *fn, struct ma_state *mas),
50 TP_ARGS(fn, mas),
63 __entry->min = mas->min;
[all …]
/linux/mm/
H A Dvma.h212 if (vmi->mas.status != ma_start && in vma_iter_store_gfp()
213 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_gfp()
216 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
217 mas_store_gfp(&vmi->mas, vma, gfp); in vma_iter_store_gfp()
218 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_store_gfp()
263 void unmap_region(struct ma_state *mas, struct vm_area_struct *vma,
366 return mas_prev(&vmi->mas, min); in vma_prev_limit()
404 __mas_set_range(&vmi->mas, index, last - 1); in vma_iter_config()
409 mas_reset(&vmi->mas); in vma_iter_reset()
415 return mas_prev_range(&vmi->mas, min); in vma_iter_prev_range_limit()
[all …]
H A Dexecmem.c110 static inline unsigned long mas_range_len(struct ma_state *mas) in mas_range_len() argument
112 return mas->last - mas->index + 1; in mas_range_len()
162 MA_STATE(mas, free_areas, 0, ULONG_MAX); in execmem_cache_clean()
166 mas_for_each(&mas, area, ULONG_MAX) { in execmem_cache_clean()
167 size_t size = mas_range_len(&mas); in execmem_cache_clean()
170 IS_ALIGNED(mas.index, PMD_SIZE)) { in execmem_cache_clean()
174 mas_store_gfp(&mas, NULL, GFP_KERNEL); in execmem_cache_clean()
187 MA_STATE(mas, free_areas, addr - 1, addr + 1); in execmem_cache_add_locked()
194 area = mas_walk(&mas); in execmem_cache_add_locked()
195 if (area && mas.last == addr - 1) in execmem_cache_add_locked()
[all …]
H A Dmmap_lock.c227 MA_STATE(mas, &mm->mm_mt, address, address); in lock_vma_under_rcu()
232 vma = mas_walk(&mas); in lock_vma_under_rcu()
H A Ddebug.c359 mas_dump(&vmi->mas); in vma_iter_dump_tree()
360 mt_dump(vmi->mas.tree, mt_dump_hex); in vma_iter_dump_tree()
H A Dvma.c476 void unmap_region(struct ma_state *mas, struct vm_area_struct *vma, in unmap_region() argument
484 unmap_vmas(&tlb, mas, vma, vma->vm_start, vma->vm_end, vma->vm_end, in unmap_region()
486 mas_set(mas, vma->vm_end); in unmap_region()
487 free_pgtables(&tlb, mas, vma, prev ? prev->vm_end : FIRST_USER_ADDRESS, in unmap_region()
1521 mt_init_flags(&mt_detach, vmi->mas.tree->ma_flags & MT_FLAGS_LOCK_MASK); in do_vmi_align_munmap()
2307 struct ma_state *mas = &vms->vmi->mas; in vms_abort_munmap_vmas() local
2320 mas_set_range(mas, vms->start, vms->end - 1); in vms_abort_munmap_vmas()
2321 mas_store_gfp(mas, NULL, GFP_KERNEL|__GFP_NOFAIL); in vms_abort_munmap_vmas()
2355 vmi->mas.tree->ma_flags & MT_FLAGS_LOCK_MASK); in __mmap_prepare()
2420 unmap_region(&vmi->mas, vma, map->prev, map->next); in __mmap_new_file_vma()
H A Dmmap.c1280 unmap_vmas(&tlb, &vmi.mas, vma, 0, ULONG_MAX, ULONG_MAX, false); in exit_mmap()
1291 free_pgtables(&tlb, &vmi.mas, vma, FIRST_USER_ADDRESS, in exit_mmap()
1749 mt_clear_in_rcu(vmi.mas.tree); in dup_mmap()
1846 mt_set_in_rcu(vmi.mas.tree); in dup_mmap()
1859 mas_set_range(&vmi.mas, mpnt->vm_start, mpnt->vm_end - 1); in dup_mmap()
1860 mas_store(&vmi.mas, XA_ZERO_ENTRY); in dup_mmap()
/linux/scripts/gdb/linux/
H A Dmapletree.py198 def mtree_lookup_walk(mas): argument
200 n = mas.node
209 if pivots[offset] >= mas.index:
216 n = mt_slot(mas.tree, slots, offset)
218 mas.reset()
230 mas = Mas(mt, index, index)
234 entry = mas.start()
235 if mas.is_none():
238 if mas.is_ptr():
243 entry = mtree_lookup_walk(mas)
[all …]
/linux/tools/testing/radix-tree/
H A Dmaple.c414 static inline void mas_node_walk(struct ma_state *mas, struct maple_node *node, in mas_node_walk() argument
426 (*range_max) = (*range_min) = mas->index; in mas_node_walk()
430 mas->offset = mas->index = mas->min; in mas_node_walk()
440 prev = mas->min; in mas_node_walk()
441 index = mas->index; in mas_node_walk()
460 max = mas->max; in mas_node_walk()
464 mas->offset = offset; in mas_node_walk()
469 mas->max = max; in mas_node_walk()
470 mas->min = prev; in mas_node_walk()
486 static inline bool mas_descend_walk(struct ma_state *mas, in mas_descend_walk() argument
[all …]
/linux/tools/testing/vma/
H A Dvma_internal.h228 struct ma_state mas; member
233 .mas = { \
533 mas_pause(&vmi->mas); in vma_iter_invalidate()
563 return mas_find(&vmi->mas, ULONG_MAX); in vma_next()
671 return mas_find(&vmi->mas, max - 1); in vma_find()
677 __mas_set_range(&vmi->mas, start, end - 1); in vma_iter_clear_gfp()
678 mas_store_gfp(&vmi->mas, NULL, gfp); in vma_iter_clear_gfp()
679 if (unlikely(mas_is_err(&vmi->mas))) in vma_iter_clear_gfp()
704 return mas_prev(&vmi->mas, 0); in vma_prev()
709 mas_set(&vmi->mas, addr); in vma_iter_set()
[all …]
/linux/mm/damon/tests/
H A Dvaddr-kunit.h21 MA_STATE(mas, mt, 0, 0); in __link_vmas()
26 mas_lock(&mas); in __link_vmas()
28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas()
29 if (mas_store_gfp(&mas, &vmas[i], GFP_KERNEL)) in __link_vmas()
35 mas_unlock(&mas); in __link_vmas()
/linux/kernel/irq/
H A Dirqdesc.c176 MA_STATE(mas, &sparse_irqs, 0, 0); in irq_find_free_area()
178 if (mas_empty_area(&mas, from, MAX_SPARSE_IRQS, cnt)) in irq_find_free_area()
180 return mas.index; in irq_find_free_area()
196 MA_STATE(mas, &sparse_irqs, irq, irq); in irq_insert_desc()
197 WARN_ON(mas_store_gfp(&mas, desc, GFP_KERNEL) != 0); in irq_insert_desc()
202 MA_STATE(mas, &sparse_irqs, irq, irq); in delete_irq_desc()
203 mas_erase(&mas); in delete_irq_desc()
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_uvmm.c282 MA_STATE(mas, &uvmm->region_mt, addr, addr); in __nouveau_uvma_region_insert()
284 if (unlikely(mas_walk(&mas))) in __nouveau_uvma_region_insert()
287 if (unlikely(mas.last < last)) in __nouveau_uvma_region_insert()
290 mas.index = addr; in __nouveau_uvma_region_insert()
291 mas.last = last; in __nouveau_uvma_region_insert()
293 mas_store_gfp(&mas, reg, GFP_KERNEL); in __nouveau_uvma_region_insert()
322 MA_STATE(mas, &uvmm->region_mt, reg->va.addr, 0); in nouveau_uvma_region_remove()
324 mas_erase(&mas); in nouveau_uvma_region_remove()
362 MA_STATE(mas, &uvmm->region_mt, addr, 0); in nouveau_uvma_region_find_first()
364 return mas_find(&mas, addr + range - 1); in nouveau_uvma_region_find_first()
[all …]
H A Dnouveau_debugfs.c209 MA_STATE(mas, &uvmm->region_mt, 0, 0); in nouveau_debugfs_gpuva_regions()
214 mas_for_each(&mas, reg, ULONG_MAX) in nouveau_debugfs_gpuva_regions()
/linux/Documentation/core-api/
H A Dmaple_tree.rst145 The advanced API is based around the ma_state, this is where the 'mas'
152 The maple state keeps track of the range start and end in mas->index and
153 mas->last, respectively.
155 mas_walk() will walk the tree to the location of mas->index and set the
156 mas->index and mas->last according to the range for the entry.
/linux/drivers/media/i2c/
H A Dmax9286.c749 struct max9286_asd *mas; in max9286_v4l2_notifier_register() local
751 mas = v4l2_async_nf_add_fwnode(&priv->notifier, source->fwnode, in max9286_v4l2_notifier_register()
753 if (IS_ERR(mas)) { in max9286_v4l2_notifier_register()
755 i, PTR_ERR(mas)); in max9286_v4l2_notifier_register()
757 return PTR_ERR(mas); in max9286_v4l2_notifier_register()
760 mas->source = source; in max9286_v4l2_notifier_register()

12