Lines Matching full:object
156 void __kasan_unpoison_new_object(struct kmem_cache *cache, void *object) in __kasan_unpoison_new_object() argument
158 kasan_unpoison(object, cache->object_size, false); in __kasan_unpoison_new_object()
161 void __kasan_poison_new_object(struct kmem_cache *cache, void *object) in __kasan_poison_new_object() argument
163 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE), in __kasan_poison_new_object()
168 * This function assigns a tag to an object considering the following:
170 * object somewhere (e.g. in the object itself). We preassign a tag for
171 * each object in caches with constructors during slab creation and reuse
172 * the same tag each time a particular object is allocated.
178 const void *object, bool init) in assign_tag() argument
185 * set, assign a tag when the object is being allocated (init == false). in assign_tag()
195 return init ? kasan_random_tag() : get_tag(object); in assign_tag()
199 const void *object) in __kasan_init_slab_obj() argument
201 /* Initialize per-object metadata if it is present. */ in __kasan_init_slab_obj()
203 kasan_init_object_meta(cache, object); in __kasan_init_slab_obj()
206 object = set_tag(object, assign_tag(cache, object, true)); in __kasan_init_slab_obj()
208 return (void *)object; in __kasan_init_slab_obj()
211 static inline bool poison_slab_object(struct kmem_cache *cache, void *object, in poison_slab_object() argument
219 tagged_object = object; in poison_slab_object()
220 object = kasan_reset_tag(object); in poison_slab_object()
222 if (unlikely(nearest_obj(cache, virt_to_slab(object), object) != object)) { in poison_slab_object()
236 kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE), in poison_slab_object()
245 bool __kasan_slab_free(struct kmem_cache *cache, void *object, in __kasan_slab_free() argument
248 if (is_kfence_address(object)) in __kasan_slab_free()
252 * If the object is buggy, do not let slab put the object onto the in __kasan_slab_free()
253 * freelist. The object will thus never be allocated again and its in __kasan_slab_free()
256 if (poison_slab_object(cache, object, ip, init)) in __kasan_slab_free()
260 * If the object is put into quarantine, do not let slab put the object in __kasan_slab_free()
261 * onto the freelist for now. The object's metadata is kept until the in __kasan_slab_free()
262 * object gets evicted from quarantine. in __kasan_slab_free()
264 if (kasan_quarantine_put(cache, object)) in __kasan_slab_free()
268 * Note: Keep per-object metadata to allow KASAN print stack traces for in __kasan_slab_free()
272 /* Let slab put the object onto the freelist. */ in __kasan_slab_free()
298 /* The object will be poisoned by kasan_poison_pages(). */ in __kasan_kfree_large()
301 static inline void unpoison_slab_object(struct kmem_cache *cache, void *object, in unpoison_slab_object() argument
305 * Unpoison the whole object. For kmalloc() allocations, in unpoison_slab_object()
308 kasan_unpoison(object, cache->object_size, init); in unpoison_slab_object()
312 kasan_save_alloc_info(cache, object, flags); in unpoison_slab_object()
316 void *object, gfp_t flags, bool init) in __kasan_slab_alloc() argument
324 if (unlikely(object == NULL)) in __kasan_slab_alloc()
327 if (is_kfence_address(object)) in __kasan_slab_alloc()
328 return (void *)object; in __kasan_slab_alloc()
334 tag = assign_tag(cache, object, false); in __kasan_slab_alloc()
335 tagged_object = set_tag(object, tag); in __kasan_slab_alloc()
337 /* Unpoison the object and save alloc info for non-kmalloc() allocations. */ in __kasan_slab_alloc()
344 const void *object, size_t size, gfp_t flags) in poison_kmalloc_redzone() argument
351 * Partially poison the last object granule to cover the unaligned in poison_kmalloc_redzone()
355 kasan_poison_last_granule((void *)object, size); in poison_kmalloc_redzone()
358 redzone_start = round_up((unsigned long)(object + size), in poison_kmalloc_redzone()
360 redzone_end = round_up((unsigned long)(object + cache->object_size), in poison_kmalloc_redzone()
370 kasan_save_alloc_info(cache, (void *)object, flags); in poison_kmalloc_redzone()
374 void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object, in __kasan_kmalloc() argument
380 if (unlikely(object == NULL)) in __kasan_kmalloc()
383 if (is_kfence_address(object)) in __kasan_kmalloc()
384 return (void *)object; in __kasan_kmalloc()
386 /* The object has already been unpoisoned by kasan_slab_alloc(). */ in __kasan_kmalloc()
387 poison_kmalloc_redzone(cache, object, size, flags); in __kasan_kmalloc()
390 return (void *)object; in __kasan_kmalloc()
402 * Partially poison the last object granule to cover the unaligned in poison_kmalloc_large_redzone()
424 /* The object has already been unpoisoned by kasan_unpoison_pages(). */ in __kasan_kmalloc_large()
431 void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flags) in __kasan_krealloc() argument
438 if (unlikely(object == ZERO_SIZE_PTR)) in __kasan_krealloc()
439 return (void *)object; in __kasan_krealloc()
441 if (is_kfence_address(object)) in __kasan_krealloc()
442 return (void *)object; in __kasan_krealloc()
445 * Unpoison the object's data. in __kasan_krealloc()
449 kasan_unpoison(object, size, false); in __kasan_krealloc()
451 slab = virt_to_slab(object); in __kasan_krealloc()
455 poison_kmalloc_large_redzone(object, size, flags); in __kasan_krealloc()
457 poison_kmalloc_redzone(slab->slab_cache, object, size, flags); in __kasan_krealloc()
459 return (void *)object; in __kasan_krealloc()
534 /* Unpoison the object and save alloc info for non-kmalloc() allocations. */ in __kasan_mempool_unpoison_object()