Lines Matching +full:fine +full:- +full:tune

1 /* SPDX-License-Identifier: GPL-2.0 */
18 * mmgrab() - Pin a &struct mm_struct.
31 * See also <Documentation/mm/active_mm.rst> for an in-depth explanation
36 atomic_inc(&mm->mm_count); in mmgrab()
51 * user-space, after storing to rq->curr. in mmdrop()
53 if (unlikely(atomic_dec_and_test(&mm->mm_count))) in mmdrop()
76 if (atomic_dec_and_test(&mm->mm_count)) in mmdrop_sched()
77 call_rcu(&mm->delayed_drop, __mmdrop_delayed); in mmdrop_sched()
115 * mmget() - Pin the address space associated with a &struct mm_struct.
127 * See also <Documentation/mm/active_mm.rst> for an in-depth explanation
132 atomic_inc(&mm->mm_users); in mmget()
137 return atomic_inc_not_zero(&mm->mm_users); in mmget_not_zero()
140 /* mmput gets rid of the mappings and all user-space */
207 * need RCU to access ->real_parent if CLONE_VM was used along with in in_vfork()
210 * We check real_parent->mm == tsk->mm because CLONE_VFORK does not in in_vfork()
214 * ->real_parent is not necessarily the task doing vfork(), so in in in_vfork()
217 * And in this case we can't trust the real_parent->mm == tsk->mm in in_vfork()
219 * another oom-unkillable task does this it should blame itself. in in_vfork()
222 ret = tsk->vfork_done && in in_vfork()
223 rcu_dereference(tsk->real_parent)->mm == tsk->mm; in in_vfork()
230 * Applies per-task gfp context to the given allocation flags.
237 unsigned int pflags = READ_ONCE(current->flags); in current_gfp_context()
267 /* Any memory-allocation retry loop should use
271 * and a central place to fine tune the waiting as the MM
294 * might_alloc - Mark possible allocation sites
310 * memalloc_noio_save - Marks implicit GFP_NOIO allocation scope.
322 unsigned int flags = current->flags & PF_MEMALLOC_NOIO; in memalloc_noio_save()
323 current->flags |= PF_MEMALLOC_NOIO; in memalloc_noio_save()
328 * memalloc_noio_restore - Ends the implicit GFP_NOIO scope.
337 current->flags = (current->flags & ~PF_MEMALLOC_NOIO) | flags; in memalloc_noio_restore()
341 * memalloc_nofs_save - Marks implicit GFP_NOFS allocation scope.
353 unsigned int flags = current->flags & PF_MEMALLOC_NOFS; in memalloc_nofs_save()
354 current->flags |= PF_MEMALLOC_NOFS; in memalloc_nofs_save()
359 * memalloc_nofs_restore - Ends the implicit GFP_NOFS scope.
368 current->flags = (current->flags & ~PF_MEMALLOC_NOFS) | flags; in memalloc_nofs_restore()
373 unsigned int flags = current->flags & PF_MEMALLOC; in memalloc_noreclaim_save()
374 current->flags |= PF_MEMALLOC; in memalloc_noreclaim_save()
380 current->flags = (current->flags & ~PF_MEMALLOC) | flags; in memalloc_noreclaim_restore()
385 unsigned int flags = current->flags & PF_MEMALLOC_PIN; in memalloc_pin_save()
387 current->flags |= PF_MEMALLOC_PIN; in memalloc_pin_save()
393 current->flags = (current->flags & ~PF_MEMALLOC_PIN) | flags; in memalloc_pin_restore()
399 * set_active_memcg - Starts the remote memcg charging scope.
422 old = current->active_memcg; in set_active_memcg()
423 current->active_memcg = memcg; in set_active_memcg()
459 if (current->mm != mm) in membarrier_mm_sync_core_before_usermode()
461 if (likely(!(atomic_read(&mm->membarrier_state) & in membarrier_mm_sync_core_before_usermode()