Searched refs:LRU_REFS_WIDTH (Results 1 – 6 of 6) sorted by relevance
112 #define LRU_REFS_WIDTH min(__LRU_REFS_WIDTH, BITS_PER_LONG - NR_PAGEFLAGS - \ macro 118 LRU_GEN_WIDTH + LRU_REFS_WIDTH)
138 VM_WARN_ON_ONCE(refs > BIT(LRU_REFS_WIDTH)); in lru_tier_from_refs()
392 * is marked by additional bits of LRU_REFS_WIDTH in folio->flags.411 #define LRU_REFS_MASK ((BIT(LRU_REFS_WIDTH) - 1) << LRU_REFS_PGOFF)415 * lru_gen_inc_refs() sets additional bits of LRU_REFS_WIDTH in folio->flags416 * after PG_referenced, then PG_workingset after LRU_REFS_WIDTH. After all its1139 #define LRU_REFS_PGOFF (LRU_GEN_PGOFF - LRU_REFS_WIDTH)
247 BUILD_BUG_ON(LRU_GEN_WIDTH + LRU_REFS_WIDTH > BITS_PER_LONG - EVICTION_SHIFT); in lru_gen_eviction() 252 token = (min_seq << LRU_REFS_WIDTH) | max(refs - 1, 0); in lru_gen_eviction() 278 max_seq &= EVICTION_MASK >> LRU_REFS_WIDTH; in lru_gen_test_recent() 280 return abs_diff(max_seq, *token >> LRU_REFS_WIDTH) < MAX_NR_GENS; in lru_gen_test_recent() 308 refs = (token & (BIT(LRU_REFS_WIDTH) - 1)) + 1; in lru_gen_refault()
112 LRU_REFS_WIDTH, in mminit_verify_pageflags_layout()
3907 if (refs + workingset != BIT(LRU_REFS_WIDTH) + 1) { in inc_min_seq() 4495 if (tier > tier_idx || refs + workingset == BIT(LRU_REFS_WIDTH) + 1) { in sort_folio() 4500 if (refs + workingset != BIT(LRU_REFS_WIDTH) + 1) { in sort_folio()