Lines Matching refs:folio

83 		struct folio *folio = page_folio(page);  in __page_cache_release()  local
87 lruvec = folio_lruvec_lock_irqsave(folio, &flags); in __page_cache_release()
202 struct folio *folio = page_folio(page); in pagevec_lru_move_fn() local
208 lruvec = folio_lruvec_relock_irqsave(folio, lruvec, &flags); in pagevec_lru_move_fn()
221 struct folio *folio = page_folio(page); in pagevec_move_tail_fn() local
223 if (!folio_test_unevictable(folio)) { in pagevec_move_tail_fn()
224 lruvec_del_folio(lruvec, folio); in pagevec_move_tail_fn()
225 folio_clear_active(folio); in pagevec_move_tail_fn()
226 lruvec_add_folio_tail(lruvec, folio); in pagevec_move_tail_fn()
227 __count_vm_events(PGROTATED, folio_nr_pages(folio)); in pagevec_move_tail_fn()
250 void folio_rotate_reclaimable(struct folio *folio) in folio_rotate_reclaimable() argument
252 if (!folio_test_locked(folio) && !folio_test_dirty(folio) && in folio_rotate_reclaimable()
253 !folio_test_unevictable(folio) && folio_test_lru(folio)) { in folio_rotate_reclaimable()
257 folio_get(folio); in folio_rotate_reclaimable()
260 if (pagevec_add_and_need_flush(pvec, &folio->page)) in folio_rotate_reclaimable()
306 void lru_note_cost_folio(struct folio *folio) in lru_note_cost_folio() argument
308 lru_note_cost(folio_lruvec(folio), folio_is_file_lru(folio), in lru_note_cost_folio()
309 folio_nr_pages(folio)); in lru_note_cost_folio()
312 static void __folio_activate(struct folio *folio, struct lruvec *lruvec) in __folio_activate() argument
314 if (!folio_test_active(folio) && !folio_test_unevictable(folio)) { in __folio_activate()
315 long nr_pages = folio_nr_pages(folio); in __folio_activate()
317 lruvec_del_folio(lruvec, folio); in __folio_activate()
318 folio_set_active(folio); in __folio_activate()
319 lruvec_add_folio(lruvec, folio); in __folio_activate()
320 trace_mm_lru_activate(folio); in __folio_activate()
347 static void folio_activate(struct folio *folio) in folio_activate() argument
349 if (folio_test_lru(folio) && !folio_test_active(folio) && in folio_activate()
350 !folio_test_unevictable(folio)) { in folio_activate()
353 folio_get(folio); in folio_activate()
356 if (pagevec_add_and_need_flush(pvec, &folio->page)) in folio_activate()
367 static void folio_activate(struct folio *folio) in folio_activate() argument
371 if (folio_test_clear_lru(folio)) { in folio_activate()
372 lruvec = folio_lruvec_lock_irq(folio); in folio_activate()
373 __folio_activate(folio, lruvec); in folio_activate()
375 folio_set_lru(folio); in folio_activate()
380 static void __lru_cache_activate_folio(struct folio *folio) in __lru_cache_activate_folio() argument
401 if (pagevec_page == &folio->page) { in __lru_cache_activate_folio()
402 folio_set_active(folio); in __lru_cache_activate_folio()
420 void folio_mark_accessed(struct folio *folio) in folio_mark_accessed() argument
422 if (!folio_test_referenced(folio)) { in folio_mark_accessed()
423 folio_set_referenced(folio); in folio_mark_accessed()
424 } else if (folio_test_unevictable(folio)) { in folio_mark_accessed()
430 } else if (!folio_test_active(folio)) { in folio_mark_accessed()
437 if (folio_test_lru(folio)) in folio_mark_accessed()
438 folio_activate(folio); in folio_mark_accessed()
440 __lru_cache_activate_folio(folio); in folio_mark_accessed()
441 folio_clear_referenced(folio); in folio_mark_accessed()
442 workingset_activation(folio); in folio_mark_accessed()
444 if (folio_test_idle(folio)) in folio_mark_accessed()
445 folio_clear_idle(folio); in folio_mark_accessed()
458 void folio_add_lru(struct folio *folio) in folio_add_lru() argument
462 VM_BUG_ON_FOLIO(folio_test_active(folio) && folio_test_unevictable(folio), folio); in folio_add_lru()
463 VM_BUG_ON_FOLIO(folio_test_lru(folio), folio); in folio_add_lru()
465 folio_get(folio); in folio_add_lru()
468 if (pagevec_add_and_need_flush(pvec, &folio->page)) in folio_add_lru()
912 struct folio *folio = page_folio(page); in release_pages() local
924 page = &folio->page; in release_pages()
963 lruvec = folio_lruvec_relock_irqsave(folio, lruvec, in release_pages()
1005 static void __pagevec_lru_add_fn(struct folio *folio, struct lruvec *lruvec) in __pagevec_lru_add_fn() argument
1007 int was_unevictable = folio_test_clear_unevictable(folio); in __pagevec_lru_add_fn()
1008 long nr_pages = folio_nr_pages(folio); in __pagevec_lru_add_fn()
1010 VM_BUG_ON_FOLIO(folio_test_lru(folio), folio); in __pagevec_lru_add_fn()
1040 folio_set_lru(folio); in __pagevec_lru_add_fn()
1043 if (folio_evictable(folio)) { in __pagevec_lru_add_fn()
1047 folio_clear_active(folio); in __pagevec_lru_add_fn()
1048 folio_set_unevictable(folio); in __pagevec_lru_add_fn()
1053 lruvec_add_folio(lruvec, folio); in __pagevec_lru_add_fn()
1054 trace_mm_lru_insertion(folio); in __pagevec_lru_add_fn()
1068 struct folio *folio = page_folio(pvec->pages[i]); in __pagevec_lru_add() local
1070 lruvec = folio_lruvec_relock_irqsave(folio, lruvec, &flags); in __pagevec_lru_add()
1071 __pagevec_lru_add_fn(folio, lruvec); in __pagevec_lru_add()