/linux/drivers/gpu/drm/i915/gvt/ |
A D | sched_policy.c | 54 struct list_head lru_list; member 108 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice() 113 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice() 122 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice() 186 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in find_busy_vgpu() 228 list_del_init(&vgpu_data->lru_list); in tbs_sched_func() 229 list_add_tail(&vgpu_data->lru_list, in tbs_sched_func() 321 INIT_LIST_HEAD(&data->lru_list); in tbs_sched_init_vgpu() 347 if (!list_empty(&vgpu_data->lru_list)) in tbs_sched_start_schedule() 355 list_add(&vgpu_data->lru_list, &sched_data->lru_runq_head); in tbs_sched_start_schedule() [all …]
|
A D | gtt.h | 171 struct list_head lru_list; member
|
A D | gtt.c | 1905 INIT_LIST_HEAD(&mm->ppgtt_mm.lru_list); in intel_vgpu_create_ppgtt_mm() 1924 list_add_tail(&mm->ppgtt_mm.lru_list, &gvt->gtt.ppgtt_mm_lru_list_head); in intel_vgpu_create_ppgtt_mm() 1986 list_del(&mm->ppgtt_mm.lru_list); in _intel_vgpu_mm_release() 2033 list_move_tail(&mm->ppgtt_mm.lru_list, in intel_vgpu_pin_mm() 2049 mm = container_of(pos, struct intel_vgpu_mm, ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm() 2054 list_del_init(&mm->ppgtt_mm.lru_list); in reclaim_one_ppgtt_mm() 2805 list_del_init(&mm->ppgtt_mm.lru_list); in intel_vgpu_invalidate_ppgtt()
|
/linux/include/linux/ |
A D | mm_inline.h | 34 enum lru_list lru, enum zone_type zid, in update_lru_size() 77 static __always_inline enum lru_list folio_lru_list(struct folio *folio) in folio_lru_list() 79 enum lru_list lru; in folio_lru_list() 96 enum lru_list lru = folio_lru_list(folio); in lruvec_add_folio() 112 enum lru_list lru = folio_lru_list(folio); in lruvec_add_folio_tail()
|
A D | mmzone.h | 267 enum lru_list { enum 288 static inline bool is_file_lru(enum lru_list lru) in is_file_lru() 293 static inline bool is_active_lru(enum lru_list lru) in is_active_lru()
|
A D | memcontrol.h | 926 void mem_cgroup_update_lru_size(struct lruvec *lruvec, enum lru_list lru, 931 enum lru_list lru, int zone_idx) in mem_cgroup_get_zone_lru_size() 1364 enum lru_list lru, int zone_idx) in mem_cgroup_get_zone_lru_size()
|
A D | vmstat.h | 520 static inline const char *lru_list_name(enum lru_list lru) in lru_list_name()
|
/linux/fs/btrfs/ |
A D | zstd.c | 50 struct list_head lru_list; member 78 struct list_head lru_list; member 110 if (list_empty(&wsm.lru_list)) { in zstd_reclaim_timer_fn() 117 lru_list); in zstd_reclaim_timer_fn() 128 list_del(&victim->lru_list); in zstd_reclaim_timer_fn() 137 if (!list_empty(&wsm.lru_list)) in zstd_reclaim_timer_fn() 182 INIT_LIST_HEAD(&wsm.lru_list); in zstd_init_workspace_manager() 207 list_del(&workspace->lru_list); in zstd_cleanup_workspace_manager() 242 list_del(&workspace->lru_list); in zstd_find_workspace() 317 list_add(&workspace->lru_list, &wsm.lru_list); in zstd_put_workspace() [all …]
|
/linux/fs/xfs/ |
A D | xfs_mru_cache.c | 132 struct list_head *lru_list; in _xfs_mru_cache_migrate() local 145 lru_list = mru->lists + mru->lru_grp; in _xfs_mru_cache_migrate() 146 if (!list_empty(lru_list)) in _xfs_mru_cache_migrate() 147 list_splice_init(lru_list, mru->reap_list.prev); in _xfs_mru_cache_migrate() 171 lru_list = mru->lists + ((mru->lru_grp + grp) % mru->grp_count); in _xfs_mru_cache_migrate() 172 if (!list_empty(lru_list)) in _xfs_mru_cache_migrate()
|
/linux/net/netfilter/ |
A D | xt_recent.c | 68 struct list_head lru_list; member 84 struct list_head lru_list; member 147 list_del(&e->lru_list); in recent_entry_remove() 163 e = list_entry(t->lru_list.next, struct recent_entry, lru_list); in recent_entry_reap() 186 e = list_entry(t->lru_list.next, struct recent_entry, lru_list); in recent_entry_init() 204 list_add_tail(&e->lru_list, &t->lru_list); in recent_entry_init() 215 list_move_tail(&e->lru_list, &t->lru_list); in recent_entry_update() 404 INIT_LIST_HEAD(&t->lru_list); in recent_mt_check()
|
/linux/drivers/md/ |
A D | dm-bufio.c | 138 struct list_head lru_list; member 510 list_add(&b->lru_list, &c->lru[dirty]); in __link_buffer() 528 list_del(&b->lru_list); in __unlink_buffer() 547 list_move(&b->lru_list, &c->lru[dirty]); in __relink_lru() 902 struct dm_buffer, lru_list); in __alloc_buffer_wait_no_callback() 903 list_del(&b->lru_list); in __alloc_buffer_wait_no_callback() 940 list_add(&b->lru_list, &c->reserved_buffers); in __free_buffer_wake() 1825 struct dm_buffer, lru_list); in dm_bufio_client_create() 1826 list_del(&b->lru_list); in dm_bufio_client_create() 1866 struct dm_buffer, lru_list); in dm_bufio_client_destroy() [all …]
|
/linux/drivers/gpu/drm/vmwgfx/ |
A D | vmwgfx_resource.c | 706 struct list_head *lru_list = &dev_priv->res_lru[res->func->res_type]; in vmw_resource_validate() local 723 if (list_empty(lru_list) || !res->func->may_evict) { in vmw_resource_validate() 732 (list_first_entry(lru_list, struct vmw_resource, in vmw_resource_validate() 742 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_validate() 919 struct list_head *lru_list = &dev_priv->res_lru[type]; in vmw_resource_evict_type() local 928 if (list_empty(lru_list)) in vmw_resource_evict_type() 932 list_first_entry(lru_list, struct vmw_resource, in vmw_resource_evict_type() 941 list_add_tail(&evict_res->lru_head, lru_list); in vmw_resource_evict_type()
|
/linux/drivers/infiniband/hw/hfi1/ |
A D | mmu_rb.c | 63 INIT_LIST_HEAD(&h->lru_list); in hfi1_mmu_rb_register() 127 list_add(&mnode->list, &handler->lru_list); in hfi1_mmu_rb_insert() 203 list_for_each_entry_safe_reverse(rbnode, ptr, &handler->lru_list, in hfi1_mmu_rb_evict()
|
A D | mmu_rb.h | 41 struct list_head lru_list; member
|
/linux/kernel/bpf/ |
A D | bpf_lru_list.c | 321 struct bpf_lru_list *l = &lru->common_lru.lru_list; in bpf_lru_list_pop_free_to_local() 533 bpf_lru_list_push_free(&lru->common_lru.lru_list, node); in bpf_common_lru_push_free() 563 struct bpf_lru_list *l = &lru->common_lru.lru_list; in bpf_common_lru_populate() 677 bpf_lru_list_init(&clru->lru_list); in bpf_lru_init()
|
A D | bpf_lru_list.h | 46 struct bpf_lru_list lru_list; member
|
/linux/mm/ |
A D | mmzone.c | 77 enum lru_list lru; in lruvec_init()
|
A D | vmscan.c | 591 static unsigned long lruvec_lru_size(struct lruvec *lruvec, enum lru_list lru, in lruvec_lru_size() 2082 enum lru_list lru, unsigned long *nr_zone_taken) in update_lru_sizes() 2119 enum lru_list lru) in isolate_lru_pages() 2391 struct scan_control *sc, enum lru_list lru) in shrink_inactive_list() 2500 enum lru_list lru) in shrink_active_list() 2651 static unsigned long shrink_list(enum lru_list lru, unsigned long nr_to_scan, in shrink_list() 2693 static bool inactive_is_low(struct lruvec *lruvec, enum lru_list inactive_lru) in inactive_is_low() 2695 enum lru_list active_lru = inactive_lru + LRU_ACTIVE; in inactive_is_low() 2739 enum lru_list lru; in get_scan_count() 2945 enum lru_list lru; in shrink_lruvec()
|
A D | memcontrol.c | 1257 void mem_cgroup_update_lru_size(struct lruvec *lruvec, enum lru_list lru, in mem_cgroup_update_lru_size() 3841 enum lru_list lru; in mem_cgroup_node_nr_lru_pages() 3861 enum lru_list lru; in mem_cgroup_nr_lru_pages()
|
/linux/include/trace/events/ |
A D | pagemap.h | 37 __field(enum lru_list, lru )
|
/linux/fs/gfs2/ |
A D | glock.c | 66 static LIST_HEAD(lru_list); 222 list_move_tail(&gl->gl_lru, &lru_list); in gfs2_glock_add_to_lru() 2010 list_add(&gl->gl_lru, &lru_list); in gfs2_dispose_glock_lru() 2046 while ((nr-- >= 0) && !list_empty(&lru_list)) { in gfs2_scan_glock_lru() 2047 gl = list_first_entry(&lru_list, struct gfs2_glock, gl_lru); in gfs2_scan_glock_lru() 2059 list_splice(&skipped, &lru_list); in gfs2_scan_glock_lru()
|
/linux/Documentation/vm/ |
A D | unevictable-lru.rst | 96 lru_list enum. 100 lru_list enum element). The memory controller tracks the movement of pages to
|