Lines Matching refs:lru

22 static inline bool list_lru_memcg_aware(struct list_lru *lru)  in list_lru_memcg_aware()  argument
24 return lru->memcg_aware; in list_lru_memcg_aware()
27 static void list_lru_register(struct list_lru *lru) in list_lru_register() argument
29 if (!list_lru_memcg_aware(lru)) in list_lru_register()
33 list_add(&lru->list, &memcg_list_lrus); in list_lru_register()
37 static void list_lru_unregister(struct list_lru *lru) in list_lru_unregister() argument
39 if (!list_lru_memcg_aware(lru)) in list_lru_unregister()
43 list_del(&lru->list); in list_lru_unregister()
47 static int lru_shrinker_id(struct list_lru *lru) in lru_shrinker_id() argument
49 return lru->shrinker_id; in lru_shrinker_id()
53 list_lru_from_memcg_idx(struct list_lru *lru, int nid, int idx) in list_lru_from_memcg_idx() argument
55 if (list_lru_memcg_aware(lru) && idx >= 0) { in list_lru_from_memcg_idx()
56 struct list_lru_memcg *mlru = xa_load(&lru->xa, idx); in list_lru_from_memcg_idx()
60 return &lru->node[nid].lru; in list_lru_from_memcg_idx()
64 lock_list_lru_of_memcg(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in lock_list_lru_of_memcg() argument
72 l = list_lru_from_memcg_idx(lru, nid, memcg_kmem_id(memcg)); in lock_list_lru_of_memcg()
109 static void list_lru_register(struct list_lru *lru) in list_lru_register() argument
113 static void list_lru_unregister(struct list_lru *lru) in list_lru_unregister() argument
117 static int lru_shrinker_id(struct list_lru *lru) in lru_shrinker_id() argument
122 static inline bool list_lru_memcg_aware(struct list_lru *lru) in list_lru_memcg_aware() argument
128 list_lru_from_memcg_idx(struct list_lru *lru, int nid, int idx) in list_lru_from_memcg_idx() argument
130 return &lru->node[nid].lru; in list_lru_from_memcg_idx()
134 lock_list_lru_of_memcg(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in lock_list_lru_of_memcg() argument
137 struct list_lru_one *l = &lru->node[nid].lru; in lock_list_lru_of_memcg()
157 bool list_lru_add(struct list_lru *lru, struct list_head *item, int nid, in list_lru_add() argument
160 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_add()
163 l = lock_list_lru_of_memcg(lru, nid, memcg, false, false); in list_lru_add()
170 set_shrinker_bit(memcg, nid, lru_shrinker_id(lru)); in list_lru_add()
179 bool list_lru_add_obj(struct list_lru *lru, struct list_head *item) in list_lru_add_obj() argument
184 if (list_lru_memcg_aware(lru)) { in list_lru_add_obj()
186 ret = list_lru_add(lru, item, nid, mem_cgroup_from_slab_obj(item)); in list_lru_add_obj()
189 ret = list_lru_add(lru, item, nid, NULL); in list_lru_add_obj()
197 bool list_lru_del(struct list_lru *lru, struct list_head *item, int nid, in list_lru_del() argument
200 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_del()
202 l = lock_list_lru_of_memcg(lru, nid, memcg, false, false); in list_lru_del()
216 bool list_lru_del_obj(struct list_lru *lru, struct list_head *item) in list_lru_del_obj() argument
221 if (list_lru_memcg_aware(lru)) { in list_lru_del_obj()
223 ret = list_lru_del(lru, item, nid, mem_cgroup_from_slab_obj(item)); in list_lru_del_obj()
226 ret = list_lru_del(lru, item, nid, NULL); in list_lru_del_obj()
248 unsigned long list_lru_count_one(struct list_lru *lru, in list_lru_count_one() argument
255 l = list_lru_from_memcg_idx(lru, nid, memcg_kmem_id(memcg)); in list_lru_count_one()
266 unsigned long list_lru_count_node(struct list_lru *lru, int nid) in list_lru_count_node() argument
270 nlru = &lru->node[nid]; in list_lru_count_node()
276 __list_lru_walk_one(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in __list_lru_walk_one() argument
280 struct list_lru_node *nlru = &lru->node[nid]; in __list_lru_walk_one()
286 l = lock_list_lru_of_memcg(lru, nid, memcg, irq_off, true); in __list_lru_walk_one()
333 list_lru_walk_one(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in list_lru_walk_one() argument
337 return __list_lru_walk_one(lru, nid, memcg, isolate, in list_lru_walk_one()
343 list_lru_walk_one_irq(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in list_lru_walk_one_irq() argument
347 return __list_lru_walk_one(lru, nid, memcg, isolate, in list_lru_walk_one_irq()
351 unsigned long list_lru_walk_node(struct list_lru *lru, int nid, in list_lru_walk_node() argument
357 isolated += list_lru_walk_one(lru, nid, NULL, isolate, cb_arg, in list_lru_walk_node()
361 if (*nr_to_walk > 0 && list_lru_memcg_aware(lru)) { in list_lru_walk_node()
366 xa_for_each(&lru->xa, index, mlru) { in list_lru_walk_node()
374 isolated += __list_lru_walk_one(lru, nid, memcg, in list_lru_walk_node()
389 static void init_one_lru(struct list_lru *lru, struct list_lru_one *l) in init_one_lru() argument
395 if (lru->key) in init_one_lru()
396 lockdep_set_class(&l->lock, lru->key); in init_one_lru()
401 static struct list_lru_memcg *memcg_init_list_lru_one(struct list_lru *lru, gfp_t gfp) in memcg_init_list_lru_one() argument
411 init_one_lru(lru, &mlru->node[nid]); in memcg_init_list_lru_one()
416 static inline void memcg_init_list_lru(struct list_lru *lru, bool memcg_aware) in memcg_init_list_lru() argument
419 xa_init_flags(&lru->xa, XA_FLAGS_LOCK_IRQ); in memcg_init_list_lru()
420 lru->memcg_aware = memcg_aware; in memcg_init_list_lru()
423 static void memcg_destroy_list_lru(struct list_lru *lru) in memcg_destroy_list_lru() argument
425 XA_STATE(xas, &lru->xa, 0); in memcg_destroy_list_lru()
428 if (!list_lru_memcg_aware(lru)) in memcg_destroy_list_lru()
439 static void memcg_reparent_list_lru_one(struct list_lru *lru, int nid, in memcg_reparent_list_lru_one() argument
447 dst = list_lru_from_memcg_idx(lru, nid, dst_idx); in memcg_reparent_list_lru_one()
454 set_shrinker_bit(dst_memcg, nid, lru_shrinker_id(lru)); in memcg_reparent_list_lru_one()
465 struct list_lru *lru; in memcg_reparent_list_lrus() local
469 list_for_each_entry(lru, &memcg_list_lrus, list) { in memcg_reparent_list_lrus()
471 XA_STATE(xas, &lru->xa, memcg->kmemcg_id); in memcg_reparent_list_lrus()
489 memcg_reparent_list_lru_one(lru, i, &mlru->node[i], parent); in memcg_reparent_list_lrus()
502 struct list_lru *lru) in memcg_list_lru_allocated() argument
506 return idx < 0 || xa_load(&lru->xa, idx); in memcg_list_lru_allocated()
509 int memcg_list_lru_alloc(struct mem_cgroup *memcg, struct list_lru *lru, in memcg_list_lru_alloc() argument
515 XA_STATE(xas, &lru->xa, 0); in memcg_list_lru_alloc()
517 if (!list_lru_memcg_aware(lru) || memcg_list_lru_allocated(memcg, lru)) in memcg_list_lru_alloc()
533 while (!memcg_list_lru_allocated(parent, lru)) { in memcg_list_lru_alloc()
538 mlru = memcg_init_list_lru_one(lru, gfp); in memcg_list_lru_alloc()
558 static inline void memcg_init_list_lru(struct list_lru *lru, bool memcg_aware) in memcg_init_list_lru() argument
562 static void memcg_destroy_list_lru(struct list_lru *lru) in memcg_destroy_list_lru() argument
567 int __list_lru_init(struct list_lru *lru, bool memcg_aware, struct shrinker *shrinker) in __list_lru_init() argument
573 lru->shrinker_id = shrinker->id; in __list_lru_init()
575 lru->shrinker_id = -1; in __list_lru_init()
581 lru->node = kcalloc(nr_node_ids, sizeof(*lru->node), GFP_KERNEL); in __list_lru_init()
582 if (!lru->node) in __list_lru_init()
586 init_one_lru(lru, &lru->node[i].lru); in __list_lru_init()
588 memcg_init_list_lru(lru, memcg_aware); in __list_lru_init()
589 list_lru_register(lru); in __list_lru_init()
595 void list_lru_destroy(struct list_lru *lru) in list_lru_destroy() argument
598 if (!lru->node) in list_lru_destroy()
601 list_lru_unregister(lru); in list_lru_destroy()
603 memcg_destroy_list_lru(lru); in list_lru_destroy()
604 kfree(lru->node); in list_lru_destroy()
605 lru->node = NULL; in list_lru_destroy()
608 lru->shrinker_id = -1; in list_lru_destroy()