mm/list_lru.c: move locking from __list_lru_walk_one() to its caller
Move the locking inside __list_lru_walk_one() to its caller. This is a preparation step in order to introduce list_lru_walk_one_irq() which does spin_lock_irq() instead of spin_lock() for the locking. Link: http://lkml.kernel.org/r/20180716111921.5365-3-bigeasy@linutronix.de Signed-off-by: Sebastian Andrzej Siewior <bigeasy@linutronix.de> Reviewed-by: Vladimir Davydov <vdavydov.dev@gmail.com> Cc: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
Родитель
87a5ffc163
Коммит
6cfe57a96b
|
@ -219,7 +219,6 @@ __list_lru_walk_one(struct list_lru *lru, int nid, int memcg_idx,
|
||||||
struct list_head *item, *n;
|
struct list_head *item, *n;
|
||||||
unsigned long isolated = 0;
|
unsigned long isolated = 0;
|
||||||
|
|
||||||
spin_lock(&nlru->lock);
|
|
||||||
l = list_lru_from_memcg_idx(nlru, memcg_idx);
|
l = list_lru_from_memcg_idx(nlru, memcg_idx);
|
||||||
restart:
|
restart:
|
||||||
list_for_each_safe(item, n, &l->list) {
|
list_for_each_safe(item, n, &l->list) {
|
||||||
|
@ -265,8 +264,6 @@ restart:
|
||||||
BUG();
|
BUG();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
spin_unlock(&nlru->lock);
|
|
||||||
return isolated;
|
return isolated;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,8 +272,14 @@ list_lru_walk_one(struct list_lru *lru, int nid, struct mem_cgroup *memcg,
|
||||||
list_lru_walk_cb isolate, void *cb_arg,
|
list_lru_walk_cb isolate, void *cb_arg,
|
||||||
unsigned long *nr_to_walk)
|
unsigned long *nr_to_walk)
|
||||||
{
|
{
|
||||||
return __list_lru_walk_one(lru, nid, memcg_cache_id(memcg),
|
struct list_lru_node *nlru = &lru->node[nid];
|
||||||
isolate, cb_arg, nr_to_walk);
|
unsigned long ret;
|
||||||
|
|
||||||
|
spin_lock(&nlru->lock);
|
||||||
|
ret = __list_lru_walk_one(lru, nid, memcg_cache_id(memcg),
|
||||||
|
isolate, cb_arg, nr_to_walk);
|
||||||
|
spin_unlock(&nlru->lock);
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(list_lru_walk_one);
|
EXPORT_SYMBOL_GPL(list_lru_walk_one);
|
||||||
|
|
||||||
|
@ -291,8 +294,13 @@ unsigned long list_lru_walk_node(struct list_lru *lru, int nid,
|
||||||
nr_to_walk);
|
nr_to_walk);
|
||||||
if (*nr_to_walk > 0 && list_lru_memcg_aware(lru)) {
|
if (*nr_to_walk > 0 && list_lru_memcg_aware(lru)) {
|
||||||
for_each_memcg_cache_index(memcg_idx) {
|
for_each_memcg_cache_index(memcg_idx) {
|
||||||
|
struct list_lru_node *nlru = &lru->node[nid];
|
||||||
|
|
||||||
|
spin_lock(&nlru->lock);
|
||||||
isolated += __list_lru_walk_one(lru, nid, memcg_idx,
|
isolated += __list_lru_walk_one(lru, nid, memcg_idx,
|
||||||
isolate, cb_arg, nr_to_walk);
|
isolate, cb_arg, nr_to_walk);
|
||||||
|
spin_unlock(&nlru->lock);
|
||||||
|
|
||||||
if (*nr_to_walk <= 0)
|
if (*nr_to_walk <= 0)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
Загрузка…
Ссылка в новой задаче