Commit 57d437d2 authored by Christoph Lameter's avatar Christoph Lameter Committed by Pekka Enberg

slub: pass page to node_match() instead of kmem_cache_cpu structure

Avoid passing the kmem_cache_cpu pointer to node_match. This makes the
node_match function more generic and easier to understand.
Acked-by: default avatarDavid Rientjes <rientjes@google.com>
Signed-off-by: default avatarChristoph Lameter <cl@linux.com>
Signed-off-by: default avatarPekka Enberg <penberg@kernel.org>
parent f6e7def7
...@@ -2050,10 +2050,10 @@ static void flush_all(struct kmem_cache *s) ...@@ -2050,10 +2050,10 @@ static void flush_all(struct kmem_cache *s)
* Check if the objects in a per cpu structure fit numa * Check if the objects in a per cpu structure fit numa
* locality expectations. * locality expectations.
*/ */
static inline int node_match(struct kmem_cache_cpu *c, int node) static inline int node_match(struct page *page, int node)
{ {
#ifdef CONFIG_NUMA #ifdef CONFIG_NUMA
if (node != NUMA_NO_NODE && page_to_nid(c->page) != node) if (node != NUMA_NO_NODE && page_to_nid(page) != node)
return 0; return 0;
#endif #endif
return 1; return 1;
...@@ -2226,7 +2226,7 @@ static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, ...@@ -2226,7 +2226,7 @@ static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node,
goto new_slab; goto new_slab;
redo: redo:
if (unlikely(!node_match(c, node))) { if (unlikely(!node_match(page, node))) {
stat(s, ALLOC_NODE_MISMATCH); stat(s, ALLOC_NODE_MISMATCH);
deactivate_slab(s, page, c->freelist); deactivate_slab(s, page, c->freelist);
c->page = NULL; c->page = NULL;
...@@ -2313,6 +2313,7 @@ static __always_inline void *slab_alloc(struct kmem_cache *s, ...@@ -2313,6 +2313,7 @@ static __always_inline void *slab_alloc(struct kmem_cache *s,
{ {
void **object; void **object;
struct kmem_cache_cpu *c; struct kmem_cache_cpu *c;
struct page *page;
unsigned long tid; unsigned long tid;
if (slab_pre_alloc_hook(s, gfpflags)) if (slab_pre_alloc_hook(s, gfpflags))
...@@ -2338,7 +2339,8 @@ static __always_inline void *slab_alloc(struct kmem_cache *s, ...@@ -2338,7 +2339,8 @@ static __always_inline void *slab_alloc(struct kmem_cache *s,
barrier(); barrier();
object = c->freelist; object = c->freelist;
if (unlikely(!object || !node_match(c, node))) page = c->page;
if (unlikely(!object || !node_match(page, node)))
object = __slab_alloc(s, gfpflags, node, addr, c); object = __slab_alloc(s, gfpflags, node, addr, c);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment