Commit b194d16c authored by Matthew Wilcox's avatar Matthew Wilcox Committed by Linus Torvalds

radix-tree: rename radix_tree_is_indirect_ptr()

As with indirect_to_ptr(), ptr_to_indirect() and
RADIX_TREE_INDIRECT_PTR, change radix_tree_is_indirect_ptr() to
radix_tree_is_internal_node().
Signed-off-by: default avatarMatthew Wilcox <willy@linux.intel.com>
Cc: Konstantin Khlebnikov <koct9i@gmail.com>
Cc: Kirill Shutemov <kirill.shutemov@linux.intel.com>
Cc: Jan Kara <jack@suse.com>
Cc: Neil Brown <neilb@suse.de>
Cc: Ross Zwisler <ross.zwisler@linux.intel.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 4dd6c098
...@@ -57,7 +57,7 @@ ...@@ -57,7 +57,7 @@
#define RADIX_DAX_ENTRY(sector, pmd) ((void *)((unsigned long)sector << \ #define RADIX_DAX_ENTRY(sector, pmd) ((void *)((unsigned long)sector << \
RADIX_DAX_SHIFT | (pmd ? RADIX_DAX_PMD : RADIX_DAX_PTE))) RADIX_DAX_SHIFT | (pmd ? RADIX_DAX_PMD : RADIX_DAX_PTE)))
static inline int radix_tree_is_indirect_ptr(void *ptr) static inline int radix_tree_is_internal_node(void *ptr)
{ {
return (int)((unsigned long)ptr & RADIX_TREE_INTERNAL_NODE); return (int)((unsigned long)ptr & RADIX_TREE_INTERNAL_NODE);
} }
...@@ -224,7 +224,7 @@ static inline void *radix_tree_deref_slot_protected(void **pslot, ...@@ -224,7 +224,7 @@ static inline void *radix_tree_deref_slot_protected(void **pslot,
*/ */
static inline int radix_tree_deref_retry(void *arg) static inline int radix_tree_deref_retry(void *arg)
{ {
return unlikely(radix_tree_is_indirect_ptr(arg)); return unlikely(radix_tree_is_internal_node(arg));
} }
/** /**
...@@ -259,7 +259,7 @@ static inline int radix_tree_exception(void *arg) ...@@ -259,7 +259,7 @@ static inline int radix_tree_exception(void *arg)
*/ */
static inline void radix_tree_replace_slot(void **pslot, void *item) static inline void radix_tree_replace_slot(void **pslot, void *item)
{ {
BUG_ON(radix_tree_is_indirect_ptr(item)); BUG_ON(radix_tree_is_internal_node(item));
rcu_assign_pointer(*pslot, item); rcu_assign_pointer(*pslot, item);
} }
...@@ -468,7 +468,7 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) ...@@ -468,7 +468,7 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
if (unlikely(!iter->tags)) if (unlikely(!iter->tags))
return NULL; return NULL;
while (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) && while (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) &&
radix_tree_is_indirect_ptr(slot[1])) { radix_tree_is_internal_node(slot[1])) {
if (entry_to_node(slot[1]) == canon) { if (entry_to_node(slot[1]) == canon) {
iter->tags >>= 1; iter->tags >>= 1;
iter->index = __radix_tree_iter_add(iter, 1); iter->index = __radix_tree_iter_add(iter, 1);
...@@ -498,7 +498,7 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags) ...@@ -498,7 +498,7 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
iter->index = __radix_tree_iter_add(iter, 1); iter->index = __radix_tree_iter_add(iter, 1);
if (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) && if (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) &&
radix_tree_is_indirect_ptr(*slot)) { radix_tree_is_internal_node(*slot)) {
if (entry_to_node(*slot) == canon) if (entry_to_node(*slot) == canon)
continue; continue;
iter->next_index = iter->index; iter->next_index = iter->index;
......
...@@ -100,7 +100,7 @@ static unsigned radix_tree_descend(struct radix_tree_node *parent, ...@@ -100,7 +100,7 @@ static unsigned radix_tree_descend(struct radix_tree_node *parent,
void **entry = rcu_dereference_raw(parent->slots[offset]); void **entry = rcu_dereference_raw(parent->slots[offset]);
#ifdef CONFIG_RADIX_TREE_MULTIORDER #ifdef CONFIG_RADIX_TREE_MULTIORDER
if (radix_tree_is_indirect_ptr(entry)) { if (radix_tree_is_internal_node(entry)) {
unsigned long siboff = get_slot_offset(parent, entry); unsigned long siboff = get_slot_offset(parent, entry);
if (siboff < RADIX_TREE_MAP_SIZE) { if (siboff < RADIX_TREE_MAP_SIZE) {
offset = siboff; offset = siboff;
...@@ -232,7 +232,7 @@ static void dump_node(struct radix_tree_node *node, unsigned long index) ...@@ -232,7 +232,7 @@ static void dump_node(struct radix_tree_node *node, unsigned long index)
entry, i, entry, i,
*(void **)entry_to_node(entry), *(void **)entry_to_node(entry),
first, last); first, last);
} else if (!radix_tree_is_indirect_ptr(entry)) { } else if (!radix_tree_is_internal_node(entry)) {
pr_debug("radix entry %p offset %ld indices %ld-%ld\n", pr_debug("radix entry %p offset %ld indices %ld-%ld\n",
entry, i, first, last); entry, i, first, last);
} else { } else {
...@@ -247,7 +247,7 @@ static void radix_tree_dump(struct radix_tree_root *root) ...@@ -247,7 +247,7 @@ static void radix_tree_dump(struct radix_tree_root *root)
pr_debug("radix root: %p rnode %p tags %x\n", pr_debug("radix root: %p rnode %p tags %x\n",
root, root->rnode, root, root->rnode,
root->gfp_mask >> __GFP_BITS_SHIFT); root->gfp_mask >> __GFP_BITS_SHIFT);
if (!radix_tree_is_indirect_ptr(root->rnode)) if (!radix_tree_is_internal_node(root->rnode))
return; return;
dump_node(entry_to_node(root->rnode), 0); dump_node(entry_to_node(root->rnode), 0);
} }
...@@ -302,7 +302,7 @@ radix_tree_node_alloc(struct radix_tree_root *root) ...@@ -302,7 +302,7 @@ radix_tree_node_alloc(struct radix_tree_root *root)
ret = kmem_cache_alloc(radix_tree_node_cachep, ret = kmem_cache_alloc(radix_tree_node_cachep,
gfp_mask | __GFP_ACCOUNT); gfp_mask | __GFP_ACCOUNT);
out: out:
BUG_ON(radix_tree_is_indirect_ptr(ret)); BUG_ON(radix_tree_is_internal_node(ret));
return ret; return ret;
} }
...@@ -421,7 +421,7 @@ static unsigned radix_tree_load_root(struct radix_tree_root *root, ...@@ -421,7 +421,7 @@ static unsigned radix_tree_load_root(struct radix_tree_root *root,
*nodep = node; *nodep = node;
if (likely(radix_tree_is_indirect_ptr(node))) { if (likely(radix_tree_is_internal_node(node))) {
node = entry_to_node(node); node = entry_to_node(node);
*maxindex = node_maxindex(node); *maxindex = node_maxindex(node);
return node->shift + RADIX_TREE_MAP_SHIFT; return node->shift + RADIX_TREE_MAP_SHIFT;
...@@ -467,7 +467,7 @@ static int radix_tree_extend(struct radix_tree_root *root, ...@@ -467,7 +467,7 @@ static int radix_tree_extend(struct radix_tree_root *root,
node->offset = 0; node->offset = 0;
node->count = 1; node->count = 1;
node->parent = NULL; node->parent = NULL;
if (radix_tree_is_indirect_ptr(slot)) if (radix_tree_is_internal_node(slot))
entry_to_node(slot)->parent = node; entry_to_node(slot)->parent = node;
node->slots[0] = slot; node->slots[0] = slot;
slot = node_to_entry(node); slot = node_to_entry(node);
...@@ -535,7 +535,7 @@ int __radix_tree_create(struct radix_tree_root *root, unsigned long index, ...@@ -535,7 +535,7 @@ int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
} else } else
rcu_assign_pointer(root->rnode, rcu_assign_pointer(root->rnode,
node_to_entry(slot)); node_to_entry(slot));
} else if (!radix_tree_is_indirect_ptr(slot)) } else if (!radix_tree_is_internal_node(slot))
break; break;
/* Go a level down */ /* Go a level down */
...@@ -585,7 +585,7 @@ int __radix_tree_insert(struct radix_tree_root *root, unsigned long index, ...@@ -585,7 +585,7 @@ int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
void **slot; void **slot;
int error; int error;
BUG_ON(radix_tree_is_indirect_ptr(item)); BUG_ON(radix_tree_is_internal_node(item));
error = __radix_tree_create(root, index, order, &node, &slot); error = __radix_tree_create(root, index, order, &node, &slot);
if (error) if (error)
...@@ -637,7 +637,7 @@ void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index, ...@@ -637,7 +637,7 @@ void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
if (index > maxindex) if (index > maxindex)
return NULL; return NULL;
while (radix_tree_is_indirect_ptr(node)) { while (radix_tree_is_internal_node(node)) {
unsigned offset; unsigned offset;
if (node == RADIX_TREE_RETRY) if (node == RADIX_TREE_RETRY)
...@@ -720,7 +720,7 @@ void *radix_tree_tag_set(struct radix_tree_root *root, ...@@ -720,7 +720,7 @@ void *radix_tree_tag_set(struct radix_tree_root *root,
shift = radix_tree_load_root(root, &node, &maxindex); shift = radix_tree_load_root(root, &node, &maxindex);
BUG_ON(index > maxindex); BUG_ON(index > maxindex);
while (radix_tree_is_indirect_ptr(node)) { while (radix_tree_is_internal_node(node)) {
unsigned offset; unsigned offset;
shift -= RADIX_TREE_MAP_SHIFT; shift -= RADIX_TREE_MAP_SHIFT;
...@@ -770,7 +770,7 @@ void *radix_tree_tag_clear(struct radix_tree_root *root, ...@@ -770,7 +770,7 @@ void *radix_tree_tag_clear(struct radix_tree_root *root,
parent = NULL; parent = NULL;
while (radix_tree_is_indirect_ptr(node)) { while (radix_tree_is_internal_node(node)) {
shift -= RADIX_TREE_MAP_SHIFT; shift -= RADIX_TREE_MAP_SHIFT;
offset = (index >> shift) & RADIX_TREE_MAP_MASK; offset = (index >> shift) & RADIX_TREE_MAP_MASK;
...@@ -835,7 +835,7 @@ int radix_tree_tag_get(struct radix_tree_root *root, ...@@ -835,7 +835,7 @@ int radix_tree_tag_get(struct radix_tree_root *root,
if (node == NULL) if (node == NULL)
return 0; return 0;
while (radix_tree_is_indirect_ptr(node)) { while (radix_tree_is_internal_node(node)) {
int offset; int offset;
shift -= RADIX_TREE_MAP_SHIFT; shift -= RADIX_TREE_MAP_SHIFT;
...@@ -900,7 +900,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, ...@@ -900,7 +900,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root,
if (index > maxindex) if (index > maxindex)
return NULL; return NULL;
if (radix_tree_is_indirect_ptr(rnode)) { if (radix_tree_is_internal_node(rnode)) {
rnode = entry_to_node(rnode); rnode = entry_to_node(rnode);
} else if (rnode) { } else if (rnode) {
/* Single-slot tree */ /* Single-slot tree */
...@@ -957,7 +957,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root, ...@@ -957,7 +957,7 @@ void **radix_tree_next_chunk(struct radix_tree_root *root,
if ((slot == NULL) || (slot == RADIX_TREE_RETRY)) if ((slot == NULL) || (slot == RADIX_TREE_RETRY))
goto restart; goto restart;
if (!radix_tree_is_indirect_ptr(slot)) if (!radix_tree_is_internal_node(slot))
break; break;
node = entry_to_node(slot); node = entry_to_node(slot);
...@@ -1039,7 +1039,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, ...@@ -1039,7 +1039,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
*first_indexp = last_index + 1; *first_indexp = last_index + 1;
return 0; return 0;
} }
if (!radix_tree_is_indirect_ptr(slot)) { if (!radix_tree_is_internal_node(slot)) {
*first_indexp = last_index + 1; *first_indexp = last_index + 1;
root_tag_set(root, settag); root_tag_set(root, settag);
return 1; return 1;
...@@ -1059,7 +1059,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, ...@@ -1059,7 +1059,7 @@ unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
if (!tag_get(node, iftag, offset)) if (!tag_get(node, iftag, offset))
goto next; goto next;
/* Sibling slots never have tags set on them */ /* Sibling slots never have tags set on them */
if (radix_tree_is_indirect_ptr(slot)) { if (radix_tree_is_internal_node(slot)) {
node = entry_to_node(slot); node = entry_to_node(slot);
shift -= RADIX_TREE_MAP_SHIFT; shift -= RADIX_TREE_MAP_SHIFT;
continue; continue;
...@@ -1152,7 +1152,7 @@ radix_tree_gang_lookup(struct radix_tree_root *root, void **results, ...@@ -1152,7 +1152,7 @@ radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
results[ret] = rcu_dereference_raw(*slot); results[ret] = rcu_dereference_raw(*slot);
if (!results[ret]) if (!results[ret])
continue; continue;
if (radix_tree_is_indirect_ptr(results[ret])) { if (radix_tree_is_internal_node(results[ret])) {
slot = radix_tree_iter_retry(&iter); slot = radix_tree_iter_retry(&iter);
continue; continue;
} }
...@@ -1235,7 +1235,7 @@ radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, ...@@ -1235,7 +1235,7 @@ radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
results[ret] = rcu_dereference_raw(*slot); results[ret] = rcu_dereference_raw(*slot);
if (!results[ret]) if (!results[ret])
continue; continue;
if (radix_tree_is_indirect_ptr(results[ret])) { if (radix_tree_is_internal_node(results[ret])) {
slot = radix_tree_iter_retry(&iter); slot = radix_tree_iter_retry(&iter);
continue; continue;
} }
...@@ -1311,7 +1311,7 @@ static unsigned long __locate(struct radix_tree_node *slot, void *item, ...@@ -1311,7 +1311,7 @@ static unsigned long __locate(struct radix_tree_node *slot, void *item,
rcu_dereference_raw(slot->slots[i]); rcu_dereference_raw(slot->slots[i]);
if (node == RADIX_TREE_RETRY) if (node == RADIX_TREE_RETRY)
goto out; goto out;
if (!radix_tree_is_indirect_ptr(node)) { if (!radix_tree_is_internal_node(node)) {
if (node == item) { if (node == item) {
info->found_index = index; info->found_index = index;
info->stop = true; info->stop = true;
...@@ -1357,7 +1357,7 @@ unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) ...@@ -1357,7 +1357,7 @@ unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
do { do {
rcu_read_lock(); rcu_read_lock();
node = rcu_dereference_raw(root->rnode); node = rcu_dereference_raw(root->rnode);
if (!radix_tree_is_indirect_ptr(node)) { if (!radix_tree_is_internal_node(node)) {
rcu_read_unlock(); rcu_read_unlock();
if (node == item) if (node == item)
info.found_index = 0; info.found_index = 0;
...@@ -1398,7 +1398,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) ...@@ -1398,7 +1398,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root)
struct radix_tree_node *to_free = root->rnode; struct radix_tree_node *to_free = root->rnode;
struct radix_tree_node *slot; struct radix_tree_node *slot;
if (!radix_tree_is_indirect_ptr(to_free)) if (!radix_tree_is_internal_node(to_free))
break; break;
to_free = entry_to_node(to_free); to_free = entry_to_node(to_free);
...@@ -1412,10 +1412,10 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) ...@@ -1412,10 +1412,10 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root)
slot = to_free->slots[0]; slot = to_free->slots[0];
if (!slot) if (!slot)
break; break;
if (!radix_tree_is_indirect_ptr(slot) && to_free->shift) if (!radix_tree_is_internal_node(slot) && to_free->shift)
break; break;
if (radix_tree_is_indirect_ptr(slot)) if (radix_tree_is_internal_node(slot))
entry_to_node(slot)->parent = NULL; entry_to_node(slot)->parent = NULL;
/* /*
...@@ -1445,7 +1445,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root) ...@@ -1445,7 +1445,7 @@ static inline bool radix_tree_shrink(struct radix_tree_root *root)
* also results in a stale slot). So tag the slot as indirect * also results in a stale slot). So tag the slot as indirect
* to force callers to retry. * to force callers to retry.
*/ */
if (!radix_tree_is_indirect_ptr(slot)) if (!radix_tree_is_internal_node(slot))
to_free->slots[0] = RADIX_TREE_RETRY; to_free->slots[0] = RADIX_TREE_RETRY;
radix_tree_node_free(to_free); radix_tree_node_free(to_free);
......
...@@ -193,7 +193,7 @@ static int verify_node(struct radix_tree_node *slot, unsigned int tag, ...@@ -193,7 +193,7 @@ static int verify_node(struct radix_tree_node *slot, unsigned int tag,
void verify_tag_consistency(struct radix_tree_root *root, unsigned int tag) void verify_tag_consistency(struct radix_tree_root *root, unsigned int tag)
{ {
struct radix_tree_node *node = root->rnode; struct radix_tree_node *node = root->rnode;
if (!radix_tree_is_indirect_ptr(node)) if (!radix_tree_is_internal_node(node))
return; return;
verify_node(node, tag, !!root_tag_get(root, tag)); verify_node(node, tag, !!root_tag_get(root, tag));
} }
...@@ -222,7 +222,7 @@ void tree_verify_min_height(struct radix_tree_root *root, int maxindex) ...@@ -222,7 +222,7 @@ void tree_verify_min_height(struct radix_tree_root *root, int maxindex)
{ {
unsigned shift; unsigned shift;
struct radix_tree_node *node = root->rnode; struct radix_tree_node *node = root->rnode;
if (!radix_tree_is_indirect_ptr(node)) { if (!radix_tree_is_internal_node(node)) {
assert(maxindex == 0); assert(maxindex == 0);
return; return;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment