Commit 01959dfe authored by Matthew Wilcox's avatar Matthew Wilcox

xarray: Define struct xa_node

This is a direct replacement for struct radix_tree_node.  A couple of
struct members have changed name, so convert those.  Use a #define so
that radix tree users continue to work without change.
Signed-off-by: default avatarMatthew Wilcox <willy@infradead.org>
Reviewed-by: default avatarJosef Bacik <jbacik@fb.com>
parent f8d5d0cc
...@@ -32,6 +32,7 @@ ...@@ -32,6 +32,7 @@
/* Keep unconverted code working */ /* Keep unconverted code working */
#define radix_tree_root xarray #define radix_tree_root xarray
#define radix_tree_node xa_node
/* /*
* The bottom two bits of the slot determine how the remaining bits in the * The bottom two bits of the slot determine how the remaining bits in the
...@@ -60,41 +61,17 @@ static inline bool radix_tree_is_internal_node(void *ptr) ...@@ -60,41 +61,17 @@ static inline bool radix_tree_is_internal_node(void *ptr)
/*** radix-tree API starts here ***/ /*** radix-tree API starts here ***/
#define RADIX_TREE_MAX_TAGS 3
#define RADIX_TREE_MAP_SHIFT XA_CHUNK_SHIFT #define RADIX_TREE_MAP_SHIFT XA_CHUNK_SHIFT
#define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT) #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
#define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1) #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
#define RADIX_TREE_TAG_LONGS \ #define RADIX_TREE_MAX_TAGS XA_MAX_MARKS
((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG) #define RADIX_TREE_TAG_LONGS XA_MARK_LONGS
#define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long)) #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
#define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \ #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
RADIX_TREE_MAP_SHIFT)) RADIX_TREE_MAP_SHIFT))
/*
* @count is the count of every non-NULL element in the ->slots array
* whether that is a value entry, a retry entry, a user pointer,
* a sibling entry or a pointer to the next level of the tree.
* @exceptional is the count of every element in ->slots which is
* either a value entry or a sibling of a value entry.
*/
struct radix_tree_node {
unsigned char shift; /* Bits remaining in each slot */
unsigned char offset; /* Slot offset in parent */
unsigned char count; /* Total entry count */
unsigned char exceptional; /* Exceptional entry count */
struct radix_tree_node *parent; /* Used when ascending tree */
struct radix_tree_root *root; /* The tree we belong to */
union {
struct list_head private_list; /* For tree user */
struct rcu_head rcu_head; /* Used when freeing node */
};
void __rcu *slots[RADIX_TREE_MAP_SIZE];
unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
};
/* The IDR tag is stored in the low bits of xa_flags */ /* The IDR tag is stored in the low bits of xa_flags */
#define ROOT_IS_IDR ((__force gfp_t)4) #define ROOT_IS_IDR ((__force gfp_t)4)
/* The top bits of xa_flags are used to store the root tags */ /* The top bits of xa_flags are used to store the root tags */
......
...@@ -252,6 +252,33 @@ static inline void xa_init(struct xarray *xa) ...@@ -252,6 +252,33 @@ static inline void xa_init(struct xarray *xa)
#endif #endif
#define XA_CHUNK_SIZE (1UL << XA_CHUNK_SHIFT) #define XA_CHUNK_SIZE (1UL << XA_CHUNK_SHIFT)
#define XA_CHUNK_MASK (XA_CHUNK_SIZE - 1) #define XA_CHUNK_MASK (XA_CHUNK_SIZE - 1)
#define XA_MAX_MARKS 3
#define XA_MARK_LONGS DIV_ROUND_UP(XA_CHUNK_SIZE, BITS_PER_LONG)
/*
* @count is the count of every non-NULL element in the ->slots array
* whether that is a value entry, a retry entry, a user pointer,
* a sibling entry or a pointer to the next level of the tree.
* @nr_values is the count of every element in ->slots which is
* either a value entry or a sibling of a value entry.
*/
struct xa_node {
unsigned char shift; /* Bits remaining in each slot */
unsigned char offset; /* Slot offset in parent */
unsigned char count; /* Total entry count */
unsigned char nr_values; /* Value entry count */
struct xa_node __rcu *parent; /* NULL at top of tree */
struct xarray *array; /* The array we belong to */
union {
struct list_head private_list; /* For tree user */
struct rcu_head rcu_head; /* Used when freeing node */
};
void __rcu *slots[XA_CHUNK_SIZE];
union {
unsigned long tags[XA_MAX_MARKS][XA_MARK_LONGS];
unsigned long marks[XA_MAX_MARKS][XA_MARK_LONGS];
};
};
/* Private */ /* Private */
static inline bool xa_is_node(const void *entry) static inline bool xa_is_node(const void *entry)
......
...@@ -260,11 +260,11 @@ static void dump_node(struct radix_tree_node *node, unsigned long index) ...@@ -260,11 +260,11 @@ static void dump_node(struct radix_tree_node *node, unsigned long index)
{ {
unsigned long i; unsigned long i;
pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d exceptional %d\n", pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d nr_values %d\n",
node, node->offset, index, index | node_maxindex(node), node, node->offset, index, index | node_maxindex(node),
node->parent, node->parent,
node->tags[0][0], node->tags[1][0], node->tags[2][0], node->tags[0][0], node->tags[1][0], node->tags[2][0],
node->shift, node->count, node->exceptional); node->shift, node->count, node->nr_values);
for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) { for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
unsigned long first = index | (i << node->shift); unsigned long first = index | (i << node->shift);
...@@ -354,7 +354,7 @@ static struct radix_tree_node * ...@@ -354,7 +354,7 @@ static struct radix_tree_node *
radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent, radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
struct radix_tree_root *root, struct radix_tree_root *root,
unsigned int shift, unsigned int offset, unsigned int shift, unsigned int offset,
unsigned int count, unsigned int exceptional) unsigned int count, unsigned int nr_values)
{ {
struct radix_tree_node *ret = NULL; struct radix_tree_node *ret = NULL;
...@@ -401,9 +401,9 @@ radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent, ...@@ -401,9 +401,9 @@ radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
ret->shift = shift; ret->shift = shift;
ret->offset = offset; ret->offset = offset;
ret->count = count; ret->count = count;
ret->exceptional = exceptional; ret->nr_values = nr_values;
ret->parent = parent; ret->parent = parent;
ret->root = root; ret->array = root;
} }
return ret; return ret;
} }
...@@ -633,8 +633,8 @@ static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp, ...@@ -633,8 +633,8 @@ static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp,
if (radix_tree_is_internal_node(entry)) { if (radix_tree_is_internal_node(entry)) {
entry_to_node(entry)->parent = node; entry_to_node(entry)->parent = node;
} else if (xa_is_value(entry)) { } else if (xa_is_value(entry)) {
/* Moving an exceptional root->xa_head to a node */ /* Moving a value entry root->xa_head to a node */
node->exceptional = 1; node->nr_values = 1;
} }
/* /*
* entry was already in the radix tree, so we do not need * entry was already in the radix tree, so we do not need
...@@ -928,12 +928,12 @@ static inline int insert_entries(struct radix_tree_node *node, ...@@ -928,12 +928,12 @@ static inline int insert_entries(struct radix_tree_node *node,
if (xa_is_node(old)) if (xa_is_node(old))
radix_tree_free_nodes(old); radix_tree_free_nodes(old);
if (xa_is_value(old)) if (xa_is_value(old))
node->exceptional--; node->nr_values--;
} }
if (node) { if (node) {
node->count += n; node->count += n;
if (xa_is_value(item)) if (xa_is_value(item))
node->exceptional += n; node->nr_values += n;
} }
return n; return n;
} }
...@@ -947,7 +947,7 @@ static inline int insert_entries(struct radix_tree_node *node, ...@@ -947,7 +947,7 @@ static inline int insert_entries(struct radix_tree_node *node,
if (node) { if (node) {
node->count++; node->count++;
if (xa_is_value(item)) if (xa_is_value(item))
node->exceptional++; node->nr_values++;
} }
return 1; return 1;
} }
...@@ -1083,7 +1083,7 @@ void *radix_tree_lookup(const struct radix_tree_root *root, unsigned long index) ...@@ -1083,7 +1083,7 @@ void *radix_tree_lookup(const struct radix_tree_root *root, unsigned long index)
EXPORT_SYMBOL(radix_tree_lookup); EXPORT_SYMBOL(radix_tree_lookup);
static inline void replace_sibling_entries(struct radix_tree_node *node, static inline void replace_sibling_entries(struct radix_tree_node *node,
void __rcu **slot, int count, int exceptional) void __rcu **slot, int count, int values)
{ {
#ifdef CONFIG_RADIX_TREE_MULTIORDER #ifdef CONFIG_RADIX_TREE_MULTIORDER
unsigned offset = get_slot_offset(node, slot); unsigned offset = get_slot_offset(node, slot);
...@@ -1096,18 +1096,18 @@ static inline void replace_sibling_entries(struct radix_tree_node *node, ...@@ -1096,18 +1096,18 @@ static inline void replace_sibling_entries(struct radix_tree_node *node,
node->slots[offset] = NULL; node->slots[offset] = NULL;
node->count--; node->count--;
} }
node->exceptional += exceptional; node->nr_values += values;
} }
#endif #endif
} }
static void replace_slot(void __rcu **slot, void *item, static void replace_slot(void __rcu **slot, void *item,
struct radix_tree_node *node, int count, int exceptional) struct radix_tree_node *node, int count, int values)
{ {
if (node && (count || exceptional)) { if (node && (count || values)) {
node->count += count; node->count += count;
node->exceptional += exceptional; node->nr_values += values;
replace_sibling_entries(node, slot, count, exceptional); replace_sibling_entries(node, slot, count, values);
} }
rcu_assign_pointer(*slot, item); rcu_assign_pointer(*slot, item);
...@@ -1161,17 +1161,17 @@ void __radix_tree_replace(struct radix_tree_root *root, ...@@ -1161,17 +1161,17 @@ void __radix_tree_replace(struct radix_tree_root *root,
radix_tree_update_node_t update_node) radix_tree_update_node_t update_node)
{ {
void *old = rcu_dereference_raw(*slot); void *old = rcu_dereference_raw(*slot);
int exceptional = !!xa_is_value(item) - !!xa_is_value(old); int values = !!xa_is_value(item) - !!xa_is_value(old);
int count = calculate_count(root, node, slot, item, old); int count = calculate_count(root, node, slot, item, old);
/* /*
* This function supports replacing exceptional entries and * This function supports replacing value entries and
* deleting entries, but that needs accounting against the * deleting entries, but that needs accounting against the
* node unless the slot is root->xa_head. * node unless the slot is root->xa_head.
*/ */
WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) && WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->xa_head) &&
(count || exceptional)); (count || values));
replace_slot(slot, item, node, count, exceptional); replace_slot(slot, item, node, count, values);
if (!node) if (!node)
return; return;
...@@ -1193,7 +1193,7 @@ void __radix_tree_replace(struct radix_tree_root *root, ...@@ -1193,7 +1193,7 @@ void __radix_tree_replace(struct radix_tree_root *root,
* across slot lookup and replacement. * across slot lookup and replacement.
* *
* NOTE: This cannot be used to switch between non-entries (empty slots), * NOTE: This cannot be used to switch between non-entries (empty slots),
* regular entries, and exceptional entries, as that requires accounting * regular entries, and value entries, as that requires accounting
* inside the radix tree node. When switching from one type of entry or * inside the radix tree node. When switching from one type of entry or
* deleting, use __radix_tree_lookup() and __radix_tree_replace() or * deleting, use __radix_tree_lookup() and __radix_tree_replace() or
* radix_tree_iter_replace(). * radix_tree_iter_replace().
...@@ -1301,7 +1301,7 @@ int radix_tree_split(struct radix_tree_root *root, unsigned long index, ...@@ -1301,7 +1301,7 @@ int radix_tree_split(struct radix_tree_root *root, unsigned long index,
rcu_assign_pointer(parent->slots[end], RADIX_TREE_RETRY); rcu_assign_pointer(parent->slots[end], RADIX_TREE_RETRY);
} }
rcu_assign_pointer(parent->slots[offset], RADIX_TREE_RETRY); rcu_assign_pointer(parent->slots[offset], RADIX_TREE_RETRY);
parent->exceptional -= (end - offset); parent->nr_values -= (end - offset);
if (order == parent->shift) if (order == parent->shift)
return 0; return 0;
...@@ -1961,7 +1961,7 @@ static bool __radix_tree_delete(struct radix_tree_root *root, ...@@ -1961,7 +1961,7 @@ static bool __radix_tree_delete(struct radix_tree_root *root,
struct radix_tree_node *node, void __rcu **slot) struct radix_tree_node *node, void __rcu **slot)
{ {
void *old = rcu_dereference_raw(*slot); void *old = rcu_dereference_raw(*slot);
int exceptional = xa_is_value(old) ? -1 : 0; int values = xa_is_value(old) ? -1 : 0;
unsigned offset = get_slot_offset(node, slot); unsigned offset = get_slot_offset(node, slot);
int tag; int tag;
...@@ -1971,7 +1971,7 @@ static bool __radix_tree_delete(struct radix_tree_root *root, ...@@ -1971,7 +1971,7 @@ static bool __radix_tree_delete(struct radix_tree_root *root,
for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
node_tag_clear(root, node, tag, offset); node_tag_clear(root, node, tag, offset);
replace_slot(slot, NULL, node, -1, exceptional); replace_slot(slot, NULL, node, -1, values);
return node && delete_node(root, node, NULL); return node && delete_node(root, node, NULL);
} }
......
...@@ -349,7 +349,7 @@ void workingset_update_node(struct radix_tree_node *node) ...@@ -349,7 +349,7 @@ void workingset_update_node(struct radix_tree_node *node)
* already where they should be. The list_empty() test is safe * already where they should be. The list_empty() test is safe
* as node->private_list is protected by the i_pages lock. * as node->private_list is protected by the i_pages lock.
*/ */
if (node->count && node->count == node->exceptional) { if (node->count && node->count == node->nr_values) {
if (list_empty(&node->private_list)) if (list_empty(&node->private_list))
list_lru_add(&shadow_nodes, &node->private_list); list_lru_add(&shadow_nodes, &node->private_list);
} else { } else {
...@@ -428,8 +428,8 @@ static enum lru_status shadow_lru_isolate(struct list_head *item, ...@@ -428,8 +428,8 @@ static enum lru_status shadow_lru_isolate(struct list_head *item,
* to reclaim, take the node off-LRU, and drop the lru_lock. * to reclaim, take the node off-LRU, and drop the lru_lock.
*/ */
node = container_of(item, struct radix_tree_node, private_list); node = container_of(item, struct xa_node, private_list);
mapping = container_of(node->root, struct address_space, i_pages); mapping = container_of(node->array, struct address_space, i_pages);
/* Coming from the list, invert the lock order */ /* Coming from the list, invert the lock order */
if (!xa_trylock(&mapping->i_pages)) { if (!xa_trylock(&mapping->i_pages)) {
...@@ -446,25 +446,25 @@ static enum lru_status shadow_lru_isolate(struct list_head *item, ...@@ -446,25 +446,25 @@ static enum lru_status shadow_lru_isolate(struct list_head *item,
* no pages, so we expect to be able to remove them all and * no pages, so we expect to be able to remove them all and
* delete and free the empty node afterwards. * delete and free the empty node afterwards.
*/ */
if (WARN_ON_ONCE(!node->exceptional)) if (WARN_ON_ONCE(!node->nr_values))
goto out_invalid; goto out_invalid;
if (WARN_ON_ONCE(node->count != node->exceptional)) if (WARN_ON_ONCE(node->count != node->nr_values))
goto out_invalid; goto out_invalid;
for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) { for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
if (node->slots[i]) { if (node->slots[i]) {
if (WARN_ON_ONCE(!xa_is_value(node->slots[i]))) if (WARN_ON_ONCE(!xa_is_value(node->slots[i])))
goto out_invalid; goto out_invalid;
if (WARN_ON_ONCE(!node->exceptional)) if (WARN_ON_ONCE(!node->nr_values))
goto out_invalid; goto out_invalid;
if (WARN_ON_ONCE(!mapping->nrexceptional)) if (WARN_ON_ONCE(!mapping->nrexceptional))
goto out_invalid; goto out_invalid;
node->slots[i] = NULL; node->slots[i] = NULL;
node->exceptional--; node->nr_values--;
node->count--; node->count--;
mapping->nrexceptional--; mapping->nrexceptional--;
} }
} }
if (WARN_ON_ONCE(node->exceptional)) if (WARN_ON_ONCE(node->nr_values))
goto out_invalid; goto out_invalid;
inc_lruvec_page_state(virt_to_page(node), WORKINGSET_NODERECLAIM); inc_lruvec_page_state(virt_to_page(node), WORKINGSET_NODERECLAIM);
__radix_tree_delete_node(&mapping->i_pages, node, __radix_tree_delete_node(&mapping->i_pages, node,
......
...@@ -393,7 +393,7 @@ static void multiorder_join2(unsigned order1, unsigned order2) ...@@ -393,7 +393,7 @@ static void multiorder_join2(unsigned order1, unsigned order2)
radix_tree_insert(&tree, 1 << order2, xa_mk_value(5)); radix_tree_insert(&tree, 1 << order2, xa_mk_value(5));
item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
assert(item2 == xa_mk_value(5)); assert(item2 == xa_mk_value(5));
assert(node->exceptional == 1); assert(node->nr_values == 1);
item2 = radix_tree_lookup(&tree, 0); item2 = radix_tree_lookup(&tree, 0);
free(item2); free(item2);
...@@ -401,7 +401,7 @@ static void multiorder_join2(unsigned order1, unsigned order2) ...@@ -401,7 +401,7 @@ static void multiorder_join2(unsigned order1, unsigned order2)
radix_tree_join(&tree, 0, order1, item1); radix_tree_join(&tree, 0, order1, item1);
item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
assert(item2 == item1); assert(item2 == item1);
assert(node->exceptional == 0); assert(node->nr_values == 0);
item_kill_tree(&tree); item_kill_tree(&tree);
} }
...@@ -409,7 +409,7 @@ static void multiorder_join2(unsigned order1, unsigned order2) ...@@ -409,7 +409,7 @@ static void multiorder_join2(unsigned order1, unsigned order2)
* This test revealed an accounting bug for value entries at one point. * This test revealed an accounting bug for value entries at one point.
* Nodes were being freed back into the pool with an elevated exception count * Nodes were being freed back into the pool with an elevated exception count
* by radix_tree_join() and then radix_tree_split() was failing to zero the * by radix_tree_join() and then radix_tree_split() was failing to zero the
* count of exceptional entries. * count of value entries.
*/ */
static void multiorder_join3(unsigned int order) static void multiorder_join3(unsigned int order)
{ {
...@@ -433,7 +433,7 @@ static void multiorder_join3(unsigned int order) ...@@ -433,7 +433,7 @@ static void multiorder_join3(unsigned int order)
} }
__radix_tree_lookup(&tree, 0, &node, NULL); __radix_tree_lookup(&tree, 0, &node, NULL);
assert(node->exceptional == node->count); assert(node->nr_values == node->count);
item_kill_tree(&tree); item_kill_tree(&tree);
} }
...@@ -520,7 +520,7 @@ static void __multiorder_split2(int old_order, int new_order) ...@@ -520,7 +520,7 @@ static void __multiorder_split2(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 0, &node, NULL); item = __radix_tree_lookup(&tree, 0, &node, NULL);
assert(item == xa_mk_value(5)); assert(item == xa_mk_value(5));
assert(node->exceptional > 0); assert(node->nr_values > 0);
radix_tree_split(&tree, 0, new_order); radix_tree_split(&tree, 0, new_order);
radix_tree_for_each_slot(slot, &tree, &iter, 0) { radix_tree_for_each_slot(slot, &tree, &iter, 0) {
...@@ -530,7 +530,7 @@ static void __multiorder_split2(int old_order, int new_order) ...@@ -530,7 +530,7 @@ static void __multiorder_split2(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 0, &node, NULL); item = __radix_tree_lookup(&tree, 0, &node, NULL);
assert(item != xa_mk_value(5)); assert(item != xa_mk_value(5));
assert(node->exceptional == 0); assert(node->nr_values == 0);
item_kill_tree(&tree); item_kill_tree(&tree);
} }
...@@ -547,7 +547,7 @@ static void __multiorder_split3(int old_order, int new_order) ...@@ -547,7 +547,7 @@ static void __multiorder_split3(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 0, &node, NULL); item = __radix_tree_lookup(&tree, 0, &node, NULL);
assert(item == xa_mk_value(5)); assert(item == xa_mk_value(5));
assert(node->exceptional > 0); assert(node->nr_values > 0);
radix_tree_split(&tree, 0, new_order); radix_tree_split(&tree, 0, new_order);
radix_tree_for_each_slot(slot, &tree, &iter, 0) { radix_tree_for_each_slot(slot, &tree, &iter, 0) {
...@@ -556,7 +556,7 @@ static void __multiorder_split3(int old_order, int new_order) ...@@ -556,7 +556,7 @@ static void __multiorder_split3(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 0, &node, NULL); item = __radix_tree_lookup(&tree, 0, &node, NULL);
assert(item == xa_mk_value(7)); assert(item == xa_mk_value(7));
assert(node->exceptional > 0); assert(node->nr_values > 0);
item_kill_tree(&tree); item_kill_tree(&tree);
...@@ -564,7 +564,7 @@ static void __multiorder_split3(int old_order, int new_order) ...@@ -564,7 +564,7 @@ static void __multiorder_split3(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 0, &node, NULL); item = __radix_tree_lookup(&tree, 0, &node, NULL);
assert(item == xa_mk_value(5)); assert(item == xa_mk_value(5));
assert(node->exceptional > 0); assert(node->nr_values > 0);
radix_tree_split(&tree, 0, new_order); radix_tree_split(&tree, 0, new_order);
radix_tree_for_each_slot(slot, &tree, &iter, 0) { radix_tree_for_each_slot(slot, &tree, &iter, 0) {
...@@ -577,13 +577,13 @@ static void __multiorder_split3(int old_order, int new_order) ...@@ -577,13 +577,13 @@ static void __multiorder_split3(int old_order, int new_order)
item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL); item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL);
assert(item == xa_mk_value(7)); assert(item == xa_mk_value(7));
assert(node->count == node->exceptional); assert(node->count == node->nr_values);
do { do {
node = node->parent; node = node->parent;
if (!node) if (!node)
break; break;
assert(node->count == 1); assert(node->count == 1);
assert(node->exceptional == 0); assert(node->nr_values == 0);
} while (1); } while (1);
item_kill_tree(&tree); item_kill_tree(&tree);
...@@ -611,15 +611,15 @@ static void multiorder_account(void) ...@@ -611,15 +611,15 @@ static void multiorder_account(void)
__radix_tree_insert(&tree, 1 << 5, 5, xa_mk_value(5)); __radix_tree_insert(&tree, 1 << 5, 5, xa_mk_value(5));
__radix_tree_lookup(&tree, 0, &node, NULL); __radix_tree_lookup(&tree, 0, &node, NULL);
assert(node->count == node->exceptional * 2); assert(node->count == node->nr_values * 2);
radix_tree_delete(&tree, 1 << 5); radix_tree_delete(&tree, 1 << 5);
assert(node->exceptional == 0); assert(node->nr_values == 0);
__radix_tree_insert(&tree, 1 << 5, 5, xa_mk_value(5)); __radix_tree_insert(&tree, 1 << 5, 5, xa_mk_value(5));
__radix_tree_lookup(&tree, 1 << 5, &node, &slot); __radix_tree_lookup(&tree, 1 << 5, &node, &slot);
assert(node->count == node->exceptional * 2); assert(node->count == node->nr_values * 2);
__radix_tree_replace(&tree, node, slot, NULL, NULL); __radix_tree_replace(&tree, node, slot, NULL, NULL);
assert(node->exceptional == 0); assert(node->nr_values == 0);
item_kill_tree(&tree); item_kill_tree(&tree);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment