Commit 8233767a authored by Xiao Guangrong's avatar Xiao Guangrong Committed by Chris Mason

Btrfs: allocate extent state and check the result properly

It doesn't allocate extent_state and check the result properly:
- in set_extent_bit, it doesn't allocate extent_state if the path is not
  allowed wait

- in clear_extent_bit, it doesn't check the result after atomic-ly allocate,
  we trigger BUG_ON() if it's fail

- if allocate fail, we trigger BUG_ON instead of returning -ENOMEM since
  the return value of clear_extent_bit() is ignored by many callers
Signed-off-by: default avatarXiao Guangrong <xiaoguangrong@cn.fujitsu.com>
Signed-off-by: default avatarChris Mason <chris.mason@oracle.com>
parent b0839166
...@@ -439,6 +439,15 @@ static int clear_state_bit(struct extent_io_tree *tree, ...@@ -439,6 +439,15 @@ static int clear_state_bit(struct extent_io_tree *tree,
return ret; return ret;
} }
static struct extent_state *
alloc_extent_state_atomic(struct extent_state *prealloc)
{
if (!prealloc)
prealloc = alloc_extent_state(GFP_ATOMIC);
return prealloc;
}
/* /*
* clear some bits on a range in the tree. This may require splitting * clear some bits on a range in the tree. This may require splitting
* or inserting elements in the tree, so the gfp mask is used to * or inserting elements in the tree, so the gfp mask is used to
...@@ -476,8 +485,7 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -476,8 +485,7 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
again: again:
if (!prealloc && (mask & __GFP_WAIT)) { if (!prealloc && (mask & __GFP_WAIT)) {
prealloc = alloc_extent_state(mask); prealloc = alloc_extent_state(mask);
if (!prealloc) BUG_ON(!prealloc);
return -ENOMEM;
} }
spin_lock(&tree->lock); spin_lock(&tree->lock);
...@@ -529,8 +537,8 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -529,8 +537,8 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
*/ */
if (state->start < start) { if (state->start < start) {
if (!prealloc) prealloc = alloc_extent_state_atomic(prealloc);
prealloc = alloc_extent_state(GFP_ATOMIC); BUG_ON(!prealloc);
err = split_state(tree, state, prealloc, start); err = split_state(tree, state, prealloc, start);
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
prealloc = NULL; prealloc = NULL;
...@@ -551,8 +559,8 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -551,8 +559,8 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
* on the first half * on the first half
*/ */
if (state->start <= end && state->end > end) { if (state->start <= end && state->end > end) {
if (!prealloc) prealloc = alloc_extent_state_atomic(prealloc);
prealloc = alloc_extent_state(GFP_ATOMIC); BUG_ON(!prealloc);
err = split_state(tree, state, prealloc, end + 1); err = split_state(tree, state, prealloc, end + 1);
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
if (wake) if (wake)
...@@ -725,8 +733,7 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -725,8 +733,7 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
again: again:
if (!prealloc && (mask & __GFP_WAIT)) { if (!prealloc && (mask & __GFP_WAIT)) {
prealloc = alloc_extent_state(mask); prealloc = alloc_extent_state(mask);
if (!prealloc) BUG_ON(!prealloc);
return -ENOMEM;
} }
spin_lock(&tree->lock); spin_lock(&tree->lock);
...@@ -743,6 +750,8 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -743,6 +750,8 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
*/ */
node = tree_search(tree, start); node = tree_search(tree, start);
if (!node) { if (!node) {
prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc);
err = insert_state(tree, prealloc, start, end, &bits); err = insert_state(tree, prealloc, start, end, &bits);
prealloc = NULL; prealloc = NULL;
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
...@@ -811,6 +820,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -811,6 +820,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
err = -EEXIST; err = -EEXIST;
goto out; goto out;
} }
prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc);
err = split_state(tree, state, prealloc, start); err = split_state(tree, state, prealloc, start);
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
prealloc = NULL; prealloc = NULL;
...@@ -841,6 +853,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -841,6 +853,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
this_end = end; this_end = end;
else else
this_end = last_start - 1; this_end = last_start - 1;
prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc);
err = insert_state(tree, prealloc, start, this_end, err = insert_state(tree, prealloc, start, this_end,
&bits); &bits);
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
...@@ -865,6 +880,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -865,6 +880,9 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
err = -EEXIST; err = -EEXIST;
goto out; goto out;
} }
prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc);
err = split_state(tree, state, prealloc, end + 1); err = split_state(tree, state, prealloc, end + 1);
BUG_ON(err == -EEXIST); BUG_ON(err == -EEXIST);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment