summaryrefslogtreecommitdiffstats
path: root/fs/btrfs/relocation.c
diff options
context:
space:
mode:
authorQu Wenruo <wqu@suse.com>2020-03-26 14:11:09 +0800
committerDavid Sterba <dsterba@suse.com>2020-05-25 11:25:19 +0200
commite9a28dc52af31d8af1883afe08e724a303b3c4eb (patch)
tree0967cfb74a693d6805ee3db2552bc329dd37732a /fs/btrfs/relocation.c
parent7053544146ac7eb71de6cee1ffda678714f905d8 (diff)
btrfs: rename tree_entry to rb_simple_node and export it
Structure tree_entry provides a very simple rb_tree which only uses bytenr as search index. That tree_entry is used in 3 structures: backref_node, mapping_node and tree_block. Since we're going to make backref_node independnt from relocation, it's a good time to extract the tree_entry into rb_simple_node, and export it into misc.h. Signed-off-by: Qu Wenruo <wqu@suse.com> Reviewed-by: David Sterba <dsterba@suse.com> Signed-off-by: David Sterba <dsterba@suse.com>
Diffstat (limited to 'fs/btrfs/relocation.c')
-rw-r--r--fs/btrfs/relocation.c109
1 files changed, 32 insertions, 77 deletions
diff --git a/fs/btrfs/relocation.c b/fs/btrfs/relocation.c
index 8fa10d8306c2..09076ac21590 100644
--- a/fs/btrfs/relocation.c
+++ b/fs/btrfs/relocation.c
@@ -24,6 +24,7 @@
#include "delalloc-space.h"
#include "block-group.h"
#include "backref.h"
+#include "misc.h"
/*
* Relocation overview
@@ -72,21 +73,15 @@
* The entry point of relocation is relocate_block_group() function.
*/
-/*
- * btrfs_backref_node, mapping_node and tree_block start with this
- */
-struct tree_entry {
- struct rb_node rb_node;
- u64 bytenr;
-};
-
#define RELOCATION_RESERVED_NODES 256
/*
* map address of tree root to tree
*/
struct mapping_node {
- struct rb_node rb_node;
- u64 bytenr;
+ struct {
+ struct rb_node rb_node;
+ u64 bytenr;
+ }; /* Use rb_simle_node for search/insert */
void *data;
};
@@ -99,8 +94,10 @@ struct mapping_tree {
* present a tree block to process
*/
struct tree_block {
- struct rb_node rb_node;
- u64 bytenr;
+ struct {
+ struct rb_node rb_node;
+ u64 bytenr;
+ }; /* Use rb_simple_node for search/insert */
struct btrfs_key key;
unsigned int level:8;
unsigned int key_ready:1;
@@ -294,48 +291,6 @@ static void free_backref_edge(struct btrfs_backref_cache *cache,
}
}
-static struct rb_node *tree_insert(struct rb_root *root, u64 bytenr,
- struct rb_node *node)
-{
- struct rb_node **p = &root->rb_node;
- struct rb_node *parent = NULL;
- struct tree_entry *entry;
-
- while (*p) {
- parent = *p;
- entry = rb_entry(parent, struct tree_entry, rb_node);
-
- if (bytenr < entry->bytenr)
- p = &(*p)->rb_left;
- else if (bytenr > entry->bytenr)
- p = &(*p)->rb_right;
- else
- return parent;
- }
-
- rb_link_node(node, parent, p);
- rb_insert_color(node, root);
- return NULL;
-}
-
-static struct rb_node *tree_search(struct rb_root *root, u64 bytenr)
-{
- struct rb_node *n = root->rb_node;
- struct tree_entry *entry;
-
- while (n) {
- entry = rb_entry(n, struct tree_entry, rb_node);
-
- if (bytenr < entry->bytenr)
- n = n->rb_left;
- else if (bytenr > entry->bytenr)
- n = n->rb_right;
- else
- return n;
- }
- return NULL;
-}
-
static void backref_tree_panic(struct rb_node *rb_node, int errno, u64 bytenr)
{
@@ -474,7 +429,7 @@ static void update_backref_node(struct btrfs_backref_cache *cache,
struct rb_node *rb_node;
rb_erase(&node->rb_node, &cache->rb_root);
node->bytenr = bytenr;
- rb_node = tree_insert(&cache->rb_root, node->bytenr, &node->rb_node);
+ rb_node = rb_simple_insert(&cache->rb_root, node->bytenr, &node->rb_node);
if (rb_node)
backref_tree_panic(rb_node, -EEXIST, bytenr);
}
@@ -599,7 +554,7 @@ struct btrfs_root *find_reloc_root(struct btrfs_fs_info *fs_info, u64 bytenr)
ASSERT(rc);
spin_lock(&rc->reloc_root_tree.lock);
- rb_node = tree_search(&rc->reloc_root_tree.rb_root, bytenr);
+ rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root, bytenr);
if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node);
root = (struct btrfs_root *)node->data;
@@ -667,7 +622,7 @@ static int handle_direct_tree_backref(struct btrfs_backref_cache *cache,
if (!edge)
return -ENOMEM;
- rb_node = tree_search(&cache->rb_root, ref_key->offset);
+ rb_node = rb_simple_search(&cache->rb_root, ref_key->offset);
if (!rb_node) {
/* Parent node not yet cached */
upper = alloc_backref_node(cache, ref_key->offset,
@@ -788,7 +743,7 @@ static int handle_indirect_tree_backref(struct btrfs_backref_cache *cache,
}
eb = path->nodes[level];
- rb_node = tree_search(&cache->rb_root, eb->start);
+ rb_node = rb_simple_search(&cache->rb_root, eb->start);
if (!rb_node) {
upper = alloc_backref_node(cache, eb->start,
lower->level + 1);
@@ -994,8 +949,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,
/* Insert this node to cache if it's not COW-only */
if (!start->cowonly) {
- rb_node = tree_insert(&cache->rb_root, start->bytenr,
- &start->rb_node);
+ rb_node = rb_simple_insert(&cache->rb_root, start->bytenr,
+ &start->rb_node);
if (rb_node)
backref_tree_panic(rb_node, -EEXIST, start->bytenr);
list_add_tail(&start->lower, &cache->leaves);
@@ -1062,8 +1017,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,
/* Only cache non-COW-only (subvolume trees) tree blocks */
if (!upper->cowonly) {
- rb_node = tree_insert(&cache->rb_root, upper->bytenr,
- &upper->rb_node);
+ rb_node = rb_simple_insert(&cache->rb_root, upper->bytenr,
+ &upper->rb_node);
if (rb_node) {
backref_tree_panic(rb_node, -EEXIST,
upper->bytenr);
@@ -1311,7 +1266,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
if (cache->last_trans > 0)
update_backref_cache(trans, cache);
- rb_node = tree_search(&cache->rb_root, src->commit_root->start);
+ rb_node = rb_simple_search(&cache->rb_root, src->commit_root->start);
if (rb_node) {
node = rb_entry(rb_node, struct btrfs_backref_node, rb_node);
if (node->detached)
@@ -1321,8 +1276,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
}
if (!node) {
- rb_node = tree_search(&cache->rb_root,
- reloc_root->commit_root->start);
+ rb_node = rb_simple_search(&cache->rb_root,
+ reloc_root->commit_root->start);
if (rb_node) {
node = rb_entry(rb_node, struct btrfs_backref_node,
rb_node);
@@ -1355,8 +1310,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
list_add_tail(&new_node->lower, &cache->leaves);
}
- rb_node = tree_insert(&cache->rb_root, new_node->bytenr,
- &new_node->rb_node);
+ rb_node = rb_simple_insert(&cache->rb_root, new_node->bytenr,
+ &new_node->rb_node);
if (rb_node)
backref_tree_panic(rb_node, -EEXIST, new_node->bytenr);
@@ -1396,8 +1351,8 @@ static int __must_check __add_reloc_root(struct btrfs_root *root)
node->data = root;
spin_lock(&rc->reloc_root_tree.lock);
- rb_node = tree_insert(&rc->reloc_root_tree.rb_root,
- node->bytenr, &node->rb_node);
+ rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
+ node->bytenr, &node->rb_node);
spin_unlock(&rc->reloc_root_tree.lock);
if (rb_node) {
btrfs_panic(fs_info, -EEXIST,
@@ -1423,8 +1378,8 @@ static void __del_reloc_root(struct btrfs_root *root)
if (rc && root->node) {
spin_lock(&rc->reloc_root_tree.lock);
- rb_node = tree_search(&rc->reloc_root_tree.rb_root,
- root->commit_root->start);
+ rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
+ root->commit_root->start);
if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node);
rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
@@ -1467,8 +1422,8 @@ static int __update_reloc_root(struct btrfs_root *root)
struct reloc_control *rc = fs_info->reloc_ctl;
spin_lock(&rc->reloc_root_tree.lock);
- rb_node = tree_search(&rc->reloc_root_tree.rb_root,
- root->commit_root->start);
+ rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
+ root->commit_root->start);
if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node);
rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
@@ -1481,8 +1436,8 @@ static int __update_reloc_root(struct btrfs_root *root)
spin_lock(&rc->reloc_root_tree.lock);
node->bytenr = root->node->start;
- rb_node = tree_insert(&rc->reloc_root_tree.rb_root,
- node->bytenr, &node->rb_node);
+ rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
+ node->bytenr, &node->rb_node);
spin_unlock(&rc->reloc_root_tree.lock);
if (rb_node)
backref_tree_panic(rb_node, -EEXIST, node->bytenr);
@@ -3640,7 +3595,7 @@ static int add_tree_block(struct reloc_control *rc,
block->level = level;
block->key_ready = 0;
- rb_node = tree_insert(blocks, block->bytenr, &block->rb_node);
+ rb_node = rb_simple_insert(blocks, block->bytenr, &block->rb_node);
if (rb_node)
backref_tree_panic(rb_node, -EEXIST, block->bytenr);
@@ -3663,7 +3618,7 @@ static int __add_tree_block(struct reloc_control *rc,
if (tree_block_processed(bytenr, rc))
return 0;
- if (tree_search(blocks, bytenr))
+ if (rb_simple_search(blocks, bytenr))
return 0;
path = btrfs_alloc_path();