aboutsummaryrefslogtreecommitdiff
path: root/lib/util
diff options
context:
space:
mode:
authorDavid Oberhollenzer <david.oberhollenzer@sigma-star.at>2021-03-07 18:15:50 +0100
committerDavid Oberhollenzer <david.oberhollenzer@sigma-star.at>2021-03-07 18:15:50 +0100
commit723019f727ce53b392389bdadcc1701ae47e6a14 (patch)
tree148912aacb539269a89b0d473abb9a85466628e0 /lib/util
parent8f79a36a592c796c19037a2513c6fc7098698dee (diff)
Optionally use a pool allocator for rb-tree nodes
This commit restructures the rbtree code to optionally use a pool allocator for the nodes. The option is made depenend on the presence of a pre-processor flag. To the configure script is added an option to enable/disable the use of custom allocators. It makes sense to still allow the malloc/free based routes for better ASAN based instrumentation. Signed-off-by: David Oberhollenzer <david.oberhollenzer@sigma-star.at>
Diffstat (limited to 'lib/util')
-rw-r--r--lib/util/Makemodule.am5
-rw-r--r--lib/util/rbtree.c46
2 files changed, 45 insertions, 6 deletions
diff --git a/lib/util/Makemodule.am b/lib/util/Makemodule.am
index 80bec4f..39352f4 100644
--- a/lib/util/Makemodule.am
+++ b/lib/util/Makemodule.am
@@ -2,10 +2,13 @@ libutil_a_SOURCES = include/util.h include/str_table.h include/hash_table.h
libutil_a_SOURCES += lib/util/str_table.c lib/util/alloc.c
libutil_a_SOURCES += lib/util/rbtree.c include/rbtree.h
libutil_a_SOURCES += lib/util/array.c include/array.h
-libutil_a_SOURCES += lib/util/mempool.c include/mempool.h
libutil_a_SOURCES += lib/util/xxhash.c lib/util/hash_table.c
libutil_a_SOURCES += lib/util/fast_urem_by_const.h
libutil_a_CFLAGS = $(AM_CFLAGS)
libutil_a_CPPFLAGS = $(AM_CPPFLAGS)
+if CUSTOM_ALLOC
+libutil_a_SOURCES += lib/util/mempool.c include/mempool.h
+endif
+
noinst_LIBRARIES += libutil.a
diff --git a/lib/util/rbtree.c b/lib/util/rbtree.c
index d8f1305..8839f82 100644
--- a/lib/util/rbtree.c
+++ b/lib/util/rbtree.c
@@ -14,6 +14,7 @@
#define IS_RED(n) ((n) && (n)->is_red)
+#ifdef NO_CUSTOM_ALLOC
static void destroy_nodes_dfs(rbtree_node_t *n)
{
rbtree_node_t *l, *r;
@@ -26,6 +27,12 @@ static void destroy_nodes_dfs(rbtree_node_t *n)
destroy_nodes_dfs(r);
}
}
+#else
+static void destroy_nodes_dfs(rbtree_node_t *n)
+{
+ (void)n;
+}
+#endif
static void flip_colors(rbtree_node_t *n)
{
@@ -89,11 +96,16 @@ static rbtree_node_t *subtree_insert(rbtree_t *tree, rbtree_node_t *root,
return subtree_balance(root);
}
-static rbtree_node_t *mknode(const rbtree_t *t, const void *key, const void *value)
+static rbtree_node_t *mknode(rbtree_t *t, const void *key, const void *value)
{
rbtree_node_t *node;
+#ifdef NO_CUSTOM_ALLOC
node = calloc(1, sizeof(*node) + t->key_size_padded + t->value_size);
+#else
+ node = mem_pool_allocate(t->pool);
+#endif
+
if (node == NULL)
return NULL;
@@ -105,11 +117,17 @@ static rbtree_node_t *mknode(const rbtree_t *t, const void *key, const void *val
return node;
}
-static rbtree_node_t *copy_node(const rbtree_t *t, const rbtree_node_t *n)
+static rbtree_node_t *copy_node(rbtree_t *nt, const rbtree_t *t,
+ const rbtree_node_t *n)
{
rbtree_node_t *out;
+#ifdef NO_CUSTOM_ALLOC
out = calloc(1, sizeof(*out) + t->key_size_padded + t->value_size);
+#else
+ out = mem_pool_allocate(nt->pool);
+#endif
+
if (out == NULL)
return NULL;
@@ -118,7 +136,7 @@ static rbtree_node_t *copy_node(const rbtree_t *t, const rbtree_node_t *n)
out->right = NULL;
if (n->left != NULL) {
- out->left = copy_node(t, n->left);
+ out->left = copy_node(nt, t, n->left);
if (out->left == NULL) {
destroy_nodes_dfs(out);
@@ -127,7 +145,7 @@ static rbtree_node_t *copy_node(const rbtree_t *t, const rbtree_node_t *n)
}
if (n->right != NULL) {
- out->right = copy_node(t, n->right);
+ out->right = copy_node(nt, t, n->right);
if (out->right == NULL) {
destroy_nodes_dfs(out);
@@ -176,6 +194,12 @@ int rbtree_init(rbtree_t *tree, size_t keysize, size_t valuesize,
if (SZ_ADD_OV(size, tree->value_size, &size))
return SQFS_ERROR_OVERFLOW;
+#ifndef NO_CUSTOM_ALLOC
+ /* initialize the underlying pool allocator */
+ tree->pool = mem_pool_create(size);
+ if (tree->pool == NULL)
+ return SQFS_ERROR_ALLOC;
+#endif
return 0;
}
@@ -184,8 +208,16 @@ int rbtree_copy(const rbtree_t *tree, rbtree_t *out)
memcpy(out, tree, sizeof(*out));
out->root = NULL;
+#ifndef NO_CUSTOM_ALLOC
+ out->pool = mem_pool_create(sizeof(rbtree_node_t) +
+ tree->key_size_padded +
+ tree->value_size);
+ if (out->pool == NULL)
+ return SQFS_ERROR_ALLOC;
+#endif
+
if (tree->root != NULL) {
- out->root = copy_node(tree, tree->root);
+ out->root = copy_node(out, tree, tree->root);
if (out->root == NULL) {
memset(out, 0, sizeof(*out));
@@ -198,7 +230,11 @@ int rbtree_copy(const rbtree_t *tree, rbtree_t *out)
void rbtree_cleanup(rbtree_t *tree)
{
+#ifdef NO_CUSTOM_ALLOC
destroy_nodes_dfs(tree->root);
+#else
+ mem_pool_destroy(tree->pool);
+#endif
memset(tree, 0, sizeof(*tree));
}