assert(end <= btrp->area_end);
if (end == btrp->area_end) {
- PARA_DEBUG_LOG("end of pool area reached: %p\n", end);
+ PARA_DEBUG_LOG("%s: end of pool area reached\n", btrp->name);
end = btrp->area_start;
}
if (end == btrp->rhead) {
/*
* Allocate a new btr buffer.
*
- * The freshly allocated buffer will have a zero refcount.
+ * The freshly allocated buffer will have a zero refcount and will
+ * not be associated with a btr pool.
*/
static struct btr_buffer *new_btrb(char *buf, size_t size)
{
if (sz1 + sz2 >= dest_size)
break;
}
+ /*
+ * If the second buffer is large, we only take the first part of it to
+ * avoid having to memcpy() huge buffers.
+ */
+ sz2 = PARA_MIN(sz2, (size_t)(64 * 1024));
if (!wbr) {
assert(buf1);
if (!buf2) /* nothing to do */
return;
- /* make a new wrap buffer combining buf1 and buf 2. */
+ /* Make a new wrap buffer combining buf1 and buf2. */
sz = sz1 + sz2;
buf = para_malloc(sz);
PARA_DEBUG_LOG("merging input buffers: (%p:%zu, %p:%zu) -> %p:%zu\n",
/* make a new btrb that combines the two buffers and a br to it. */
sz = szs[0] + szs[1];
buf = para_malloc(sz);
- PARA_DEBUG_LOG("memory merging input buffers: (%zu, %zu) -> %zu\n",
- szs[0], szs[1], sz);
+ PARA_DEBUG_LOG("%s: memory merging input buffers: (%zu, %zu) -> %zu\n",
+ btrn->name, szs[0], szs[1], sz);
memcpy(buf, bufs[0], szs[0]);
memcpy(buf + szs[0], bufs[1], szs[1]);
return log_tree_recursively(btrn, loglevel, 0);
}
+/*
+ * \return \a root if \a name is \p NULL.
+ */
+struct btr_node *btr_search_node(const char *name, struct btr_node *root)
+{
+ struct btr_node *ch;
+
+ if (!name)
+ return root;
+ if (!strcmp(root->name, name))
+ return root;
+ FOR_EACH_CHILD(ch, root) {
+ struct btr_node *result = btr_search_node(name, ch);
+ if (result)
+ return result;
+ }
+ return NULL;
+}
+
/** 640K ought to be enough for everybody ;) */
#define BTRN_MAX_PENDING (640 * 1024)
{
size_t iqs;
+ if (!btrn)
+ return 0;
if (type != BTR_NT_LEAF) {
if (btr_no_children(btrn))
return -E_BTR_NO_CHILD;