* \return The same value which was passed during creation time to
* btr_pool_new().
*/
-size_t btr_pool_size(struct btr_pool *btrp)
+size_t btr_pool_size(const struct btr_pool *btrp)
{
return btrp->area_end - btrp->area_start;
}
-static size_t btr_pool_filled(struct btr_pool *btrp)
+static size_t btr_pool_filled(const struct btr_pool *btrp)
{
if (!btrp->whead)
return btr_pool_size(btrp);
* the largest contiguous buffer that can currently be allocated from the
* buffer pool.
*/
-size_t btr_pool_unused(struct btr_pool *btrp)
+size_t btr_pool_unused(const struct btr_pool *btrp)
{
return btr_pool_size(btrp) - btr_pool_filled(btrp);
}
* Return maximal size available for one read. This is
* smaller than the value returned by btr_pool_unused().
*/
-static size_t btr_pool_available(struct btr_pool *btrp)
+static size_t btr_pool_available(const struct btr_pool *btrp)
{
if (!btrp->whead)
return 0;
* \return The maximal amount of bytes that may be written to the returned
* buffer.
*/
-size_t btr_pool_get_buffer(struct btr_pool *btrp, char **result)
+size_t btr_pool_get_buffer(const struct btr_pool *btrp, char **result)
{
if (result)
*result = btrp->whead;
* consists of two buffers. If this function returns the value n, then n
* elements of \a iov are initialized.
*/
-int btr_pool_get_buffers(struct btr_pool *btrp, struct iovec iov[2])
+int btr_pool_get_buffers(const struct btr_pool *btrp, struct iovec iov[2])
{
size_t sz, unused;
char *buf;
free(btrb->buf);
}
-static struct btr_buffer_reference *get_first_input_br(struct btr_node *btrn)
+static struct btr_buffer_reference *get_first_input_br(const struct btr_node *btrn)
{
if (list_empty(&btrn->input_queue))
return NULL;
struct btr_buffer_reference, node);
}
+static struct btr_buffer_reference *get_last_input_br(const struct btr_node *btrn)
+{
+ if (list_empty(&btrn->input_queue))
+ return NULL;
+ return list_last_entry(&btrn->input_queue,
+ struct btr_buffer_reference, node);
+}
+
/*
* Deallocate the reference, release the resources if refcount drops to zero.
*/
}
}
+static bool may_merge_btrb(const struct btr_buffer *btrb,
+ const struct btr_buffer_reference *br)
+{
+ if (!br)
+ return false;
+ if (br->consumed > 0)
+ return false;
+ if (br->btrb->buf + br->btrb->size != btrb->buf)
+ return false;
+ if (!br->btrb->pool)
+ return true;
+ return br->btrb->size + btrb->size < btr_pool_size(br->btrb->pool) / 3;
+}
+
static void add_btrb_to_children(struct btr_buffer *btrb,
struct btr_node *btrn, size_t consumed)
{
if (btrn->start.tv_sec == 0)
btrn->start = *now;
FOR_EACH_CHILD(ch, btrn) {
- struct btr_buffer_reference *br = zalloc(sizeof(*br));
- br->btrb = btrb;
- br->consumed = consumed;
- list_add_tail(&br->node, &ch->input_queue);
- btrb->refcount++;
+ struct btr_buffer_reference *br = get_last_input_br(ch);
+ if (may_merge_btrb(btrb, br)) {
+ br->btrb->size += btrb->size;
+ free(btrb);
+ } else {
+ br = zalloc(sizeof(*br));
+ br->btrb = btrb;
+ br->consumed = consumed;
+ list_add_tail(&br->node, &ch->input_queue);
+ btrb->refcount++;
+ }
if (ch->start.tv_sec == 0)
ch->start = *now;
}
*
* \return True if this node has no children. False otherwise.
*/
-static bool btr_no_children(struct btr_node *btrn)
+static bool btr_no_children(const struct btr_node *btrn)
{
return list_empty(&btrn->children);
}
* After a (non-leaf) node was removed removed from the tree, the function
* returns true for all child nodes.
*/
-bool btr_no_parent(struct btr_node *btrn)
+bool btr_no_parent(const struct btr_node *btrn)
{
return !btrn->parent;
}
* buffer.
*
* Since the buffer tree may change at any time, this function should be called
- * during each post_select call.
+ * during each post_monitor call.
*
* \return True if \a btrn has no siblings.
*/
-bool btr_inplace_ok(struct btr_node *btrn)
+bool btr_inplace_ok(const struct btr_node *btrn)
{
struct btr_buffer_reference *br;
FOR_EACH_BUFFER_REF(br, btrn) {
return true;
}
-static inline size_t br_available_bytes(struct btr_buffer_reference *br)
+static inline size_t br_available_bytes(const struct btr_buffer_reference *br)
{
return br->btrb->size - br->consumed;
}
-static size_t btr_get_buffer_by_reference(struct btr_buffer_reference *br, char **buf)
+static size_t btr_get_buffer_by_reference(const struct btr_buffer_reference *br,
+ char **buf)
{
if (buf)
*buf = br->btrb->buf + br->consumed;
* to by \a btrn, the function returns zero and the value of \a bufp is
* undefined.
*/
-size_t btr_next_buffer_omit(struct btr_node *btrn, size_t omit, char **bufp)
+size_t btr_next_buffer_omit(const struct btr_node *btrn, size_t omit,
+ char **bufp)
{
struct btr_buffer_reference *br;
size_t wrap_count, sz, rv = 0;
* The call of this function is is equivalent to calling \ref
* btr_next_buffer_omit() with an \a omit value of zero.
*/
-size_t btr_next_buffer(struct btr_node *btrn, char **bufp)
+size_t btr_next_buffer(const struct btr_node *btrn, char **bufp)
{
return btr_next_buffer_omit(btrn, 0, bufp);
}
* This simply iterates over all buffer references in the input queue and
* returns the sum of the sizes of all references.
*/
-size_t btr_get_input_queue_size(struct btr_node *btrn)
+size_t btr_get_input_queue_size(const struct btr_node *btrn)
{
struct btr_buffer_reference *br;
size_t size = 0, wrap_consumed = 0;
return size;
}
+static bool min_iqs_available(size_t min_iqs, const struct btr_node *btrn)
+{
+ struct btr_buffer_reference *br;
+ size_t have = 0, wrap_consumed = 0;
+
+ FOR_EACH_BUFFER_REF(br, btrn) {
+ if (br->wrap_count != 0) {
+ wrap_consumed = br->consumed;
+ continue;
+ }
+ have += br_available_bytes(br);
+ if (have > wrap_consumed + min_iqs)
+ return true;
+ }
+ return false;
+}
/**
* Remove a node from the buffer tree, reconnecting parent and children.
*
* \return This function iterates over all children of the given node and
* returns the size of the largest input queue.
*/
-size_t btr_get_output_queue_size(struct btr_node *btrn)
+size_t btr_get_output_queue_size(const struct btr_node *btrn)
{
size_t max_size = 0;
struct btr_node *ch;
*
* \sa \ref receiver::execute, \ref filter::execute, \ref writer::execute.
*/
-int btr_exec_up(struct btr_node *btrn, const char *command, char **value_result)
+int btr_exec_up(const struct btr_node *btrn, const char *command, char **value_result)
{
int ret;
*
* \sa \ref btr_new_node(), struct \ref btr_node_description.
*/
-void *btr_context(struct btr_node *btrn)
+void *btr_context(const struct btr_node *btrn)
{
return btrn->context;
}
-static bool need_buffer_pool_merge(struct btr_node *btrn)
+static bool need_buffer_pool_merge(const struct btr_node *btrn)
{
struct btr_buffer_reference *br = get_first_input_br(btrn);
}
}
-static bool btr_eof(struct btr_node *btrn)
+static bool btr_eof(const struct btr_node *btrn)
{
char *buf;
size_t len = btr_next_buffer(btrn, &buf);
return (len == 0 && btr_no_parent(btrn));
}
-static void log_tree_recursively(struct btr_node *btrn, int loglevel, int depth)
+static void log_tree_recursively(const struct btr_node *btrn, int loglevel, int depth)
{
struct btr_node *ch;
const char spaces[] = " ", *space = spaces + 16 - depth;
* \param btrn Start logging at this node.
* \param loglevel Set severity with which the tree should be logged.
*/
-void btr_log_tree(struct btr_node *btrn, int loglevel)
+void btr_log_tree(const struct btr_node *btrn, int loglevel)
{
return log_tree_recursively(btrn, loglevel, 0);
}
* \param type The supposed type of \a btrn.
*
* Most users of the buffer tree subsystem call this function from both
- * their pre_select and the post_select methods.
+ * their ->pre_monitor() and ->post_monitor() methods.
*
* \return Negative if an error condition was detected, zero if there
* is nothing to do and positive otherwise.
* btrn, the function also returns zero in order to bound the memory usage of
* the buffer tree.
*/
-int btr_node_status(struct btr_node *btrn, size_t min_iqs,
+int btr_node_status(const struct btr_node *btrn, size_t min_iqs,
enum btr_node_type type)
{
- size_t iqs;
-
- assert(btrn);
if (type != BTR_NT_LEAF && btr_no_children(btrn))
return -E_BTR_NO_CHILD;
if (type != BTR_NT_ROOT && btr_eof(btrn))
- return -E_BTR_EOF;
+ return -E_EOF;
if (btr_get_output_queue_size(btrn) > BTRN_MAX_PENDING)
return 0;
if (type == BTR_NT_ROOT)
return 1;
- iqs = btr_get_input_queue_size(btrn);
- if (iqs == 0) /* we have a parent, because not eof */
- return 0;
- if (iqs < min_iqs && !btr_no_parent(btrn))
- return 0;
- return 1;
+ if (min_iqs_available(min_iqs, btrn))
+ return 1;
+ return btr_no_parent(btrn);
}
/**
*
* Mainly useful for the time display of para_audiod.
*/
-void btr_get_node_start(struct btr_node *btrn, struct timeval *tv)
+void btr_get_node_start(const struct btr_node *btrn, struct timeval *tv)
{
*tv = btrn->start;
}
* \return The parent of \a btrn, or \p NULL if \a btrn is the
* root node of the buffer tree.
*/
-struct btr_node *btr_parent(struct btr_node *btrn)
+struct btr_node *btr_parent(const struct btr_node *btrn)
{
return btrn->parent;
}