2018-04-06 17:44:33 +02:00
|
|
|
/* Simple Plugin API
|
2018-11-05 17:48:52 +01:00
|
|
|
* Copyright © 2018 Wim Taymans
|
2018-04-06 17:44:33 +02:00
|
|
|
*
|
2018-11-05 17:48:52 +01:00
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
2018-04-06 17:44:33 +02:00
|
|
|
*
|
2018-11-05 17:48:52 +01:00
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
|
* Software.
|
2018-04-06 17:44:33 +02:00
|
|
|
*
|
2018-11-05 17:48:52 +01:00
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
|
|
|
* DEALINGS IN THE SOFTWARE.
|
2018-04-06 17:44:33 +02:00
|
|
|
*/
|
2019-01-14 12:58:23 +01:00
|
|
|
#ifndef SPA_BUFFER_ALLOC_H
|
|
|
|
|
#define SPA_BUFFER_ALLOC_H
|
2018-04-06 17:44:33 +02:00
|
|
|
|
|
|
|
|
#ifdef __cplusplus
|
|
|
|
|
extern "C" {
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
#include <spa/buffer/buffer.h>
|
|
|
|
|
|
|
|
|
|
struct spa_buffer_alloc_info {
|
|
|
|
|
#define SPA_BUFFER_ALLOC_FLAG_INLINE_META (1<<0) /**< add metadata data in the skeleton */
|
|
|
|
|
#define SPA_BUFFER_ALLOC_FLAG_INLINE_CHUNK (1<<1) /**< add chunk data in the skeleton */
|
|
|
|
|
#define SPA_BUFFER_ALLOC_FLAG_INLINE_DATA (1<<2) /**< add buffer data to the skeleton */
|
2018-04-09 09:30:56 +02:00
|
|
|
#define SPA_BUFFER_ALLOC_FLAG_INLINE_ALL 0b111
|
|
|
|
|
#define SPA_BUFFER_ALLOC_FLAG_NO_DATA (1<<3) /**< don't set data pointers */
|
2018-04-06 17:44:33 +02:00
|
|
|
uint32_t flags;
|
|
|
|
|
uint32_t n_metas;
|
2018-04-09 09:30:56 +02:00
|
|
|
struct spa_meta *metas;
|
2018-04-06 17:44:33 +02:00
|
|
|
uint32_t n_datas;
|
2018-04-09 09:30:56 +02:00
|
|
|
struct spa_data *datas;
|
2018-04-06 17:44:33 +02:00
|
|
|
uint32_t *data_aligns;
|
2019-01-25 13:28:56 +01:00
|
|
|
uint32_t max_align; /**< max of all alignments */
|
2018-04-09 09:30:56 +02:00
|
|
|
size_t skel_size; /**< size of the struct spa_buffer and inlined meta/chunk/data */
|
|
|
|
|
size_t meta_size; /**< size of the meta if not inlined */
|
|
|
|
|
size_t chunk_size; /**< size of the chunk if not inlined */
|
|
|
|
|
size_t data_size; /**< size of the data if not inlined */
|
2019-01-25 13:28:56 +01:00
|
|
|
size_t mem_size; /**< size of the total memory if not inlined */
|
2018-04-06 17:44:33 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
static inline int spa_buffer_alloc_fill_info(struct spa_buffer_alloc_info *info,
|
2019-01-08 11:53:36 +01:00
|
|
|
uint32_t n_metas, struct spa_meta metas[],
|
|
|
|
|
uint32_t n_datas, struct spa_data datas[],
|
|
|
|
|
uint32_t data_aligns[])
|
2018-04-06 17:44:33 +02:00
|
|
|
{
|
|
|
|
|
size_t size;
|
2019-01-07 15:52:42 +01:00
|
|
|
uint32_t i;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
|
|
|
|
info->n_metas = n_metas;
|
2018-04-09 09:30:56 +02:00
|
|
|
info->metas = metas;
|
2018-04-06 17:44:33 +02:00
|
|
|
info->n_datas = n_datas;
|
2018-04-09 09:30:56 +02:00
|
|
|
info->datas = datas;
|
2018-04-06 17:44:33 +02:00
|
|
|
info->data_aligns = data_aligns;
|
2019-01-25 13:28:56 +01:00
|
|
|
info->max_align = 16;
|
|
|
|
|
info->mem_size = 0;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
|
|
|
|
info->skel_size = sizeof(struct spa_buffer);
|
|
|
|
|
info->skel_size += n_metas * sizeof(struct spa_meta);
|
|
|
|
|
info->skel_size += n_datas * sizeof(struct spa_data);
|
|
|
|
|
|
|
|
|
|
for (i = 0, size = 0; i < n_metas; i++)
|
2019-01-24 18:28:52 +01:00
|
|
|
size += SPA_ROUND_UP_N(metas[i].size, 8);
|
2018-04-09 09:30:56 +02:00
|
|
|
info->meta_size = size;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
|
|
|
|
if (SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_META))
|
2018-04-09 09:30:56 +02:00
|
|
|
info->skel_size += info->meta_size;
|
2019-01-25 13:28:56 +01:00
|
|
|
else
|
|
|
|
|
info->mem_size += info->meta_size;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2018-04-09 09:30:56 +02:00
|
|
|
info->chunk_size = n_datas * sizeof(struct spa_chunk);
|
2018-04-06 17:44:33 +02:00
|
|
|
if (SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_CHUNK))
|
2018-04-09 09:30:56 +02:00
|
|
|
info->skel_size += info->chunk_size;
|
2019-01-25 13:28:56 +01:00
|
|
|
else
|
|
|
|
|
info->mem_size += info->chunk_size;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-24 18:28:52 +01:00
|
|
|
for (i = 0, size = 0; i < n_datas; i++) {
|
2019-01-25 13:28:56 +01:00
|
|
|
info->max_align = SPA_MAX(info->max_align, data_aligns[i]);
|
2019-01-24 18:28:52 +01:00
|
|
|
size = SPA_ROUND_UP_N(size, data_aligns[i]);
|
2018-04-09 09:30:56 +02:00
|
|
|
size += datas[i].maxsize;
|
2019-01-24 18:28:52 +01:00
|
|
|
}
|
2018-04-09 09:30:56 +02:00
|
|
|
info->data_size = size;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2018-04-09 09:30:56 +02:00
|
|
|
if (!SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_NO_DATA) &&
|
2019-01-24 18:28:52 +01:00
|
|
|
SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_DATA)) {
|
2019-01-25 13:28:56 +01:00
|
|
|
info->skel_size = SPA_ROUND_UP_N(info->skel_size, n_datas ? data_aligns[0] : 1);
|
2019-01-24 18:28:52 +01:00
|
|
|
info->skel_size += info->data_size;
|
2019-01-25 13:28:56 +01:00
|
|
|
info->skel_size = SPA_ROUND_UP_N(info->skel_size, info->max_align);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
info->mem_size = SPA_ROUND_UP_N(info->mem_size, n_datas ? data_aligns[0] : 1);
|
|
|
|
|
info->mem_size += info->data_size;
|
|
|
|
|
info->mem_size = SPA_ROUND_UP_N(info->mem_size, info->max_align);
|
2019-01-24 18:28:52 +01:00
|
|
|
}
|
2018-04-06 17:44:33 +02:00
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static inline struct spa_buffer *
|
|
|
|
|
spa_buffer_alloc_layout(struct spa_buffer_alloc_info *info,
|
2019-01-07 17:57:03 +01:00
|
|
|
void *skel_mem, void *data_mem)
|
2018-04-06 17:44:33 +02:00
|
|
|
{
|
2019-01-08 11:53:36 +01:00
|
|
|
struct spa_buffer *b = (struct spa_buffer*)skel_mem;
|
2018-04-06 17:44:33 +02:00
|
|
|
size_t size;
|
2019-01-07 15:52:42 +01:00
|
|
|
uint32_t i;
|
2018-04-06 17:44:33 +02:00
|
|
|
void **dp, *skel, *data;
|
|
|
|
|
struct spa_chunk *cp;
|
|
|
|
|
|
|
|
|
|
b->n_metas = info->n_metas;
|
|
|
|
|
b->metas = SPA_MEMBER(b, sizeof(struct spa_buffer), struct spa_meta);
|
|
|
|
|
b->n_datas = info->n_datas;
|
|
|
|
|
b->datas = SPA_MEMBER(b->metas, info->n_metas * sizeof(struct spa_meta), struct spa_data);
|
|
|
|
|
|
|
|
|
|
skel = SPA_MEMBER(b->datas, info->n_datas * sizeof(struct spa_data), void);
|
|
|
|
|
data = data_mem;
|
|
|
|
|
|
|
|
|
|
if (SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_META))
|
|
|
|
|
dp = &skel;
|
|
|
|
|
else
|
|
|
|
|
dp = &data;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < info->n_metas; i++) {
|
|
|
|
|
struct spa_meta *m = &b->metas[i];
|
2018-04-09 09:30:56 +02:00
|
|
|
*m = info->metas[i];
|
2018-04-06 17:44:33 +02:00
|
|
|
m->data = *dp;
|
2019-01-24 18:28:52 +01:00
|
|
|
*dp = SPA_MEMBER(*dp, SPA_ROUND_UP_N(m->size, 8), void);
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
size = info->n_datas * sizeof(struct spa_chunk);
|
|
|
|
|
if (SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_CHUNK)) {
|
2019-01-08 11:53:36 +01:00
|
|
|
cp = (struct spa_chunk*)skel;
|
|
|
|
|
skel = SPA_MEMBER(skel, size, void);
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
else {
|
2019-01-08 11:53:36 +01:00
|
|
|
cp = (struct spa_chunk*)data;
|
|
|
|
|
data = SPA_MEMBER(data, size, void);
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_INLINE_DATA))
|
|
|
|
|
dp = &skel;
|
|
|
|
|
else
|
|
|
|
|
dp = &data;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < info->n_datas; i++) {
|
|
|
|
|
struct spa_data *d = &b->datas[i];
|
|
|
|
|
|
2018-04-09 09:30:56 +02:00
|
|
|
*d = info->datas[i];
|
2018-04-06 17:44:33 +02:00
|
|
|
d->chunk = &cp[i];
|
2018-04-09 09:30:56 +02:00
|
|
|
if (!SPA_FLAG_CHECK(info->flags, SPA_BUFFER_ALLOC_FLAG_NO_DATA)) {
|
2019-01-24 18:28:52 +01:00
|
|
|
*dp = SPA_PTR_ALIGN(*dp, info->data_aligns[i], void);
|
2018-04-06 17:44:33 +02:00
|
|
|
d->data = *dp;
|
2019-01-08 11:53:36 +01:00
|
|
|
*dp = SPA_MEMBER(*dp, d->maxsize, void);
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return b;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static inline int
|
|
|
|
|
spa_buffer_alloc_layout_array(struct spa_buffer_alloc_info *info,
|
2019-01-08 11:53:36 +01:00
|
|
|
uint32_t n_buffers, struct spa_buffer *buffers[],
|
2018-04-09 09:30:56 +02:00
|
|
|
void *skel_mem, void *data_mem)
|
2018-04-06 17:44:33 +02:00
|
|
|
{
|
2019-01-07 15:52:42 +01:00
|
|
|
uint32_t i;
|
2018-04-06 17:44:33 +02:00
|
|
|
for (i = 0; i < n_buffers; i++) {
|
2019-01-07 17:57:03 +01:00
|
|
|
buffers[i] = spa_buffer_alloc_layout(info, skel_mem, data_mem);
|
|
|
|
|
skel_mem = SPA_MEMBER(skel_mem, info->skel_size, void);
|
2019-01-25 13:28:56 +01:00
|
|
|
data_mem = SPA_MEMBER(data_mem, info->mem_size, void);
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static inline struct spa_buffer **
|
2018-04-09 09:30:56 +02:00
|
|
|
spa_buffer_alloc_array(uint32_t n_buffers, uint32_t flags,
|
2019-01-08 11:53:36 +01:00
|
|
|
uint32_t n_metas, struct spa_meta metas[],
|
|
|
|
|
uint32_t n_datas, struct spa_data datas[],
|
|
|
|
|
uint32_t data_aligns[])
|
2018-04-06 17:44:33 +02:00
|
|
|
{
|
|
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
struct spa_buffer **buffers;
|
|
|
|
|
struct spa_buffer_alloc_info info = { flags | SPA_BUFFER_ALLOC_FLAG_INLINE_ALL, };
|
|
|
|
|
void *skel;
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
spa_buffer_alloc_fill_info(&info, n_metas, metas, n_datas, datas, data_aligns);
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
buffers = (struct spa_buffer **)calloc(1, info.max_align +
|
|
|
|
|
n_buffers * (sizeof(struct spa_buffer *) + info.skel_size));
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
skel = SPA_MEMBER(buffers, sizeof(struct spa_buffer *) * n_buffers, void);
|
|
|
|
|
skel = SPA_PTR_ALIGN(skel, info.max_align, void);
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
spa_buffer_alloc_layout_array(&info, n_buffers, buffers, skel, NULL);
|
2018-04-06 17:44:33 +02:00
|
|
|
|
2019-01-25 13:28:56 +01:00
|
|
|
return buffers;
|
2018-04-06 17:44:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef __cplusplus
|
|
|
|
|
} /* extern "C" */
|
|
|
|
|
#endif
|
|
|
|
|
|
2019-01-14 12:58:23 +01:00
|
|
|
#endif /* SPA_BUFFER_ALLOC_H */
|