| @@ -192,3 +192,146 @@ int av_buffer_realloc(AVBufferRef **pbuf, int size) | |||
| buf->buffer->size = buf->size = size; | |||
| return 0; | |||
| } | |||
| AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size)) | |||
| { | |||
| AVBufferPool *pool = av_mallocz(sizeof(*pool)); | |||
| if (!pool) | |||
| return NULL; | |||
| pool->size = size; | |||
| pool->alloc = alloc ? alloc : av_buffer_alloc; | |||
| avpriv_atomic_int_set(&pool->refcount, 1); | |||
| return pool; | |||
| } | |||
| /* | |||
| * This function gets called when the pool has been uninited and | |||
| * all the buffers returned to it. | |||
| */ | |||
| static void buffer_pool_free(AVBufferPool *pool) | |||
| { | |||
| while (pool->pool) { | |||
| BufferPoolEntry *buf = pool->pool; | |||
| pool->pool = buf->next; | |||
| buf->free(buf->opaque, buf->data); | |||
| av_freep(&buf); | |||
| } | |||
| av_freep(&pool); | |||
| } | |||
| void av_buffer_pool_uninit(AVBufferPool **ppool) | |||
| { | |||
| AVBufferPool *pool; | |||
| if (!ppool || !*ppool) | |||
| return; | |||
| pool = *ppool; | |||
| *ppool = NULL; | |||
| if (!avpriv_atomic_int_add_and_fetch(&pool->refcount, -1)) | |||
| buffer_pool_free(pool); | |||
| } | |||
| /* remove the whole buffer list from the pool and return it */ | |||
| static BufferPoolEntry *get_pool(AVBufferPool *pool) | |||
| { | |||
| BufferPoolEntry *cur = NULL, *last = NULL; | |||
| do { | |||
| FFSWAP(BufferPoolEntry*, cur, last); | |||
| cur = avpriv_atomic_ptr_cas((void * volatile *)&pool->pool, last, NULL); | |||
| if (!cur) | |||
| return NULL; | |||
| } while (cur != last); | |||
| return cur; | |||
| } | |||
| static void add_to_pool(BufferPoolEntry *buf) | |||
| { | |||
| AVBufferPool *pool; | |||
| BufferPoolEntry *cur, *end = buf; | |||
| if (!buf) | |||
| return; | |||
| pool = buf->pool; | |||
| while (end->next) | |||
| end = end->next; | |||
| while ((cur = avpriv_atomic_ptr_cas((void * volatile *)&pool->pool, NULL, buf))) { | |||
| /* pool is not empty, retrieve it and append it to our list */ | |||
| cur = get_pool(pool); | |||
| end->next = cur; | |||
| while (end->next) | |||
| end = end->next; | |||
| } | |||
| } | |||
| static void pool_release_buffer(void *opaque, uint8_t *data) | |||
| { | |||
| BufferPoolEntry *buf = opaque; | |||
| AVBufferPool *pool = buf->pool; | |||
| add_to_pool(buf); | |||
| if (!avpriv_atomic_int_add_and_fetch(&pool->refcount, -1)) | |||
| buffer_pool_free(pool); | |||
| } | |||
| /* allocate a new buffer and override its free() callback so that | |||
| * it is returned to the pool on free */ | |||
| static AVBufferRef *pool_alloc_buffer(AVBufferPool *pool) | |||
| { | |||
| BufferPoolEntry *buf; | |||
| AVBufferRef *ret; | |||
| ret = pool->alloc(pool->size); | |||
| if (!ret) | |||
| return NULL; | |||
| buf = av_mallocz(sizeof(*buf)); | |||
| if (!buf) { | |||
| av_buffer_unref(&ret); | |||
| return NULL; | |||
| } | |||
| buf->data = ret->buffer->data; | |||
| buf->opaque = ret->buffer->opaque; | |||
| buf->free = ret->buffer->free; | |||
| buf->pool = pool; | |||
| ret->buffer->opaque = buf; | |||
| ret->buffer->free = pool_release_buffer; | |||
| avpriv_atomic_int_add_and_fetch(&pool->refcount, 1); | |||
| return ret; | |||
| } | |||
| AVBufferRef *av_buffer_pool_get(AVBufferPool *pool) | |||
| { | |||
| AVBufferRef *ret; | |||
| BufferPoolEntry *buf; | |||
| /* check whether the pool is empty */ | |||
| buf = get_pool(pool); | |||
| if (!buf) | |||
| return pool_alloc_buffer(pool); | |||
| /* keep the first entry, return the rest of the list to the pool */ | |||
| add_to_pool(buf->next); | |||
| buf->next = NULL; | |||
| ret = av_buffer_create(buf->data, pool->size, pool_release_buffer, | |||
| buf, 0); | |||
| if (!ret) { | |||
| add_to_pool(buf); | |||
| return NULL; | |||
| } | |||
| avpriv_atomic_int_add_and_fetch(&pool->refcount, 1); | |||
| return ret; | |||
| } | |||
| @@ -190,6 +190,76 @@ int av_buffer_make_writable(AVBufferRef **buf); | |||
| */ | |||
| int av_buffer_realloc(AVBufferRef **buf, int size); | |||
| /** | |||
| * @} | |||
| */ | |||
| /** | |||
| * @defgroup lavu_bufferpool AVBufferPool | |||
| * @ingroup lavu_data | |||
| * | |||
| * @{ | |||
| * AVBufferPool is an API for a lock-free thread-safe pool of AVBuffers. | |||
| * | |||
| * Frequently allocating and freeing large buffers may be slow. AVBufferPool is | |||
| * meant to solve this in cases when the caller needs a set of buffers of the | |||
| * same size (the most obvious use case being buffers for raw video or audio | |||
| * frames). | |||
| * | |||
| * At the beginning, the user must call av_buffer_pool_init() to create the | |||
| * buffer pool. Then whenever a buffer is needed, call av_buffer_pool_get() to | |||
| * get a reference to a new buffer, similar to av_buffer_alloc(). This new | |||
| * reference works in all aspects the same way as the one created by | |||
| * av_buffer_alloc(). However, when the last reference to this buffer is | |||
| * unreferenced, it is returned to the pool instead of being freed and will be | |||
| * reused for subsequent av_buffer_pool_get() calls. | |||
| * | |||
| * When the caller is done with the pool and no longer needs to allocate any new | |||
| * buffers, av_buffer_pool_uninit() must be called to mark the pool as freeable. | |||
| * Once all the buffers are released, it will automatically be freed. | |||
| * | |||
| * Allocating and releasing buffers with this API is thread-safe as long as | |||
| * either the default alloc callback is used, or the user-supplied one is | |||
| * thread-safe. | |||
| */ | |||
| /** | |||
| * The buffer pool. This structure is opaque and not meant to be accessed | |||
| * directly. It is allocated with av_buffer_pool_init() and freed with | |||
| * av_buffer_pool_uninit(). | |||
| */ | |||
| typedef struct AVBufferPool AVBufferPool; | |||
| /** | |||
| * Allocate and initialize a buffer pool. | |||
| * | |||
| * @param size size of each buffer in this pool | |||
| * @param alloc a function that will be used to allocate new buffers when the | |||
| * pool is empty. May be NULL, then the default allocator will be used | |||
| * (av_buffer_alloc()). | |||
| * @return newly created buffer pool on success, NULL on error. | |||
| */ | |||
| AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size)); | |||
| /** | |||
| * Mark the pool as being available for freeing. It will actually be freed only | |||
| * once all the allocated buffers associated with the pool are released. Thus it | |||
| * is safe to call this function while some of the allocated buffers are still | |||
| * in use. | |||
| * | |||
| * @param pool pointer to the pool to be freed. It will be set to NULL. | |||
| * @see av_buffer_pool_can_uninit() | |||
| */ | |||
| void av_buffer_pool_uninit(AVBufferPool **pool); | |||
| /** | |||
| * Allocate a new AVBuffer, reusing an old buffer from the pool when available. | |||
| * This function may be called simultaneously from multiple threads. | |||
| * | |||
| * @return a reference to the new buffer on success, NULL on error. | |||
| */ | |||
| AVBufferRef *av_buffer_pool_get(AVBufferPool *pool); | |||
| /** | |||
| * @} | |||
| */ | |||
| @@ -57,4 +57,36 @@ struct AVBuffer { | |||
| int flags; | |||
| }; | |||
| typedef struct BufferPoolEntry { | |||
| uint8_t *data; | |||
| /* | |||
| * Backups of the original opaque/free of the AVBuffer corresponding to | |||
| * data. They will be used to free the buffer when the pool is freed. | |||
| */ | |||
| void *opaque; | |||
| void (*free)(void *opaque, uint8_t *data); | |||
| AVBufferPool *pool; | |||
| struct BufferPoolEntry * volatile next; | |||
| } BufferPoolEntry; | |||
| struct AVBufferPool { | |||
| BufferPoolEntry * volatile pool; | |||
| /* | |||
| * This is used to track when the pool is to be freed. | |||
| * The pointer to the pool itself held by the caller is considered to | |||
| * be one reference. Each buffer requested by the caller increases refcount | |||
| * by one, returning the buffer to the pool decreases it by one. | |||
| * refcount reaches zero when the buffer has been uninited AND all the | |||
| * buffers have been released, then it's safe to free the pool and all | |||
| * the buffers in it. | |||
| */ | |||
| volatile int refcount; | |||
| int size; | |||
| AVBufferRef* (*alloc)(int size); | |||
| }; | |||
| #endif /* AVUTIL_BUFFER_INTERNAL_H */ | |||