mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2024-11-26 19:01:44 +02:00
45a2902976
No buffer will be fetched from the pool after it's uninitialized, so there's no benefit from waiting until every single buffer has been returned to it before freeing them all. This should free some memory in certain scenarios, which can be beneficial in low memory systems. Based on a patch by Jonas Karlman. Reviewed-by: Anton Khirnov <anton@khirnov.net> Signed-off-by: James Almer <jamrial@gmail.com>
404 lines
9.1 KiB
C
404 lines
9.1 KiB
C
/*
|
|
* This file is part of FFmpeg.
|
|
*
|
|
* FFmpeg is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
* License as published by the Free Software Foundation; either
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
*
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
* Lesser General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
* License along with FFmpeg; if not, write to the Free Software
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
*/
|
|
|
|
#include <stdatomic.h>
|
|
#include <stdint.h>
|
|
#include <string.h>
|
|
|
|
#include "avassert.h"
|
|
#include "buffer_internal.h"
|
|
#include "common.h"
|
|
#include "mem.h"
|
|
#include "thread.h"
|
|
|
|
AVBufferRef *av_buffer_create(uint8_t *data, int size,
|
|
void (*free)(void *opaque, uint8_t *data),
|
|
void *opaque, int flags)
|
|
{
|
|
AVBufferRef *ref = NULL;
|
|
AVBuffer *buf = NULL;
|
|
|
|
buf = av_mallocz(sizeof(*buf));
|
|
if (!buf)
|
|
return NULL;
|
|
|
|
buf->data = data;
|
|
buf->size = size;
|
|
buf->free = free ? free : av_buffer_default_free;
|
|
buf->opaque = opaque;
|
|
|
|
atomic_init(&buf->refcount, 1);
|
|
|
|
buf->flags = flags;
|
|
|
|
ref = av_mallocz(sizeof(*ref));
|
|
if (!ref) {
|
|
av_freep(&buf);
|
|
return NULL;
|
|
}
|
|
|
|
ref->buffer = buf;
|
|
ref->data = data;
|
|
ref->size = size;
|
|
|
|
return ref;
|
|
}
|
|
|
|
void av_buffer_default_free(void *opaque, uint8_t *data)
|
|
{
|
|
av_free(data);
|
|
}
|
|
|
|
AVBufferRef *av_buffer_alloc(int size)
|
|
{
|
|
AVBufferRef *ret = NULL;
|
|
uint8_t *data = NULL;
|
|
|
|
data = av_malloc(size);
|
|
if (!data)
|
|
return NULL;
|
|
|
|
ret = av_buffer_create(data, size, av_buffer_default_free, NULL, 0);
|
|
if (!ret)
|
|
av_freep(&data);
|
|
|
|
return ret;
|
|
}
|
|
|
|
AVBufferRef *av_buffer_allocz(int size)
|
|
{
|
|
AVBufferRef *ret = av_buffer_alloc(size);
|
|
if (!ret)
|
|
return NULL;
|
|
|
|
memset(ret->data, 0, size);
|
|
return ret;
|
|
}
|
|
|
|
AVBufferRef *av_buffer_ref(AVBufferRef *buf)
|
|
{
|
|
AVBufferRef *ret = av_mallocz(sizeof(*ret));
|
|
|
|
if (!ret)
|
|
return NULL;
|
|
|
|
*ret = *buf;
|
|
|
|
atomic_fetch_add_explicit(&buf->buffer->refcount, 1, memory_order_relaxed);
|
|
|
|
return ret;
|
|
}
|
|
|
|
static void buffer_replace(AVBufferRef **dst, AVBufferRef **src)
|
|
{
|
|
AVBuffer *b;
|
|
|
|
b = (*dst)->buffer;
|
|
|
|
if (src) {
|
|
**dst = **src;
|
|
av_freep(src);
|
|
} else
|
|
av_freep(dst);
|
|
|
|
if (atomic_fetch_sub_explicit(&b->refcount, 1, memory_order_acq_rel) == 1) {
|
|
b->free(b->opaque, b->data);
|
|
av_freep(&b);
|
|
}
|
|
}
|
|
|
|
void av_buffer_unref(AVBufferRef **buf)
|
|
{
|
|
if (!buf || !*buf)
|
|
return;
|
|
|
|
buffer_replace(buf, NULL);
|
|
}
|
|
|
|
int av_buffer_is_writable(const AVBufferRef *buf)
|
|
{
|
|
if (buf->buffer->flags & AV_BUFFER_FLAG_READONLY)
|
|
return 0;
|
|
|
|
return atomic_load(&buf->buffer->refcount) == 1;
|
|
}
|
|
|
|
void *av_buffer_get_opaque(const AVBufferRef *buf)
|
|
{
|
|
return buf->buffer->opaque;
|
|
}
|
|
|
|
int av_buffer_get_ref_count(const AVBufferRef *buf)
|
|
{
|
|
return atomic_load(&buf->buffer->refcount);
|
|
}
|
|
|
|
int av_buffer_make_writable(AVBufferRef **pbuf)
|
|
{
|
|
AVBufferRef *newbuf, *buf = *pbuf;
|
|
|
|
if (av_buffer_is_writable(buf))
|
|
return 0;
|
|
|
|
newbuf = av_buffer_alloc(buf->size);
|
|
if (!newbuf)
|
|
return AVERROR(ENOMEM);
|
|
|
|
memcpy(newbuf->data, buf->data, buf->size);
|
|
|
|
buffer_replace(pbuf, &newbuf);
|
|
|
|
return 0;
|
|
}
|
|
|
|
int av_buffer_realloc(AVBufferRef **pbuf, int size)
|
|
{
|
|
AVBufferRef *buf = *pbuf;
|
|
uint8_t *tmp;
|
|
int ret;
|
|
|
|
if (!buf) {
|
|
/* allocate a new buffer with av_realloc(), so it will be reallocatable
|
|
* later */
|
|
uint8_t *data = av_realloc(NULL, size);
|
|
if (!data)
|
|
return AVERROR(ENOMEM);
|
|
|
|
buf = av_buffer_create(data, size, av_buffer_default_free, NULL, 0);
|
|
if (!buf) {
|
|
av_freep(&data);
|
|
return AVERROR(ENOMEM);
|
|
}
|
|
|
|
buf->buffer->flags_internal |= BUFFER_FLAG_REALLOCATABLE;
|
|
*pbuf = buf;
|
|
|
|
return 0;
|
|
} else if (buf->size == size)
|
|
return 0;
|
|
|
|
if (!(buf->buffer->flags_internal & BUFFER_FLAG_REALLOCATABLE) ||
|
|
!av_buffer_is_writable(buf) || buf->data != buf->buffer->data) {
|
|
/* cannot realloc, allocate a new reallocable buffer and copy data */
|
|
AVBufferRef *new = NULL;
|
|
|
|
ret = av_buffer_realloc(&new, size);
|
|
if (ret < 0)
|
|
return ret;
|
|
|
|
memcpy(new->data, buf->data, FFMIN(size, buf->size));
|
|
|
|
buffer_replace(pbuf, &new);
|
|
return 0;
|
|
}
|
|
|
|
tmp = av_realloc(buf->buffer->data, size);
|
|
if (!tmp)
|
|
return AVERROR(ENOMEM);
|
|
|
|
buf->buffer->data = buf->data = tmp;
|
|
buf->buffer->size = buf->size = size;
|
|
return 0;
|
|
}
|
|
|
|
int av_buffer_replace(AVBufferRef **pdst, AVBufferRef *src)
|
|
{
|
|
AVBufferRef *dst = *pdst;
|
|
AVBufferRef *tmp;
|
|
|
|
if (!src) {
|
|
av_buffer_unref(pdst);
|
|
return 0;
|
|
}
|
|
|
|
if (dst && dst->buffer == src->buffer) {
|
|
/* make sure the data pointers match */
|
|
dst->data = src->data;
|
|
dst->size = src->size;
|
|
return 0;
|
|
}
|
|
|
|
tmp = av_buffer_ref(src);
|
|
if (!tmp)
|
|
return AVERROR(ENOMEM);
|
|
|
|
av_buffer_unref(pdst);
|
|
*pdst = tmp;
|
|
return 0;
|
|
}
|
|
|
|
AVBufferPool *av_buffer_pool_init2(int size, void *opaque,
|
|
AVBufferRef* (*alloc)(void *opaque, int size),
|
|
void (*pool_free)(void *opaque))
|
|
{
|
|
AVBufferPool *pool = av_mallocz(sizeof(*pool));
|
|
if (!pool)
|
|
return NULL;
|
|
|
|
ff_mutex_init(&pool->mutex, NULL);
|
|
|
|
pool->size = size;
|
|
pool->opaque = opaque;
|
|
pool->alloc2 = alloc;
|
|
pool->alloc = av_buffer_alloc; // fallback
|
|
pool->pool_free = pool_free;
|
|
|
|
atomic_init(&pool->refcount, 1);
|
|
|
|
return pool;
|
|
}
|
|
|
|
AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size))
|
|
{
|
|
AVBufferPool *pool = av_mallocz(sizeof(*pool));
|
|
if (!pool)
|
|
return NULL;
|
|
|
|
ff_mutex_init(&pool->mutex, NULL);
|
|
|
|
pool->size = size;
|
|
pool->alloc = alloc ? alloc : av_buffer_alloc;
|
|
|
|
atomic_init(&pool->refcount, 1);
|
|
|
|
return pool;
|
|
}
|
|
|
|
static void buffer_pool_flush(AVBufferPool *pool)
|
|
{
|
|
while (pool->pool) {
|
|
BufferPoolEntry *buf = pool->pool;
|
|
pool->pool = buf->next;
|
|
|
|
buf->free(buf->opaque, buf->data);
|
|
av_freep(&buf);
|
|
}
|
|
}
|
|
|
|
/*
|
|
* This function gets called when the pool has been uninited and
|
|
* all the buffers returned to it.
|
|
*/
|
|
static void buffer_pool_free(AVBufferPool *pool)
|
|
{
|
|
buffer_pool_flush(pool);
|
|
ff_mutex_destroy(&pool->mutex);
|
|
|
|
if (pool->pool_free)
|
|
pool->pool_free(pool->opaque);
|
|
|
|
av_freep(&pool);
|
|
}
|
|
|
|
void av_buffer_pool_uninit(AVBufferPool **ppool)
|
|
{
|
|
AVBufferPool *pool;
|
|
|
|
if (!ppool || !*ppool)
|
|
return;
|
|
pool = *ppool;
|
|
*ppool = NULL;
|
|
|
|
ff_mutex_lock(&pool->mutex);
|
|
buffer_pool_flush(pool);
|
|
ff_mutex_unlock(&pool->mutex);
|
|
|
|
if (atomic_fetch_sub_explicit(&pool->refcount, 1, memory_order_acq_rel) == 1)
|
|
buffer_pool_free(pool);
|
|
}
|
|
|
|
static void pool_release_buffer(void *opaque, uint8_t *data)
|
|
{
|
|
BufferPoolEntry *buf = opaque;
|
|
AVBufferPool *pool = buf->pool;
|
|
|
|
if(CONFIG_MEMORY_POISONING)
|
|
memset(buf->data, FF_MEMORY_POISON, pool->size);
|
|
|
|
ff_mutex_lock(&pool->mutex);
|
|
buf->next = pool->pool;
|
|
pool->pool = buf;
|
|
ff_mutex_unlock(&pool->mutex);
|
|
|
|
if (atomic_fetch_sub_explicit(&pool->refcount, 1, memory_order_acq_rel) == 1)
|
|
buffer_pool_free(pool);
|
|
}
|
|
|
|
/* allocate a new buffer and override its free() callback so that
|
|
* it is returned to the pool on free */
|
|
static AVBufferRef *pool_alloc_buffer(AVBufferPool *pool)
|
|
{
|
|
BufferPoolEntry *buf;
|
|
AVBufferRef *ret;
|
|
|
|
av_assert0(pool->alloc || pool->alloc2);
|
|
|
|
ret = pool->alloc2 ? pool->alloc2(pool->opaque, pool->size) :
|
|
pool->alloc(pool->size);
|
|
if (!ret)
|
|
return NULL;
|
|
|
|
buf = av_mallocz(sizeof(*buf));
|
|
if (!buf) {
|
|
av_buffer_unref(&ret);
|
|
return NULL;
|
|
}
|
|
|
|
buf->data = ret->buffer->data;
|
|
buf->opaque = ret->buffer->opaque;
|
|
buf->free = ret->buffer->free;
|
|
buf->pool = pool;
|
|
|
|
ret->buffer->opaque = buf;
|
|
ret->buffer->free = pool_release_buffer;
|
|
|
|
return ret;
|
|
}
|
|
|
|
AVBufferRef *av_buffer_pool_get(AVBufferPool *pool)
|
|
{
|
|
AVBufferRef *ret;
|
|
BufferPoolEntry *buf;
|
|
|
|
ff_mutex_lock(&pool->mutex);
|
|
buf = pool->pool;
|
|
if (buf) {
|
|
ret = av_buffer_create(buf->data, pool->size, pool_release_buffer,
|
|
buf, 0);
|
|
if (ret) {
|
|
pool->pool = buf->next;
|
|
buf->next = NULL;
|
|
}
|
|
} else {
|
|
ret = pool_alloc_buffer(pool);
|
|
}
|
|
ff_mutex_unlock(&pool->mutex);
|
|
|
|
if (ret)
|
|
atomic_fetch_add_explicit(&pool->refcount, 1, memory_order_relaxed);
|
|
|
|
return ret;
|
|
}
|
|
|
|
void *av_buffer_pool_buffer_get_opaque(AVBufferRef *ref)
|
|
{
|
|
BufferPoolEntry *buf = ref->buffer->opaque;
|
|
av_assert0(buf);
|
|
return buf->opaque;
|
|
}
|