2002-06-11 16:41:01 +03:00
|
|
|
/*
|
2006-07-19 10:28:58 +03:00
|
|
|
* default memory allocator for libavutil
|
2009-01-19 17:46:40 +02:00
|
|
|
* Copyright (c) 2002 Fabrice Bellard
|
2002-06-11 16:41:01 +03:00
|
|
|
*
|
2006-10-07 18:30:46 +03:00
|
|
|
* This file is part of FFmpeg.
|
|
|
|
*
|
|
|
|
* FFmpeg is free software; you can redistribute it and/or
|
2002-06-11 16:41:01 +03:00
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
2006-10-07 18:30:46 +03:00
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
2002-06-11 16:41:01 +03:00
|
|
|
*
|
2006-10-07 18:30:46 +03:00
|
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
2002-06-11 16:41:01 +03:00
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
2006-10-07 18:30:46 +03:00
|
|
|
* License along with FFmpeg; if not, write to the Free Software
|
2006-01-13 00:43:26 +02:00
|
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2002-06-11 16:41:01 +03:00
|
|
|
*/
|
2005-12-17 20:14:38 +02:00
|
|
|
|
2003-03-06 13:32:04 +02:00
|
|
|
/**
|
2010-04-20 17:45:34 +03:00
|
|
|
* @file
|
2009-01-28 02:16:05 +02:00
|
|
|
* default memory allocator for libavutil
|
2003-03-06 13:32:04 +02:00
|
|
|
*/
|
2005-12-17 20:14:38 +02:00
|
|
|
|
2011-05-12 16:11:27 +03:00
|
|
|
#define _XOPEN_SOURCE 600
|
|
|
|
|
2009-01-24 16:55:30 +02:00
|
|
|
#include "config.h"
|
2003-01-24 01:03:09 +02:00
|
|
|
|
2009-01-24 16:55:30 +02:00
|
|
|
#include <limits.h>
|
2012-10-18 20:27:51 +03:00
|
|
|
#include <stdint.h>
|
2009-01-08 01:36:34 +02:00
|
|
|
#include <stdlib.h>
|
2021-05-22 23:03:10 +02:00
|
|
|
#include <stdatomic.h>
|
2009-01-24 16:55:30 +02:00
|
|
|
#include <string.h>
|
2009-01-14 01:44:16 +02:00
|
|
|
#if HAVE_MALLOC_H
|
2002-06-11 16:41:01 +03:00
|
|
|
#include <malloc.h>
|
|
|
|
#endif
|
|
|
|
|
2021-08-01 08:36:09 +02:00
|
|
|
#include "attributes.h"
|
|
|
|
#include "avassert.h"
|
2014-03-08 23:27:00 +03:00
|
|
|
#include "dynarray.h"
|
2021-08-01 08:36:09 +02:00
|
|
|
#include "error.h"
|
|
|
|
#include "internal.h"
|
2012-10-18 20:27:51 +03:00
|
|
|
#include "intreadwrite.h"
|
2021-08-01 08:36:09 +02:00
|
|
|
#include "macros.h"
|
2009-01-26 00:40:43 +02:00
|
|
|
#include "mem.h"
|
|
|
|
|
2010-01-28 15:06:31 +02:00
|
|
|
#ifdef MALLOC_PREFIX
|
|
|
|
|
|
|
|
#define malloc AV_JOIN(MALLOC_PREFIX, malloc)
|
|
|
|
#define memalign AV_JOIN(MALLOC_PREFIX, memalign)
|
|
|
|
#define posix_memalign AV_JOIN(MALLOC_PREFIX, posix_memalign)
|
|
|
|
#define realloc AV_JOIN(MALLOC_PREFIX, realloc)
|
|
|
|
#define free AV_JOIN(MALLOC_PREFIX, free)
|
|
|
|
|
|
|
|
void *malloc(size_t size);
|
|
|
|
void *memalign(size_t align, size_t size);
|
|
|
|
int posix_memalign(void **ptr, size_t align, size_t size);
|
|
|
|
void *realloc(void *ptr, size_t size);
|
|
|
|
void free(void *ptr);
|
|
|
|
|
|
|
|
#endif /* MALLOC_PREFIX */
|
|
|
|
|
avutil/mem: limit alignment to maximum simd align
FFmpeg has instances of DECLARE_ALIGNED(32, ...) in a lot of structs,
which then end up heap-allocated.
By declaring any variable in a struct, or tree of structs, to be 32 byte
aligned, it allows the compiler to safely assume the entire struct
itself is also 32 byte aligned.
This might make the compiler emit code which straight up crashes or
misbehaves in other ways, and at least in one instances is now
documented to actually do (see ticket 10549 on trac).
The issue there is that an unrelated variable in SingleChannelElement is
declared to have an alignment of 32 bytes. So if the compiler does a copy
in decode_cpe() with avx instructions, but ffmpeg is built with
--disable-avx, this results in a crash, since the memory is only 16 byte
aligned.
Mind you, even if the compiler does not emit avx instructions, the code
is still invalid and could misbehave. It just happens not to. Declaring
any variable in a struct with a 32 byte alignment promises 32 byte
alignment of the whole struct to the compiler.
This patch limits the maximum alignment to the maximum possible simd
alignment according to configure.
While not perfect, it at the very least gets rid of a lot of UB, by
matching up the maximum DECLARE_ALIGNED value with the alignment of heap
allocations done by lavu.
2023-12-03 22:01:50 +02:00
|
|
|
#define ALIGN (HAVE_SIMD_ALIGN_64 ? 64 : (HAVE_SIMD_ALIGN_32 ? 32 : 16))
|
2011-05-10 04:15:42 +03:00
|
|
|
|
2024-03-28 21:19:28 +02:00
|
|
|
#define FF_MEMORY_POISON 0x2a
|
|
|
|
|
2012-04-15 23:35:19 +03:00
|
|
|
/* NOTE: if you want to override these functions with your own
|
|
|
|
* implementations (not recommended) you have to link libav* as
|
|
|
|
* dynamic libraries and remove -Wl,-Bsymbolic from the linker flags.
|
|
|
|
* Note that this will cost performance. */
|
2002-06-11 16:41:01 +03:00
|
|
|
|
2024-03-23 14:38:06 +02:00
|
|
|
static atomic_size_t max_alloc_size = INT_MAX;
|
2011-12-25 20:43:58 +03:00
|
|
|
|
|
|
|
void av_max_alloc(size_t max){
|
2021-05-22 23:03:10 +02:00
|
|
|
atomic_store_explicit(&max_alloc_size, max, memory_order_relaxed);
|
2011-12-25 20:43:58 +03:00
|
|
|
}
|
2011-05-19 00:59:38 +03:00
|
|
|
|
2021-08-10 19:40:02 +02:00
|
|
|
static int size_mult(size_t a, size_t b, size_t *r)
|
|
|
|
{
|
|
|
|
size_t t;
|
|
|
|
|
|
|
|
#if (!defined(__INTEL_COMPILER) && AV_GCC_VERSION_AT_LEAST(5,1)) || AV_HAS_BUILTIN(__builtin_mul_overflow)
|
|
|
|
if (__builtin_mul_overflow(a, b, &t))
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
#else
|
|
|
|
t = a * b;
|
|
|
|
/* Hack inspired from glibc: don't try the division if nelem and elsize
|
|
|
|
* are both less than sqrt(SIZE_MAX). */
|
|
|
|
if ((a | b) >= ((size_t)1 << (sizeof(size_t) * 4)) && a && t / a != b)
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
#endif
|
|
|
|
*r = t;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-04-12 22:17:26 +03:00
|
|
|
void *av_malloc(size_t size)
|
2002-06-11 16:41:01 +03:00
|
|
|
{
|
2009-01-08 01:36:34 +02:00
|
|
|
void *ptr = NULL;
|
2005-01-12 02:16:25 +02:00
|
|
|
|
2021-05-22 23:03:10 +02:00
|
|
|
if (size > atomic_load_explicit(&max_alloc_size, memory_order_relaxed))
|
2005-01-12 02:16:25 +02:00
|
|
|
return NULL;
|
2005-12-17 20:14:38 +02:00
|
|
|
|
2016-03-24 21:55:19 +02:00
|
|
|
#if HAVE_POSIX_MEMALIGN
|
2011-12-11 03:56:48 +03:00
|
|
|
if (size) //OS X on SDK 10.6 has a broken posix_memalign implementation
|
2012-10-20 14:18:59 +03:00
|
|
|
if (posix_memalign(&ptr, ALIGN, size))
|
2009-02-21 22:38:27 +02:00
|
|
|
ptr = NULL;
|
2012-06-18 16:37:02 +03:00
|
|
|
#elif HAVE_ALIGNED_MALLOC
|
2012-06-19 21:52:00 +03:00
|
|
|
ptr = _aligned_malloc(size, ALIGN);
|
2009-01-14 01:44:16 +02:00
|
|
|
#elif HAVE_MEMALIGN
|
2013-03-10 12:22:45 +03:00
|
|
|
#ifndef __DJGPP__
|
2012-10-20 14:18:59 +03:00
|
|
|
ptr = memalign(ALIGN, size);
|
2013-03-10 12:22:45 +03:00
|
|
|
#else
|
|
|
|
ptr = memalign(size, ALIGN);
|
|
|
|
#endif
|
2005-12-17 20:14:38 +02:00
|
|
|
/* Why 64?
|
2012-10-18 20:16:37 +03:00
|
|
|
* Indeed, we should align it:
|
|
|
|
* on 4 for 386
|
|
|
|
* on 16 for 486
|
|
|
|
* on 32 for 586, PPro - K6-III
|
|
|
|
* on 64 for K7 (maybe for P3 too).
|
|
|
|
* Because L1 and L2 caches are aligned on those values.
|
|
|
|
* But I don't want to code such logic here!
|
2002-06-11 16:41:01 +03:00
|
|
|
*/
|
2012-10-18 20:16:37 +03:00
|
|
|
/* Why 32?
|
|
|
|
* For AVX ASM. SSE / NEON needs only 16.
|
|
|
|
* Why not larger? Because I did not see a difference in benchmarks ...
|
2002-09-15 13:02:15 +03:00
|
|
|
*/
|
2012-10-18 20:16:37 +03:00
|
|
|
/* benchmarks with P3
|
|
|
|
* memalign(64) + 1 3071, 3051, 3032
|
|
|
|
* memalign(64) + 2 3051, 3032, 3041
|
|
|
|
* memalign(64) + 4 2911, 2896, 2915
|
|
|
|
* memalign(64) + 8 2545, 2554, 2550
|
|
|
|
* memalign(64) + 16 2543, 2572, 2563
|
|
|
|
* memalign(64) + 32 2546, 2545, 2571
|
|
|
|
* memalign(64) + 64 2570, 2533, 2558
|
|
|
|
*
|
|
|
|
* BTW, malloc seems to do 8-byte alignment by default here.
|
2002-09-15 13:02:15 +03:00
|
|
|
*/
|
2002-06-11 16:41:01 +03:00
|
|
|
#else
|
|
|
|
ptr = malloc(size);
|
|
|
|
#endif
|
2012-07-02 20:31:35 +03:00
|
|
|
if(!ptr && !size) {
|
|
|
|
size = 1;
|
2011-05-07 16:28:39 +03:00
|
|
|
ptr= av_malloc(1);
|
2012-07-02 20:31:35 +03:00
|
|
|
}
|
|
|
|
#if CONFIG_MEMORY_POISONING
|
|
|
|
if (ptr)
|
2013-05-12 14:37:33 +03:00
|
|
|
memset(ptr, FF_MEMORY_POISON, size);
|
2012-07-02 20:31:35 +03:00
|
|
|
#endif
|
2002-06-11 16:41:01 +03:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2011-04-12 22:17:26 +03:00
|
|
|
void *av_realloc(void *ptr, size_t size)
|
2003-01-24 01:03:09 +02:00
|
|
|
{
|
2021-04-26 18:34:31 +02:00
|
|
|
void *ret;
|
2021-05-22 23:03:10 +02:00
|
|
|
if (size > atomic_load_explicit(&max_alloc_size, memory_order_relaxed))
|
2005-01-12 02:16:25 +02:00
|
|
|
return NULL;
|
|
|
|
|
2016-03-24 21:55:19 +02:00
|
|
|
#if HAVE_ALIGNED_MALLOC
|
2021-04-26 18:34:31 +02:00
|
|
|
ret = _aligned_realloc(ptr, size + !size, ALIGN);
|
2006-08-17 11:18:48 +03:00
|
|
|
#else
|
2021-04-26 18:34:31 +02:00
|
|
|
ret = realloc(ptr, size + !size);
|
2004-06-06 06:45:53 +03:00
|
|
|
#endif
|
2021-04-26 18:34:31 +02:00
|
|
|
#if CONFIG_MEMORY_POISONING
|
|
|
|
if (ret && !ptr)
|
|
|
|
memset(ret, FF_MEMORY_POISON, size);
|
|
|
|
#endif
|
|
|
|
return ret;
|
2003-01-21 23:30:48 +02:00
|
|
|
}
|
|
|
|
|
2011-03-20 20:39:20 +02:00
|
|
|
void *av_realloc_f(void *ptr, size_t nelem, size_t elsize)
|
|
|
|
{
|
|
|
|
size_t size;
|
|
|
|
void *r;
|
|
|
|
|
2021-08-10 19:40:02 +02:00
|
|
|
if (size_mult(elsize, nelem, &size)) {
|
2011-03-20 20:39:20 +02:00
|
|
|
av_free(ptr);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
r = av_realloc(ptr, size);
|
2016-07-18 18:30:54 +02:00
|
|
|
if (!r)
|
2011-03-20 20:39:20 +02:00
|
|
|
av_free(ptr);
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
2013-09-15 22:42:07 +03:00
|
|
|
int av_reallocp(void *ptr, size_t size)
|
|
|
|
{
|
2015-01-26 21:17:31 +02:00
|
|
|
void *val;
|
2013-09-15 22:42:07 +03:00
|
|
|
|
2013-09-20 14:02:41 +03:00
|
|
|
if (!size) {
|
|
|
|
av_freep(ptr);
|
|
|
|
return 0;
|
|
|
|
}
|
2013-09-15 22:42:07 +03:00
|
|
|
|
2015-01-26 21:17:31 +02:00
|
|
|
memcpy(&val, ptr, sizeof(val));
|
|
|
|
val = av_realloc(val, size);
|
|
|
|
|
|
|
|
if (!val) {
|
2013-09-15 22:42:07 +03:00
|
|
|
av_freep(ptr);
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
}
|
|
|
|
|
2015-01-26 21:17:31 +02:00
|
|
|
memcpy(ptr, &val, sizeof(val));
|
2013-09-15 22:42:07 +03:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-03-30 17:02:39 +02:00
|
|
|
void *av_malloc_array(size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-12 00:36:30 +02:00
|
|
|
size_t result;
|
2021-08-10 19:40:02 +02:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2017-03-30 17:02:39 +02:00
|
|
|
return NULL;
|
2020-04-12 00:36:30 +02:00
|
|
|
return av_malloc(result);
|
2017-03-30 17:02:39 +02:00
|
|
|
}
|
|
|
|
|
2013-06-03 12:31:46 +03:00
|
|
|
void *av_realloc_array(void *ptr, size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-12 00:36:30 +02:00
|
|
|
size_t result;
|
2021-08-10 19:40:02 +02:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2013-06-03 12:31:46 +03:00
|
|
|
return NULL;
|
2020-04-12 00:36:30 +02:00
|
|
|
return av_realloc(ptr, result);
|
2013-06-03 12:31:46 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
|
|
|
|
{
|
2015-01-26 21:17:31 +02:00
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, ptr, sizeof(val));
|
2015-02-01 14:52:22 +02:00
|
|
|
val = av_realloc_f(val, nmemb, size);
|
|
|
|
memcpy(ptr, &val, sizeof(val));
|
|
|
|
if (!val && nmemb && size)
|
2013-06-03 12:31:46 +03:00
|
|
|
return AVERROR(ENOMEM);
|
2015-01-26 21:17:31 +02:00
|
|
|
|
2013-06-03 12:31:46 +03:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2002-06-11 16:41:01 +03:00
|
|
|
void av_free(void *ptr)
|
|
|
|
{
|
2016-03-24 21:55:19 +02:00
|
|
|
#if HAVE_ALIGNED_MALLOC
|
2012-06-18 16:37:02 +03:00
|
|
|
_aligned_free(ptr);
|
2004-06-06 06:45:53 +03:00
|
|
|
#else
|
2011-02-03 02:40:35 +02:00
|
|
|
free(ptr);
|
2004-06-06 06:45:53 +03:00
|
|
|
#endif
|
2002-06-11 16:41:01 +03:00
|
|
|
}
|
|
|
|
|
2006-09-25 18:23:40 +03:00
|
|
|
void av_freep(void *arg)
|
|
|
|
{
|
2015-01-26 21:17:31 +02:00
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, arg, sizeof(val));
|
|
|
|
memcpy(arg, &(void *){ NULL }, sizeof(val));
|
|
|
|
av_free(val);
|
2006-09-25 18:23:40 +03:00
|
|
|
}
|
|
|
|
|
2011-04-12 22:17:26 +03:00
|
|
|
void *av_mallocz(size_t size)
|
2006-09-25 18:23:40 +03:00
|
|
|
{
|
2008-04-03 22:18:14 +03:00
|
|
|
void *ptr = av_malloc(size);
|
2006-09-25 18:23:40 +03:00
|
|
|
if (ptr)
|
|
|
|
memset(ptr, 0, size);
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2011-09-24 19:39:13 +03:00
|
|
|
void *av_calloc(size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-12 00:36:30 +02:00
|
|
|
size_t result;
|
2021-08-10 19:40:02 +02:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2011-09-24 19:39:13 +03:00
|
|
|
return NULL;
|
2020-04-12 00:36:30 +02:00
|
|
|
return av_mallocz(result);
|
2011-09-24 19:39:13 +03:00
|
|
|
}
|
|
|
|
|
2006-09-25 18:23:40 +03:00
|
|
|
char *av_strdup(const char *s)
|
|
|
|
{
|
2012-10-18 20:16:37 +03:00
|
|
|
char *ptr = NULL;
|
|
|
|
if (s) {
|
2015-05-10 16:06:50 +02:00
|
|
|
size_t len = strlen(s) + 1;
|
2013-10-05 00:19:13 +03:00
|
|
|
ptr = av_realloc(NULL, len);
|
2008-05-23 15:37:52 +03:00
|
|
|
if (ptr)
|
|
|
|
memcpy(ptr, s, len);
|
2008-05-23 15:37:32 +03:00
|
|
|
}
|
2006-09-25 18:23:40 +03:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2014-08-12 19:24:19 +03:00
|
|
|
char *av_strndup(const char *s, size_t len)
|
|
|
|
{
|
|
|
|
char *ret = NULL, *end;
|
|
|
|
|
|
|
|
if (!s)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
end = memchr(s, 0, len);
|
|
|
|
if (end)
|
|
|
|
len = end - s;
|
|
|
|
|
|
|
|
ret = av_realloc(NULL, len + 1);
|
|
|
|
if (!ret)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
memcpy(ret, s, len);
|
|
|
|
ret[len] = 0;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-04-28 01:49:14 +03:00
|
|
|
void *av_memdup(const void *p, size_t size)
|
|
|
|
{
|
|
|
|
void *ptr = NULL;
|
|
|
|
if (p) {
|
|
|
|
ptr = av_malloc(size);
|
|
|
|
if (ptr)
|
|
|
|
memcpy(ptr, p, size);
|
|
|
|
}
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2014-02-25 03:06:06 +03:00
|
|
|
int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem)
|
|
|
|
{
|
2015-02-01 16:16:15 +02:00
|
|
|
void **tab;
|
|
|
|
memcpy(&tab, tab_ptr, sizeof(tab));
|
2014-02-25 03:06:06 +03:00
|
|
|
|
2016-07-30 07:05:39 +02:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, sizeof(*tab), tab, *nb_ptr, {
|
2014-02-25 03:06:06 +03:00
|
|
|
tab[*nb_ptr] = elem;
|
2015-02-01 16:16:15 +02:00
|
|
|
memcpy(tab_ptr, &tab, sizeof(tab));
|
2014-02-25 03:06:06 +03:00
|
|
|
}, {
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
});
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-04-29 18:33:38 +03:00
|
|
|
void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem)
|
|
|
|
{
|
2015-02-01 16:16:15 +02:00
|
|
|
void **tab;
|
|
|
|
memcpy(&tab, tab_ptr, sizeof(tab));
|
2014-03-08 23:27:00 +03:00
|
|
|
|
2016-07-30 07:05:39 +02:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, sizeof(*tab), tab, *nb_ptr, {
|
2014-03-22 23:14:57 +03:00
|
|
|
tab[*nb_ptr] = elem;
|
2015-02-01 16:16:15 +02:00
|
|
|
memcpy(tab_ptr, &tab, sizeof(tab));
|
2014-03-08 23:27:00 +03:00
|
|
|
}, {
|
|
|
|
*nb_ptr = 0;
|
|
|
|
av_freep(tab_ptr);
|
|
|
|
});
|
2011-04-29 18:33:38 +03:00
|
|
|
}
|
2012-01-29 02:34:59 +03:00
|
|
|
|
2013-04-14 04:07:54 +03:00
|
|
|
void *av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size,
|
|
|
|
const uint8_t *elem_data)
|
|
|
|
{
|
2014-03-08 23:27:00 +03:00
|
|
|
uint8_t *tab_elem_data = NULL;
|
|
|
|
|
2016-07-30 07:05:39 +02:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, elem_size, *tab_ptr, *nb_ptr, {
|
2014-03-08 23:27:00 +03:00
|
|
|
tab_elem_data = (uint8_t *)*tab_ptr + (*nb_ptr) * elem_size;
|
|
|
|
if (elem_data)
|
|
|
|
memcpy(tab_elem_data, elem_data, elem_size);
|
|
|
|
else if (CONFIG_MEMORY_POISONING)
|
|
|
|
memset(tab_elem_data, FF_MEMORY_POISON, elem_size);
|
|
|
|
}, {
|
|
|
|
av_freep(tab_ptr);
|
|
|
|
*nb_ptr = 0;
|
|
|
|
});
|
2013-04-14 04:07:54 +03:00
|
|
|
return tab_elem_data;
|
|
|
|
}
|
|
|
|
|
2012-10-26 16:42:23 +03:00
|
|
|
static void fill16(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
uint32_t v = AV_RN16(dst - 2);
|
|
|
|
|
|
|
|
v |= v << 16;
|
|
|
|
|
|
|
|
while (len >= 4) {
|
|
|
|
AV_WN32(dst, v);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-2];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void fill24(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
#if HAVE_BIGENDIAN
|
|
|
|
uint32_t v = AV_RB24(dst - 3);
|
|
|
|
uint32_t a = v << 8 | v >> 16;
|
|
|
|
uint32_t b = v << 16 | v >> 8;
|
|
|
|
uint32_t c = v << 24 | v;
|
|
|
|
#else
|
|
|
|
uint32_t v = AV_RL24(dst - 3);
|
|
|
|
uint32_t a = v | v << 24;
|
|
|
|
uint32_t b = v >> 8 | v << 16;
|
|
|
|
uint32_t c = v >> 16 | v << 8;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
while (len >= 12) {
|
|
|
|
AV_WN32(dst, a);
|
|
|
|
AV_WN32(dst + 4, b);
|
|
|
|
AV_WN32(dst + 8, c);
|
|
|
|
dst += 12;
|
|
|
|
len -= 12;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len >= 4) {
|
|
|
|
AV_WN32(dst, a);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len >= 4) {
|
|
|
|
AV_WN32(dst, b);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-3];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void fill32(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
uint32_t v = AV_RN32(dst - 4);
|
|
|
|
|
2019-01-17 23:35:10 +02:00
|
|
|
#if HAVE_FAST_64BIT
|
|
|
|
uint64_t v2= v + ((uint64_t)v<<32);
|
|
|
|
while (len >= 32) {
|
|
|
|
AV_WN64(dst , v2);
|
|
|
|
AV_WN64(dst+ 8, v2);
|
|
|
|
AV_WN64(dst+16, v2);
|
|
|
|
AV_WN64(dst+24, v2);
|
|
|
|
dst += 32;
|
|
|
|
len -= 32;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-10-26 16:42:23 +03:00
|
|
|
while (len >= 4) {
|
|
|
|
AV_WN32(dst, v);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-4];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-10-18 20:27:51 +03:00
|
|
|
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
|
|
|
|
{
|
|
|
|
const uint8_t *src = &dst[-back];
|
2013-02-13 23:36:25 +03:00
|
|
|
if (!back)
|
|
|
|
return;
|
|
|
|
|
2012-10-18 20:27:51 +03:00
|
|
|
if (back == 1) {
|
|
|
|
memset(dst, *src, cnt);
|
2012-10-26 16:42:23 +03:00
|
|
|
} else if (back == 2) {
|
|
|
|
fill16(dst, cnt);
|
|
|
|
} else if (back == 3) {
|
|
|
|
fill24(dst, cnt);
|
|
|
|
} else if (back == 4) {
|
|
|
|
fill32(dst, cnt);
|
2012-10-18 20:27:51 +03:00
|
|
|
} else {
|
2012-10-26 16:42:23 +03:00
|
|
|
if (cnt >= 16) {
|
2012-10-18 20:27:51 +03:00
|
|
|
int blocklen = back;
|
|
|
|
while (cnt > blocklen) {
|
|
|
|
memcpy(dst, src, blocklen);
|
|
|
|
dst += blocklen;
|
|
|
|
cnt -= blocklen;
|
|
|
|
blocklen <<= 1;
|
|
|
|
}
|
|
|
|
memcpy(dst, src, cnt);
|
2012-10-26 16:42:23 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (cnt >= 8) {
|
|
|
|
AV_COPY32U(dst, src);
|
|
|
|
AV_COPY32U(dst + 4, src + 4);
|
|
|
|
src += 8;
|
|
|
|
dst += 8;
|
|
|
|
cnt -= 8;
|
|
|
|
}
|
|
|
|
if (cnt >= 4) {
|
|
|
|
AV_COPY32U(dst, src);
|
|
|
|
src += 4;
|
|
|
|
dst += 4;
|
|
|
|
cnt -= 4;
|
|
|
|
}
|
|
|
|
if (cnt >= 2) {
|
|
|
|
AV_COPY16U(dst, src);
|
|
|
|
src += 2;
|
|
|
|
dst += 2;
|
|
|
|
cnt -= 2;
|
2012-10-18 20:27:51 +03:00
|
|
|
}
|
2012-10-26 16:42:23 +03:00
|
|
|
if (cnt)
|
|
|
|
*dst = *src;
|
2012-10-18 20:27:51 +03:00
|
|
|
}
|
|
|
|
}
|
2012-10-25 14:15:58 +03:00
|
|
|
|
2013-10-28 00:21:59 +03:00
|
|
|
void *av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2021-05-22 23:03:10 +02:00
|
|
|
size_t max_size;
|
|
|
|
|
2017-12-30 15:38:33 +02:00
|
|
|
if (min_size <= *size)
|
2013-10-28 00:21:59 +03:00
|
|
|
return ptr;
|
|
|
|
|
2021-05-22 23:03:10 +02:00
|
|
|
max_size = atomic_load_explicit(&max_alloc_size, memory_order_relaxed);
|
2022-07-05 21:31:19 +02:00
|
|
|
/* *size is an unsigned, so the real maximum is <= UINT_MAX. */
|
|
|
|
max_size = FFMIN(max_size, UINT_MAX);
|
2021-05-22 23:03:10 +02:00
|
|
|
|
|
|
|
if (min_size > max_size) {
|
2018-01-02 02:58:35 +02:00
|
|
|
*size = 0;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-05-22 23:03:10 +02:00
|
|
|
min_size = FFMIN(max_size, FFMAX(min_size + min_size / 16 + 32, min_size));
|
2013-10-28 00:21:59 +03:00
|
|
|
|
|
|
|
ptr = av_realloc(ptr, min_size);
|
|
|
|
/* we could set this to the unmodified min_size but this is safer
|
|
|
|
* if the user lost the ptr and uses NULL now
|
|
|
|
*/
|
|
|
|
if (!ptr)
|
|
|
|
min_size = 0;
|
|
|
|
|
|
|
|
*size = min_size;
|
|
|
|
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2021-05-23 16:24:17 +02:00
|
|
|
static inline void fast_malloc(void *ptr, unsigned int *size, size_t min_size, int zero_realloc)
|
|
|
|
{
|
2021-05-23 16:29:04 +02:00
|
|
|
size_t max_size;
|
2021-05-23 16:24:17 +02:00
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, ptr, sizeof(val));
|
|
|
|
if (min_size <= *size) {
|
|
|
|
av_assert0(val || !min_size);
|
|
|
|
return;
|
|
|
|
}
|
2021-05-23 16:29:04 +02:00
|
|
|
|
|
|
|
max_size = atomic_load_explicit(&max_alloc_size, memory_order_relaxed);
|
2022-07-05 21:31:19 +02:00
|
|
|
/* *size is an unsigned, so the real maximum is <= UINT_MAX. */
|
|
|
|
max_size = FFMIN(max_size, UINT_MAX);
|
2021-05-23 16:29:04 +02:00
|
|
|
|
|
|
|
if (min_size > max_size) {
|
|
|
|
av_freep(ptr);
|
|
|
|
*size = 0;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
min_size = FFMIN(max_size, FFMAX(min_size + min_size / 16 + 32, min_size));
|
2021-05-23 16:24:17 +02:00
|
|
|
av_freep(ptr);
|
|
|
|
val = zero_realloc ? av_mallocz(min_size) : av_malloc(min_size);
|
|
|
|
memcpy(ptr, &val, sizeof(val));
|
|
|
|
if (!val)
|
|
|
|
min_size = 0;
|
|
|
|
*size = min_size;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-11-14 17:04:04 +03:00
|
|
|
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2021-05-23 16:24:17 +02:00
|
|
|
fast_malloc(ptr, size, min_size, 0);
|
2013-11-14 17:04:04 +03:00
|
|
|
}
|
|
|
|
|
2015-11-18 15:19:47 +02:00
|
|
|
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2021-05-23 16:24:17 +02:00
|
|
|
fast_malloc(ptr, size, min_size, 1);
|
2015-11-18 15:19:47 +02:00
|
|
|
}
|
2021-05-31 11:25:26 +02:00
|
|
|
|
|
|
|
int av_size_mult(size_t a, size_t b, size_t *r)
|
|
|
|
{
|
2021-08-10 19:40:02 +02:00
|
|
|
return size_mult(a, b, r);
|
2021-05-31 11:25:26 +02:00
|
|
|
}
|