1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

ffplay: port to lavu/tx

This commit is contained in:
Lynne 2023-02-18 13:14:31 +01:00
parent a810126501
commit 4acd08be6c
No known key found for this signature in database
GPG Key ID: A2FEA5F03F034464

View File

@ -47,7 +47,7 @@
#include "libavdevice/avdevice.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavcodec/avfft.h"
#include "libavutil/tx.h"
#include "libswresample/swresample.h"
#include "libavfilter/avfilter.h"
@ -262,9 +262,11 @@ typedef struct VideoState {
int16_t sample_array[SAMPLE_ARRAY_SIZE];
int sample_array_index;
int last_i_start;
RDFTContext *rdft;
AVTXContext *rdft;
av_tx_fn rdft_fn;
int rdft_bits;
FFTSample *rdft_data;
float *real_data;
AVComplexFloat *rdft_data;
int xpos;
double last_vis_time;
SDL_Texture *vis_texture;
@ -1120,6 +1122,7 @@ static void video_audio_display(VideoState *s)
fill_rectangle(s->xleft, y, s->width, 1);
}
} else {
int err = 0;
if (realloc_texture(&s->vis_texture, SDL_PIXELFORMAT_ARGB8888, s->width, s->height, SDL_BLENDMODE_NONE, 1) < 0)
return;
@ -1127,31 +1130,39 @@ static void video_audio_display(VideoState *s)
s->xpos = 0;
nb_display_channels= FFMIN(nb_display_channels, 2);
if (rdft_bits != s->rdft_bits) {
av_rdft_end(s->rdft);
av_free(s->rdft_data);
s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
const float rdft_scale = 1.0;
av_tx_uninit(&s->rdft);
av_freep(&s->real_data);
av_freep(&s->rdft_data);
s->rdft_bits = rdft_bits;
s->rdft_data = av_malloc_array(nb_freq, 4 *sizeof(*s->rdft_data));
s->real_data = av_malloc_array(nb_freq, 4 *sizeof(*s->real_data));
s->rdft_data = av_malloc_array(nb_freq + 1, 2 *sizeof(*s->rdft_data));
err = av_tx_init(&s->rdft, &s->rdft_fn, AV_TX_FLOAT_RDFT,
0, 1 << rdft_bits, &rdft_scale, 0);
}
if (!s->rdft || !s->rdft_data){
if (err < 0 || !s->rdft_data) {
av_log(NULL, AV_LOG_ERROR, "Failed to allocate buffers for RDFT, switching to waves display\n");
s->show_mode = SHOW_MODE_WAVES;
} else {
FFTSample *data[2];
float *data_in[2];
AVComplexFloat *data[2];
SDL_Rect rect = {.x = s->xpos, .y = 0, .w = 1, .h = s->height};
uint32_t *pixels;
int pitch;
for (ch = 0; ch < nb_display_channels; ch++) {
data[ch] = s->rdft_data + 2 * nb_freq * ch;
data_in[ch] = s->real_data + 2 * nb_freq * ch;
data[ch] = s->rdft_data + nb_freq * ch;
i = i_start + ch;
for (x = 0; x < 2 * nb_freq; x++) {
double w = (x-nb_freq) * (1.0 / nb_freq);
data[ch][x] = s->sample_array[i] * (1.0 - w * w);
data_in[ch][x] = s->sample_array[i] * (1.0 - w * w);
i += channels;
if (i >= SAMPLE_ARRAY_SIZE)
i -= SAMPLE_ARRAY_SIZE;
}
av_rdft_calc(s->rdft, data[ch]);
s->rdft_fn(s->rdft, data[ch], data_in[ch], sizeof(float));
data[ch][0].im = data[ch][nb_freq].re;
data[ch][nb_freq].re = 0;
}
/* Least efficient way to do this, we should of course
* directly access it but it is more than fast enough. */
@ -1160,8 +1171,8 @@ static void video_audio_display(VideoState *s)
pixels += pitch * s->height;
for (y = 0; y < s->height; y++) {
double w = 1 / sqrt(nb_freq);
int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1]));
int b = (nb_display_channels == 2 ) ? sqrt(w * hypot(data[1][2 * y + 0], data[1][2 * y + 1]))
int a = sqrt(w * sqrt(data[0][y].re * data[0][y].re + data[0][y].im * data[0][y].im));
int b = (nb_display_channels == 2 ) ? sqrt(w * hypot(data[1][y].re, data[1][y].im))
: a;
a = FFMIN(a, 255);
b = FFMIN(b, 255);
@ -1197,7 +1208,8 @@ static void stream_component_close(VideoState *is, int stream_index)
is->audio_buf = NULL;
if (is->rdft) {
av_rdft_end(is->rdft);
av_tx_uninit(&is->rdft);
av_freep(&is->real_data);
av_freep(&is->rdft_data);
is->rdft = NULL;
is->rdft_bits = 0;