1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge remote-tracking branch 'qatar/master'

* qatar/master:
  libvpxenc: use the default bitrate if not set
  utvideo: Rename utvideo.c to utvideodec.c
  doc: Fix syntax errors in sample Emacs config
  mjpegdec: more meaningful return values
  configure: clean up Altivec detection
  getopt: Remove an unnecessary define
  rtmp: Use int instead of ssize_t
  getopt: Add missing includes
  rtmp: Add support for receiving incoming streams
  Add missing includes for code relying on external libraries

Conflicts:
	libavcodec/libopenjpegenc.c
	libavcodec/libvpxenc.c
	libavcodec/mjpegdec.c
	libavformat/version.h

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2012-08-17 17:06:07 +02:00
commit fa3fde168b
16 changed files with 614 additions and 85 deletions

View File

@ -30,7 +30,8 @@
* in the public domain. * in the public domain.
*/ */
#define EOF (-1) #include <stdio.h>
#include <string.h>
static int opterr = 1; static int opterr = 1;
static int optind = 1; static int optind = 1;

11
configure vendored
View File

@ -3194,17 +3194,14 @@ elif enabled ppc; then
check_cc <<EOF || disable altivec check_cc <<EOF || disable altivec
$inc_altivec_h $inc_altivec_h
int main(void) { int main(void) {
vector signed int v1, v2, v3; vector signed int v1 = (vector signed int) { 0 };
v1 = vec_add(v2,v3); vector signed int v2 = (vector signed int) { 1 };
v1 = vec_add(v1, v2);
return 0; return 0;
} }
EOF EOF
# check if our compiler supports braces for vector declarations enabled altivec || warn "Altivec disabled, possibly missing --cpu flag"
check_cc <<EOF || die "You need a compiler that supports {} in AltiVec vector declarations."
$inc_altivec_h
int main (void) { (vector int) {1}; return 0; }
EOF
fi fi
elif enabled sparc; then elif enabled sparc; then

View File

@ -204,8 +204,8 @@ For Emacs, add these roughly equivalent lines to your @file{.emacs.d/init.el}:
(c-add-style "ffmpeg" (c-add-style "ffmpeg"
'("k&r" '("k&r"
(c-basic-offset . 4) (c-basic-offset . 4)
(indent-tabs-mode nil) (indent-tabs-mode . nil)
(show-trailing-whitespace t) (show-trailing-whitespace . t)
(c-offsets-alist (c-offsets-alist
(statement-cont . (c-lineup-assignments +))) (statement-cont . (c-lineup-assignments +)))
) )

View File

@ -218,6 +218,11 @@ application specified in @var{app}, may be prefixed by "mp4:". You
can override the value parsed from the URI through the @code{rtmp_playpath} can override the value parsed from the URI through the @code{rtmp_playpath}
option, too. option, too.
@item listen
Act as a server, listening for an incoming connection.
@item timeout
Maximum time to wait for the incoming connection. Implies listen.
@end table @end table
Additionally, the following parameters can be set via command line options Additionally, the following parameters can be set via command line options

View File

@ -431,7 +431,7 @@ OBJS-$(CONFIG_TTA_DECODER) += tta.o
OBJS-$(CONFIG_TWINVQ_DECODER) += twinvq.o celp_math.o OBJS-$(CONFIG_TWINVQ_DECODER) += twinvq.o celp_math.o
OBJS-$(CONFIG_TXD_DECODER) += txd.o s3tc.o OBJS-$(CONFIG_TXD_DECODER) += txd.o s3tc.o
OBJS-$(CONFIG_ULTI_DECODER) += ulti.o OBJS-$(CONFIG_ULTI_DECODER) += ulti.o
OBJS-$(CONFIG_UTVIDEO_DECODER) += utvideo.o OBJS-$(CONFIG_UTVIDEO_DECODER) += utvideodec.o
OBJS-$(CONFIG_V210_DECODER) += v210dec.o OBJS-$(CONFIG_V210_DECODER) += v210dec.o
OBJS-$(CONFIG_V210_ENCODER) += v210enc.o OBJS-$(CONFIG_V210_ENCODER) += v210enc.o
OBJS-$(CONFIG_V308_DECODER) += v308dec.o OBJS-$(CONFIG_V308_DECODER) += v308dec.o

View File

@ -27,6 +27,7 @@
#define OPJ_STATIC #define OPJ_STATIC
#include <openjpeg.h> #include <openjpeg.h>
#include "libavutil/common.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/imgutils.h" #include "libavutil/imgutils.h"
#include "libavutil/pixfmt.h" #include "libavutil/pixfmt.h"

View File

@ -28,6 +28,7 @@
#include <openjpeg.h> #include <openjpeg.h>
#include "libavutil/avassert.h" #include "libavutil/avassert.h"
#include "libavutil/common.h"
#include "libavutil/imgutils.h" #include "libavutil/imgutils.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"

View File

@ -24,6 +24,7 @@
*/ */
#include "libschroedinger.h" #include "libschroedinger.h"
#include "libavutil/mem.h"
static const SchroVideoFormatInfo ff_schro_video_format_info[] = { static const SchroVideoFormatInfo ff_schro_video_format_info[] = {
{ 640, 480, 24000, 1001}, { 640, 480, 24000, 1001},

View File

@ -27,8 +27,12 @@
* (http://dirac.sourceforge.net/specification.html). * (http://dirac.sourceforge.net/specification.html).
*/ */
#include <string.h>
#include "libavutil/imgutils.h" #include "libavutil/imgutils.h"
#include "libavutil/internal.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/mem.h"
#include "avcodec.h" #include "avcodec.h"
#include "libschroedinger.h" #include "libschroedinger.h"

View File

@ -28,6 +28,8 @@
#include <xavs.h> #include <xavs.h>
#include "avcodec.h" #include "avcodec.h"
#include "internal.h" #include "internal.h"
#include "libavutil/internal.h"
#include "libavutil/mem.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"
#define END_OF_STREAM 0x001 #define END_OF_STREAM 0x001

View File

@ -158,18 +158,19 @@ int ff_mjpeg_decode_dht(MJpegDecodeContext *s)
int len, index, i, class, n, v, code_max; int len, index, i, class, n, v, code_max;
uint8_t bits_table[17]; uint8_t bits_table[17];
uint8_t val_table[256]; uint8_t val_table[256];
int ret = 0;
len = get_bits(&s->gb, 16) - 2; len = get_bits(&s->gb, 16) - 2;
while (len > 0) { while (len > 0) {
if (len < 17) if (len < 17)
return -1; return AVERROR_INVALIDDATA;
class = get_bits(&s->gb, 4); class = get_bits(&s->gb, 4);
if (class >= 2) if (class >= 2)
return -1; return AVERROR_INVALIDDATA;
index = get_bits(&s->gb, 4); index = get_bits(&s->gb, 4);
if (index >= 4) if (index >= 4)
return -1; return AVERROR_INVALIDDATA;
n = 0; n = 0;
for (i = 1; i <= 16; i++) { for (i = 1; i <= 16; i++) {
bits_table[i] = get_bits(&s->gb, 8); bits_table[i] = get_bits(&s->gb, 8);
@ -177,7 +178,7 @@ int ff_mjpeg_decode_dht(MJpegDecodeContext *s)
} }
len -= 17; len -= 17;
if (len < n || n > 256) if (len < n || n > 256)
return -1; return AVERROR_INVALIDDATA;
code_max = 0; code_max = 0;
for (i = 0; i < n; i++) { for (i = 0; i < n; i++) {
@ -192,15 +193,15 @@ int ff_mjpeg_decode_dht(MJpegDecodeContext *s)
ff_free_vlc(&s->vlcs[class][index]); ff_free_vlc(&s->vlcs[class][index]);
av_log(s->avctx, AV_LOG_DEBUG, "class=%d index=%d nb_codes=%d\n", av_log(s->avctx, AV_LOG_DEBUG, "class=%d index=%d nb_codes=%d\n",
class, index, code_max + 1); class, index, code_max + 1);
if (build_vlc(&s->vlcs[class][index], bits_table, val_table, if ((ret = build_vlc(&s->vlcs[class][index], bits_table, val_table,
code_max + 1, 0, class > 0) < 0) code_max + 1, 0, class > 0)) < 0)
return -1; return ret;
if (class > 0) { if (class > 0) {
ff_free_vlc(&s->vlcs[2][index]); ff_free_vlc(&s->vlcs[2][index]);
if (build_vlc(&s->vlcs[2][index], bits_table, val_table, if ((ret = build_vlc(&s->vlcs[2][index], bits_table, val_table,
code_max + 1, 0, 0) < 0) code_max + 1, 0, 0)) < 0)
return -1; return ret;
} }
} }
return 0; return 0;
@ -241,7 +242,7 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
av_log(s->avctx, AV_LOG_DEBUG, "sof0: picture: %dx%d\n", width, height); av_log(s->avctx, AV_LOG_DEBUG, "sof0: picture: %dx%d\n", width, height);
if (av_image_check_size(width, height, 0, s->avctx)) if (av_image_check_size(width, height, 0, s->avctx))
return -1; return AVERROR_INVALIDDATA;
nb_components = get_bits(&s->gb, 8); nb_components = get_bits(&s->gb, 8);
if (nb_components <= 0 || if (nb_components <= 0 ||
@ -254,9 +255,10 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
} }
} }
if (s->ls && !(s->bits <= 8 || nb_components == 1)) { if (s->ls && !(s->bits <= 8 || nb_components == 1)) {
av_log(s->avctx, AV_LOG_ERROR, av_log_missing_feature(s->avctx,
"only <= 8 bits/component or 16-bit gray accepted for JPEG-LS\n"); "only <= 8 bits/component or "
return -1; "16-bit gray accepted for JPEG-LS\n", 0);
return AVERROR_PATCHWELCOME;
} }
s->nb_components = nb_components; s->nb_components = nb_components;
s->h_max = 1; s->h_max = 1;
@ -277,16 +279,16 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
} }
s->quant_index[i] = get_bits(&s->gb, 8); s->quant_index[i] = get_bits(&s->gb, 8);
if (s->quant_index[i] >= 4) if (s->quant_index[i] >= 4)
return -1; return AVERROR_INVALIDDATA;
av_log(s->avctx, AV_LOG_DEBUG, "component %d %d:%d id: %d quant:%d\n", av_log(s->avctx, AV_LOG_DEBUG, "component %d %d:%d id: %d quant:%d\n",
i, s->h_count[i], s->v_count[i], i, s->h_count[i], s->v_count[i],
s->component_id[i], s->quant_index[i]); s->component_id[i], s->quant_index[i]);
} }
if (s->ls && (s->h_max > 1 || s->v_max > 1)) { if (s->ls && (s->h_max > 1 || s->v_max > 1)) {
av_log(s->avctx, AV_LOG_ERROR, av_log_missing_feature(s->avctx,
"Subsampling in JPEG-LS is not supported.\n"); "Subsampling in JPEG-LS is not supported.\n", 0);
return -1; return AVERROR_PATCHWELCOME;
} }
if (s->v_max == 1 && s->h_max == 1 && s->lossless==1 && nb_components==3) if (s->v_max == 1 && s->h_max == 1 && s->lossless==1 && nb_components==3)
@ -404,7 +406,7 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
break; break;
default: default:
av_log(s->avctx, AV_LOG_ERROR, "Unhandled pixel format 0x%x\n", pix_fmt_id); av_log(s->avctx, AV_LOG_ERROR, "Unhandled pixel format 0x%x\n", pix_fmt_id);
return -1; return AVERROR_PATCHWELCOME;
} }
if ((s->upscale_h || s->upscale_v) && s->avctx->lowres) { if ((s->upscale_h || s->upscale_v) && s->avctx->lowres) {
av_log(s->avctx, AV_LOG_ERROR, "lowres not supported for weird subsampling\n"); av_log(s->avctx, AV_LOG_ERROR, "lowres not supported for weird subsampling\n");
@ -486,7 +488,7 @@ static int decode_block(MJpegDecodeContext *s, DCTELEM *block, int component,
val = mjpeg_decode_dc(s, dc_index); val = mjpeg_decode_dc(s, dc_index);
if (val == 0xffff) { if (val == 0xffff) {
av_log(s->avctx, AV_LOG_ERROR, "error dc\n"); av_log(s->avctx, AV_LOG_ERROR, "error dc\n");
return -1; return AVERROR_INVALIDDATA;
} }
val = val * quant_matrix[0] + s->last_dc[component]; val = val * quant_matrix[0] + s->last_dc[component];
s->last_dc[component] = val; s->last_dc[component] = val;
@ -514,7 +516,7 @@ static int decode_block(MJpegDecodeContext *s, DCTELEM *block, int component,
if (i > 63) { if (i > 63) {
av_log(s->avctx, AV_LOG_ERROR, "error count: %d\n", i); av_log(s->avctx, AV_LOG_ERROR, "error count: %d\n", i);
return -1; return AVERROR_INVALIDDATA;
} }
j = s->scantable.permutated[i]; j = s->scantable.permutated[i];
block[j] = level * quant_matrix[j]; block[j] = level * quant_matrix[j];
@ -534,7 +536,7 @@ static int decode_dc_progressive(MJpegDecodeContext *s, DCTELEM *block,
val = mjpeg_decode_dc(s, dc_index); val = mjpeg_decode_dc(s, dc_index);
if (val == 0xffff) { if (val == 0xffff) {
av_log(s->avctx, AV_LOG_ERROR, "error dc\n"); av_log(s->avctx, AV_LOG_ERROR, "error dc\n");
return -1; return AVERROR_INVALIDDATA;
} }
val = (val * quant_matrix[0] << Al) + s->last_dc[component]; val = (val * quant_matrix[0] << Al) + s->last_dc[component];
s->last_dc[component] = val; s->last_dc[component] = val;
@ -583,7 +585,7 @@ static int decode_block_progressive(MJpegDecodeContext *s, DCTELEM *block,
break; break;
} }
av_log(s->avctx, AV_LOG_ERROR, "error count: %d\n", i); av_log(s->avctx, AV_LOG_ERROR, "error count: %d\n", i);
return -1; return AVERROR_INVALIDDATA;
} }
j = s->scantable.permutated[i]; j = s->scantable.permutated[i];
block[j] = level * quant_matrix[j] << Al; block[j] = level * quant_matrix[j] << Al;
@ -592,7 +594,7 @@ static int decode_block_progressive(MJpegDecodeContext *s, DCTELEM *block,
i += 15; i += 15;
if (i >= se) { if (i >= se) {
av_log(s->avctx, AV_LOG_ERROR, "ZRL overflow: %d\n", i); av_log(s->avctx, AV_LOG_ERROR, "ZRL overflow: %d\n", i);
return -1; return AVERROR_INVALIDDATA;
} }
} else { } else {
val = (1 << run); val = (1 << run);
@ -1001,7 +1003,7 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah,
if (get_bits_left(&s->gb) < 0) { if (get_bits_left(&s->gb) < 0) {
av_log(s->avctx, AV_LOG_ERROR, "overread %d\n", av_log(s->avctx, AV_LOG_ERROR, "overread %d\n",
-get_bits_left(&s->gb)); -get_bits_left(&s->gb));
return -1; return AVERROR_INVALIDDATA;
} }
for (i = 0; i < nb_components; i++) { for (i = 0; i < nb_components; i++) {
uint8_t *ptr; uint8_t *ptr;
@ -1031,7 +1033,7 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah,
s->quant_matrixes[s->quant_index[c]]) < 0) { s->quant_matrixes[s->quant_index[c]]) < 0) {
av_log(s->avctx, AV_LOG_ERROR, av_log(s->avctx, AV_LOG_ERROR,
"error y=%d x=%d\n", mb_y, mb_x); "error y=%d x=%d\n", mb_y, mb_x);
return -1; return AVERROR_INVALIDDATA;
} }
s->dsp.idct_put(ptr, linesize[c], s->block); s->dsp.idct_put(ptr, linesize[c], s->block);
} }
@ -1047,7 +1049,7 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah,
Al) < 0) { Al) < 0) {
av_log(s->avctx, AV_LOG_ERROR, av_log(s->avctx, AV_LOG_ERROR,
"error y=%d x=%d\n", mb_y, mb_x); "error y=%d x=%d\n", mb_y, mb_x);
return -1; return AVERROR_INVALIDDATA;
} }
} }
// av_log(s->avctx, AV_LOG_DEBUG, "mb: %d %d processed\n", // av_log(s->avctx, AV_LOG_DEBUG, "mb: %d %d processed\n",
@ -1124,7 +1126,7 @@ static int mjpeg_decode_scan_progressive_ac(MJpegDecodeContext *s, int ss,
if (ret < 0) { if (ret < 0) {
av_log(s->avctx, AV_LOG_ERROR, av_log(s->avctx, AV_LOG_ERROR,
"error y=%d x=%d\n", mb_y, mb_x); "error y=%d x=%d\n", mb_y, mb_x);
return -1; return AVERROR_INVALIDDATA;
} }
if (last_scan) { if (last_scan) {
@ -1140,7 +1142,7 @@ int ff_mjpeg_decode_sos(MJpegDecodeContext *s, const uint8_t *mb_bitmask,
const AVFrame *reference) const AVFrame *reference)
{ {
int len, nb_components, i, h, v, predictor, point_transform; int len, nb_components, i, h, v, predictor, point_transform;
int index, id; int index, id, ret;
const int block_size = s->lossless ? 1 : 8; const int block_size = s->lossless ? 1 : 8;
int ilv, prev_shift; int ilv, prev_shift;
@ -1157,11 +1159,11 @@ int ff_mjpeg_decode_sos(MJpegDecodeContext *s, const uint8_t *mb_bitmask,
if (nb_components == 0 || nb_components > MAX_COMPONENTS) { if (nb_components == 0 || nb_components > MAX_COMPONENTS) {
av_log(s->avctx, AV_LOG_ERROR, av_log(s->avctx, AV_LOG_ERROR,
"decode_sos: nb_components (%d) unsupported\n", nb_components); "decode_sos: nb_components (%d) unsupported\n", nb_components);
return -1; return AVERROR_PATCHWELCOME;
} }
if (len != 6 + 2 * nb_components) { if (len != 6 + 2 * nb_components) {
av_log(s->avctx, AV_LOG_ERROR, "decode_sos: invalid len (%d)\n", len); av_log(s->avctx, AV_LOG_ERROR, "decode_sos: invalid len (%d)\n", len);
return -1; return AVERROR_INVALIDDATA;
} }
for (i = 0; i < nb_components; i++) { for (i = 0; i < nb_components; i++) {
id = get_bits(&s->gb, 8) - 1; id = get_bits(&s->gb, 8) - 1;
@ -1173,7 +1175,7 @@ int ff_mjpeg_decode_sos(MJpegDecodeContext *s, const uint8_t *mb_bitmask,
if (index == s->nb_components) { if (index == s->nb_components) {
av_log(s->avctx, AV_LOG_ERROR, av_log(s->avctx, AV_LOG_ERROR,
"decode_sos: index(%d) out of components\n", index); "decode_sos: index(%d) out of components\n", index);
return -1; return AVERROR_INVALIDDATA;
} }
/* Metasoft MJPEG codec has Cb and Cr swapped */ /* Metasoft MJPEG codec has Cb and Cr swapped */
if (s->avctx->codec_tag == MKTAG('M', 'T', 'S', 'J') if (s->avctx->codec_tag == MKTAG('M', 'T', 'S', 'J')
@ -1243,27 +1245,30 @@ next_field:
// for () { // for () {
// reset_ls_coding_parameters(s, 0); // reset_ls_coding_parameters(s, 0);
if (ff_jpegls_decode_picture(s, predictor, point_transform, ilv) < 0) if ((ret = ff_jpegls_decode_picture(s, predictor,
return -1; point_transform, ilv)) < 0)
return ret;
} else { } else {
if (s->rgb) { if (s->rgb) {
if (ljpeg_decode_rgb_scan(s, nb_components, predictor, point_transform) < 0) if ((ret = ljpeg_decode_rgb_scan(s, nb_components, predictor, point_transform)) < 0)
return -1; return ret;
} else { } else {
if (ljpeg_decode_yuv_scan(s, predictor, point_transform) < 0) if ((ret = ljpeg_decode_yuv_scan(s, predictor, point_transform)) < 0)
return -1; return ret;
} }
} }
} else { } else {
if (s->progressive && predictor) { if (s->progressive && predictor) {
av_assert0(s->picture_ptr == &s->picture); av_assert0(s->picture_ptr == &s->picture);
if (mjpeg_decode_scan_progressive_ac(s, predictor, ilv, prev_shift, if ((ret = mjpeg_decode_scan_progressive_ac(s, predictor,
point_transform) < 0) ilv, prev_shift,
return -1; point_transform)) < 0)
return ret;
} else { } else {
if (mjpeg_decode_scan(s, nb_components, prev_shift, point_transform, if ((ret = mjpeg_decode_scan(s, nb_components,
mb_bitmask, reference) < 0) prev_shift, point_transform,
return -1; mb_bitmask, reference)) < 0)
return ret;
} }
} }
if(s->interlaced && get_bits_left(&s->gb) > 32 && show_bits(&s->gb, 8) == 0xFF) { if(s->interlaced && get_bits_left(&s->gb) > 32 && show_bits(&s->gb, 8) == 0xFF) {
@ -1283,13 +1288,13 @@ next_field:
return 0; return 0;
out_of_range: out_of_range:
av_log(s->avctx, AV_LOG_ERROR, "decode_sos: ac/dc index out of range\n"); av_log(s->avctx, AV_LOG_ERROR, "decode_sos: ac/dc index out of range\n");
return -1; return AVERROR_INVALIDDATA;
} }
static int mjpeg_decode_dri(MJpegDecodeContext *s) static int mjpeg_decode_dri(MJpegDecodeContext *s)
{ {
if (get_bits(&s->gb, 16) != 4) if (get_bits(&s->gb, 16) != 4)
return -1; return AVERROR_INVALIDDATA;
s->restart_interval = get_bits(&s->gb, 16); s->restart_interval = get_bits(&s->gb, 16);
s->restart_count = 0; s->restart_count = 0;
av_log(s->avctx, AV_LOG_DEBUG, "restart interval: %d\n", av_log(s->avctx, AV_LOG_DEBUG, "restart interval: %d\n",
@ -1304,9 +1309,9 @@ static int mjpeg_decode_app(MJpegDecodeContext *s)
len = get_bits(&s->gb, 16); len = get_bits(&s->gb, 16);
if (len < 5) if (len < 5)
return -1; return AVERROR_INVALIDDATA;
if (8 * len > get_bits_left(&s->gb)) if (8 * len > get_bits_left(&s->gb))
return -1; return AVERROR_INVALIDDATA;
id = get_bits_long(&s->gb, 32); id = get_bits_long(&s->gb, 32);
id = av_be2ne32(id); id = av_be2ne32(id);
@ -1596,6 +1601,7 @@ int ff_mjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
int unescaped_buf_size; int unescaped_buf_size;
int start_code; int start_code;
int i, index; int i, index;
int ret = 0;
AVFrame *picture = data; AVFrame *picture = data;
s->got_picture = 0; // picture from previous image can not be reused s->got_picture = 0; // picture from previous image can not be reused
@ -1644,9 +1650,9 @@ int ff_mjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
ff_mjpeg_decode_dqt(s); ff_mjpeg_decode_dqt(s);
break; break;
case DHT: case DHT:
if (ff_mjpeg_decode_dht(s) < 0) { if ((ret = ff_mjpeg_decode_dht(s)) < 0) {
av_log(avctx, AV_LOG_ERROR, "huffman table decode error\n"); av_log(avctx, AV_LOG_ERROR, "huffman table decode error\n");
return -1; return ret;
} }
break; break;
case SOF0: case SOF0:
@ -1654,33 +1660,34 @@ int ff_mjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
s->lossless = 0; s->lossless = 0;
s->ls = 0; s->ls = 0;
s->progressive = 0; s->progressive = 0;
if (ff_mjpeg_decode_sof(s) < 0) if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return -1; return ret;
break; break;
case SOF2: case SOF2:
s->lossless = 0; s->lossless = 0;
s->ls = 0; s->ls = 0;
s->progressive = 1; s->progressive = 1;
if (ff_mjpeg_decode_sof(s) < 0) if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return -1; return ret;
break; break;
case SOF3: case SOF3:
s->lossless = 1; s->lossless = 1;
s->ls = 0; s->ls = 0;
s->progressive = 0; s->progressive = 0;
if (ff_mjpeg_decode_sof(s) < 0) if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return -1; return ret;
break; break;
case SOF48: case SOF48:
s->lossless = 1; s->lossless = 1;
s->ls = 1; s->ls = 1;
s->progressive = 0; s->progressive = 0;
if (ff_mjpeg_decode_sof(s) < 0) if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return -1; return ret;
break; break;
case LSE: case LSE:
if (!CONFIG_JPEGLS_DECODER || ff_jpegls_decode_lse(s) < 0) if (!CONFIG_JPEGLS_DECODER ||
return -1; (ret = ff_jpegls_decode_lse(s)) < 0)
return ret;
break; break;
case EOI: case EOI:
eoi_parser: eoi_parser:
@ -1715,9 +1722,9 @@ eoi_parser:
goto the_end; goto the_end;
case SOS: case SOS:
if (ff_mjpeg_decode_sos(s, NULL, NULL) < 0 && if ((ret = ff_mjpeg_decode_sos(s, NULL, NULL)) < 0 &&
(avctx->err_recognition & AV_EF_EXPLODE)) (avctx->err_recognition & AV_EF_EXPLODE))
return AVERROR_INVALIDDATA; return ret;
break; break;
case DRI: case DRI:
mjpeg_decode_dri(s); mjpeg_decode_dri(s);
@ -1752,7 +1759,7 @@ eoi_parser:
goto eoi_parser; goto eoi_parser;
} }
av_log(avctx, AV_LOG_FATAL, "No JPEG data found in image\n"); av_log(avctx, AV_LOG_FATAL, "No JPEG data found in image\n");
return -1; return AVERROR_INVALIDDATA;
the_end: the_end:
if (s->upscale_h) { if (s->upscale_h) {
uint8_t *line = s->picture_ptr->data[s->upscale_h]; uint8_t *line = s->picture_ptr->data[s->upscale_h];

View File

@ -2,20 +2,20 @@
* Ut Video decoder * Ut Video decoder
* Copyright (c) 2011 Konstantin Shishkov * Copyright (c) 2011 Konstantin Shishkov
* *
* This file is part of Libav. * This file is part of FFmpeg.
* *
* Libav is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either * License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version. * version 2.1 of the License, or (at your option) any later version.
* *
* Libav is distributed in the hope that it will be useful, * FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of * but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. * Lesser General Public License for more details.
* *
* You should have received a copy of the GNU Lesser General Public * You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software * License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/ */

View File

@ -26,9 +26,15 @@
#include <dlfcn.h> #include <dlfcn.h>
#include <frei0r.h> #include <frei0r.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "config.h"
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/imgutils.h" #include "libavutil/imgutils.h"
#include "libavutil/internal.h"
#include "libavutil/mathematics.h" #include "libavutil/mathematics.h"
#include "libavutil/mem.h"
#include "libavutil/parseutils.h" #include "libavutil/parseutils.h"
#include "avfilter.h" #include "avfilter.h"
#include "formats.h" #include "formats.h"

View File

@ -28,6 +28,7 @@
#include <opencv/cv.h> #include <opencv/cv.h>
#include <opencv/cxcore.h> #include <opencv/cxcore.h>
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/common.h"
#include "libavutil/file.h" #include "libavutil/file.h"
#include "avfilter.h" #include "avfilter.h"
#include "formats.h" #include "formats.h"

View File

@ -29,6 +29,7 @@
#include "libavutil/intfloat.h" #include "libavutil/intfloat.h"
#include "libavutil/lfg.h" #include "libavutil/lfg.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"
#include "libavutil/random_seed.h"
#include "libavutil/sha.h" #include "libavutil/sha.h"
#include "avformat.h" #include "avformat.h"
#include "internal.h" #include "internal.h"
@ -51,6 +52,7 @@
#define PLAYPATH_MAX_LENGTH 256 #define PLAYPATH_MAX_LENGTH 256
#define TCURL_MAX_LENGTH 512 #define TCURL_MAX_LENGTH 512
#define FLASHVER_MAX_LENGTH 64 #define FLASHVER_MAX_LENGTH 64
#define RTMP_PKTDATA_DEFAULT_SIZE 4096
/** RTMP protocol handler state */ /** RTMP protocol handler state */
typedef enum { typedef enum {
@ -59,6 +61,7 @@ typedef enum {
STATE_FCPUBLISH, ///< client FCPublishing stream (for output) STATE_FCPUBLISH, ///< client FCPublishing stream (for output)
STATE_PLAYING, ///< client has started receiving multimedia data from server STATE_PLAYING, ///< client has started receiving multimedia data from server
STATE_PUBLISHING, ///< client has started sending multimedia data to server (for output) STATE_PUBLISHING, ///< client has started sending multimedia data to server (for output)
STATE_RECEIVING, ///< received a publish command (for input)
STATE_STOPPED, ///< the broadcast has been stopped STATE_STOPPED, ///< the broadcast has been stopped
} ClientState; } ClientState;
@ -110,6 +113,9 @@ typedef struct RTMPContext {
TrackedMethod*tracked_methods; ///< tracked methods buffer TrackedMethod*tracked_methods; ///< tracked methods buffer
int nb_tracked_methods; ///< number of tracked methods int nb_tracked_methods; ///< number of tracked methods
int tracked_methods_size; ///< size of the tracked methods buffer int tracked_methods_size; ///< size of the tracked methods buffer
int listen; ///< listen mode flag
int listen_timeout; ///< listen timeout to wait for new connections
int nb_streamid; ///< The next stream id to return on createStream calls
} RTMPContext; } RTMPContext;
#define PLAYER_KEY_OPEN_PART_LEN 30 ///< length of partial key used for first client digest signing #define PLAYER_KEY_OPEN_PART_LEN 30 ///< length of partial key used for first client digest signing
@ -377,6 +383,151 @@ static int gen_connect(URLContext *s, RTMPContext *rt)
return rtmp_send_packet(rt, &pkt, 1); return rtmp_send_packet(rt, &pkt, 1);
} }
static int read_connect(URLContext *s, RTMPContext *rt)
{
RTMPPacket pkt = { 0 };
uint8_t *p;
const uint8_t *cp;
int ret;
char command[64];
int stringlen;
double seqnum;
uint8_t tmpstr[256];
GetByteContext gbc;
if ((ret = ff_rtmp_packet_read(rt->stream, &pkt, rt->in_chunk_size,
rt->prev_pkt[1])) < 0)
return ret;
cp = pkt.data;
bytestream2_init(&gbc, cp, pkt.data_size);
if (ff_amf_read_string(&gbc, command, sizeof(command), &stringlen)) {
av_log(s, AV_LOG_ERROR, "Unable to read command string\n");
ff_rtmp_packet_destroy(&pkt);
return AVERROR_INVALIDDATA;
}
if (strcmp(command, "connect")) {
av_log(s, AV_LOG_ERROR, "Expecting connect, got %s\n", command);
ff_rtmp_packet_destroy(&pkt);
return AVERROR_INVALIDDATA;
}
ret = ff_amf_read_number(&gbc, &seqnum);
if (ret)
av_log(s, AV_LOG_WARNING, "SeqNum not found\n");
/* Here one could parse an AMF Object with data as flashVers and others. */
ret = ff_amf_get_field_value(gbc.buffer,
gbc.buffer + bytestream2_get_bytes_left(&gbc),
"app", tmpstr, sizeof(tmpstr));
if (ret)
av_log(s, AV_LOG_WARNING, "App field not found in connect\n");
if (!ret && strcmp(tmpstr, rt->app))
av_log(s, AV_LOG_WARNING, "App field don't match up: %s <-> %s\n",
tmpstr, rt->app);
ff_rtmp_packet_destroy(&pkt);
// Send Window Acknowledgement Size (as defined in speficication)
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_NETWORK_CHANNEL,
RTMP_PT_SERVER_BW, 0, 4)) < 0)
return ret;
p = pkt.data;
bytestream_put_be32(&p, rt->server_bw);
pkt.data_size = p - pkt.data;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
if (ret < 0)
return ret;
// Send Peer Bandwidth
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_NETWORK_CHANNEL,
RTMP_PT_CLIENT_BW, 0, 5)) < 0)
return ret;
p = pkt.data;
bytestream_put_be32(&p, rt->server_bw);
bytestream_put_byte(&p, 2); // dynamic
pkt.data_size = p - pkt.data;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
if (ret < 0)
return ret;
// Ping request
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_NETWORK_CHANNEL,
RTMP_PT_PING, 0, 6)) < 0)
return ret;
p = pkt.data;
bytestream_put_be16(&p, 0); // 0 -> Stream Begin
bytestream_put_be32(&p, 0);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
if (ret < 0)
return ret;
// Chunk size
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_CHUNK_SIZE, 0, 4)) < 0)
return ret;
p = pkt.data;
bytestream_put_be32(&p, rt->out_chunk_size);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
if (ret < 0)
return ret;
// Send result_ NetConnection.Connect.Success to connect
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_INVOKE, 0,
RTMP_PKTDATA_DEFAULT_SIZE)) < 0)
return ret;
p = pkt.data;
ff_amf_write_string(&p, "_result");
ff_amf_write_number(&p, seqnum);
ff_amf_write_object_start(&p);
ff_amf_write_field_name(&p, "fmsVer");
ff_amf_write_string(&p, "FMS/3,0,1,123");
ff_amf_write_field_name(&p, "capabilities");
ff_amf_write_number(&p, 31);
ff_amf_write_object_end(&p);
ff_amf_write_object_start(&p);
ff_amf_write_field_name(&p, "level");
ff_amf_write_string(&p, "status");
ff_amf_write_field_name(&p, "code");
ff_amf_write_string(&p, "NetConnection.Connect.Success");
ff_amf_write_field_name(&p, "description");
ff_amf_write_string(&p, "Connection succeeded.");
ff_amf_write_field_name(&p, "objectEncoding");
ff_amf_write_number(&p, 0);
ff_amf_write_object_end(&p);
pkt.data_size = p - pkt.data;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
if (ret < 0)
return ret;
if ((ret = ff_rtmp_packet_create(&pkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_INVOKE, 0, 30)) < 0)
return ret;
p = pkt.data;
ff_amf_write_string(&p, "onBWDone");
ff_amf_write_number(&p, 0);
ff_amf_write_null(&p);
ff_amf_write_number(&p, 8192);
pkt.data_size = p - pkt.data;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
}
/** /**
* Generate 'releaseStream' call and send it to the server. It should make * Generate 'releaseStream' call and send it to the server. It should make
* the server release some channel for media streams. * the server release some channel for media streams.
@ -1138,6 +1289,123 @@ static int rtmp_handshake(URLContext *s, RTMPContext *rt)
return 0; return 0;
} }
static int rtmp_receive_hs_packet(RTMPContext* rt, uint32_t *first_int,
uint32_t *second_int, char *arraydata,
int size)
{
int inoutsize;
inoutsize = ffurl_read_complete(rt->stream, arraydata,
RTMP_HANDSHAKE_PACKET_SIZE);
if (inoutsize <= 0)
return AVERROR(EIO);
if (inoutsize != RTMP_HANDSHAKE_PACKET_SIZE) {
av_log(rt, AV_LOG_ERROR, "Erroneous Message size %d"
" not following standard\n", (int)inoutsize);
return AVERROR(EINVAL);
}
*first_int = AV_RB32(arraydata);
*second_int = AV_RB32(arraydata + 4);
return 0;
}
static int rtmp_send_hs_packet(RTMPContext* rt, uint32_t first_int,
uint32_t second_int, char *arraydata, int size)
{
int inoutsize;
AV_WB32(arraydata, first_int);
AV_WB32(arraydata + 4, first_int);
inoutsize = ffurl_write(rt->stream, arraydata,
RTMP_HANDSHAKE_PACKET_SIZE);
if (inoutsize != RTMP_HANDSHAKE_PACKET_SIZE) {
av_log(rt, AV_LOG_ERROR, "Unable to write answer\n");
return AVERROR(EIO);
}
return 0;
}
/**
* rtmp handshake server side
*/
static int rtmp_server_handshake(URLContext *s, RTMPContext *rt)
{
uint8_t buffer[RTMP_HANDSHAKE_PACKET_SIZE];
uint32_t hs_epoch;
uint32_t hs_my_epoch;
uint8_t hs_c1[RTMP_HANDSHAKE_PACKET_SIZE];
uint8_t hs_s1[RTMP_HANDSHAKE_PACKET_SIZE];
uint32_t zeroes;
uint32_t temp = 0;
int randomidx = 0;
int inoutsize = 0;
int ret;
inoutsize = ffurl_read_complete(rt->stream, buffer, 1); // Receive C0
if (inoutsize <= 0) {
av_log(s, AV_LOG_ERROR, "Unable to read handshake\n");
return AVERROR(EIO);
}
// Check Version
if (buffer[0] != 3) {
av_log(s, AV_LOG_ERROR, "RTMP protocol version mismatch\n");
return AVERROR(EIO);
}
if (ffurl_write(rt->stream, buffer, 1) <= 0) { // Send S0
av_log(s, AV_LOG_ERROR,
"Unable to write answer - RTMP S0\n");
return AVERROR(EIO);
}
/* Receive C1 */
ret = rtmp_receive_hs_packet(rt, &hs_epoch, &zeroes, hs_c1,
RTMP_HANDSHAKE_PACKET_SIZE);
if (ret) {
av_log(s, AV_LOG_ERROR, "RTMP Handshake C1 Error\n");
return ret;
}
if (zeroes)
av_log(s, AV_LOG_WARNING, "Erroneous C1 Message zero != 0\n");
/* Send S1 */
/* By now same epoch will be sent */
hs_my_epoch = hs_epoch;
/* Generate random */
for (randomidx = 0; randomidx < (RTMP_HANDSHAKE_PACKET_SIZE);
randomidx += 4)
AV_WB32(hs_s1 + 8 + randomidx, av_get_random_seed());
ret = rtmp_send_hs_packet(rt, hs_my_epoch, 0, hs_s1,
RTMP_HANDSHAKE_PACKET_SIZE);
if (ret) {
av_log(s, AV_LOG_ERROR, "RTMP Handshake S1 Error\n");
return ret;
}
/* Send S2 */
ret = rtmp_send_hs_packet(rt, hs_epoch, 0, hs_c1,
RTMP_HANDSHAKE_PACKET_SIZE);
if (ret) {
av_log(s, AV_LOG_ERROR, "RTMP Handshake S2 Error\n");
return ret;
}
/* Receive C2 */
ret = rtmp_receive_hs_packet(rt, &temp, &zeroes, buffer,
RTMP_HANDSHAKE_PACKET_SIZE);
if (ret) {
av_log(s, AV_LOG_ERROR, "RTMP Handshake C2 Error\n");
return ret;
}
if (temp != hs_my_epoch)
av_log(s, AV_LOG_WARNING,
"Erroneous C2 Message epoch does not match up with C1 epoch\n");
if (memcmp(buffer + 8, hs_s1 + 8,
RTMP_HANDSHAKE_PACKET_SIZE - 8))
av_log(s, AV_LOG_WARNING,
"Erroneous C2 Message random does not match up\n");
return 0;
}
static int handle_chunk_size(URLContext *s, RTMPPacket *pkt) static int handle_chunk_size(URLContext *s, RTMPPacket *pkt)
{ {
RTMPContext *rt = s->priv_data; RTMPContext *rt = s->priv_data;
@ -1270,6 +1538,139 @@ static int handle_invoke_error(URLContext *s, RTMPPacket *pkt)
return ret; return ret;
} }
static int send_invoke_response(URLContext *s, RTMPPacket *pkt)
{
RTMPContext *rt = s->priv_data;
double seqnum;
char filename[64];
char command[64];
char statusmsg[128];
int stringlen;
char *pchar;
const uint8_t *p = pkt->data;
uint8_t *pp = NULL;
RTMPPacket spkt = { 0 };
GetByteContext gbc;
int ret;
bytestream2_init(&gbc, p, pkt->data_size);
if (ff_amf_read_string(&gbc, command, sizeof(command),
&stringlen)) {
av_log(s, AV_LOG_ERROR, "Error in PT_INVOKE\n");
return AVERROR_INVALIDDATA;
}
ret = ff_amf_read_number(&gbc, &seqnum);
if (ret)
return ret;
ret = ff_amf_read_null(&gbc);
if (ret)
return ret;
if (!strcmp(command, "FCPublish") ||
!strcmp(command, "publish")) {
ret = ff_amf_read_string(&gbc, filename,
sizeof(filename), &stringlen);
// check with url
if (s->filename) {
pchar = strrchr(s->filename, '/');
if (!pchar) {
av_log(s, AV_LOG_WARNING,
"Unable to find / in url %s, bad format\n",
s->filename);
pchar = s->filename;
}
pchar++;
if (strcmp(pchar, filename))
av_log(s, AV_LOG_WARNING, "Unexpected stream %s, expecting"
" %s\n", filename, pchar);
}
rt->state = STATE_RECEIVING;
}
if (!strcmp(command, "FCPublish")) {
if ((ret = ff_rtmp_packet_create(&spkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_INVOKE, 0,
RTMP_PKTDATA_DEFAULT_SIZE)) < 0) {
av_log(s, AV_LOG_ERROR, "Unable to create response packet\n");
return ret;
}
pp = spkt.data;
ff_amf_write_string(&pp, "onFCPublish");
} else if (!strcmp(command, "publish")) {
PutByteContext pbc;
// Send Stream Begin 1
if ((ret = ff_rtmp_packet_create(&spkt, RTMP_NETWORK_CHANNEL,
RTMP_PT_PING, 0, 6)) < 0) {
av_log(s, AV_LOG_ERROR, "Unable to create response packet\n");
return ret;
}
pp = spkt.data;
bytestream2_init_writer(&pbc, pp, spkt.data_size);
bytestream2_put_be16(&pbc, 0); // 0 -> Stream Begin
bytestream2_put_be32(&pbc, rt->nb_streamid);
ret = ff_rtmp_packet_write(rt->stream, &spkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&spkt);
if (ret < 0)
return ret;
// Send onStatus(NetStream.Publish.Start)
if ((ret = ff_rtmp_packet_create(&spkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_INVOKE, 0,
RTMP_PKTDATA_DEFAULT_SIZE)) < 0) {
av_log(s, AV_LOG_ERROR, "Unable to create response packet\n");
return ret;
}
spkt.extra = pkt->extra;
pp = spkt.data;
ff_amf_write_string(&pp, "onStatus");
ff_amf_write_number(&pp, 0);
ff_amf_write_null(&pp);
ff_amf_write_object_start(&pp);
ff_amf_write_field_name(&pp, "level");
ff_amf_write_string(&pp, "status");
ff_amf_write_field_name(&pp, "code");
ff_amf_write_string(&pp, "NetStream.Publish.Start");
ff_amf_write_field_name(&pp, "description");
snprintf(statusmsg, sizeof(statusmsg),
"%s is now published", filename);
ff_amf_write_string(&pp, statusmsg);
ff_amf_write_field_name(&pp, "details");
ff_amf_write_string(&pp, filename);
ff_amf_write_field_name(&pp, "clientid");
snprintf(statusmsg, sizeof(statusmsg), "%s", LIBAVFORMAT_IDENT);
ff_amf_write_string(&pp, statusmsg);
ff_amf_write_object_end(&pp);
} else {
if ((ret = ff_rtmp_packet_create(&spkt, RTMP_SYSTEM_CHANNEL,
RTMP_PT_INVOKE, 0,
RTMP_PKTDATA_DEFAULT_SIZE)) < 0) {
av_log(s, AV_LOG_ERROR, "Unable to create response packet\n");
return ret;
}
pp = spkt.data;
ff_amf_write_string(&pp, "_result");
ff_amf_write_number(&pp, seqnum);
ff_amf_write_null(&pp);
if (!strcmp(command, "createStream")) {
rt->nb_streamid++;
if (rt->nb_streamid == 0 || rt->nb_streamid == 2)
rt->nb_streamid++; /* Values 0 and 2 are reserved */
ff_amf_write_number(&pp, rt->nb_streamid);
/* By now we don't control which streams are removed in
* deleteStream. There is no stream creation control
* if a client creates more than 2^32 - 2 streams. */
}
}
spkt.data_size = pp - spkt.data;
ret = ff_rtmp_packet_write(rt->stream, &spkt, rt->out_chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&spkt);
return ret;
}
static int handle_invoke_result(URLContext *s, RTMPPacket *pkt) static int handle_invoke_result(URLContext *s, RTMPPacket *pkt)
{ {
RTMPContext *rt = s->priv_data; RTMPContext *rt = s->priv_data;
@ -1384,11 +1785,79 @@ static int handle_invoke(URLContext *s, RTMPPacket *pkt)
} else if (!memcmp(pkt->data, "\002\000\010onBWDone", 11)) { } else if (!memcmp(pkt->data, "\002\000\010onBWDone", 11)) {
if ((ret = gen_check_bw(s, rt)) < 0) if ((ret = gen_check_bw(s, rt)) < 0)
return ret; return ret;
} else if (!memcmp(pkt->data, "\002\000\015releaseStream", 16) ||
!memcmp(pkt->data, "\002\000\011FCPublish", 12) ||
!memcmp(pkt->data, "\002\000\007publish", 10) ||
!memcmp(pkt->data, "\002\000\010_checkbw", 11) ||
!memcmp(pkt->data, "\002\000\014createStream", 15)) {
if (ret = send_invoke_response(s, pkt) < 0)
return ret;
} }
return ret; return ret;
} }
static int handle_notify(URLContext *s, RTMPPacket *pkt) {
RTMPContext *rt = s->priv_data;
const uint8_t *p = NULL;
uint8_t *cp = NULL;
uint8_t commandbuffer[64];
char statusmsg[128];
int stringlen;
GetByteContext gbc;
PutByteContext pbc;
uint32_t ts;
int old_flv_size;
const uint8_t *datatowrite;
unsigned datatowritelength;
p = pkt->data;
bytestream2_init(&gbc, p, pkt->data_size);
if (ff_amf_read_string(&gbc, commandbuffer, sizeof(commandbuffer),
&stringlen))
return AVERROR_INVALIDDATA;
if (!strcmp(commandbuffer, "@setDataFrame")) {
datatowrite = gbc.buffer;
datatowritelength = bytestream2_get_bytes_left(&gbc);
if (ff_amf_read_string(&gbc, statusmsg,
sizeof(statusmsg), &stringlen))
return AVERROR_INVALIDDATA;
if (strcmp(statusmsg, "onMetaData")) {
av_log(s, AV_LOG_INFO, "Expecting onMetadata but got %s\n",
statusmsg);
return 0;
}
/* Provide ECMAArray to flv */
ts = pkt->timestamp;
// generate packet header and put data into buffer for FLV demuxer
if (rt->flv_off < rt->flv_size) {
old_flv_size = rt->flv_size;
rt->flv_size += datatowritelength + 15;
} else {
old_flv_size = 0;
rt->flv_size = datatowritelength + 15;
rt->flv_off = 0;
}
cp = av_realloc(rt->flv_data, rt->flv_size);
if (!cp)
return AVERROR(ENOMEM);
rt->flv_data = cp;
bytestream2_init_writer(&pbc, cp, rt->flv_size);
bytestream2_skip_p(&pbc, old_flv_size);
bytestream2_put_byte(&pbc, pkt->type);
bytestream2_put_be24(&pbc, datatowritelength);
bytestream2_put_be24(&pbc, ts);
bytestream2_put_byte(&pbc, ts >> 24);
bytestream2_put_be24(&pbc, 0);
bytestream2_put_buffer(&pbc, datatowrite, datatowritelength);
bytestream2_put_be32(&pbc, 0);
}
return 0;
}
/** /**
* Parse received packet and possibly perform some action depending on * Parse received packet and possibly perform some action depending on
* the packet contents. * the packet contents.
@ -1430,6 +1899,7 @@ static int rtmp_parse_result(URLContext *s, RTMPContext *rt, RTMPPacket *pkt)
case RTMP_PT_VIDEO: case RTMP_PT_VIDEO:
case RTMP_PT_AUDIO: case RTMP_PT_AUDIO:
case RTMP_PT_METADATA: case RTMP_PT_METADATA:
case RTMP_PT_NOTIFY:
/* Audio, Video and Metadata packets are parsed in get_packet() */ /* Audio, Video and Metadata packets are parsed in get_packet() */
break; break;
default: default:
@ -1489,7 +1959,9 @@ static int get_packet(URLContext *s, int for_header)
ff_rtmp_packet_destroy(&rpkt); ff_rtmp_packet_destroy(&rpkt);
return AVERROR_EOF; return AVERROR_EOF;
} }
if (for_header && (rt->state == STATE_PLAYING || rt->state == STATE_PUBLISHING)) { if (for_header && (rt->state == STATE_PLAYING ||
rt->state == STATE_PUBLISHING ||
rt->state == STATE_RECEIVING)) {
ff_rtmp_packet_destroy(&rpkt); ff_rtmp_packet_destroy(&rpkt);
return 0; return 0;
} }
@ -1514,6 +1986,14 @@ static int get_packet(URLContext *s, int for_header)
bytestream_put_be32(&p, 0); bytestream_put_be32(&p, 0);
ff_rtmp_packet_destroy(&rpkt); ff_rtmp_packet_destroy(&rpkt);
return 0; return 0;
} else if (rpkt.type == RTMP_PT_NOTIFY) {
ret = handle_notify(s, &rpkt);
ff_rtmp_packet_destroy(&rpkt);
if (ret) {
av_log(s, AV_LOG_ERROR, "Handle notify error\n");
return ret;
}
return 0;
} else if (rpkt.type == RTMP_PT_METADATA) { } else if (rpkt.type == RTMP_PT_METADATA) {
// we got raw FLV data, make it available for FLV demuxer // we got raw FLV data, make it available for FLV demuxer
rt->flv_off = 0; rt->flv_off = 0;
@ -1584,11 +2064,19 @@ static int rtmp_open(URLContext *s, const char *uri, int flags)
AVDictionary *opts = NULL; AVDictionary *opts = NULL;
int ret; int ret;
if (rt->listen_timeout > 0)
rt->listen = 1;
rt->is_input = !(flags & AVIO_FLAG_WRITE); rt->is_input = !(flags & AVIO_FLAG_WRITE);
av_url_split(proto, sizeof(proto), NULL, 0, hostname, sizeof(hostname), &port, av_url_split(proto, sizeof(proto), NULL, 0, hostname, sizeof(hostname), &port,
path, sizeof(path), s->filename); path, sizeof(path), s->filename);
if (rt->listen && strcmp(proto, "rtmp")) {
av_log(s, AV_LOG_ERROR, "rtmp_listen not available for %s\n",
proto);
return AVERROR(EINVAL);
}
if (!strcmp(proto, "rtmpt") || !strcmp(proto, "rtmpts")) { if (!strcmp(proto, "rtmpt") || !strcmp(proto, "rtmpts")) {
if (!strcmp(proto, "rtmpts")) if (!strcmp(proto, "rtmpts"))
av_dict_set(&opts, "ffrtmphttp_tls", "1", 1); av_dict_set(&opts, "ffrtmphttp_tls", "1", 1);
@ -1611,7 +2099,12 @@ static int rtmp_open(URLContext *s, const char *uri, int flags)
/* open the tcp connection */ /* open the tcp connection */
if (port < 0) if (port < 0)
port = RTMP_DEFAULT_PORT; port = RTMP_DEFAULT_PORT;
ff_url_join(buf, sizeof(buf), "tcp", NULL, hostname, port, NULL); if (rt->listen)
ff_url_join(buf, sizeof(buf), "tcp", NULL, hostname, port,
"?listen&listen_timeout=%d",
rt->listen_timeout * 1000);
else
ff_url_join(buf, sizeof(buf), "tcp", NULL, hostname, port, NULL);
} }
if ((ret = ffurl_open(&rt->stream, buf, AVIO_FLAG_READ_WRITE, if ((ret = ffurl_open(&rt->stream, buf, AVIO_FLAG_READ_WRITE,
@ -1626,7 +2119,9 @@ static int rtmp_open(URLContext *s, const char *uri, int flags)
} }
rt->state = STATE_START; rt->state = STATE_START;
if ((ret = rtmp_handshake(s, rt)) < 0) if (!rt->listen && (ret = rtmp_handshake(s, rt)) < 0)
goto fail;
if (rt->listen && (ret = rtmp_server_handshake(s, rt)) < 0)
goto fail; goto fail;
rt->out_chunk_size = 128; rt->out_chunk_size = 128;
@ -1726,8 +2221,14 @@ static int rtmp_open(URLContext *s, const char *uri, int flags)
av_log(s, AV_LOG_DEBUG, "Proto = %s, path = %s, app = %s, fname = %s\n", av_log(s, AV_LOG_DEBUG, "Proto = %s, path = %s, app = %s, fname = %s\n",
proto, path, rt->app, rt->playpath); proto, path, rt->app, rt->playpath);
if ((ret = gen_connect(s, rt)) < 0) if (!rt->listen) {
goto fail; if ((ret = gen_connect(s, rt)) < 0)
goto fail;
} else {
if (read_connect(s, s->priv_data) < 0)
goto fail;
rt->is_input = 1;
}
do { do {
ret = get_packet(s, 1); ret = get_packet(s, 1);
@ -1919,6 +2420,8 @@ static const AVOption rtmp_options[] = {
{"rtmp_swfurl", "URL of the SWF player. By default no value will be sent", OFFSET(swfurl), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC|ENC}, {"rtmp_swfurl", "URL of the SWF player. By default no value will be sent", OFFSET(swfurl), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC|ENC},
{"rtmp_swfverify", "URL to player swf file, compute hash/size automatically.", OFFSET(swfverify), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC}, {"rtmp_swfverify", "URL to player swf file, compute hash/size automatically.", OFFSET(swfverify), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC},
{"rtmp_tcurl", "URL of the target stream. Defaults to proto://host[:port]/app.", OFFSET(tcurl), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC|ENC}, {"rtmp_tcurl", "URL of the target stream. Defaults to proto://host[:port]/app.", OFFSET(tcurl), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC|ENC},
{"rtmp_listen", "Listen for incoming rtmp connections", OFFSET(listen), AV_OPT_TYPE_INT, {0}, INT_MIN, INT_MAX, DEC, "rtmp_listen" },
{"timeout", "Maximum timeout (in seconds) to wait for incoming connections. -1 is infinite. Implies -rtmp_listen 1", OFFSET(listen_timeout), AV_OPT_TYPE_INT, {-1}, INT_MIN, INT_MAX, DEC, "rtmp_listen" },
{ NULL }, { NULL },
}; };

View File

@ -30,8 +30,8 @@
#include "libavutil/avutil.h" #include "libavutil/avutil.h"
#define LIBAVFORMAT_VERSION_MAJOR 54 #define LIBAVFORMAT_VERSION_MAJOR 54
#define LIBAVFORMAT_VERSION_MINOR 24 #define LIBAVFORMAT_VERSION_MINOR 25
#define LIBAVFORMAT_VERSION_MICRO 101 #define LIBAVFORMAT_VERSION_MICRO 100
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \ #define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \ LIBAVFORMAT_VERSION_MINOR, \