1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

avcodec: Remove libstagefright

It serves absolutely no purpose other than to confuse potentional
Android developers about how to use hardware acceleration properly
on the the platform. The stagefright "API" is not public, and the
MediaCodec API is the proper way to do this.

Furthermore, stagefright support in avcodec needs a series of
magic incantations and version-specific stuff, such that
using it actually provides downsides compared just using the actual
Android frameworks properly, in that it is a lot more work and confusion
to get it even running. It also leads to a lot of misinformation, like
these sorts of comments (in [1]) that are absolutely incorrect.

[1] http://stackoverflow.com/a/29362353/3115956

Signed-off-by: Derek Buitenhuis <derek.buitenhuis@gmail.com>
This commit is contained in:
Derek Buitenhuis 2016-01-03 13:22:03 -05:00
parent 149b1f7cca
commit 72673ad7ea
8 changed files with 2 additions and 659 deletions

View File

@ -51,6 +51,7 @@ version <next>:
- audio high-order multiband parametric equalizer
- automatic bitstream filtering
- showspectrumpic filter
- libstagefright support removed
version 2.8:

View File

@ -304,7 +304,6 @@ Codecs:
Hardware acceleration:
crystalhd.c Philip Langdale
dxva2* Hendrik Leppkes, Laurent Aimar
libstagefright.cpp Mohamed Naufal
vaapi* Gwenole Beauchesne
vda* Sebastien Zwickert
vdpau* Philip Langdale, Carl Eugen Hoyos

6
configure vendored
View File

@ -250,7 +250,6 @@ External library support:
--enable-libsoxr enable Include libsoxr resampling [no]
--enable-libspeex enable Speex de/encoding via libspeex [no]
--enable-libssh enable SFTP protocol via libssh [no]
--enable-libstagefright-h264 enable H.264 decoding via libstagefright [no]
--enable-libtesseract enable Tesseract, needed for ocr filter [no]
--enable-libtheora enable Theora encoding via libtheora [no]
--enable-libtwolame enable MP2 encoding via libtwolame [no]
@ -1478,7 +1477,6 @@ EXTERNAL_LIBRARY_LIST="
libsoxr
libspeex
libssh
libstagefright_h264
libtesseract
libtheora
libtwolame
@ -2639,7 +2637,6 @@ libshine_encoder_select="audio_frame_queue"
libspeex_decoder_deps="libspeex"
libspeex_encoder_deps="libspeex"
libspeex_encoder_select="audio_frame_queue"
libstagefright_h264_decoder_deps="libstagefright_h264"
libtheora_encoder_deps="libtheora"
libtwolame_encoder_deps="libtwolame"
libvo_aacenc_encoder_deps="libvo_aacenc"
@ -5476,9 +5473,6 @@ enabled libsnappy && require snappy snappy-c.h snappy_compress -lsnappy
enabled libsoxr && require libsoxr soxr.h soxr_create -lsoxr && LIBSOXR="-lsoxr"
enabled libssh && require_pkg_config libssh libssh/sftp.h sftp_init
enabled libspeex && require_pkg_config speex speex/speex.h speex_decoder_init -lspeex
enabled libstagefright_h264 && require_cpp libstagefright_h264 "binder/ProcessState.h media/stagefright/MetaData.h
media/stagefright/MediaBufferGroup.h media/stagefright/MediaDebug.h media/stagefright/MediaDefs.h
media/stagefright/OMXClient.h media/stagefright/OMXCodec.h" android::OMXClient -lstagefright -lmedia -lutils -lbinder -lgnustl_static
enabled libtesseract && require_pkg_config tesseract tesseract/capi.h TessBaseAPICreate
enabled libtheora && require libtheora theora/theoraenc.h th_info_init -ltheoraenc -ltheoradec -logg
enabled libtwolame && require libtwolame twolame.h twolame_init -ltwolame &&

View File

@ -829,7 +829,6 @@ OBJS-$(CONFIG_LIBSCHROEDINGER_ENCODER) += libschroedingerenc.o \
OBJS-$(CONFIG_LIBSHINE_ENCODER) += libshine.o
OBJS-$(CONFIG_LIBSPEEX_DECODER) += libspeexdec.o
OBJS-$(CONFIG_LIBSPEEX_ENCODER) += libspeexenc.o
OBJS-$(CONFIG_LIBSTAGEFRIGHT_H264_DECODER)+= libstagefright.o
OBJS-$(CONFIG_LIBTHEORA_ENCODER) += libtheoraenc.o
OBJS-$(CONFIG_LIBTWOLAME_ENCODER) += libtwolame.o
OBJS-$(CONFIG_LIBUTVIDEO_DECODER) += libutvideodec.o

View File

@ -572,7 +572,6 @@ void avcodec_register_all(void)
REGISTER_ENCDEC (LIBSCHROEDINGER, libschroedinger);
REGISTER_ENCODER(LIBSHINE, libshine);
REGISTER_ENCDEC (LIBSPEEX, libspeex);
REGISTER_DECODER(LIBSTAGEFRIGHT_H264, libstagefright_h264);
REGISTER_ENCODER(LIBTHEORA, libtheora);
REGISTER_ENCODER(LIBTWOLAME, libtwolame);
REGISTER_ENCDEC (LIBUTVIDEO, libutvideo);

View File

@ -1,591 +0,0 @@
/*
* Interface to the Android Stagefright library for
* H/W accelerated H.264 decoding
*
* Copyright (C) 2011 Mohamed Naufal
* Copyright (C) 2011 Martin Storsjö
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <binder/ProcessState.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <utils/List.h>
#include <new>
#include <map>
extern "C" {
#include "avcodec.h"
#include "libavutil/imgutils.h"
#include "internal.h"
}
#define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
using namespace android;
struct Frame {
status_t status;
size_t size;
int64_t time;
int key;
uint8_t *buffer;
AVFrame *vframe;
};
struct TimeStamp {
int64_t pts;
int64_t reordered_opaque;
};
class CustomSource;
struct StagefrightContext {
AVCodecContext *avctx;
AVBitStreamFilterContext *bsfc;
uint8_t* orig_extradata;
int orig_extradata_size;
sp<MediaSource> *source;
List<Frame*> *in_queue, *out_queue;
pthread_mutex_t in_mutex, out_mutex;
pthread_cond_t condition;
pthread_t decode_thread_id;
Frame *end_frame;
bool source_done;
volatile sig_atomic_t thread_started, thread_exited, stop_decode;
AVFrame *prev_frame;
std::map<int64_t, TimeStamp> *ts_map;
int64_t frame_index;
uint8_t *dummy_buf;
int dummy_bufsize;
OMXClient *client;
sp<MediaSource> *decoder;
const char *decoder_component;
};
class CustomSource : public MediaSource {
public:
CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
s = (StagefrightContext*)avctx->priv_data;
source_meta = meta;
frame_size = (avctx->width * avctx->height * 3) / 2;
buf_group.add_buffer(new MediaBuffer(frame_size));
}
virtual sp<MetaData> getFormat() {
return source_meta;
}
virtual status_t start(MetaData *params) {
return OK;
}
virtual status_t stop() {
return OK;
}
virtual status_t read(MediaBuffer **buffer,
const MediaSource::ReadOptions *options) {
Frame *frame;
status_t ret;
if (s->thread_exited)
return ERROR_END_OF_STREAM;
pthread_mutex_lock(&s->in_mutex);
while (s->in_queue->empty())
pthread_cond_wait(&s->condition, &s->in_mutex);
frame = *s->in_queue->begin();
ret = frame->status;
if (ret == OK) {
ret = buf_group.acquire_buffer(buffer);
if (ret == OK) {
memcpy((*buffer)->data(), frame->buffer, frame->size);
(*buffer)->set_range(0, frame->size);
(*buffer)->meta_data()->clear();
(*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
(*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
} else {
av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
}
av_freep(&frame->buffer);
}
s->in_queue->erase(s->in_queue->begin());
pthread_mutex_unlock(&s->in_mutex);
av_freep(&frame);
return ret;
}
private:
MediaBufferGroup buf_group;
sp<MetaData> source_meta;
StagefrightContext *s;
int frame_size;
};
void* decode_thread(void *arg)
{
AVCodecContext *avctx = (AVCodecContext*)arg;
StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(avctx->pix_fmt);
Frame* frame;
MediaBuffer *buffer;
int32_t w, h;
int decode_done = 0;
int ret;
int src_linesize[3];
const uint8_t *src_data[3];
int64_t out_frame_index = 0;
do {
buffer = NULL;
frame = (Frame*)av_mallocz(sizeof(Frame));
if (!frame) {
frame = s->end_frame;
frame->status = AVERROR(ENOMEM);
decode_done = 1;
s->end_frame = NULL;
goto push_frame;
}
frame->status = (*s->decoder)->read(&buffer);
if (frame->status == OK) {
sp<MetaData> outFormat = (*s->decoder)->getFormat();
outFormat->findInt32(kKeyWidth , &w);
outFormat->findInt32(kKeyHeight, &h);
frame->vframe = av_frame_alloc();
if (!frame->vframe) {
frame->status = AVERROR(ENOMEM);
decode_done = 1;
buffer->release();
goto push_frame;
}
ret = ff_get_buffer(avctx, frame->vframe, AV_GET_BUFFER_FLAG_REF);
if (ret < 0) {
frame->status = ret;
decode_done = 1;
buffer->release();
goto push_frame;
}
// The OMX.SEC decoder doesn't signal the modified width/height
if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
(w & 15 || h & 15)) {
if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
w = (w + 15)&~15;
h = (h + 15)&~15;
}
}
if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
avctx->width = w;
avctx->height = h;
}
src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
src_data[0] = (uint8_t*)buffer->data();
src_data[1] = src_data[0] + src_linesize[0] * h;
src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
av_image_copy(frame->vframe->data, frame->vframe->linesize,
src_data, src_linesize,
avctx->pix_fmt, avctx->width, avctx->height);
buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
s->ts_map->erase(out_frame_index);
}
buffer->release();
} else if (frame->status == INFO_FORMAT_CHANGED) {
if (buffer)
buffer->release();
av_free(frame);
continue;
} else {
decode_done = 1;
}
push_frame:
while (true) {
pthread_mutex_lock(&s->out_mutex);
if (s->out_queue->size() >= 10) {
pthread_mutex_unlock(&s->out_mutex);
usleep(10000);
continue;
}
break;
}
s->out_queue->push_back(frame);
pthread_mutex_unlock(&s->out_mutex);
} while (!decode_done && !s->stop_decode);
s->thread_exited = true;
return 0;
}
static av_cold int Stagefright_init(AVCodecContext *avctx)
{
StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
sp<MetaData> meta, outFormat;
int32_t colorFormat = 0;
int ret;
if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
return -1;
s->avctx = avctx;
s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
if (!s->bsfc) {
av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
return -1;
}
s->orig_extradata_size = avctx->extradata_size;
s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
AV_INPUT_BUFFER_PADDING_SIZE);
if (!s->orig_extradata) {
ret = AVERROR(ENOMEM);
goto fail;
}
memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
meta = new MetaData;
if (!meta) {
ret = AVERROR(ENOMEM);
goto fail;
}
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
meta->setInt32(kKeyWidth, avctx->width);
meta->setInt32(kKeyHeight, avctx->height);
meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
android::ProcessState::self()->startThreadPool();
s->source = new sp<MediaSource>();
*s->source = new CustomSource(avctx, meta);
s->in_queue = new List<Frame*>;
s->out_queue = new List<Frame*>;
s->ts_map = new std::map<int64_t, TimeStamp>;
s->client = new OMXClient;
s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
!s->ts_map || !s->end_frame) {
ret = AVERROR(ENOMEM);
goto fail;
}
if (s->client->connect() != OK) {
av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
ret = -1;
goto fail;
}
s->decoder = new sp<MediaSource>();
*s->decoder = OMXCodec::Create(s->client->interface(), meta,
false, *s->source, NULL,
OMXCodec::kClientNeedsFramebuffer);
if ((*s->decoder)->start() != OK) {
av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
ret = -1;
s->client->disconnect();
goto fail;
}
outFormat = (*s->decoder)->getFormat();
outFormat->findInt32(kKeyColorFormat, &colorFormat);
if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
avctx->pix_fmt = AV_PIX_FMT_NV21;
else if (colorFormat == OMX_COLOR_FormatYCbYCr)
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
else if (colorFormat == OMX_COLOR_FormatCbYCrY)
avctx->pix_fmt = AV_PIX_FMT_UYVY422;
else
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
if (s->decoder_component)
s->decoder_component = av_strdup(s->decoder_component);
pthread_mutex_init(&s->in_mutex, NULL);
pthread_mutex_init(&s->out_mutex, NULL);
pthread_cond_init(&s->condition, NULL);
return 0;
fail:
av_bitstream_filter_close(s->bsfc);
av_freep(&s->orig_extradata);
av_freep(&s->end_frame);
delete s->in_queue;
delete s->out_queue;
delete s->ts_map;
delete s->client;
return ret;
}
static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame, AVPacket *avpkt)
{
StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
Frame *frame;
status_t status;
int orig_size = avpkt->size;
AVPacket pkt = *avpkt;
AVFrame *ret_frame;
if (!s->thread_started) {
if(pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx))
return AVERROR(ENOMEM);
s->thread_started = true;
}
if (avpkt && avpkt->data) {
av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
avpkt = &pkt;
}
if (!s->source_done) {
if(!s->dummy_buf) {
s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
if (!s->dummy_buf)
return AVERROR(ENOMEM);
s->dummy_bufsize = avpkt->size;
memcpy(s->dummy_buf, avpkt->data, avpkt->size);
}
frame = (Frame*)av_mallocz(sizeof(Frame));
if (avpkt->data) {
frame->status = OK;
frame->size = avpkt->size;
frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
frame->buffer = (uint8_t*)av_malloc(avpkt->size);
if (!frame->buffer) {
av_freep(&frame);
return AVERROR(ENOMEM);
}
uint8_t *ptr = avpkt->data;
// The OMX.SEC decoder fails without this.
if (avpkt->size == orig_size + avctx->extradata_size) {
ptr += avctx->extradata_size;
frame->size = orig_size;
}
memcpy(frame->buffer, ptr, orig_size);
if (avpkt == &pkt)
av_free(avpkt->data);
frame->time = ++s->frame_index;
(*s->ts_map)[s->frame_index].pts = avpkt->pts;
(*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
} else {
frame->status = ERROR_END_OF_STREAM;
s->source_done = true;
}
while (true) {
if (s->thread_exited) {
s->source_done = true;
break;
}
pthread_mutex_lock(&s->in_mutex);
if (s->in_queue->size() >= 10) {
pthread_mutex_unlock(&s->in_mutex);
usleep(10000);
continue;
}
s->in_queue->push_back(frame);
pthread_cond_signal(&s->condition);
pthread_mutex_unlock(&s->in_mutex);
break;
}
}
while (true) {
pthread_mutex_lock(&s->out_mutex);
if (!s->out_queue->empty()) break;
pthread_mutex_unlock(&s->out_mutex);
if (!s->source_done) {
usleep(10000);
continue;
} else {
return orig_size;
}
}
frame = *s->out_queue->begin();
s->out_queue->erase(s->out_queue->begin());
pthread_mutex_unlock(&s->out_mutex);
ret_frame = frame->vframe;
status = frame->status;
av_freep(&frame);
if (status == ERROR_END_OF_STREAM)
return 0;
if (status != OK) {
if (status == AVERROR(ENOMEM))
return status;
av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
return -1;
}
if (s->prev_frame)
av_frame_free(&s->prev_frame);
s->prev_frame = ret_frame;
*got_frame = 1;
*(AVFrame*)data = *ret_frame;
return orig_size;
}
static av_cold int Stagefright_close(AVCodecContext *avctx)
{
StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
Frame *frame;
if (s->thread_started) {
if (!s->thread_exited) {
s->stop_decode = 1;
// Make sure decode_thread() doesn't get stuck
pthread_mutex_lock(&s->out_mutex);
while (!s->out_queue->empty()) {
frame = *s->out_queue->begin();
s->out_queue->erase(s->out_queue->begin());
if (frame->vframe)
av_frame_free(&frame->vframe);
av_freep(&frame);
}
pthread_mutex_unlock(&s->out_mutex);
// Feed a dummy frame prior to signalling EOF.
// This is required to terminate the decoder(OMX.SEC)
// when only one frame is read during stream info detection.
if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
frame->status = OK;
frame->size = s->dummy_bufsize;
frame->key = 1;
frame->buffer = s->dummy_buf;
pthread_mutex_lock(&s->in_mutex);
s->in_queue->push_back(frame);
pthread_cond_signal(&s->condition);
pthread_mutex_unlock(&s->in_mutex);
s->dummy_buf = NULL;
}
pthread_mutex_lock(&s->in_mutex);
s->end_frame->status = ERROR_END_OF_STREAM;
s->in_queue->push_back(s->end_frame);
pthread_cond_signal(&s->condition);
pthread_mutex_unlock(&s->in_mutex);
s->end_frame = NULL;
}
pthread_join(s->decode_thread_id, NULL);
if (s->prev_frame)
av_frame_free(&s->prev_frame);
s->thread_started = false;
}
while (!s->in_queue->empty()) {
frame = *s->in_queue->begin();
s->in_queue->erase(s->in_queue->begin());
if (frame->size)
av_freep(&frame->buffer);
av_freep(&frame);
}
while (!s->out_queue->empty()) {
frame = *s->out_queue->begin();
s->out_queue->erase(s->out_queue->begin());
if (frame->vframe)
av_frame_free(&frame->vframe);
av_freep(&frame);
}
(*s->decoder)->stop();
s->client->disconnect();
if (s->decoder_component)
av_freep(&s->decoder_component);
av_freep(&s->dummy_buf);
av_freep(&s->end_frame);
// Reset the extradata back to the original mp4 format, so that
// the next invocation (both when decoding and when called from
// av_find_stream_info) get the original mp4 format extradata.
av_freep(&avctx->extradata);
avctx->extradata = s->orig_extradata;
avctx->extradata_size = s->orig_extradata_size;
delete s->in_queue;
delete s->out_queue;
delete s->ts_map;
delete s->client;
delete s->decoder;
delete s->source;
pthread_mutex_destroy(&s->in_mutex);
pthread_mutex_destroy(&s->out_mutex);
pthread_cond_destroy(&s->condition);
av_bitstream_filter_close(s->bsfc);
return 0;
}
AVCodec ff_libstagefright_h264_decoder = {
"libstagefright_h264",
NULL_IF_CONFIG_SMALL("libstagefright H.264"),
AVMEDIA_TYPE_VIDEO,
AV_CODEC_ID_H264,
AV_CODEC_CAP_DELAY,
NULL, //supported_framerates
NULL, //pix_fmts
NULL, //supported_samplerates
NULL, //sample_fmts
NULL, //channel_layouts
0, //max_lowres
NULL, //priv_class
NULL, //profiles
sizeof(StagefrightContext),
NULL, //next
NULL, //init_thread_copy
NULL, //update_thread_context
NULL, //defaults
NULL, //init_static_data
Stagefright_init,
NULL, //encode
NULL, //encode2
Stagefright_decode_frame,
Stagefright_close,
};

View File

@ -29,7 +29,7 @@
#include "libavutil/version.h"
#define LIBAVCODEC_VERSION_MAJOR 57
#define LIBAVCODEC_VERSION_MINOR 21
#define LIBAVCODEC_VERSION_MINOR 22
#define LIBAVCODEC_VERSION_MICRO 100
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \

View File

@ -1,58 +0,0 @@
#!/bin/bash
if [ "$NDK" = "" ]; then
echo NDK variable not set, assuming ${HOME}/android-ndk
export NDK=${HOME}/android-ndk
fi
echo "Fetching Android system headers"
git clone --depth=1 --branch gingerbread-release https://github.com/CyanogenMod/android_frameworks_base.git ../android-source/frameworks/base
git clone --depth=1 --branch gingerbread-release https://github.com/CyanogenMod/android_system_core.git ../android-source/system/core
echo "Fetching Android libraries for linking"
# Libraries from any froyo/gingerbread device/emulator should work
# fine, since the symbols used should be available on most of them.
if [ ! -d "../android-libs" ]; then
if [ ! -f "../update-cm-7.0.3-N1-signed.zip" ]; then
wget http://download.cyanogenmod.com/get/update-cm-7.0.3-N1-signed.zip -P../
fi
unzip ../update-cm-7.0.3-N1-signed.zip system/lib/* -d../
mv ../system/lib ../android-libs
rmdir ../system
fi
SYSROOT=$NDK/platforms/android-9/arch-arm
# Expand the prebuilt/* path into the correct one
TOOLCHAIN=`echo $NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/*-x86`
export PATH=$TOOLCHAIN/bin:$PATH
ANDROID_SOURCE=../android-source
ANDROID_LIBS=../android-libs
ABI="armeabi-v7a"
rm -rf ../build/stagefright
mkdir -p ../build/stagefright
DEST=../build/stagefright
FLAGS="--target-os=linux --cross-prefix=arm-linux-androideabi- --arch=arm --cpu=armv7-a"
FLAGS="$FLAGS --sysroot=$SYSROOT"
FLAGS="$FLAGS --disable-avdevice --disable-decoder=h264 --disable-decoder=h264_vdpau --enable-libstagefright-h264"
EXTRA_CFLAGS="-I$ANDROID_SOURCE/frameworks/base/include -I$ANDROID_SOURCE/system/core/include"
EXTRA_CFLAGS="$EXTRA_CFLAGS -I$ANDROID_SOURCE/frameworks/base/media/libstagefright"
EXTRA_CFLAGS="$EXTRA_CFLAGS -I$ANDROID_SOURCE/frameworks/base/include/media/stagefright/openmax"
EXTRA_CFLAGS="$EXTRA_CFLAGS -I$NDK/sources/cxx-stl/gnu-libstdc++/include -I$NDK/sources/cxx-stl/gnu-libstdc++/libs/$ABI/include"
EXTRA_CFLAGS="$EXTRA_CFLAGS -march=armv7-a -mfloat-abi=softfp -mfpu=neon"
EXTRA_LDFLAGS="-Wl,--fix-cortex-a8 -L$ANDROID_LIBS -Wl,-rpath-link,$ANDROID_LIBS -L$NDK/sources/cxx-stl/gnu-libstdc++/libs/$ABI"
EXTRA_CXXFLAGS="-Wno-multichar -fno-exceptions -fno-rtti"
DEST="$DEST/$ABI"
FLAGS="$FLAGS --prefix=$DEST"
mkdir -p $DEST
echo $FLAGS --extra-cflags="$EXTRA_CFLAGS" --extra-ldflags="$EXTRA_LDFLAGS" --extra-cxxflags="$EXTRA_CXXFLAGS" > $DEST/info.txt
./configure $FLAGS --extra-cflags="$EXTRA_CFLAGS" --extra-ldflags="$EXTRA_LDFLAGS" --extra-cxxflags="$EXTRA_CXXFLAGS" | tee $DEST/configuration.txt
[ $PIPESTATUS == 0 ] || exit 1
make clean
make -j4 || exit 1