2014-04-11 18:29:07 +03:00
/ *
* AVFoundation input device
* Copyright ( c ) 2014 Thilo Borgmann < thilo . borgmann @ mail . de >
*
* This file is part of FFmpeg .
*
* FFmpeg is free software ; you can redistribute it and / or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version .
*
* FFmpeg is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg ; if not , write to the Free Software
* Foundation , Inc . , 51 Franklin Street , Fifth Floor , Boston , MA 02110 -1301 USA
* /
/ * *
* @ file
* AVFoundation input device
* @ author Thilo Borgmann < thilo . borgmann @ mail . de >
* /
# import < AVFoundation / AVFoundation . h >
# include < pthread . h >
# include "libavutil/pixdesc.h"
# include "libavutil/opt.h"
2014-11-10 21:31:14 +02:00
# include "libavutil/avstring.h"
2014-04-11 18:29:07 +03:00
# include "libavformat/internal.h"
# include "libavutil/internal.h"
2015-03-07 22:26:52 +02:00
# include "libavutil/parseutils.h"
2014-04-11 18:29:07 +03:00
# include "libavutil/time.h"
2016-06-24 17:00:17 +02:00
# include "libavutil/imgutils.h"
2014-04-11 18:29:07 +03:00
# include "avdevice.h"
2014-09-23 17:49:59 +03:00
static const int avf_time _base = 1000000 ;
2014-04-11 18:29:07 +03:00
static const AVRational avf_time _base _q = {
. num = 1 ,
. den = avf_time _base
} ;
2014-06-11 21:26:33 +03:00
struct AVFPixelFormatSpec {
enum AVPixelFormat ff_id ;
OSType avf_id ;
} ;
static const struct AVFPixelFormatSpec avf_pixel _formats [ ] = {
{ AV_PIX _FMT _MONOBLACK , kCVPixelFormatType_1Monochrome } ,
{ AV_PIX _FMT _RGB555BE , kCVPixelFormatType_16BE555 } ,
{ AV_PIX _FMT _RGB555LE , kCVPixelFormatType_16LE555 } ,
{ AV_PIX _FMT _RGB565BE , kCVPixelFormatType_16BE565 } ,
{ AV_PIX _FMT _RGB565LE , kCVPixelFormatType_16LE565 } ,
{ AV_PIX _FMT _RGB24 , kCVPixelFormatType_24RGB } ,
{ AV_PIX _FMT _BGR24 , kCVPixelFormatType_24BGR } ,
{ AV_PIX _FMT _0RGB , kCVPixelFormatType_32ARGB } ,
{ AV_PIX _FMT _BGR0 , kCVPixelFormatType_32BGRA } ,
{ AV_PIX _FMT _0BGR , kCVPixelFormatType_32ABGR } ,
{ AV_PIX _FMT _RGB0 , kCVPixelFormatType_32RGBA } ,
{ AV_PIX _FMT _BGR48BE , kCVPixelFormatType_48RGB } ,
{ AV_PIX _FMT _UYVY422 , kCVPixelFormatType_422YpCbCr8 } ,
{ AV_PIX _FMT _YUVA444P , kCVPixelFormatType_4444YpCbCrA8R } ,
{ AV_PIX _FMT _YUVA444P16LE , kCVPixelFormatType_4444AYpCbCr16 } ,
{ AV_PIX _FMT _YUV444P , kCVPixelFormatType_444YpCbCr8 } ,
{ AV_PIX _FMT _YUV422P16 , kCVPixelFormatType_422YpCbCr16 } ,
{ AV_PIX _FMT _YUV422P10 , kCVPixelFormatType_422YpCbCr10 } ,
{ AV_PIX _FMT _YUV444P10 , kCVPixelFormatType_444YpCbCr10 } ,
{ AV_PIX _FMT _YUV420P , kCVPixelFormatType_420YpCbCr8Planar } ,
{ AV_PIX _FMT _NV12 , kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange } ,
{ AV_PIX _FMT _YUYV422 , kCVPixelFormatType_422YpCbCr8 _yuvs } ,
2015-03-10 13:08:29 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1080
2014-06-11 21:26:33 +03:00
{ AV_PIX _FMT _GRAY8 , kCVPixelFormatType_OneComponent8 } ,
2014-07-14 05:49:24 +03:00
# endif
2014-06-11 21:26:33 +03:00
{ AV_PIX _FMT _NONE , 0 }
} ;
2014-04-11 18:29:07 +03:00
typedef struct
{
AVClass * class ;
int frames_captured ;
2014-09-23 18:06:37 +03:00
int audio_frames _captured ;
2014-04-11 18:29:07 +03:00
int64_t first_pts ;
2014-09-23 18:06:37 +03:00
int64_t first_audio _pts ;
2014-04-11 18:29:07 +03:00
pthread_mutex _t frame_lock ;
id avf_delegate ;
2014-09-23 18:06:37 +03:00
id avf_audio _delegate ;
2014-04-11 18:29:07 +03:00
2015-03-07 22:26:52 +02:00
AVRational framerate ;
int width , height ;
2015-03-07 23:01:03 +02:00
int capture_cursor ;
2015-03-07 23:10:34 +02:00
int capture_mouse _clicks ;
2019-07-08 13:33:29 +02:00
int capture_raw _data ;
2020-01-19 18:36:21 +02:00
int drop_late _frames ;
2019-07-08 13:33:29 +02:00
int video_is _muxed ;
2020-04-04 13:51:17 +02:00
int video_is _screen ;
2015-03-07 23:01:03 +02:00
2014-04-11 18:29:07 +03:00
int list_devices ;
int video_device _index ;
2014-09-23 17:48:06 +03:00
int video_stream _index ;
2014-09-23 18:06:37 +03:00
int audio_device _index ;
int audio_stream _index ;
char * video_filename ;
char * audio_filename ;
2014-10-25 18:02:28 +03:00
int num_video _devices ;
2014-09-23 18:06:37 +03:00
int audio_channels ;
int audio_bits _per _sample ;
int audio_float ;
int audio_be ;
int audio_signed _integer ;
int audio_packed ;
int audio_non _interleaved ;
int32_t * audio_buffer ;
int audio_buffer _size ;
2014-06-11 21:26:33 +03:00
enum AVPixelFormat pixel_format ;
2014-04-11 18:29:07 +03:00
AVCaptureSession * capture_session ;
AVCaptureVideoDataOutput * video_output ;
2014-09-23 18:06:37 +03:00
AVCaptureAudioDataOutput * audio_output ;
2014-04-11 18:29:07 +03:00
CMSampleBufferRef current_frame ;
2014-09-23 18:06:37 +03:00
CMSampleBufferRef current_audio _frame ;
2020-03-05 13:32:49 +02:00
AVCaptureDevice * observed_device ;
2020-04-30 12:34:13 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2020-03-05 13:32:49 +02:00
AVCaptureDeviceTransportControlsPlaybackMode observed_mode ;
2020-04-30 12:34:13 +02:00
# endif
2020-03-05 13:32:49 +02:00
int observed_quit ;
2014-04-11 18:29:07 +03:00
} AVFContext ;
static void lock_frames ( AVFContext * ctx )
{
pthread_mutex _lock ( & ctx -> frame_lock ) ;
}
static void unlock_frames ( AVFContext * ctx )
{
pthread_mutex _unlock ( & ctx -> frame_lock ) ;
}
/ * * FrameReciever class - delegate for AVCaptureSession
* /
@ interface AVFFrameReceiver : NSObject
{
AVFContext * _context ;
}
- ( id ) initWithContext : ( AVFContext * ) context ;
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) videoFrame
fromConnection : ( AVCaptureConnection * ) connection ;
@ end
@ implementation AVFFrameReceiver
- ( id ) initWithContext : ( AVFContext * ) context
{
if ( self = [ super init ] ) {
_context = context ;
2020-03-05 13:32:49 +02:00
// start observing if a device is set for it
2020-04-04 13:49:46 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2020-03-05 13:32:49 +02:00
if ( _context -> observed_device ) {
NSString * keyPath = NSStringFromSelector ( @ selector ( transportControlsPlaybackMode ) ) ;
NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew ;
[ _context -> observed_device addObserver : self
forKeyPath : keyPath
options : options
context : _context ] ;
}
2020-04-04 13:49:46 +02:00
# endif
2014-04-11 18:29:07 +03:00
}
return self ;
}
2020-03-05 13:32:49 +02:00
- ( void ) dealloc {
// stop observing if a device is set for it
2020-04-04 13:49:46 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
if ( _context -> observed_device ) {
NSString * keyPath = NSStringFromSelector ( @ selector ( transportControlsPlaybackMode ) ) ;
[ _context -> observed_device removeObserver : self forKeyPath : keyPath ] ;
}
# endif
2020-03-05 13:32:49 +02:00
[ super dealloc ] ;
}
- ( void ) observeValueForKeyPath : ( NSString * ) keyPath
ofObject : ( id ) object
change : ( NSDictionary * ) change
context : ( void * ) context {
if ( context = = _context ) {
2020-04-30 12:34:13 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2020-03-05 13:32:49 +02:00
AVCaptureDeviceTransportControlsPlaybackMode mode =
[ change [ NSKeyValueChangeNewKey ] integerValue ] ;
if ( mode ! = _context -> observed_mode ) {
if ( mode = = AVCaptureDeviceTransportControlsNotPlayingMode ) {
_context -> observed_quit = 1 ;
}
_context -> observed_mode = mode ;
}
2020-04-30 12:34:13 +02:00
# endif
2020-03-05 13:32:49 +02:00
} else {
[ super observeValueForKeyPath : keyPath
ofObject : object
change : change
context : context ] ;
}
}
2014-04-11 18:29:07 +03:00
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) videoFrame
fromConnection : ( AVCaptureConnection * ) connection
{
lock_frames ( _context ) ;
if ( _context -> current_frame ! = nil ) {
CFRelease ( _context -> current_frame ) ;
}
_context -> current_frame = ( CMSampleBufferRef ) CFRetain ( videoFrame ) ;
unlock_frames ( _context ) ;
+ + _context -> frames_captured ;
}
@ end
2014-09-23 18:06:37 +03:00
/ * * AudioReciever class - delegate for AVCaptureSession
* /
@ interface AVFAudioReceiver : NSObject
{
AVFContext * _context ;
}
- ( id ) initWithContext : ( AVFContext * ) context ;
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) audioFrame
fromConnection : ( AVCaptureConnection * ) connection ;
@ end
@ implementation AVFAudioReceiver
- ( id ) initWithContext : ( AVFContext * ) context
{
if ( self = [ super init ] ) {
_context = context ;
}
return self ;
}
- ( void ) captureOutput : ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer : ( CMSampleBufferRef ) audioFrame
fromConnection : ( AVCaptureConnection * ) connection
{
lock_frames ( _context ) ;
if ( _context -> current_audio _frame ! = nil ) {
CFRelease ( _context -> current_audio _frame ) ;
}
_context -> current_audio _frame = ( CMSampleBufferRef ) CFRetain ( audioFrame ) ;
unlock_frames ( _context ) ;
+ + _context -> audio_frames _captured ;
}
@ end
2014-04-11 18:29:07 +03:00
static void destroy_context ( AVFContext * ctx )
{
[ ctx -> capture_session stopRunning ] ;
[ ctx -> capture_session release ] ;
[ ctx -> video_output release ] ;
2014-09-23 18:06:37 +03:00
[ ctx -> audio_output release ] ;
2014-04-11 18:29:07 +03:00
[ ctx -> avf_delegate release ] ;
2014-09-23 18:06:37 +03:00
[ ctx -> avf_audio _delegate release ] ;
2014-04-11 18:29:07 +03:00
ctx -> capture_session = NULL ;
ctx -> video_output = NULL ;
2014-09-23 18:06:37 +03:00
ctx -> audio_output = NULL ;
2014-04-11 18:29:07 +03:00
ctx -> avf_delegate = NULL ;
2014-09-23 18:06:37 +03:00
ctx -> avf_audio _delegate = NULL ;
av_freep ( & ctx -> audio_buffer ) ;
2014-04-11 18:29:07 +03:00
pthread_mutex _destroy ( & ctx -> frame_lock ) ;
if ( ctx -> current_frame ) {
CFRelease ( ctx -> current_frame ) ;
}
}
2014-09-23 18:06:37 +03:00
static void parse_device _name ( AVFormatContext * s )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
2017-12-30 00:29:52 +02:00
char * tmp = av_strdup ( s -> url ) ;
2014-11-10 21:31:14 +02:00
char * save ;
2014-09-23 18:06:37 +03:00
if ( tmp [ 0 ] ! = ' : ' ) {
2014-11-10 21:31:14 +02:00
ctx -> video_filename = av_strtok ( tmp , ":" , & save ) ;
ctx -> audio_filename = av_strtok ( NULL , ":" , & save ) ;
2014-09-23 18:06:37 +03:00
} else {
2014-11-10 21:31:14 +02:00
ctx -> audio_filename = av_strtok ( tmp , ":" , & save ) ;
2014-09-23 18:06:37 +03:00
}
}
2015-03-07 22:26:52 +02:00
/ * *
* Configure the video device .
*
* Configure the video device using a run - time approach to access properties
* since formats , activeFormat are available since iOS >= 7.0 or OSX >= 10.7
* and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9 .
*
* The NSUndefinedKeyException must be handled by the caller of this function .
*
* /
static int configure_video _device ( AVFormatContext * s , AVCaptureDevice * video_device )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
double framerate = av_q2d ( ctx -> framerate ) ;
NSObject * range = nil ;
NSObject * format = nil ;
NSObject * selected_range = nil ;
NSObject * selected_format = nil ;
2019-07-08 13:33:29 +02:00
// try to configure format by formats list
// might raise an exception if no format list is given
// ( then fallback to default , no configuration )
@ try {
2019-07-08 19:39:35 +02:00
for ( format in [ video_device valueForKey : @ "formats" ] ) {
CMFormatDescriptionRef formatDescription ;
CMVideoDimensions dimensions ;
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
formatDescription = ( CMFormatDescriptionRef ) [ format performSelector : @ selector ( formatDescription ) ] ;
dimensions = CMVideoFormatDescriptionGetDimensions ( formatDescription ) ;
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
if ( ( ctx -> width = = 0 && ctx -> height = = 0 ) ||
( dimensions . width = = ctx -> width && dimensions . height = = ctx -> height ) ) {
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
selected_format = format ;
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
for ( range in [ format valueForKey : @ "videoSupportedFrameRateRanges" ] ) {
double max_framerate ;
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
[ [ range valueForKey : @ "maxFrameRate" ] getValue : & max_framerate ] ;
if ( fabs ( framerate - max_framerate ) < 0.01 ) {
selected_range = range ;
break ;
}
2015-03-07 22:26:52 +02:00
}
}
}
2019-07-08 19:39:35 +02:00
if ( ! selected_format ) {
av_log ( s , AV_LOG _ERROR , "Selected video size (%dx%d) is not supported by the device.\n" ,
ctx -> width , ctx -> height ) ;
goto unsupported_format ;
2019-07-08 13:33:29 +02:00
}
2015-03-07 22:26:52 +02:00
2019-07-08 19:39:35 +02:00
if ( ! selected_range ) {
av_log ( s , AV_LOG _ERROR , "Selected framerate (%f) is not supported by the device.\n" ,
framerate ) ;
if ( ctx -> video_is _muxed ) {
av_log ( s , AV_LOG _ERROR , "Falling back to default.\n" ) ;
} else {
goto unsupported_format ;
}
2019-07-08 13:33:29 +02:00
}
2019-07-08 19:39:35 +02:00
if ( [ video_device lockForConfiguration : NULL ] = = YES ) {
if ( selected_format ) {
[ video_device setValue : selected_format forKey : @ "activeFormat" ] ;
}
if ( selected_range ) {
NSValue * min_frame _duration = [ selected_range valueForKey : @ "minFrameDuration" ] ;
[ video_device setValue : min_frame _duration forKey : @ "activeVideoMinFrameDuration" ] ;
[ video_device setValue : min_frame _duration forKey : @ "activeVideoMaxFrameDuration" ] ;
}
} else {
av_log ( s , AV_LOG _ERROR , "Could not lock device for configuration.\n" ) ;
return AVERROR ( EINVAL ) ;
2019-07-08 13:33:29 +02:00
}
} @ catch ( NSException * e ) {
av_log ( ctx , AV_LOG _WARNING , "Configuration of video device failed, falling back to default.\n" ) ;
}
2015-03-07 22:26:52 +02:00
return 0 ;
unsupported_format :
av_log ( s , AV_LOG _ERROR , "Supported modes:\n" ) ;
for ( format in [ video_device valueForKey : @ "formats" ] ) {
CMFormatDescriptionRef formatDescription ;
CMVideoDimensions dimensions ;
formatDescription = ( CMFormatDescriptionRef ) [ format performSelector : @ selector ( formatDescription ) ] ;
dimensions = CMVideoFormatDescriptionGetDimensions ( formatDescription ) ;
for ( range in [ format valueForKey : @ "videoSupportedFrameRateRanges" ] ) {
double min_framerate ;
double max_framerate ;
[ [ range valueForKey : @ "minFrameRate" ] getValue : & min_framerate ] ;
[ [ range valueForKey : @ "maxFrameRate" ] getValue : & max_framerate ] ;
av_log ( s , AV_LOG _ERROR , " %dx%d@[%f %f]fps\n" ,
dimensions . width , dimensions . height ,
min_framerate , max_framerate ) ;
}
}
return AVERROR ( EINVAL ) ;
}
2014-09-24 13:16:31 +03:00
static int add_video _device ( AVFormatContext * s , AVCaptureDevice * video_device )
2014-04-11 18:29:07 +03:00
{
2014-09-24 13:16:31 +03:00
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
2015-03-07 22:26:52 +02:00
int ret ;
2014-09-24 13:16:31 +03:00
NSError * error = nil ;
2014-10-25 18:02:28 +03:00
AVCaptureInput * capture_input = nil ;
2015-03-10 13:08:02 +02:00
struct AVFPixelFormatSpec pxl_fmt _spec ;
NSNumber * pixel_format ;
NSDictionary * capture_dict ;
dispatch_queue _t queue ;
2014-10-25 18:02:28 +03:00
if ( ctx -> video_device _index < ctx -> num_video _devices ) {
capture_input = ( AVCaptureInput * ) [ [ [ AVCaptureDeviceInput alloc ] initWithDevice : video_device error : & error ] autorelease ] ;
} else {
capture_input = ( AVCaptureInput * ) video_device ;
}
2014-04-11 18:29:07 +03:00
2014-10-25 18:02:28 +03:00
if ( ! capture_input ) {
2014-04-11 18:29:07 +03:00
av_log ( s , AV_LOG _ERROR , "Failed to create AV capture input device: %s\n" ,
[ [ error localizedDescription ] UTF8String ] ) ;
2014-09-24 13:16:31 +03:00
return 1 ;
2014-04-11 18:29:07 +03:00
}
2014-10-25 18:02:28 +03:00
if ( [ ctx -> capture_session canAddInput : capture_input ] ) {
[ ctx -> capture_session addInput : capture_input ] ;
2014-04-11 18:29:07 +03:00
} else {
av_log ( s , AV_LOG _ERROR , "can't add video input to capture session\n" ) ;
2014-09-24 13:16:31 +03:00
return 1 ;
2014-04-11 18:29:07 +03:00
}
// Attaching output
ctx -> video_output = [ [ AVCaptureVideoDataOutput alloc ] init ] ;
if ( ! ctx -> video_output ) {
av_log ( s , AV_LOG _ERROR , "Failed to init AV video output\n" ) ;
2014-09-24 13:16:31 +03:00
return 1 ;
2014-04-11 18:29:07 +03:00
}
2015-03-07 22:26:52 +02:00
// Configure device framerate and video size
@ try {
if ( ( ret = configure_video _device ( s , video_device ) ) < 0 ) {
return ret ;
}
} @ catch ( NSException * exception ) {
if ( ! [ [ exception name ] isEqualToString : NSUndefinedKeyException ] ) {
2015-03-30 21:41:17 +02:00
av_log ( s , AV_LOG _ERROR , "An error occurred: %s" , [ exception . reason UTF8String ] ) ;
2015-03-07 22:26:52 +02:00
return AVERROR_EXTERNAL ;
}
}
2014-06-11 21:26:33 +03:00
// select pixel format
pxl_fmt _spec . ff_id = AV_PIX _FMT _NONE ;
for ( int i = 0 ; avf_pixel _formats [ i ] . ff_id ! = AV_PIX _FMT _NONE ; i + + ) {
if ( ctx -> pixel_format = = avf_pixel _formats [ i ] . ff_id ) {
pxl_fmt _spec = avf_pixel _formats [ i ] ;
break ;
}
}
// check if selected pixel format is supported by AVFoundation
if ( pxl_fmt _spec . ff_id = = AV_PIX _FMT _NONE ) {
av_log ( s , AV_LOG _ERROR , "Selected pixel format (%s) is not supported by AVFoundation.\n" ,
av_get _pix _fmt _name ( pxl_fmt _spec . ff_id ) ) ;
2014-09-24 13:16:31 +03:00
return 1 ;
2014-06-11 21:26:33 +03:00
}
// check if the pixel format is available for this device
if ( [ [ ctx -> video_output availableVideoCVPixelFormatTypes ] indexOfObject : [ NSNumber numberWithInt : pxl_fmt _spec . avf_id ] ] = = NSNotFound ) {
av_log ( s , AV_LOG _ERROR , "Selected pixel format (%s) is not supported by the input device.\n" ,
av_get _pix _fmt _name ( pxl_fmt _spec . ff_id ) ) ;
pxl_fmt _spec . ff_id = AV_PIX _FMT _NONE ;
av_log ( s , AV_LOG _ERROR , "Supported pixel formats:\n" ) ;
for ( NSNumber * pxl_fmt in [ ctx -> video_output availableVideoCVPixelFormatTypes ] ) {
struct AVFPixelFormatSpec pxl_fmt _dummy ;
pxl_fmt _dummy . ff_id = AV_PIX _FMT _NONE ;
for ( int i = 0 ; avf_pixel _formats [ i ] . ff_id ! = AV_PIX _FMT _NONE ; i + + ) {
if ( [ pxl_fmt intValue ] = = avf_pixel _formats [ i ] . avf_id ) {
pxl_fmt _dummy = avf_pixel _formats [ i ] ;
break ;
}
}
if ( pxl_fmt _dummy . ff_id ! = AV_PIX _FMT _NONE ) {
av_log ( s , AV_LOG _ERROR , " %s\n" , av_get _pix _fmt _name ( pxl_fmt _dummy . ff_id ) ) ;
// select first supported pixel format instead of user selected ( or default ) pixel format
if ( pxl_fmt _spec . ff_id = = AV_PIX _FMT _NONE ) {
pxl_fmt _spec = pxl_fmt _dummy ;
}
}
}
// fail if there is no appropriate pixel format or print a warning about overriding the pixel format
if ( pxl_fmt _spec . ff_id = = AV_PIX _FMT _NONE ) {
2014-09-24 13:16:31 +03:00
return 1 ;
2014-06-11 21:26:33 +03:00
} else {
av_log ( s , AV_LOG _WARNING , "Overriding selected pixel format to use %s instead.\n" ,
av_get _pix _fmt _name ( pxl_fmt _spec . ff_id ) ) ;
}
}
2019-07-08 13:33:29 +02:00
// set videoSettings to an empty dict for receiving raw data of muxed devices
if ( ctx -> capture_raw _data ) {
2019-07-08 19:39:35 +02:00
ctx -> pixel_format = pxl_fmt _spec . ff_id ;
ctx -> video_output . videoSettings = @ { } ;
2019-07-08 13:33:29 +02:00
} else {
2019-07-08 19:39:35 +02:00
ctx -> pixel_format = pxl_fmt _spec . ff_id ;
pixel_format = [ NSNumber numberWithUnsignedInt : pxl_fmt _spec . avf_id ] ;
capture_dict = [ NSDictionary dictionaryWithObject : pixel_format
forKey : ( id ) kCVPixelBufferPixelFormatTypeKey ] ;
2014-04-11 18:29:07 +03:00
2019-07-08 19:39:35 +02:00
[ ctx -> video_output setVideoSettings : capture_dict ] ;
2019-07-08 13:33:29 +02:00
}
2020-01-19 18:36:21 +02:00
[ ctx -> video_output setAlwaysDiscardsLateVideoFrames : ctx -> drop_late _frames ] ;
2014-04-11 18:29:07 +03:00
2020-04-04 13:49:46 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2020-03-05 13:32:49 +02:00
// check for transport control support and set observer device if supported
2020-04-04 13:51:17 +02:00
if ( ! ctx -> video_is _screen ) {
int trans_ctrl = [ video_device transportControlsSupported ] ;
AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [ video_device transportControlsPlaybackMode ] ;
2020-03-05 13:32:49 +02:00
2020-04-04 13:51:17 +02:00
if ( trans_ctrl ) {
ctx -> observed_mode = trans_mode ;
ctx -> observed_device = video_device ;
}
2020-03-05 13:32:49 +02:00
}
2020-04-04 13:49:46 +02:00
# endif
2020-03-05 13:32:49 +02:00
2014-04-11 18:29:07 +03:00
ctx -> avf_delegate = [ [ AVFFrameReceiver alloc ] initWithContext : ctx ] ;
2015-03-10 13:08:02 +02:00
queue = dispatch_queue _create ( "avf_queue" , NULL ) ;
2014-04-11 18:29:07 +03:00
[ ctx -> video_output setSampleBufferDelegate : ctx -> avf_delegate queue : queue ] ;
dispatch_release ( queue ) ;
if ( [ ctx -> capture_session canAddOutput : ctx -> video_output ] ) {
[ ctx -> capture_session addOutput : ctx -> video_output ] ;
} else {
av_log ( s , AV_LOG _ERROR , "can't add video output to capture session\n" ) ;
2014-09-24 13:16:31 +03:00
return 1 ;
2014-04-11 18:29:07 +03:00
}
2014-09-24 13:16:31 +03:00
return 0 ;
}
2014-09-23 18:06:37 +03:00
static int add_audio _device ( AVFormatContext * s , AVCaptureDevice * audio_device )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
NSError * error = nil ;
AVCaptureDeviceInput * audio_dev _input = [ [ [ AVCaptureDeviceInput alloc ] initWithDevice : audio_device error : & error ] autorelease ] ;
2015-03-10 13:08:02 +02:00
dispatch_queue _t queue ;
2014-09-23 18:06:37 +03:00
if ( ! audio_dev _input ) {
av_log ( s , AV_LOG _ERROR , "Failed to create AV capture input device: %s\n" ,
[ [ error localizedDescription ] UTF8String ] ) ;
return 1 ;
}
if ( [ ctx -> capture_session canAddInput : audio_dev _input ] ) {
[ ctx -> capture_session addInput : audio_dev _input ] ;
} else {
av_log ( s , AV_LOG _ERROR , "can't add audio input to capture session\n" ) ;
return 1 ;
}
// Attaching output
ctx -> audio_output = [ [ AVCaptureAudioDataOutput alloc ] init ] ;
if ( ! ctx -> audio_output ) {
av_log ( s , AV_LOG _ERROR , "Failed to init AV audio output\n" ) ;
return 1 ;
}
ctx -> avf_audio _delegate = [ [ AVFAudioReceiver alloc ] initWithContext : ctx ] ;
2015-03-10 13:08:02 +02:00
queue = dispatch_queue _create ( "avf_audio_queue" , NULL ) ;
2014-09-23 18:06:37 +03:00
[ ctx -> audio_output setSampleBufferDelegate : ctx -> avf_audio _delegate queue : queue ] ;
dispatch_release ( queue ) ;
if ( [ ctx -> capture_session canAddOutput : ctx -> audio_output ] ) {
[ ctx -> capture_session addOutput : ctx -> audio_output ] ;
} else {
av_log ( s , AV_LOG _ERROR , "adding audio output to capture session failed\n" ) ;
return 1 ;
}
return 0 ;
}
2014-09-24 13:16:31 +03:00
static int get_video _config ( AVFormatContext * s )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
2015-03-10 13:08:02 +02:00
CVImageBufferRef image_buffer ;
2019-07-08 13:33:29 +02:00
CMBlockBufferRef block_buffer ;
2015-03-10 13:08:02 +02:00
CGSize image_buffer _size ;
AVStream * stream = avformat_new _stream ( s , NULL ) ;
if ( ! stream ) {
return 1 ;
}
2014-04-11 18:29:07 +03:00
// Take stream info from the first frame .
while ( ctx -> frames_captured < 1 ) {
CFRunLoopRunInMode ( kCFRunLoopDefaultMode , 0.1 , YES ) ;
}
lock_frames ( ctx ) ;
2014-09-23 17:48:06 +03:00
ctx -> video_stream _index = stream -> index ;
2014-04-11 18:29:07 +03:00
avpriv_set _pts _info ( stream , 64 , 1 , avf_time _base ) ;
2019-07-08 13:33:29 +02:00
image_buffer = CMSampleBufferGetImageBuffer ( ctx -> current_frame ) ;
block_buffer = CMSampleBufferGetDataBuffer ( ctx -> current_frame ) ;
if ( image_buffer ) {
image_buffer _size = CVImageBufferGetEncodedSize ( image_buffer ) ;
2014-04-11 18:29:07 +03:00
2019-07-08 19:39:35 +02:00
stream -> codecpar -> codec_id = AV_CODEC _ID _RAWVIDEO ;
stream -> codecpar -> codec_type = AVMEDIA_TYPE _VIDEO ;
stream -> codecpar -> width = ( int ) image_buffer _size . width ;
stream -> codecpar -> height = ( int ) image_buffer _size . height ;
stream -> codecpar -> format = ctx -> pixel_format ;
2019-07-08 13:33:29 +02:00
} else {
stream -> codecpar -> codec_id = AV_CODEC _ID _DVVIDEO ;
stream -> codecpar -> codec_type = AVMEDIA_TYPE _VIDEO ;
stream -> codecpar -> format = ctx -> pixel_format ;
}
2014-04-11 18:29:07 +03:00
CFRelease ( ctx -> current_frame ) ;
ctx -> current_frame = nil ;
unlock_frames ( ctx ) ;
2014-09-24 13:16:31 +03:00
return 0 ;
}
2014-09-23 18:06:37 +03:00
static int get_audio _config ( AVFormatContext * s )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
2015-03-10 13:08:02 +02:00
CMFormatDescriptionRef format_desc ;
AVStream * stream = avformat_new _stream ( s , NULL ) ;
if ( ! stream ) {
return 1 ;
}
2014-09-23 18:06:37 +03:00
// Take stream info from the first frame .
while ( ctx -> audio_frames _captured < 1 ) {
CFRunLoopRunInMode ( kCFRunLoopDefaultMode , 0.1 , YES ) ;
}
lock_frames ( ctx ) ;
ctx -> audio_stream _index = stream -> index ;
avpriv_set _pts _info ( stream , 64 , 1 , avf_time _base ) ;
2015-03-10 13:08:02 +02:00
format_desc = CMSampleBufferGetFormatDescription ( ctx -> current_audio _frame ) ;
2014-09-23 18:06:37 +03:00
const AudioStreamBasicDescription * basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription ( format_desc ) ;
if ( ! basic_desc ) {
av_log ( s , AV_LOG _ERROR , "audio format not available\n" ) ;
return 1 ;
}
2016-05-05 15:08:02 +02:00
stream -> codecpar -> codec_type = AVMEDIA_TYPE _AUDIO ;
stream -> codecpar -> sample_rate = basic_desc -> mSampleRate ;
stream -> codecpar -> channels = basic_desc -> mChannelsPerFrame ;
stream -> codecpar -> channel_layout = av_get _default _channel _layout ( stream -> codecpar -> channels ) ;
2014-09-23 18:06:37 +03:00
ctx -> audio_channels = basic_desc -> mChannelsPerFrame ;
ctx -> audio_bits _per _sample = basic_desc -> mBitsPerChannel ;
ctx -> audio_float = basic_desc -> mFormatFlags & kAudioFormatFlagIsFloat ;
ctx -> audio_be = basic_desc -> mFormatFlags & kAudioFormatFlagIsBigEndian ;
ctx -> audio_signed _integer = basic_desc -> mFormatFlags & kAudioFormatFlagIsSignedInteger ;
ctx -> audio_packed = basic_desc -> mFormatFlags & kAudioFormatFlagIsPacked ;
ctx -> audio_non _interleaved = basic_desc -> mFormatFlags & kAudioFormatFlagIsNonInterleaved ;
if ( basic_desc -> mFormatID = = kAudioFormatLinearPCM &&
ctx -> audio_float &&
2015-03-06 02:06:57 +02:00
ctx -> audio_bits _per _sample = = 32 &&
2014-09-23 18:06:37 +03:00
ctx -> audio_packed ) {
2016-05-05 15:08:02 +02:00
stream -> codecpar -> codec_id = ctx -> audio_be ? AV_CODEC _ID _PCM _F32BE : AV_CODEC _ID _PCM _F32LE ;
2015-03-06 02:06:57 +02:00
} else if ( basic_desc -> mFormatID = = kAudioFormatLinearPCM &&
ctx -> audio_signed _integer &&
ctx -> audio_bits _per _sample = = 16 &&
ctx -> audio_packed ) {
2016-05-05 15:08:02 +02:00
stream -> codecpar -> codec_id = ctx -> audio_be ? AV_CODEC _ID _PCM _S16BE : AV_CODEC _ID _PCM _S16LE ;
2015-03-06 02:10:16 +02:00
} else if ( basic_desc -> mFormatID = = kAudioFormatLinearPCM &&
ctx -> audio_signed _integer &&
ctx -> audio_bits _per _sample = = 24 &&
ctx -> audio_packed ) {
2016-05-05 15:08:02 +02:00
stream -> codecpar -> codec_id = ctx -> audio_be ? AV_CODEC _ID _PCM _S24BE : AV_CODEC _ID _PCM _S24LE ;
2015-03-06 02:10:16 +02:00
} else if ( basic_desc -> mFormatID = = kAudioFormatLinearPCM &&
ctx -> audio_signed _integer &&
ctx -> audio_bits _per _sample = = 32 &&
ctx -> audio_packed ) {
2016-05-05 15:08:02 +02:00
stream -> codecpar -> codec_id = ctx -> audio_be ? AV_CODEC _ID _PCM _S32BE : AV_CODEC _ID _PCM _S32LE ;
2014-09-23 18:06:37 +03:00
} else {
av_log ( s , AV_LOG _ERROR , "audio format is not supported\n" ) ;
return 1 ;
}
if ( ctx -> audio_non _interleaved ) {
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer ( ctx -> current_audio _frame ) ;
ctx -> audio_buffer _size = CMBlockBufferGetDataLength ( block_buffer ) ;
ctx -> audio_buffer = av_malloc ( ctx -> audio_buffer _size ) ;
if ( ! ctx -> audio_buffer ) {
av_log ( s , AV_LOG _ERROR , "error allocating audio buffer\n" ) ;
return 1 ;
}
}
CFRelease ( ctx -> current_audio _frame ) ;
ctx -> current_audio _frame = nil ;
unlock_frames ( ctx ) ;
return 0 ;
}
2014-09-24 13:16:31 +03:00
static int avf_read _header ( AVFormatContext * s )
{
NSAutoreleasePool * pool = [ [ NSAutoreleasePool alloc ] init ] ;
2015-03-10 13:08:02 +02:00
uint32_t num_screens = 0 ;
2014-09-24 13:16:31 +03:00
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
2015-03-10 13:08:02 +02:00
AVCaptureDevice * video_device = nil ;
AVCaptureDevice * audio_device = nil ;
// Find capture device
NSArray * devices = [ AVCaptureDevice devicesWithMediaType : AVMediaTypeVideo ] ;
2019-07-08 13:33:29 +02:00
NSArray * devices_muxed = [ AVCaptureDevice devicesWithMediaType : AVMediaTypeMuxed ] ;
2015-03-10 13:08:02 +02:00
2019-07-08 13:33:29 +02:00
ctx -> num_video _devices = [ devices count ] + [ devices_muxed count ] ;
2014-09-24 13:16:31 +03:00
ctx -> first_pts = av_gettime ( ) ;
2014-09-23 18:06:37 +03:00
ctx -> first_audio _pts = av_gettime ( ) ;
2014-09-24 13:16:31 +03:00
pthread_mutex _init ( & ctx -> frame_lock , NULL ) ;
2015-03-10 13:08:29 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2014-10-25 18:02:28 +03:00
CGGetActiveDisplayList ( 0 , NULL , & num_screens ) ;
2014-10-27 16:20:27 +02:00
# endif
2014-10-25 18:02:28 +03:00
2014-09-24 13:16:31 +03:00
// List devices if requested
if ( ctx -> list_devices ) {
2014-10-25 18:02:28 +03:00
int index = 0 ;
2015-03-10 13:08:02 +02:00
av_log ( ctx , AV_LOG _INFO , "AVFoundation video devices:\n" ) ;
2014-09-24 13:16:31 +03:00
for ( AVCaptureDevice * device in devices ) {
const char * name = [ [ device localizedName ] UTF8String ] ;
2014-10-25 18:02:28 +03:00
index = [ devices indexOfObject : device ] ;
2014-09-24 13:16:31 +03:00
av_log ( ctx , AV_LOG _INFO , "[%d] %s\n" , index , name ) ;
}
2019-07-08 13:33:29 +02:00
for ( AVCaptureDevice * device in devices_muxed ) {
const char * name = [ [ device localizedName ] UTF8String ] ;
index = [ devices count ] + [ devices_muxed indexOfObject : device ] ;
av_log ( ctx , AV_LOG _INFO , "[%d] %s\n" , index , name ) ;
}
2015-03-10 13:08:29 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2014-10-25 18:02:28 +03:00
if ( num_screens > 0 ) {
CGDirectDisplayID screens [ num_screens ] ;
CGGetActiveDisplayList ( num_screens , screens , & num_screens ) ;
for ( int i = 0 ; i < num_screens ; i + + ) {
2019-07-08 13:33:29 +02:00
av_log ( ctx , AV_LOG _INFO , "[%d] Capture screen %d\n" , ctx -> num_video _devices + i , i ) ;
2014-10-25 18:02:28 +03:00
}
}
2014-10-27 16:20:27 +02:00
# endif
2014-10-25 18:02:28 +03:00
2014-09-23 18:06:37 +03:00
av_log ( ctx , AV_LOG _INFO , "AVFoundation audio devices:\n" ) ;
devices = [ AVCaptureDevice devicesWithMediaType : AVMediaTypeAudio ] ;
for ( AVCaptureDevice * device in devices ) {
const char * name = [ [ device localizedName ] UTF8String ] ;
int index = [ devices indexOfObject : device ] ;
av_log ( ctx , AV_LOG _INFO , "[%d] %s\n" , index , name ) ;
}
goto fail ;
2014-09-24 13:16:31 +03:00
}
2014-09-23 18:06:37 +03:00
// parse input filename for video and audio device
parse_device _name ( s ) ;
2014-09-24 13:16:31 +03:00
// check for device index given in filename
2014-09-23 18:06:37 +03:00
if ( ctx -> video_device _index = = -1 && ctx -> video_filename ) {
sscanf ( ctx -> video_filename , "%d" , & ctx -> video_device _index ) ;
}
if ( ctx -> audio_device _index = = -1 && ctx -> audio_filename ) {
sscanf ( ctx -> audio_filename , "%d" , & ctx -> audio_device _index ) ;
2014-09-24 13:16:31 +03:00
}
if ( ctx -> video_device _index >= 0 ) {
2014-10-25 18:02:28 +03:00
if ( ctx -> video_device _index < ctx -> num_video _devices ) {
2019-07-08 13:33:29 +02:00
if ( ctx -> video_device _index < [ devices count ] ) {
2019-07-08 19:39:35 +02:00
video_device = [ devices objectAtIndex : ctx -> video_device _index ] ;
2019-07-08 13:33:29 +02:00
} else {
video_device = [ devices_muxed objectAtIndex : ( ctx -> video_device _index - [ devices count ] ) ] ;
ctx -> video_is _muxed = 1 ;
}
2014-10-25 18:02:28 +03:00
} else if ( ctx -> video_device _index < ctx -> num_video _devices + num_screens ) {
2015-03-10 13:08:29 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2014-10-25 18:02:28 +03:00
CGDirectDisplayID screens [ num_screens ] ;
CGGetActiveDisplayList ( num_screens , screens , & num_screens ) ;
AVCaptureScreenInput * capture_screen _input = [ [ [ AVCaptureScreenInput alloc ] initWithDisplayID : screens [ ctx -> video_device _index - ctx -> num_video _devices ] ] autorelease ] ;
2015-03-07 22:26:52 +02:00
if ( ctx -> framerate . num > 0 ) {
capture_screen _input . minFrameDuration = CMTimeMake ( ctx -> framerate . den , ctx -> framerate . num ) ;
}
2015-03-07 23:01:03 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1080
if ( ctx -> capture_cursor ) {
capture_screen _input . capturesCursor = YES ;
} else {
capture_screen _input . capturesCursor = NO ;
}
# endif
2015-03-07 23:10:34 +02:00
if ( ctx -> capture_mouse _clicks ) {
capture_screen _input . capturesMouseClicks = YES ;
} else {
capture_screen _input . capturesMouseClicks = NO ;
}
2014-10-25 18:02:28 +03:00
video_device = ( AVCaptureDevice * ) capture_screen _input ;
2020-04-04 13:51:17 +02:00
ctx -> video_is _screen = 1 ;
2014-10-27 16:20:27 +02:00
# endif
2014-10-25 18:02:28 +03:00
} else {
2014-09-24 13:16:31 +03:00
av_log ( ctx , AV_LOG _ERROR , "Invalid device index\n" ) ;
goto fail ;
}
2014-09-23 18:06:37 +03:00
} else if ( ctx -> video_filename &&
2014-11-13 18:22:48 +02:00
strncmp ( ctx -> video_filename , "none" , 4 ) ) {
if ( ! strncmp ( ctx -> video_filename , "default" , 7 ) ) {
video_device = [ AVCaptureDevice defaultDeviceWithMediaType : AVMediaTypeVideo ] ;
} else {
2014-10-25 18:02:28 +03:00
// looking for video inputs
2015-03-10 13:08:02 +02:00
for ( AVCaptureDevice * device in devices ) {
2014-09-23 18:06:37 +03:00
if ( ! strncmp ( ctx -> video_filename , [ [ device localizedName ] UTF8String ] , strlen ( ctx -> video_filename ) ) ) {
2014-09-24 13:16:31 +03:00
video_device = device ;
break ;
}
}
2019-07-08 13:33:29 +02:00
// looking for muxed inputs
for ( AVCaptureDevice * device in devices_muxed ) {
if ( ! strncmp ( ctx -> video_filename , [ [ device localizedName ] UTF8String ] , strlen ( ctx -> video_filename ) ) ) {
video_device = device ;
ctx -> video_is _muxed = 1 ;
break ;
}
}
2014-09-24 13:16:31 +03:00
2015-03-10 13:08:29 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1070
2014-10-25 18:02:28 +03:00
// looking for screen inputs
if ( ! video_device ) {
int idx ;
if ( sscanf ( ctx -> video_filename , "Capture screen %d" , & idx ) && idx < num_screens ) {
CGDirectDisplayID screens [ num_screens ] ;
CGGetActiveDisplayList ( num_screens , screens , & num_screens ) ;
AVCaptureScreenInput * capture_screen _input = [ [ [ AVCaptureScreenInput alloc ] initWithDisplayID : screens [ idx ] ] autorelease ] ;
video_device = ( AVCaptureDevice * ) capture_screen _input ;
ctx -> video_device _index = ctx -> num_video _devices + idx ;
2020-04-04 13:51:17 +02:00
ctx -> video_is _screen = 1 ;
2015-03-07 22:26:52 +02:00
if ( ctx -> framerate . num > 0 ) {
capture_screen _input . minFrameDuration = CMTimeMake ( ctx -> framerate . den , ctx -> framerate . num ) ;
}
2015-03-07 23:01:03 +02:00
# if ! TARGET_OS _IPHONE && __MAC _OS _X _VERSION _MIN _REQUIRED >= 1080
if ( ctx -> capture_cursor ) {
capture_screen _input . capturesCursor = YES ;
} else {
capture_screen _input . capturesCursor = NO ;
}
# endif
2015-03-07 23:10:34 +02:00
if ( ctx -> capture_mouse _clicks ) {
capture_screen _input . capturesMouseClicks = YES ;
} else {
capture_screen _input . capturesMouseClicks = NO ;
}
2014-10-25 18:02:28 +03:00
}
}
2014-10-27 16:20:27 +02:00
# endif
2014-11-13 18:22:48 +02:00
}
2014-10-25 18:02:28 +03:00
2014-09-24 13:16:31 +03:00
if ( ! video_device ) {
av_log ( ctx , AV_LOG _ERROR , "Video device not found\n" ) ;
goto fail ;
}
}
2014-09-23 18:06:37 +03:00
// get audio device
if ( ctx -> audio_device _index >= 0 ) {
NSArray * devices = [ AVCaptureDevice devicesWithMediaType : AVMediaTypeAudio ] ;
2014-09-24 13:16:31 +03:00
2014-09-23 18:06:37 +03:00
if ( ctx -> audio_device _index >= [ devices count ] ) {
av_log ( ctx , AV_LOG _ERROR , "Invalid audio device index\n" ) ;
2014-09-24 13:16:31 +03:00
goto fail ;
}
2014-09-23 18:06:37 +03:00
audio_device = [ devices objectAtIndex : ctx -> audio_device _index ] ;
} else if ( ctx -> audio_filename &&
2014-11-13 18:22:48 +02:00
strncmp ( ctx -> audio_filename , "none" , 4 ) ) {
if ( ! strncmp ( ctx -> audio_filename , "default" , 7 ) ) {
audio_device = [ AVCaptureDevice defaultDeviceWithMediaType : AVMediaTypeAudio ] ;
} else {
2014-09-23 18:06:37 +03:00
NSArray * devices = [ AVCaptureDevice devicesWithMediaType : AVMediaTypeAudio ] ;
for ( AVCaptureDevice * device in devices ) {
if ( ! strncmp ( ctx -> audio_filename , [ [ device localizedName ] UTF8String ] , strlen ( ctx -> audio_filename ) ) ) {
audio_device = device ;
break ;
}
}
2014-11-13 18:22:48 +02:00
}
2014-09-23 18:06:37 +03:00
if ( ! audio_device ) {
av_log ( ctx , AV_LOG _ERROR , "Audio device not found\n" ) ;
goto fail ;
}
2014-09-24 13:16:31 +03:00
}
2014-09-23 18:06:37 +03:00
// Video nor Audio capture device not found , looking for AVMediaTypeVideo / Audio
if ( ! video_device && ! audio_device ) {
av_log ( s , AV_LOG _ERROR , "No AV capture device found\n" ) ;
goto fail ;
}
if ( video_device ) {
2014-10-25 18:02:28 +03:00
if ( ctx -> video_device _index < ctx -> num_video _devices ) {
av_log ( s , AV_LOG _DEBUG , "'%s' opened\n" , [ [ video_device localizedName ] UTF8String ] ) ;
} else {
av_log ( s , AV_LOG _DEBUG , "'%s' opened\n" , [ [ video_device description ] UTF8String ] ) ;
}
2014-09-23 18:06:37 +03:00
}
if ( audio_device ) {
av_log ( s , AV_LOG _DEBUG , "audio device '%s' opened\n" , [ [ audio_device localizedName ] UTF8String ] ) ;
}
2014-09-24 13:16:31 +03:00
// Initialize capture session
ctx -> capture_session = [ [ AVCaptureSession alloc ] init ] ;
2014-09-23 18:06:37 +03:00
if ( video_device && add_video _device ( s , video_device ) ) {
2014-09-24 13:16:31 +03:00
goto fail ;
}
2014-09-23 18:06:37 +03:00
if ( audio_device && add_audio _device ( s , audio_device ) ) {
}
2014-09-24 13:16:31 +03:00
[ ctx -> capture_session startRunning ] ;
2015-03-07 22:26:52 +02:00
/ * Unlock device configuration only after the session is started so it
* does not reset the capture formats * /
2020-04-04 13:51:17 +02:00
if ( ! ctx -> video_is _screen ) {
2015-03-07 22:26:52 +02:00
[ video_device unlockForConfiguration ] ;
}
2014-09-23 18:06:37 +03:00
if ( video_device && get_video _config ( s ) ) {
goto fail ;
}
// set audio stream
if ( audio_device && get_audio _config ( s ) ) {
2014-09-24 13:16:31 +03:00
goto fail ;
}
2014-04-11 18:29:07 +03:00
[ pool release ] ;
return 0 ;
fail :
[ pool release ] ;
destroy_context ( ctx ) ;
return AVERROR ( EIO ) ;
}
2016-06-24 17:00:17 +02:00
static int copy_cvpixelbuffer ( AVFormatContext * s ,
CVPixelBufferRef image_buffer ,
AVPacket * pkt )
{
AVFContext * ctx = s -> priv_data ;
int src_linesize [ 4 ] ;
const uint8_t * src_data [ 4 ] ;
int width = CVPixelBufferGetWidth ( image_buffer ) ;
int height = CVPixelBufferGetHeight ( image_buffer ) ;
int status ;
memset ( src_linesize , 0 , sizeof ( src_linesize ) ) ;
memset ( src_data , 0 , sizeof ( src_data ) ) ;
status = CVPixelBufferLockBaseAddress ( image_buffer , 0 ) ;
if ( status ! = kCVReturnSuccess ) {
2019-07-08 13:32:22 +02:00
av_log ( s , AV_LOG _ERROR , "Could not lock base address: %d (%dx%d)\n" , status , width , height ) ;
2016-06-24 17:00:17 +02:00
return AVERROR_EXTERNAL ;
}
if ( CVPixelBufferIsPlanar ( image_buffer ) ) {
size_t plane_count = CVPixelBufferGetPlaneCount ( image_buffer ) ;
int i ;
for ( i = 0 ; i < plane_count ; i + + ) {
src_linesize [ i ] = CVPixelBufferGetBytesPerRowOfPlane ( image_buffer , i ) ;
src_data [ i ] = CVPixelBufferGetBaseAddressOfPlane ( image_buffer , i ) ;
}
} else {
src_linesize [ 0 ] = CVPixelBufferGetBytesPerRow ( image_buffer ) ;
src_data [ 0 ] = CVPixelBufferGetBaseAddress ( image_buffer ) ;
}
status = av_image _copy _to _buffer ( pkt -> data , pkt -> size ,
src_data , src_linesize ,
ctx -> pixel_format , width , height , 1 ) ;
CVPixelBufferUnlockBaseAddress ( image_buffer , 0 ) ;
return status ;
}
2014-04-11 18:29:07 +03:00
static int avf_read _packet ( AVFormatContext * s , AVPacket * pkt )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
do {
2015-03-10 13:08:02 +02:00
CVImageBufferRef image_buffer ;
2019-07-08 13:33:29 +02:00
CMBlockBufferRef block_buffer ;
2014-04-11 18:29:07 +03:00
lock_frames ( ctx ) ;
if ( ctx -> current_frame ! = nil ) {
2016-06-24 17:00:17 +02:00
int status ;
2019-07-08 13:33:29 +02:00
int length = 0 ;
image_buffer = CMSampleBufferGetImageBuffer ( ctx -> current_frame ) ;
block_buffer = CMSampleBufferGetDataBuffer ( ctx -> current_frame ) ;
if ( image_buffer ! = nil ) {
length = ( int ) CVPixelBufferGetDataSize ( image_buffer ) ;
} else if ( block_buffer ! = nil ) {
length = ( int ) CMBlockBufferGetDataLength ( block_buffer ) ;
} else {
return AVERROR ( EINVAL ) ;
}
if ( av_new _packet ( pkt , length ) < 0 ) {
2014-04-11 18:29:07 +03:00
return AVERROR ( EIO ) ;
}
2015-03-13 21:16:55 +02:00
CMItemCount count ;
CMSampleTimingInfo timing_info ;
if ( CMSampleBufferGetOutputSampleTimingInfoArray ( ctx -> current_frame , 1 , & timing_info , & count ) = = noErr ) {
AVRational timebase_q = av_make _q ( 1 , timing_info . presentationTimeStamp . timescale ) ;
pkt -> pts = pkt -> dts = av_rescale _q ( timing_info . presentationTimeStamp . value , timebase_q , avf_time _base _q ) ;
}
2014-09-23 17:48:06 +03:00
pkt -> stream_index = ctx -> video_stream _index ;
2014-04-11 18:29:07 +03:00
pkt -> flags | = AV_PKT _FLAG _KEY ;
2019-07-08 13:33:29 +02:00
if ( image_buffer ) {
2019-07-08 19:39:35 +02:00
status = copy_cvpixelbuffer ( s , image_buffer , pkt ) ;
2019-07-08 13:33:29 +02:00
} else {
status = 0 ;
OSStatus ret = CMBlockBufferCopyDataBytes ( block_buffer , 0 , pkt -> size , pkt -> data ) ;
if ( ret ! = kCMBlockBufferNoErr ) {
status = AVERROR ( EIO ) ;
}
}
2014-04-11 18:29:07 +03:00
CFRelease ( ctx -> current_frame ) ;
ctx -> current_frame = nil ;
2016-06-24 17:00:17 +02:00
if ( status < 0 )
return status ;
2014-09-23 18:06:37 +03:00
} else if ( ctx -> current_audio _frame ! = nil ) {
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer ( ctx -> current_audio _frame ) ;
int block_buffer _size = CMBlockBufferGetDataLength ( block_buffer ) ;
if ( ! block_buffer || ! block_buffer _size ) {
return AVERROR ( EIO ) ;
}
if ( ctx -> audio_non _interleaved && block_buffer _size > ctx -> audio_buffer _size ) {
return AVERROR_BUFFER _TOO _SMALL ;
}
if ( av_new _packet ( pkt , block_buffer _size ) < 0 ) {
return AVERROR ( EIO ) ;
}
2015-03-13 21:16:55 +02:00
CMItemCount count ;
CMSampleTimingInfo timing_info ;
if ( CMSampleBufferGetOutputSampleTimingInfoArray ( ctx -> current_audio _frame , 1 , & timing_info , & count ) = = noErr ) {
AVRational timebase_q = av_make _q ( 1 , timing_info . presentationTimeStamp . timescale ) ;
pkt -> pts = pkt -> dts = av_rescale _q ( timing_info . presentationTimeStamp . value , timebase_q , avf_time _base _q ) ;
}
2014-09-23 18:06:37 +03:00
pkt -> stream_index = ctx -> audio_stream _index ;
pkt -> flags | = AV_PKT _FLAG _KEY ;
if ( ctx -> audio_non _interleaved ) {
2015-03-10 13:08:02 +02:00
int sample , c , shift , num_samples ;
2014-09-23 18:06:37 +03:00
OSStatus ret = CMBlockBufferCopyDataBytes ( block_buffer , 0 , pkt -> size , ctx -> audio_buffer ) ;
if ( ret ! = kCMBlockBufferNoErr ) {
return AVERROR ( EIO ) ;
}
2015-03-10 13:08:02 +02:00
num_samples = pkt -> size / ( ctx -> audio_channels * ( ctx -> audio_bits _per _sample > > 3 ) ) ;
2014-09-23 18:06:37 +03:00
// transform decoded frame into output format
# define INTERLEAVE_OUTPUT ( bps ) \
{ \
int # # bps # # _t * * src ; \
int # # bps # # _t * dest ; \
src = av_malloc ( ctx -> audio_channels * sizeof ( int # # bps # # _t * ) ) ; \
if ( ! src ) return AVERROR ( EIO ) ; \
for ( c = 0 ; c < ctx -> audio_channels ; c + + ) { \
src [ c ] = ( ( int # # bps # # _t * ) ctx -> audio_buffer ) + c * num_samples ; \
} \
dest = ( int # # bps # # _t * ) pkt -> data ; \
shift = bps - ctx -> audio_bits _per _sample ; \
for ( sample = 0 ; sample < num_samples ; sample + + ) \
for ( c = 0 ; c < ctx -> audio_channels ; c + + ) \
* dest + + = src [ c ] [ sample ] < < shift ; \
av_freep ( & src ) ; \
}
if ( ctx -> audio_bits _per _sample <= 16 ) {
INTERLEAVE_OUTPUT ( 16 )
} else {
INTERLEAVE_OUTPUT ( 32 )
}
} else {
OSStatus ret = CMBlockBufferCopyDataBytes ( block_buffer , 0 , pkt -> size , pkt -> data ) ;
if ( ret ! = kCMBlockBufferNoErr ) {
return AVERROR ( EIO ) ;
}
}
CFRelease ( ctx -> current_audio _frame ) ;
ctx -> current_audio _frame = nil ;
2014-04-11 18:29:07 +03:00
} else {
pkt -> data = NULL ;
2020-03-05 13:32:49 +02:00
unlock_frames ( ctx ) ;
if ( ctx -> observed_quit ) {
return AVERROR_EOF ;
} else {
return AVERROR ( EAGAIN ) ;
}
2014-04-11 18:29:07 +03:00
}
unlock_frames ( ctx ) ;
} while ( ! pkt -> data ) ;
return 0 ;
}
static int avf_close ( AVFormatContext * s )
{
AVFContext * ctx = ( AVFContext * ) s -> priv_data ;
destroy_context ( ctx ) ;
return 0 ;
}
static const AVOption options [ ] = {
2019-07-08 13:29:40 +02:00
{ "list_devices" , "list available devices" , offsetof ( AVFContext , list_devices ) , AV_OPT _TYPE _BOOL , { . i64 = 0 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM } ,
2014-04-11 18:29:07 +03:00
{ "video_device_index" , "select video device by index for devices with same name (starts at 0)" , offsetof ( AVFContext , video_device _index ) , AV_OPT _TYPE _INT , { . i64 = -1 } , -1 , INT_MAX , AV_OPT _FLAG _DECODING _PARAM } ,
2014-09-23 18:06:37 +03:00
{ "audio_device_index" , "select audio device by index for devices with same name (starts at 0)" , offsetof ( AVFContext , audio_device _index ) , AV_OPT _TYPE _INT , { . i64 = -1 } , -1 , INT_MAX , AV_OPT _FLAG _DECODING _PARAM } ,
2014-06-11 21:26:33 +03:00
{ "pixel_format" , "set pixel format" , offsetof ( AVFContext , pixel_format ) , AV_OPT _TYPE _PIXEL _FMT , { . i64 = AV_PIX _FMT _YUV420P } , 0 , INT_MAX , AV_OPT _FLAG _DECODING _PARAM } ,
2016-06-09 04:52:52 +02:00
{ "framerate" , "set frame rate" , offsetof ( AVFContext , framerate ) , AV_OPT _TYPE _VIDEO _RATE , { . str = "ntsc" } , 0 , INT_MAX , AV_OPT _FLAG _DECODING _PARAM } ,
2015-03-07 22:26:52 +02:00
{ "video_size" , "set video size" , offsetof ( AVFContext , width ) , AV_OPT _TYPE _IMAGE _SIZE , { . str = NULL } , 0 , 0 , AV_OPT _FLAG _DECODING _PARAM } ,
2019-07-08 13:29:40 +02:00
{ "capture_cursor" , "capture the screen cursor" , offsetof ( AVFContext , capture_cursor ) , AV_OPT _TYPE _BOOL , { . i64 = 0 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM } ,
{ "capture_mouse_clicks" , "capture the screen mouse clicks" , offsetof ( AVFContext , capture_mouse _clicks ) , AV_OPT _TYPE _BOOL , { . i64 = 0 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM } ,
2019-07-08 19:52:53 +02:00
{ "capture_raw_data" , "capture the raw data from device connection" , offsetof ( AVFContext , capture_raw _data ) , AV_OPT _TYPE _BOOL , { . i64 = 0 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM } ,
2020-01-19 18:36:21 +02:00
{ "drop_late_frames" , "drop frames that are available later than expected" , offsetof ( AVFContext , drop_late _frames ) , AV_OPT _TYPE _BOOL , { . i64 = 1 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM } ,
2015-03-07 23:10:34 +02:00
2014-04-11 18:29:07 +03:00
{ NULL } ,
} ;
static const AVClass avf_class = {
2019-08-13 03:55:12 +02:00
. class_name = "AVFoundation indev" ,
2014-04-11 18:29:07 +03:00
. item_name = av_default _item _name ,
. option = options ,
. version = LIBAVUTIL_VERSION _INT ,
2014-08-04 23:06:59 +03:00
. category = AV_CLASS _CATEGORY _DEVICE _VIDEO _INPUT ,
2014-04-11 18:29:07 +03:00
} ;
2021-04-19 18:56:01 +02:00
const AVInputFormat ff_avfoundation _demuxer = {
2014-04-11 18:29:07 +03:00
. name = "avfoundation" ,
. long_name = NULL_IF _CONFIG _SMALL ( "AVFoundation input device" ) ,
. priv_data _size = sizeof ( AVFContext ) ,
. read_header = avf_read _header ,
. read_packet = avf_read _packet ,
. read_close = avf_close ,
. flags = AVFMT_NOFILE ,
. priv_class = & avf_class ,
} ;