You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

856 lines
30KB

  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/pixdesc.h"
  29. #include "libavutil/opt.h"
  30. #include "libavutil/avstring.h"
  31. #include "libavformat/internal.h"
  32. #include "libavutil/internal.h"
  33. #include "libavutil/time.h"
  34. #include "avdevice.h"
  35. static const int avf_time_base = 1000000;
  36. static const AVRational avf_time_base_q = {
  37. .num = 1,
  38. .den = avf_time_base
  39. };
  40. struct AVFPixelFormatSpec {
  41. enum AVPixelFormat ff_id;
  42. OSType avf_id;
  43. };
  44. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  45. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  46. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  47. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  48. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  49. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  50. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  51. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  52. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  53. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  54. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  55. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  56. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  57. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  58. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  59. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  60. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  61. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  62. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  63. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  64. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  65. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  66. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  67. #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  68. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  69. #endif
  70. { AV_PIX_FMT_NONE, 0 }
  71. };
  72. typedef struct
  73. {
  74. AVClass* class;
  75. int frames_captured;
  76. int audio_frames_captured;
  77. int64_t first_pts;
  78. int64_t first_audio_pts;
  79. pthread_mutex_t frame_lock;
  80. pthread_cond_t frame_wait_cond;
  81. id avf_delegate;
  82. id avf_audio_delegate;
  83. int list_devices;
  84. int video_device_index;
  85. int video_stream_index;
  86. int audio_device_index;
  87. int audio_stream_index;
  88. char *video_filename;
  89. char *audio_filename;
  90. int num_video_devices;
  91. int audio_channels;
  92. int audio_bits_per_sample;
  93. int audio_float;
  94. int audio_be;
  95. int audio_signed_integer;
  96. int audio_packed;
  97. int audio_non_interleaved;
  98. int32_t *audio_buffer;
  99. int audio_buffer_size;
  100. enum AVPixelFormat pixel_format;
  101. AVCaptureSession *capture_session;
  102. AVCaptureVideoDataOutput *video_output;
  103. AVCaptureAudioDataOutput *audio_output;
  104. CMSampleBufferRef current_frame;
  105. CMSampleBufferRef current_audio_frame;
  106. } AVFContext;
  107. static void lock_frames(AVFContext* ctx)
  108. {
  109. pthread_mutex_lock(&ctx->frame_lock);
  110. }
  111. static void unlock_frames(AVFContext* ctx)
  112. {
  113. pthread_mutex_unlock(&ctx->frame_lock);
  114. }
  115. /** FrameReciever class - delegate for AVCaptureSession
  116. */
  117. @interface AVFFrameReceiver : NSObject
  118. {
  119. AVFContext* _context;
  120. }
  121. - (id)initWithContext:(AVFContext*)context;
  122. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  123. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  124. fromConnection:(AVCaptureConnection *)connection;
  125. @end
  126. @implementation AVFFrameReceiver
  127. - (id)initWithContext:(AVFContext*)context
  128. {
  129. if (self = [super init]) {
  130. _context = context;
  131. }
  132. return self;
  133. }
  134. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  135. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  136. fromConnection:(AVCaptureConnection *)connection
  137. {
  138. lock_frames(_context);
  139. if (_context->current_frame != nil) {
  140. CFRelease(_context->current_frame);
  141. }
  142. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  143. pthread_cond_signal(&_context->frame_wait_cond);
  144. unlock_frames(_context);
  145. ++_context->frames_captured;
  146. }
  147. @end
  148. /** AudioReciever class - delegate for AVCaptureSession
  149. */
  150. @interface AVFAudioReceiver : NSObject
  151. {
  152. AVFContext* _context;
  153. }
  154. - (id)initWithContext:(AVFContext*)context;
  155. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  156. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  157. fromConnection:(AVCaptureConnection *)connection;
  158. @end
  159. @implementation AVFAudioReceiver
  160. - (id)initWithContext:(AVFContext*)context
  161. {
  162. if (self = [super init]) {
  163. _context = context;
  164. }
  165. return self;
  166. }
  167. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  168. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  169. fromConnection:(AVCaptureConnection *)connection
  170. {
  171. lock_frames(_context);
  172. if (_context->current_audio_frame != nil) {
  173. CFRelease(_context->current_audio_frame);
  174. }
  175. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  176. pthread_cond_signal(&_context->frame_wait_cond);
  177. unlock_frames(_context);
  178. ++_context->audio_frames_captured;
  179. }
  180. @end
  181. static void destroy_context(AVFContext* ctx)
  182. {
  183. [ctx->capture_session stopRunning];
  184. [ctx->capture_session release];
  185. [ctx->video_output release];
  186. [ctx->audio_output release];
  187. [ctx->avf_delegate release];
  188. [ctx->avf_audio_delegate release];
  189. ctx->capture_session = NULL;
  190. ctx->video_output = NULL;
  191. ctx->audio_output = NULL;
  192. ctx->avf_delegate = NULL;
  193. ctx->avf_audio_delegate = NULL;
  194. av_freep(&ctx->audio_buffer);
  195. pthread_mutex_destroy(&ctx->frame_lock);
  196. pthread_cond_destroy(&ctx->frame_wait_cond);
  197. if (ctx->current_frame) {
  198. CFRelease(ctx->current_frame);
  199. }
  200. }
  201. static void parse_device_name(AVFormatContext *s)
  202. {
  203. AVFContext *ctx = (AVFContext*)s->priv_data;
  204. char *tmp = av_strdup(s->filename);
  205. char *save;
  206. if (tmp[0] != ':') {
  207. ctx->video_filename = av_strtok(tmp, ":", &save);
  208. ctx->audio_filename = av_strtok(NULL, ":", &save);
  209. } else {
  210. ctx->audio_filename = av_strtok(tmp, ":", &save);
  211. }
  212. }
  213. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  214. {
  215. AVFContext *ctx = (AVFContext*)s->priv_data;
  216. NSError *error = nil;
  217. AVCaptureInput* capture_input = nil;
  218. if (ctx->video_device_index < ctx->num_video_devices) {
  219. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  220. } else {
  221. capture_input = (AVCaptureInput*) video_device;
  222. }
  223. if (!capture_input) {
  224. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  225. [[error localizedDescription] UTF8String]);
  226. return 1;
  227. }
  228. if ([ctx->capture_session canAddInput:capture_input]) {
  229. [ctx->capture_session addInput:capture_input];
  230. } else {
  231. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  232. return 1;
  233. }
  234. // Attaching output
  235. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  236. if (!ctx->video_output) {
  237. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  238. return 1;
  239. }
  240. // select pixel format
  241. struct AVFPixelFormatSpec pxl_fmt_spec;
  242. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  243. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  244. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  245. pxl_fmt_spec = avf_pixel_formats[i];
  246. break;
  247. }
  248. }
  249. // check if selected pixel format is supported by AVFoundation
  250. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  251. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  252. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  253. return 1;
  254. }
  255. // check if the pixel format is available for this device
  256. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  257. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  258. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  259. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  260. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  261. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  262. struct AVFPixelFormatSpec pxl_fmt_dummy;
  263. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  264. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  265. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  266. pxl_fmt_dummy = avf_pixel_formats[i];
  267. break;
  268. }
  269. }
  270. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  271. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  272. // select first supported pixel format instead of user selected (or default) pixel format
  273. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  274. pxl_fmt_spec = pxl_fmt_dummy;
  275. }
  276. }
  277. }
  278. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  279. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  280. return 1;
  281. } else {
  282. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  283. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  284. }
  285. }
  286. ctx->pixel_format = pxl_fmt_spec.ff_id;
  287. NSNumber *pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  288. NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  289. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  290. [ctx->video_output setVideoSettings:capture_dict];
  291. [ctx->video_output setAlwaysDiscardsLateVideoFrames:YES];
  292. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  293. dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL);
  294. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  295. dispatch_release(queue);
  296. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  297. [ctx->capture_session addOutput:ctx->video_output];
  298. } else {
  299. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  300. return 1;
  301. }
  302. return 0;
  303. }
  304. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  305. {
  306. AVFContext *ctx = (AVFContext*)s->priv_data;
  307. NSError *error = nil;
  308. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  309. if (!audio_dev_input) {
  310. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  311. [[error localizedDescription] UTF8String]);
  312. return 1;
  313. }
  314. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  315. [ctx->capture_session addInput:audio_dev_input];
  316. } else {
  317. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  318. return 1;
  319. }
  320. // Attaching output
  321. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  322. if (!ctx->audio_output) {
  323. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  324. return 1;
  325. }
  326. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  327. dispatch_queue_t queue = dispatch_queue_create("avf_audio_queue", NULL);
  328. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  329. dispatch_release(queue);
  330. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  331. [ctx->capture_session addOutput:ctx->audio_output];
  332. } else {
  333. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  334. return 1;
  335. }
  336. return 0;
  337. }
  338. static int get_video_config(AVFormatContext *s)
  339. {
  340. AVFContext *ctx = (AVFContext*)s->priv_data;
  341. // Take stream info from the first frame.
  342. while (ctx->frames_captured < 1) {
  343. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  344. }
  345. lock_frames(ctx);
  346. AVStream* stream = avformat_new_stream(s, NULL);
  347. if (!stream) {
  348. return 1;
  349. }
  350. ctx->video_stream_index = stream->index;
  351. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  352. CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  353. CGSize image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  354. stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
  355. stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  356. stream->codec->width = (int)image_buffer_size.width;
  357. stream->codec->height = (int)image_buffer_size.height;
  358. stream->codec->pix_fmt = ctx->pixel_format;
  359. CFRelease(ctx->current_frame);
  360. ctx->current_frame = nil;
  361. unlock_frames(ctx);
  362. return 0;
  363. }
  364. static int get_audio_config(AVFormatContext *s)
  365. {
  366. AVFContext *ctx = (AVFContext*)s->priv_data;
  367. // Take stream info from the first frame.
  368. while (ctx->audio_frames_captured < 1) {
  369. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  370. }
  371. lock_frames(ctx);
  372. AVStream* stream = avformat_new_stream(s, NULL);
  373. if (!stream) {
  374. return 1;
  375. }
  376. ctx->audio_stream_index = stream->index;
  377. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  378. CMFormatDescriptionRef format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  379. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  380. if (!basic_desc) {
  381. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  382. return 1;
  383. }
  384. stream->codec->codec_type = AVMEDIA_TYPE_AUDIO;
  385. stream->codec->sample_rate = basic_desc->mSampleRate;
  386. stream->codec->channels = basic_desc->mChannelsPerFrame;
  387. stream->codec->channel_layout = av_get_default_channel_layout(stream->codec->channels);
  388. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  389. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  390. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  391. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  392. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  393. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  394. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  395. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  396. ctx->audio_float &&
  397. ctx->audio_packed) {
  398. stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  399. } else {
  400. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  401. return 1;
  402. }
  403. if (ctx->audio_non_interleaved) {
  404. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  405. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  406. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  407. if (!ctx->audio_buffer) {
  408. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  409. return 1;
  410. }
  411. }
  412. CFRelease(ctx->current_audio_frame);
  413. ctx->current_audio_frame = nil;
  414. unlock_frames(ctx);
  415. return 0;
  416. }
  417. static int avf_read_header(AVFormatContext *s)
  418. {
  419. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  420. AVFContext *ctx = (AVFContext*)s->priv_data;
  421. ctx->first_pts = av_gettime();
  422. ctx->first_audio_pts = av_gettime();
  423. uint32_t num_screens = 0;
  424. pthread_mutex_init(&ctx->frame_lock, NULL);
  425. pthread_cond_init(&ctx->frame_wait_cond, NULL);
  426. #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  427. CGGetActiveDisplayList(0, NULL, &num_screens);
  428. #endif
  429. // List devices if requested
  430. if (ctx->list_devices) {
  431. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  432. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  433. int index = 0;
  434. for (AVCaptureDevice *device in devices) {
  435. const char *name = [[device localizedName] UTF8String];
  436. index = [devices indexOfObject:device];
  437. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  438. index++;
  439. }
  440. #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  441. if (num_screens > 0) {
  442. CGDirectDisplayID screens[num_screens];
  443. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  444. for (int i = 0; i < num_screens; i++) {
  445. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", index + i, i);
  446. }
  447. }
  448. #endif
  449. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  450. devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  451. for (AVCaptureDevice *device in devices) {
  452. const char *name = [[device localizedName] UTF8String];
  453. int index = [devices indexOfObject:device];
  454. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  455. }
  456. goto fail;
  457. }
  458. // Find capture device
  459. AVCaptureDevice *video_device = nil;
  460. AVCaptureDevice *audio_device = nil;
  461. NSArray *video_devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  462. ctx->num_video_devices = [video_devices count];
  463. // parse input filename for video and audio device
  464. parse_device_name(s);
  465. // check for device index given in filename
  466. if (ctx->video_device_index == -1 && ctx->video_filename) {
  467. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  468. }
  469. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  470. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  471. }
  472. if (ctx->video_device_index >= 0) {
  473. if (ctx->video_device_index < ctx->num_video_devices) {
  474. video_device = [video_devices objectAtIndex:ctx->video_device_index];
  475. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  476. #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  477. CGDirectDisplayID screens[num_screens];
  478. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  479. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  480. video_device = (AVCaptureDevice*) capture_screen_input;
  481. #endif
  482. } else {
  483. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  484. goto fail;
  485. }
  486. } else if (ctx->video_filename &&
  487. strncmp(ctx->video_filename, "none", 4)) {
  488. if (!strncmp(ctx->video_filename, "default", 7)) {
  489. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  490. } else {
  491. // looking for video inputs
  492. for (AVCaptureDevice *device in video_devices) {
  493. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  494. video_device = device;
  495. break;
  496. }
  497. }
  498. #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  499. // looking for screen inputs
  500. if (!video_device) {
  501. int idx;
  502. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  503. CGDirectDisplayID screens[num_screens];
  504. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  505. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  506. video_device = (AVCaptureDevice*) capture_screen_input;
  507. ctx->video_device_index = ctx->num_video_devices + idx;
  508. }
  509. }
  510. #endif
  511. }
  512. if (!video_device) {
  513. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  514. goto fail;
  515. }
  516. }
  517. // get audio device
  518. if (ctx->audio_device_index >= 0) {
  519. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  520. if (ctx->audio_device_index >= [devices count]) {
  521. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  522. goto fail;
  523. }
  524. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  525. } else if (ctx->audio_filename &&
  526. strncmp(ctx->audio_filename, "none", 4)) {
  527. if (!strncmp(ctx->audio_filename, "default", 7)) {
  528. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  529. } else {
  530. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  531. for (AVCaptureDevice *device in devices) {
  532. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  533. audio_device = device;
  534. break;
  535. }
  536. }
  537. }
  538. if (!audio_device) {
  539. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  540. goto fail;
  541. }
  542. }
  543. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  544. if (!video_device && !audio_device) {
  545. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  546. goto fail;
  547. }
  548. if (video_device) {
  549. if (ctx->video_device_index < ctx->num_video_devices) {
  550. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  551. } else {
  552. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  553. }
  554. }
  555. if (audio_device) {
  556. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  557. }
  558. // Initialize capture session
  559. ctx->capture_session = [[AVCaptureSession alloc] init];
  560. if (video_device && add_video_device(s, video_device)) {
  561. goto fail;
  562. }
  563. if (audio_device && add_audio_device(s, audio_device)) {
  564. }
  565. [ctx->capture_session startRunning];
  566. if (video_device && get_video_config(s)) {
  567. goto fail;
  568. }
  569. // set audio stream
  570. if (audio_device && get_audio_config(s)) {
  571. goto fail;
  572. }
  573. [pool release];
  574. return 0;
  575. fail:
  576. [pool release];
  577. destroy_context(ctx);
  578. return AVERROR(EIO);
  579. }
  580. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  581. {
  582. AVFContext* ctx = (AVFContext*)s->priv_data;
  583. do {
  584. lock_frames(ctx);
  585. CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  586. if (ctx->current_frame != nil) {
  587. if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) {
  588. return AVERROR(EIO);
  589. }
  590. pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
  591. AV_TIME_BASE_Q,
  592. avf_time_base_q);
  593. pkt->stream_index = ctx->video_stream_index;
  594. pkt->flags |= AV_PKT_FLAG_KEY;
  595. CVPixelBufferLockBaseAddress(image_buffer, 0);
  596. void* data = CVPixelBufferGetBaseAddress(image_buffer);
  597. memcpy(pkt->data, data, pkt->size);
  598. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  599. CFRelease(ctx->current_frame);
  600. ctx->current_frame = nil;
  601. } else if (ctx->current_audio_frame != nil) {
  602. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  603. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  604. if (!block_buffer || !block_buffer_size) {
  605. return AVERROR(EIO);
  606. }
  607. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  608. return AVERROR_BUFFER_TOO_SMALL;
  609. }
  610. if (av_new_packet(pkt, block_buffer_size) < 0) {
  611. return AVERROR(EIO);
  612. }
  613. pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_audio_pts,
  614. AV_TIME_BASE_Q,
  615. avf_time_base_q);
  616. pkt->stream_index = ctx->audio_stream_index;
  617. pkt->flags |= AV_PKT_FLAG_KEY;
  618. if (ctx->audio_non_interleaved) {
  619. int sample, c, shift;
  620. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  621. if (ret != kCMBlockBufferNoErr) {
  622. return AVERROR(EIO);
  623. }
  624. int num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  625. // transform decoded frame into output format
  626. #define INTERLEAVE_OUTPUT(bps) \
  627. { \
  628. int##bps##_t **src; \
  629. int##bps##_t *dest; \
  630. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  631. if (!src) return AVERROR(EIO); \
  632. for (c = 0; c < ctx->audio_channels; c++) { \
  633. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  634. } \
  635. dest = (int##bps##_t*)pkt->data; \
  636. shift = bps - ctx->audio_bits_per_sample; \
  637. for (sample = 0; sample < num_samples; sample++) \
  638. for (c = 0; c < ctx->audio_channels; c++) \
  639. *dest++ = src[c][sample] << shift; \
  640. av_freep(&src); \
  641. }
  642. if (ctx->audio_bits_per_sample <= 16) {
  643. INTERLEAVE_OUTPUT(16)
  644. } else {
  645. INTERLEAVE_OUTPUT(32)
  646. }
  647. } else {
  648. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  649. if (ret != kCMBlockBufferNoErr) {
  650. return AVERROR(EIO);
  651. }
  652. }
  653. CFRelease(ctx->current_audio_frame);
  654. ctx->current_audio_frame = nil;
  655. } else {
  656. pkt->data = NULL;
  657. pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
  658. }
  659. unlock_frames(ctx);
  660. } while (!pkt->data);
  661. return 0;
  662. }
  663. static int avf_close(AVFormatContext *s)
  664. {
  665. AVFContext* ctx = (AVFContext*)s->priv_data;
  666. destroy_context(ctx);
  667. return 0;
  668. }
  669. static const AVOption options[] = {
  670. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  671. { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  672. { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
  673. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  674. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  675. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  676. { NULL },
  677. };
  678. static const AVClass avf_class = {
  679. .class_name = "AVFoundation input device",
  680. .item_name = av_default_item_name,
  681. .option = options,
  682. .version = LIBAVUTIL_VERSION_INT,
  683. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  684. };
  685. AVInputFormat ff_avfoundation_demuxer = {
  686. .name = "avfoundation",
  687. .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  688. .priv_data_size = sizeof(AVFContext),
  689. .read_header = avf_read_header,
  690. .read_packet = avf_read_packet,
  691. .read_close = avf_close,
  692. .flags = AVFMT_NOFILE,
  693. .priv_class = &avf_class,
  694. };