You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1162 lines
42KB

  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/pixdesc.h"
  29. #include "libavutil/opt.h"
  30. #include "libavutil/avstring.h"
  31. #include "libavformat/internal.h"
  32. #include "libavutil/internal.h"
  33. #include "libavutil/parseutils.h"
  34. #include "libavutil/time.h"
  35. #include "libavutil/imgutils.h"
  36. #include "avdevice.h"
  37. static const int avf_time_base = 1000000;
  38. static const AVRational avf_time_base_q = {
  39. .num = 1,
  40. .den = avf_time_base
  41. };
  42. struct AVFPixelFormatSpec {
  43. enum AVPixelFormat ff_id;
  44. OSType avf_id;
  45. };
  46. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  47. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  48. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  49. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  50. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  51. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  52. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  53. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  54. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  55. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  56. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  57. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  58. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  59. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  60. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  61. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  62. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  63. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  64. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  65. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  66. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  67. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  68. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  69. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  70. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  71. #endif
  72. { AV_PIX_FMT_NONE, 0 }
  73. };
  74. typedef struct
  75. {
  76. AVClass* class;
  77. int frames_captured;
  78. int audio_frames_captured;
  79. int64_t first_pts;
  80. int64_t first_audio_pts;
  81. pthread_mutex_t frame_lock;
  82. pthread_cond_t frame_wait_cond;
  83. id avf_delegate;
  84. id avf_audio_delegate;
  85. AVRational framerate;
  86. int width, height;
  87. int capture_cursor;
  88. int capture_mouse_clicks;
  89. int capture_raw_data;
  90. int drop_late_frames;
  91. int video_is_muxed;
  92. int list_devices;
  93. int video_device_index;
  94. int video_stream_index;
  95. int audio_device_index;
  96. int audio_stream_index;
  97. char *video_filename;
  98. char *audio_filename;
  99. int num_video_devices;
  100. int audio_channels;
  101. int audio_bits_per_sample;
  102. int audio_float;
  103. int audio_be;
  104. int audio_signed_integer;
  105. int audio_packed;
  106. int audio_non_interleaved;
  107. int32_t *audio_buffer;
  108. int audio_buffer_size;
  109. enum AVPixelFormat pixel_format;
  110. AVCaptureSession *capture_session;
  111. AVCaptureVideoDataOutput *video_output;
  112. AVCaptureAudioDataOutput *audio_output;
  113. CMSampleBufferRef current_frame;
  114. CMSampleBufferRef current_audio_frame;
  115. } AVFContext;
  116. static void lock_frames(AVFContext* ctx)
  117. {
  118. pthread_mutex_lock(&ctx->frame_lock);
  119. }
  120. static void unlock_frames(AVFContext* ctx)
  121. {
  122. pthread_mutex_unlock(&ctx->frame_lock);
  123. }
  124. /** FrameReciever class - delegate for AVCaptureSession
  125. */
  126. @interface AVFFrameReceiver : NSObject
  127. {
  128. AVFContext* _context;
  129. }
  130. - (id)initWithContext:(AVFContext*)context;
  131. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  132. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  133. fromConnection:(AVCaptureConnection *)connection;
  134. @end
  135. @implementation AVFFrameReceiver
  136. - (id)initWithContext:(AVFContext*)context
  137. {
  138. if (self = [super init]) {
  139. _context = context;
  140. }
  141. return self;
  142. }
  143. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  144. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  145. fromConnection:(AVCaptureConnection *)connection
  146. {
  147. lock_frames(_context);
  148. if (_context->current_frame != nil) {
  149. CFRelease(_context->current_frame);
  150. }
  151. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  152. pthread_cond_signal(&_context->frame_wait_cond);
  153. unlock_frames(_context);
  154. ++_context->frames_captured;
  155. }
  156. @end
  157. /** AudioReciever class - delegate for AVCaptureSession
  158. */
  159. @interface AVFAudioReceiver : NSObject
  160. {
  161. AVFContext* _context;
  162. }
  163. - (id)initWithContext:(AVFContext*)context;
  164. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  165. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  166. fromConnection:(AVCaptureConnection *)connection;
  167. @end
  168. @implementation AVFAudioReceiver
  169. - (id)initWithContext:(AVFContext*)context
  170. {
  171. if (self = [super init]) {
  172. _context = context;
  173. }
  174. return self;
  175. }
  176. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  177. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  178. fromConnection:(AVCaptureConnection *)connection
  179. {
  180. lock_frames(_context);
  181. if (_context->current_audio_frame != nil) {
  182. CFRelease(_context->current_audio_frame);
  183. }
  184. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  185. pthread_cond_signal(&_context->frame_wait_cond);
  186. unlock_frames(_context);
  187. ++_context->audio_frames_captured;
  188. }
  189. @end
  190. static void destroy_context(AVFContext* ctx)
  191. {
  192. [ctx->capture_session stopRunning];
  193. [ctx->capture_session release];
  194. [ctx->video_output release];
  195. [ctx->audio_output release];
  196. [ctx->avf_delegate release];
  197. [ctx->avf_audio_delegate release];
  198. ctx->capture_session = NULL;
  199. ctx->video_output = NULL;
  200. ctx->audio_output = NULL;
  201. ctx->avf_delegate = NULL;
  202. ctx->avf_audio_delegate = NULL;
  203. av_freep(&ctx->audio_buffer);
  204. pthread_mutex_destroy(&ctx->frame_lock);
  205. pthread_cond_destroy(&ctx->frame_wait_cond);
  206. if (ctx->current_frame) {
  207. CFRelease(ctx->current_frame);
  208. }
  209. }
  210. static void parse_device_name(AVFormatContext *s)
  211. {
  212. AVFContext *ctx = (AVFContext*)s->priv_data;
  213. char *tmp = av_strdup(s->url);
  214. char *save;
  215. if (tmp[0] != ':') {
  216. ctx->video_filename = av_strtok(tmp, ":", &save);
  217. ctx->audio_filename = av_strtok(NULL, ":", &save);
  218. } else {
  219. ctx->audio_filename = av_strtok(tmp, ":", &save);
  220. }
  221. }
  222. /**
  223. * Configure the video device.
  224. *
  225. * Configure the video device using a run-time approach to access properties
  226. * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
  227. * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
  228. *
  229. * The NSUndefinedKeyException must be handled by the caller of this function.
  230. *
  231. */
  232. static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  233. {
  234. AVFContext *ctx = (AVFContext*)s->priv_data;
  235. double framerate = av_q2d(ctx->framerate);
  236. NSObject *range = nil;
  237. NSObject *format = nil;
  238. NSObject *selected_range = nil;
  239. NSObject *selected_format = nil;
  240. // try to configure format by formats list
  241. // might raise an exception if no format list is given
  242. // (then fallback to default, no configuration)
  243. @try {
  244. for (format in [video_device valueForKey:@"formats"]) {
  245. CMFormatDescriptionRef formatDescription;
  246. CMVideoDimensions dimensions;
  247. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  248. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  249. if ((ctx->width == 0 && ctx->height == 0) ||
  250. (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
  251. selected_format = format;
  252. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  253. double max_framerate;
  254. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  255. if (fabs (framerate - max_framerate) < 0.01) {
  256. selected_range = range;
  257. break;
  258. }
  259. }
  260. }
  261. }
  262. if (!selected_format) {
  263. av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
  264. ctx->width, ctx->height);
  265. goto unsupported_format;
  266. }
  267. if (!selected_range) {
  268. av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
  269. framerate);
  270. if (ctx->video_is_muxed) {
  271. av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
  272. } else {
  273. goto unsupported_format;
  274. }
  275. }
  276. if ([video_device lockForConfiguration:NULL] == YES) {
  277. if (selected_format) {
  278. [video_device setValue:selected_format forKey:@"activeFormat"];
  279. }
  280. if (selected_range) {
  281. NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
  282. [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
  283. [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
  284. }
  285. } else {
  286. av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
  287. return AVERROR(EINVAL);
  288. }
  289. } @catch(NSException *e) {
  290. av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
  291. }
  292. return 0;
  293. unsupported_format:
  294. av_log(s, AV_LOG_ERROR, "Supported modes:\n");
  295. for (format in [video_device valueForKey:@"formats"]) {
  296. CMFormatDescriptionRef formatDescription;
  297. CMVideoDimensions dimensions;
  298. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  299. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  300. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  301. double min_framerate;
  302. double max_framerate;
  303. [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
  304. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  305. av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
  306. dimensions.width, dimensions.height,
  307. min_framerate, max_framerate);
  308. }
  309. }
  310. return AVERROR(EINVAL);
  311. }
  312. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  313. {
  314. AVFContext *ctx = (AVFContext*)s->priv_data;
  315. int ret;
  316. NSError *error = nil;
  317. AVCaptureInput* capture_input = nil;
  318. struct AVFPixelFormatSpec pxl_fmt_spec;
  319. NSNumber *pixel_format;
  320. NSDictionary *capture_dict;
  321. dispatch_queue_t queue;
  322. if (ctx->video_device_index < ctx->num_video_devices) {
  323. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  324. } else {
  325. capture_input = (AVCaptureInput*) video_device;
  326. }
  327. if (!capture_input) {
  328. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  329. [[error localizedDescription] UTF8String]);
  330. return 1;
  331. }
  332. if ([ctx->capture_session canAddInput:capture_input]) {
  333. [ctx->capture_session addInput:capture_input];
  334. } else {
  335. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  336. return 1;
  337. }
  338. // Attaching output
  339. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  340. if (!ctx->video_output) {
  341. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  342. return 1;
  343. }
  344. // Configure device framerate and video size
  345. @try {
  346. if ((ret = configure_video_device(s, video_device)) < 0) {
  347. return ret;
  348. }
  349. } @catch (NSException *exception) {
  350. if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
  351. av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
  352. return AVERROR_EXTERNAL;
  353. }
  354. }
  355. // select pixel format
  356. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  357. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  358. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  359. pxl_fmt_spec = avf_pixel_formats[i];
  360. break;
  361. }
  362. }
  363. // check if selected pixel format is supported by AVFoundation
  364. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  365. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  366. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  367. return 1;
  368. }
  369. // check if the pixel format is available for this device
  370. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  371. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  372. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  373. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  374. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  375. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  376. struct AVFPixelFormatSpec pxl_fmt_dummy;
  377. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  378. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  379. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  380. pxl_fmt_dummy = avf_pixel_formats[i];
  381. break;
  382. }
  383. }
  384. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  385. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  386. // select first supported pixel format instead of user selected (or default) pixel format
  387. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  388. pxl_fmt_spec = pxl_fmt_dummy;
  389. }
  390. }
  391. }
  392. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  393. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  394. return 1;
  395. } else {
  396. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  397. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  398. }
  399. }
  400. // set videoSettings to an empty dict for receiving raw data of muxed devices
  401. if (ctx->capture_raw_data) {
  402. ctx->pixel_format = pxl_fmt_spec.ff_id;
  403. ctx->video_output.videoSettings = @{ };
  404. } else {
  405. ctx->pixel_format = pxl_fmt_spec.ff_id;
  406. pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  407. capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  408. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  409. [ctx->video_output setVideoSettings:capture_dict];
  410. }
  411. [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
  412. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  413. queue = dispatch_queue_create("avf_queue", NULL);
  414. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  415. dispatch_release(queue);
  416. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  417. [ctx->capture_session addOutput:ctx->video_output];
  418. } else {
  419. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  420. return 1;
  421. }
  422. return 0;
  423. }
  424. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  425. {
  426. AVFContext *ctx = (AVFContext*)s->priv_data;
  427. NSError *error = nil;
  428. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  429. dispatch_queue_t queue;
  430. if (!audio_dev_input) {
  431. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  432. [[error localizedDescription] UTF8String]);
  433. return 1;
  434. }
  435. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  436. [ctx->capture_session addInput:audio_dev_input];
  437. } else {
  438. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  439. return 1;
  440. }
  441. // Attaching output
  442. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  443. if (!ctx->audio_output) {
  444. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  445. return 1;
  446. }
  447. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  448. queue = dispatch_queue_create("avf_audio_queue", NULL);
  449. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  450. dispatch_release(queue);
  451. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  452. [ctx->capture_session addOutput:ctx->audio_output];
  453. } else {
  454. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  455. return 1;
  456. }
  457. return 0;
  458. }
  459. static int get_video_config(AVFormatContext *s)
  460. {
  461. AVFContext *ctx = (AVFContext*)s->priv_data;
  462. CVImageBufferRef image_buffer;
  463. CMBlockBufferRef block_buffer;
  464. CGSize image_buffer_size;
  465. AVStream* stream = avformat_new_stream(s, NULL);
  466. if (!stream) {
  467. return 1;
  468. }
  469. // Take stream info from the first frame.
  470. while (ctx->frames_captured < 1) {
  471. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  472. }
  473. lock_frames(ctx);
  474. ctx->video_stream_index = stream->index;
  475. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  476. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  477. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  478. if (image_buffer) {
  479. image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  480. stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  481. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  482. stream->codecpar->width = (int)image_buffer_size.width;
  483. stream->codecpar->height = (int)image_buffer_size.height;
  484. stream->codecpar->format = ctx->pixel_format;
  485. } else {
  486. stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
  487. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  488. stream->codecpar->format = ctx->pixel_format;
  489. }
  490. CFRelease(ctx->current_frame);
  491. ctx->current_frame = nil;
  492. unlock_frames(ctx);
  493. return 0;
  494. }
  495. static int get_audio_config(AVFormatContext *s)
  496. {
  497. AVFContext *ctx = (AVFContext*)s->priv_data;
  498. CMFormatDescriptionRef format_desc;
  499. AVStream* stream = avformat_new_stream(s, NULL);
  500. if (!stream) {
  501. return 1;
  502. }
  503. // Take stream info from the first frame.
  504. while (ctx->audio_frames_captured < 1) {
  505. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  506. }
  507. lock_frames(ctx);
  508. ctx->audio_stream_index = stream->index;
  509. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  510. format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  511. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  512. if (!basic_desc) {
  513. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  514. return 1;
  515. }
  516. stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  517. stream->codecpar->sample_rate = basic_desc->mSampleRate;
  518. stream->codecpar->channels = basic_desc->mChannelsPerFrame;
  519. stream->codecpar->channel_layout = av_get_default_channel_layout(stream->codecpar->channels);
  520. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  521. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  522. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  523. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  524. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  525. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  526. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  527. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  528. ctx->audio_float &&
  529. ctx->audio_bits_per_sample == 32 &&
  530. ctx->audio_packed) {
  531. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  532. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  533. ctx->audio_signed_integer &&
  534. ctx->audio_bits_per_sample == 16 &&
  535. ctx->audio_packed) {
  536. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
  537. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  538. ctx->audio_signed_integer &&
  539. ctx->audio_bits_per_sample == 24 &&
  540. ctx->audio_packed) {
  541. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
  542. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  543. ctx->audio_signed_integer &&
  544. ctx->audio_bits_per_sample == 32 &&
  545. ctx->audio_packed) {
  546. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
  547. } else {
  548. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  549. return 1;
  550. }
  551. if (ctx->audio_non_interleaved) {
  552. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  553. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  554. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  555. if (!ctx->audio_buffer) {
  556. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  557. return 1;
  558. }
  559. }
  560. CFRelease(ctx->current_audio_frame);
  561. ctx->current_audio_frame = nil;
  562. unlock_frames(ctx);
  563. return 0;
  564. }
  565. static int avf_read_header(AVFormatContext *s)
  566. {
  567. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  568. int capture_screen = 0;
  569. uint32_t num_screens = 0;
  570. AVFContext *ctx = (AVFContext*)s->priv_data;
  571. AVCaptureDevice *video_device = nil;
  572. AVCaptureDevice *audio_device = nil;
  573. // Find capture device
  574. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  575. NSArray *devices_muxed = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
  576. ctx->num_video_devices = [devices count] + [devices_muxed count];
  577. ctx->first_pts = av_gettime();
  578. ctx->first_audio_pts = av_gettime();
  579. pthread_mutex_init(&ctx->frame_lock, NULL);
  580. pthread_cond_init(&ctx->frame_wait_cond, NULL);
  581. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  582. CGGetActiveDisplayList(0, NULL, &num_screens);
  583. #endif
  584. // List devices if requested
  585. if (ctx->list_devices) {
  586. int index = 0;
  587. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  588. for (AVCaptureDevice *device in devices) {
  589. const char *name = [[device localizedName] UTF8String];
  590. index = [devices indexOfObject:device];
  591. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  592. }
  593. for (AVCaptureDevice *device in devices_muxed) {
  594. const char *name = [[device localizedName] UTF8String];
  595. index = [devices count] + [devices_muxed indexOfObject:device];
  596. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  597. }
  598. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  599. if (num_screens > 0) {
  600. CGDirectDisplayID screens[num_screens];
  601. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  602. for (int i = 0; i < num_screens; i++) {
  603. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
  604. }
  605. }
  606. #endif
  607. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  608. devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  609. for (AVCaptureDevice *device in devices) {
  610. const char *name = [[device localizedName] UTF8String];
  611. int index = [devices indexOfObject:device];
  612. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  613. }
  614. goto fail;
  615. }
  616. // parse input filename for video and audio device
  617. parse_device_name(s);
  618. // check for device index given in filename
  619. if (ctx->video_device_index == -1 && ctx->video_filename) {
  620. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  621. }
  622. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  623. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  624. }
  625. if (ctx->video_device_index >= 0) {
  626. if (ctx->video_device_index < ctx->num_video_devices) {
  627. if (ctx->video_device_index < [devices count]) {
  628. video_device = [devices objectAtIndex:ctx->video_device_index];
  629. } else {
  630. video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
  631. ctx->video_is_muxed = 1;
  632. }
  633. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  634. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  635. CGDirectDisplayID screens[num_screens];
  636. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  637. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  638. if (ctx->framerate.num > 0) {
  639. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  640. }
  641. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  642. if (ctx->capture_cursor) {
  643. capture_screen_input.capturesCursor = YES;
  644. } else {
  645. capture_screen_input.capturesCursor = NO;
  646. }
  647. #endif
  648. if (ctx->capture_mouse_clicks) {
  649. capture_screen_input.capturesMouseClicks = YES;
  650. } else {
  651. capture_screen_input.capturesMouseClicks = NO;
  652. }
  653. video_device = (AVCaptureDevice*) capture_screen_input;
  654. capture_screen = 1;
  655. #endif
  656. } else {
  657. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  658. goto fail;
  659. }
  660. } else if (ctx->video_filename &&
  661. strncmp(ctx->video_filename, "none", 4)) {
  662. if (!strncmp(ctx->video_filename, "default", 7)) {
  663. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  664. } else {
  665. // looking for video inputs
  666. for (AVCaptureDevice *device in devices) {
  667. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  668. video_device = device;
  669. break;
  670. }
  671. }
  672. // looking for muxed inputs
  673. for (AVCaptureDevice *device in devices_muxed) {
  674. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  675. video_device = device;
  676. ctx->video_is_muxed = 1;
  677. break;
  678. }
  679. }
  680. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  681. // looking for screen inputs
  682. if (!video_device) {
  683. int idx;
  684. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  685. CGDirectDisplayID screens[num_screens];
  686. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  687. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  688. video_device = (AVCaptureDevice*) capture_screen_input;
  689. ctx->video_device_index = ctx->num_video_devices + idx;
  690. capture_screen = 1;
  691. if (ctx->framerate.num > 0) {
  692. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  693. }
  694. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  695. if (ctx->capture_cursor) {
  696. capture_screen_input.capturesCursor = YES;
  697. } else {
  698. capture_screen_input.capturesCursor = NO;
  699. }
  700. #endif
  701. if (ctx->capture_mouse_clicks) {
  702. capture_screen_input.capturesMouseClicks = YES;
  703. } else {
  704. capture_screen_input.capturesMouseClicks = NO;
  705. }
  706. }
  707. }
  708. #endif
  709. }
  710. if (!video_device) {
  711. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  712. goto fail;
  713. }
  714. }
  715. // get audio device
  716. if (ctx->audio_device_index >= 0) {
  717. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  718. if (ctx->audio_device_index >= [devices count]) {
  719. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  720. goto fail;
  721. }
  722. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  723. } else if (ctx->audio_filename &&
  724. strncmp(ctx->audio_filename, "none", 4)) {
  725. if (!strncmp(ctx->audio_filename, "default", 7)) {
  726. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  727. } else {
  728. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  729. for (AVCaptureDevice *device in devices) {
  730. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  731. audio_device = device;
  732. break;
  733. }
  734. }
  735. }
  736. if (!audio_device) {
  737. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  738. goto fail;
  739. }
  740. }
  741. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  742. if (!video_device && !audio_device) {
  743. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  744. goto fail;
  745. }
  746. if (video_device) {
  747. if (ctx->video_device_index < ctx->num_video_devices) {
  748. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  749. } else {
  750. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  751. }
  752. }
  753. if (audio_device) {
  754. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  755. }
  756. // Initialize capture session
  757. ctx->capture_session = [[AVCaptureSession alloc] init];
  758. if (video_device && add_video_device(s, video_device)) {
  759. goto fail;
  760. }
  761. if (audio_device && add_audio_device(s, audio_device)) {
  762. }
  763. [ctx->capture_session startRunning];
  764. /* Unlock device configuration only after the session is started so it
  765. * does not reset the capture formats */
  766. if (!capture_screen) {
  767. [video_device unlockForConfiguration];
  768. }
  769. if (video_device && get_video_config(s)) {
  770. goto fail;
  771. }
  772. // set audio stream
  773. if (audio_device && get_audio_config(s)) {
  774. goto fail;
  775. }
  776. [pool release];
  777. return 0;
  778. fail:
  779. [pool release];
  780. destroy_context(ctx);
  781. return AVERROR(EIO);
  782. }
  783. static int copy_cvpixelbuffer(AVFormatContext *s,
  784. CVPixelBufferRef image_buffer,
  785. AVPacket *pkt)
  786. {
  787. AVFContext *ctx = s->priv_data;
  788. int src_linesize[4];
  789. const uint8_t *src_data[4];
  790. int width = CVPixelBufferGetWidth(image_buffer);
  791. int height = CVPixelBufferGetHeight(image_buffer);
  792. int status;
  793. memset(src_linesize, 0, sizeof(src_linesize));
  794. memset(src_data, 0, sizeof(src_data));
  795. status = CVPixelBufferLockBaseAddress(image_buffer, 0);
  796. if (status != kCVReturnSuccess) {
  797. av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
  798. return AVERROR_EXTERNAL;
  799. }
  800. if (CVPixelBufferIsPlanar(image_buffer)) {
  801. size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
  802. int i;
  803. for(i = 0; i < plane_count; i++){
  804. src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
  805. src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
  806. }
  807. } else {
  808. src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
  809. src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
  810. }
  811. status = av_image_copy_to_buffer(pkt->data, pkt->size,
  812. src_data, src_linesize,
  813. ctx->pixel_format, width, height, 1);
  814. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  815. return status;
  816. }
  817. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  818. {
  819. AVFContext* ctx = (AVFContext*)s->priv_data;
  820. do {
  821. CVImageBufferRef image_buffer;
  822. CMBlockBufferRef block_buffer;
  823. lock_frames(ctx);
  824. if (ctx->current_frame != nil) {
  825. int status;
  826. int length = 0;
  827. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  828. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  829. if (image_buffer != nil) {
  830. length = (int)CVPixelBufferGetDataSize(image_buffer);
  831. } else if (block_buffer != nil) {
  832. length = (int)CMBlockBufferGetDataLength(block_buffer);
  833. } else {
  834. return AVERROR(EINVAL);
  835. }
  836. if (av_new_packet(pkt, length) < 0) {
  837. return AVERROR(EIO);
  838. }
  839. CMItemCount count;
  840. CMSampleTimingInfo timing_info;
  841. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
  842. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  843. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  844. }
  845. pkt->stream_index = ctx->video_stream_index;
  846. pkt->flags |= AV_PKT_FLAG_KEY;
  847. if (image_buffer) {
  848. status = copy_cvpixelbuffer(s, image_buffer, pkt);
  849. } else {
  850. status = 0;
  851. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  852. if (ret != kCMBlockBufferNoErr) {
  853. status = AVERROR(EIO);
  854. }
  855. }
  856. CFRelease(ctx->current_frame);
  857. ctx->current_frame = nil;
  858. if (status < 0)
  859. return status;
  860. } else if (ctx->current_audio_frame != nil) {
  861. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  862. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  863. if (!block_buffer || !block_buffer_size) {
  864. return AVERROR(EIO);
  865. }
  866. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  867. return AVERROR_BUFFER_TOO_SMALL;
  868. }
  869. if (av_new_packet(pkt, block_buffer_size) < 0) {
  870. return AVERROR(EIO);
  871. }
  872. CMItemCount count;
  873. CMSampleTimingInfo timing_info;
  874. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
  875. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  876. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  877. }
  878. pkt->stream_index = ctx->audio_stream_index;
  879. pkt->flags |= AV_PKT_FLAG_KEY;
  880. if (ctx->audio_non_interleaved) {
  881. int sample, c, shift, num_samples;
  882. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  883. if (ret != kCMBlockBufferNoErr) {
  884. return AVERROR(EIO);
  885. }
  886. num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  887. // transform decoded frame into output format
  888. #define INTERLEAVE_OUTPUT(bps) \
  889. { \
  890. int##bps##_t **src; \
  891. int##bps##_t *dest; \
  892. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  893. if (!src) return AVERROR(EIO); \
  894. for (c = 0; c < ctx->audio_channels; c++) { \
  895. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  896. } \
  897. dest = (int##bps##_t*)pkt->data; \
  898. shift = bps - ctx->audio_bits_per_sample; \
  899. for (sample = 0; sample < num_samples; sample++) \
  900. for (c = 0; c < ctx->audio_channels; c++) \
  901. *dest++ = src[c][sample] << shift; \
  902. av_freep(&src); \
  903. }
  904. if (ctx->audio_bits_per_sample <= 16) {
  905. INTERLEAVE_OUTPUT(16)
  906. } else {
  907. INTERLEAVE_OUTPUT(32)
  908. }
  909. } else {
  910. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  911. if (ret != kCMBlockBufferNoErr) {
  912. return AVERROR(EIO);
  913. }
  914. }
  915. CFRelease(ctx->current_audio_frame);
  916. ctx->current_audio_frame = nil;
  917. } else {
  918. pkt->data = NULL;
  919. pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
  920. }
  921. unlock_frames(ctx);
  922. } while (!pkt->data);
  923. return 0;
  924. }
  925. static int avf_close(AVFormatContext *s)
  926. {
  927. AVFContext* ctx = (AVFContext*)s->priv_data;
  928. destroy_context(ctx);
  929. return 0;
  930. }
  931. static const AVOption options[] = {
  932. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  933. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  934. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  935. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  936. { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  937. { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
  938. { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  939. { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  940. { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  941. { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  942. { NULL },
  943. };
  944. static const AVClass avf_class = {
  945. .class_name = "AVFoundation indev",
  946. .item_name = av_default_item_name,
  947. .option = options,
  948. .version = LIBAVUTIL_VERSION_INT,
  949. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  950. };
  951. AVInputFormat ff_avfoundation_demuxer = {
  952. .name = "avfoundation",
  953. .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  954. .priv_data_size = sizeof(AVFContext),
  955. .read_header = avf_read_header,
  956. .read_packet = avf_read_packet,
  957. .read_close = avf_close,
  958. .flags = AVFMT_NOFILE,
  959. .priv_class = &avf_class,
  960. };