You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1213 lines
44KB

  1. /*
  2. * AVFoundation input device
  3. * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * AVFoundation input device
  24. * @author Thilo Borgmann <thilo.borgmann@mail.de>
  25. */
  26. #import <AVFoundation/AVFoundation.h>
  27. #include <pthread.h>
  28. #include "libavutil/pixdesc.h"
  29. #include "libavutil/opt.h"
  30. #include "libavutil/avstring.h"
  31. #include "libavformat/internal.h"
  32. #include "libavutil/internal.h"
  33. #include "libavutil/parseutils.h"
  34. #include "libavutil/time.h"
  35. #include "libavutil/imgutils.h"
  36. #include "avdevice.h"
  37. static const int avf_time_base = 1000000;
  38. static const AVRational avf_time_base_q = {
  39. .num = 1,
  40. .den = avf_time_base
  41. };
  42. struct AVFPixelFormatSpec {
  43. enum AVPixelFormat ff_id;
  44. OSType avf_id;
  45. };
  46. static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
  47. { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
  48. { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
  49. { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
  50. { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
  51. { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
  52. { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
  53. { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
  54. { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
  55. { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
  56. { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
  57. { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
  58. { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
  59. { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
  60. { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
  61. { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
  62. { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
  63. { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
  64. { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
  65. { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
  66. { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
  67. { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
  68. { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
  69. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  70. { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
  71. #endif
  72. { AV_PIX_FMT_NONE, 0 }
  73. };
  74. typedef struct
  75. {
  76. AVClass* class;
  77. int frames_captured;
  78. int audio_frames_captured;
  79. int64_t first_pts;
  80. int64_t first_audio_pts;
  81. pthread_mutex_t frame_lock;
  82. id avf_delegate;
  83. id avf_audio_delegate;
  84. AVRational framerate;
  85. int width, height;
  86. int capture_cursor;
  87. int capture_mouse_clicks;
  88. int capture_raw_data;
  89. int drop_late_frames;
  90. int video_is_muxed;
  91. int list_devices;
  92. int video_device_index;
  93. int video_stream_index;
  94. int audio_device_index;
  95. int audio_stream_index;
  96. char *video_filename;
  97. char *audio_filename;
  98. int num_video_devices;
  99. int audio_channels;
  100. int audio_bits_per_sample;
  101. int audio_float;
  102. int audio_be;
  103. int audio_signed_integer;
  104. int audio_packed;
  105. int audio_non_interleaved;
  106. int32_t *audio_buffer;
  107. int audio_buffer_size;
  108. enum AVPixelFormat pixel_format;
  109. AVCaptureSession *capture_session;
  110. AVCaptureVideoDataOutput *video_output;
  111. AVCaptureAudioDataOutput *audio_output;
  112. CMSampleBufferRef current_frame;
  113. CMSampleBufferRef current_audio_frame;
  114. AVCaptureDevice *observed_device;
  115. AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
  116. int observed_quit;
  117. } AVFContext;
  118. static void lock_frames(AVFContext* ctx)
  119. {
  120. pthread_mutex_lock(&ctx->frame_lock);
  121. }
  122. static void unlock_frames(AVFContext* ctx)
  123. {
  124. pthread_mutex_unlock(&ctx->frame_lock);
  125. }
  126. /** FrameReciever class - delegate for AVCaptureSession
  127. */
  128. @interface AVFFrameReceiver : NSObject
  129. {
  130. AVFContext* _context;
  131. }
  132. - (id)initWithContext:(AVFContext*)context;
  133. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  134. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  135. fromConnection:(AVCaptureConnection *)connection;
  136. @end
  137. @implementation AVFFrameReceiver
  138. - (id)initWithContext:(AVFContext*)context
  139. {
  140. if (self = [super init]) {
  141. _context = context;
  142. // start observing if a device is set for it
  143. if (_context->observed_device) {
  144. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  145. NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
  146. [_context->observed_device addObserver: self
  147. forKeyPath: keyPath
  148. options: options
  149. context: _context];
  150. }
  151. }
  152. return self;
  153. }
  154. - (void)dealloc {
  155. // stop observing if a device is set for it
  156. NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
  157. [_context->observed_device removeObserver: self forKeyPath: keyPath];
  158. [super dealloc];
  159. }
  160. - (void)observeValueForKeyPath:(NSString *)keyPath
  161. ofObject:(id)object
  162. change:(NSDictionary *)change
  163. context:(void *)context {
  164. if (context == _context) {
  165. AVCaptureDeviceTransportControlsPlaybackMode mode =
  166. [change[NSKeyValueChangeNewKey] integerValue];
  167. if (mode != _context->observed_mode) {
  168. if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
  169. _context->observed_quit = 1;
  170. }
  171. _context->observed_mode = mode;
  172. }
  173. } else {
  174. [super observeValueForKeyPath: keyPath
  175. ofObject: object
  176. change: change
  177. context: context];
  178. }
  179. }
  180. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  181. didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
  182. fromConnection:(AVCaptureConnection *)connection
  183. {
  184. lock_frames(_context);
  185. if (_context->current_frame != nil) {
  186. CFRelease(_context->current_frame);
  187. }
  188. _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
  189. unlock_frames(_context);
  190. ++_context->frames_captured;
  191. }
  192. @end
  193. /** AudioReciever class - delegate for AVCaptureSession
  194. */
  195. @interface AVFAudioReceiver : NSObject
  196. {
  197. AVFContext* _context;
  198. }
  199. - (id)initWithContext:(AVFContext*)context;
  200. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  201. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  202. fromConnection:(AVCaptureConnection *)connection;
  203. @end
  204. @implementation AVFAudioReceiver
  205. - (id)initWithContext:(AVFContext*)context
  206. {
  207. if (self = [super init]) {
  208. _context = context;
  209. }
  210. return self;
  211. }
  212. - (void) captureOutput:(AVCaptureOutput *)captureOutput
  213. didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
  214. fromConnection:(AVCaptureConnection *)connection
  215. {
  216. lock_frames(_context);
  217. if (_context->current_audio_frame != nil) {
  218. CFRelease(_context->current_audio_frame);
  219. }
  220. _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
  221. unlock_frames(_context);
  222. ++_context->audio_frames_captured;
  223. }
  224. @end
  225. static void destroy_context(AVFContext* ctx)
  226. {
  227. [ctx->capture_session stopRunning];
  228. [ctx->capture_session release];
  229. [ctx->video_output release];
  230. [ctx->audio_output release];
  231. [ctx->avf_delegate release];
  232. [ctx->avf_audio_delegate release];
  233. ctx->capture_session = NULL;
  234. ctx->video_output = NULL;
  235. ctx->audio_output = NULL;
  236. ctx->avf_delegate = NULL;
  237. ctx->avf_audio_delegate = NULL;
  238. av_freep(&ctx->audio_buffer);
  239. pthread_mutex_destroy(&ctx->frame_lock);
  240. if (ctx->current_frame) {
  241. CFRelease(ctx->current_frame);
  242. }
  243. }
  244. static void parse_device_name(AVFormatContext *s)
  245. {
  246. AVFContext *ctx = (AVFContext*)s->priv_data;
  247. char *tmp = av_strdup(s->url);
  248. char *save;
  249. if (tmp[0] != ':') {
  250. ctx->video_filename = av_strtok(tmp, ":", &save);
  251. ctx->audio_filename = av_strtok(NULL, ":", &save);
  252. } else {
  253. ctx->audio_filename = av_strtok(tmp, ":", &save);
  254. }
  255. }
  256. /**
  257. * Configure the video device.
  258. *
  259. * Configure the video device using a run-time approach to access properties
  260. * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
  261. * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
  262. *
  263. * The NSUndefinedKeyException must be handled by the caller of this function.
  264. *
  265. */
  266. static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  267. {
  268. AVFContext *ctx = (AVFContext*)s->priv_data;
  269. double framerate = av_q2d(ctx->framerate);
  270. NSObject *range = nil;
  271. NSObject *format = nil;
  272. NSObject *selected_range = nil;
  273. NSObject *selected_format = nil;
  274. // try to configure format by formats list
  275. // might raise an exception if no format list is given
  276. // (then fallback to default, no configuration)
  277. @try {
  278. for (format in [video_device valueForKey:@"formats"]) {
  279. CMFormatDescriptionRef formatDescription;
  280. CMVideoDimensions dimensions;
  281. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  282. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  283. if ((ctx->width == 0 && ctx->height == 0) ||
  284. (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
  285. selected_format = format;
  286. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  287. double max_framerate;
  288. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  289. if (fabs (framerate - max_framerate) < 0.01) {
  290. selected_range = range;
  291. break;
  292. }
  293. }
  294. }
  295. }
  296. if (!selected_format) {
  297. av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
  298. ctx->width, ctx->height);
  299. goto unsupported_format;
  300. }
  301. if (!selected_range) {
  302. av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
  303. framerate);
  304. if (ctx->video_is_muxed) {
  305. av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
  306. } else {
  307. goto unsupported_format;
  308. }
  309. }
  310. if ([video_device lockForConfiguration:NULL] == YES) {
  311. if (selected_format) {
  312. [video_device setValue:selected_format forKey:@"activeFormat"];
  313. }
  314. if (selected_range) {
  315. NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
  316. [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
  317. [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
  318. }
  319. } else {
  320. av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
  321. return AVERROR(EINVAL);
  322. }
  323. } @catch(NSException *e) {
  324. av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
  325. }
  326. return 0;
  327. unsupported_format:
  328. av_log(s, AV_LOG_ERROR, "Supported modes:\n");
  329. for (format in [video_device valueForKey:@"formats"]) {
  330. CMFormatDescriptionRef formatDescription;
  331. CMVideoDimensions dimensions;
  332. formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
  333. dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  334. for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
  335. double min_framerate;
  336. double max_framerate;
  337. [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
  338. [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
  339. av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
  340. dimensions.width, dimensions.height,
  341. min_framerate, max_framerate);
  342. }
  343. }
  344. return AVERROR(EINVAL);
  345. }
  346. static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
  347. {
  348. AVFContext *ctx = (AVFContext*)s->priv_data;
  349. int ret;
  350. NSError *error = nil;
  351. AVCaptureInput* capture_input = nil;
  352. struct AVFPixelFormatSpec pxl_fmt_spec;
  353. NSNumber *pixel_format;
  354. NSDictionary *capture_dict;
  355. dispatch_queue_t queue;
  356. if (ctx->video_device_index < ctx->num_video_devices) {
  357. capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
  358. } else {
  359. capture_input = (AVCaptureInput*) video_device;
  360. }
  361. if (!capture_input) {
  362. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  363. [[error localizedDescription] UTF8String]);
  364. return 1;
  365. }
  366. if ([ctx->capture_session canAddInput:capture_input]) {
  367. [ctx->capture_session addInput:capture_input];
  368. } else {
  369. av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
  370. return 1;
  371. }
  372. // Attaching output
  373. ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
  374. if (!ctx->video_output) {
  375. av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
  376. return 1;
  377. }
  378. // Configure device framerate and video size
  379. @try {
  380. if ((ret = configure_video_device(s, video_device)) < 0) {
  381. return ret;
  382. }
  383. } @catch (NSException *exception) {
  384. if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
  385. av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
  386. return AVERROR_EXTERNAL;
  387. }
  388. }
  389. // select pixel format
  390. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  391. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  392. if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
  393. pxl_fmt_spec = avf_pixel_formats[i];
  394. break;
  395. }
  396. }
  397. // check if selected pixel format is supported by AVFoundation
  398. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  399. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
  400. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  401. return 1;
  402. }
  403. // check if the pixel format is available for this device
  404. if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
  405. av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
  406. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  407. pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
  408. av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
  409. for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
  410. struct AVFPixelFormatSpec pxl_fmt_dummy;
  411. pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
  412. for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
  413. if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
  414. pxl_fmt_dummy = avf_pixel_formats[i];
  415. break;
  416. }
  417. }
  418. if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
  419. av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
  420. // select first supported pixel format instead of user selected (or default) pixel format
  421. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  422. pxl_fmt_spec = pxl_fmt_dummy;
  423. }
  424. }
  425. }
  426. // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
  427. if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
  428. return 1;
  429. } else {
  430. av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
  431. av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
  432. }
  433. }
  434. // set videoSettings to an empty dict for receiving raw data of muxed devices
  435. if (ctx->capture_raw_data) {
  436. ctx->pixel_format = pxl_fmt_spec.ff_id;
  437. ctx->video_output.videoSettings = @{ };
  438. } else {
  439. ctx->pixel_format = pxl_fmt_spec.ff_id;
  440. pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
  441. capture_dict = [NSDictionary dictionaryWithObject:pixel_format
  442. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  443. [ctx->video_output setVideoSettings:capture_dict];
  444. }
  445. [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
  446. // check for transport control support and set observer device if supported
  447. int trans_ctrl = [video_device transportControlsSupported];
  448. AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
  449. if (trans_ctrl) {
  450. ctx->observed_mode = trans_mode;
  451. ctx->observed_device = video_device;
  452. }
  453. ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
  454. queue = dispatch_queue_create("avf_queue", NULL);
  455. [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
  456. dispatch_release(queue);
  457. if ([ctx->capture_session canAddOutput:ctx->video_output]) {
  458. [ctx->capture_session addOutput:ctx->video_output];
  459. } else {
  460. av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
  461. return 1;
  462. }
  463. return 0;
  464. }
  465. static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
  466. {
  467. AVFContext *ctx = (AVFContext*)s->priv_data;
  468. NSError *error = nil;
  469. AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
  470. dispatch_queue_t queue;
  471. if (!audio_dev_input) {
  472. av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
  473. [[error localizedDescription] UTF8String]);
  474. return 1;
  475. }
  476. if ([ctx->capture_session canAddInput:audio_dev_input]) {
  477. [ctx->capture_session addInput:audio_dev_input];
  478. } else {
  479. av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
  480. return 1;
  481. }
  482. // Attaching output
  483. ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
  484. if (!ctx->audio_output) {
  485. av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
  486. return 1;
  487. }
  488. ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
  489. queue = dispatch_queue_create("avf_audio_queue", NULL);
  490. [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
  491. dispatch_release(queue);
  492. if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
  493. [ctx->capture_session addOutput:ctx->audio_output];
  494. } else {
  495. av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
  496. return 1;
  497. }
  498. return 0;
  499. }
  500. static int get_video_config(AVFormatContext *s)
  501. {
  502. AVFContext *ctx = (AVFContext*)s->priv_data;
  503. CVImageBufferRef image_buffer;
  504. CMBlockBufferRef block_buffer;
  505. CGSize image_buffer_size;
  506. AVStream* stream = avformat_new_stream(s, NULL);
  507. if (!stream) {
  508. return 1;
  509. }
  510. // Take stream info from the first frame.
  511. while (ctx->frames_captured < 1) {
  512. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  513. }
  514. lock_frames(ctx);
  515. ctx->video_stream_index = stream->index;
  516. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  517. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  518. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  519. if (image_buffer) {
  520. image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
  521. stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
  522. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  523. stream->codecpar->width = (int)image_buffer_size.width;
  524. stream->codecpar->height = (int)image_buffer_size.height;
  525. stream->codecpar->format = ctx->pixel_format;
  526. } else {
  527. stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
  528. stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
  529. stream->codecpar->format = ctx->pixel_format;
  530. }
  531. CFRelease(ctx->current_frame);
  532. ctx->current_frame = nil;
  533. unlock_frames(ctx);
  534. return 0;
  535. }
  536. static int get_audio_config(AVFormatContext *s)
  537. {
  538. AVFContext *ctx = (AVFContext*)s->priv_data;
  539. CMFormatDescriptionRef format_desc;
  540. AVStream* stream = avformat_new_stream(s, NULL);
  541. if (!stream) {
  542. return 1;
  543. }
  544. // Take stream info from the first frame.
  545. while (ctx->audio_frames_captured < 1) {
  546. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  547. }
  548. lock_frames(ctx);
  549. ctx->audio_stream_index = stream->index;
  550. avpriv_set_pts_info(stream, 64, 1, avf_time_base);
  551. format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
  552. const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
  553. if (!basic_desc) {
  554. av_log(s, AV_LOG_ERROR, "audio format not available\n");
  555. return 1;
  556. }
  557. stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  558. stream->codecpar->sample_rate = basic_desc->mSampleRate;
  559. stream->codecpar->channels = basic_desc->mChannelsPerFrame;
  560. stream->codecpar->channel_layout = av_get_default_channel_layout(stream->codecpar->channels);
  561. ctx->audio_channels = basic_desc->mChannelsPerFrame;
  562. ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
  563. ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
  564. ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
  565. ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
  566. ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
  567. ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
  568. if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  569. ctx->audio_float &&
  570. ctx->audio_bits_per_sample == 32 &&
  571. ctx->audio_packed) {
  572. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
  573. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  574. ctx->audio_signed_integer &&
  575. ctx->audio_bits_per_sample == 16 &&
  576. ctx->audio_packed) {
  577. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
  578. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  579. ctx->audio_signed_integer &&
  580. ctx->audio_bits_per_sample == 24 &&
  581. ctx->audio_packed) {
  582. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
  583. } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
  584. ctx->audio_signed_integer &&
  585. ctx->audio_bits_per_sample == 32 &&
  586. ctx->audio_packed) {
  587. stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
  588. } else {
  589. av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
  590. return 1;
  591. }
  592. if (ctx->audio_non_interleaved) {
  593. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  594. ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  595. ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
  596. if (!ctx->audio_buffer) {
  597. av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
  598. return 1;
  599. }
  600. }
  601. CFRelease(ctx->current_audio_frame);
  602. ctx->current_audio_frame = nil;
  603. unlock_frames(ctx);
  604. return 0;
  605. }
  606. static int avf_read_header(AVFormatContext *s)
  607. {
  608. NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
  609. int capture_screen = 0;
  610. uint32_t num_screens = 0;
  611. AVFContext *ctx = (AVFContext*)s->priv_data;
  612. AVCaptureDevice *video_device = nil;
  613. AVCaptureDevice *audio_device = nil;
  614. // Find capture device
  615. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  616. NSArray *devices_muxed = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
  617. ctx->num_video_devices = [devices count] + [devices_muxed count];
  618. ctx->first_pts = av_gettime();
  619. ctx->first_audio_pts = av_gettime();
  620. pthread_mutex_init(&ctx->frame_lock, NULL);
  621. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  622. CGGetActiveDisplayList(0, NULL, &num_screens);
  623. #endif
  624. // List devices if requested
  625. if (ctx->list_devices) {
  626. int index = 0;
  627. av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
  628. for (AVCaptureDevice *device in devices) {
  629. const char *name = [[device localizedName] UTF8String];
  630. index = [devices indexOfObject:device];
  631. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  632. }
  633. for (AVCaptureDevice *device in devices_muxed) {
  634. const char *name = [[device localizedName] UTF8String];
  635. index = [devices count] + [devices_muxed indexOfObject:device];
  636. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  637. }
  638. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  639. if (num_screens > 0) {
  640. CGDirectDisplayID screens[num_screens];
  641. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  642. for (int i = 0; i < num_screens; i++) {
  643. av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
  644. }
  645. }
  646. #endif
  647. av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
  648. devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  649. for (AVCaptureDevice *device in devices) {
  650. const char *name = [[device localizedName] UTF8String];
  651. int index = [devices indexOfObject:device];
  652. av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
  653. }
  654. goto fail;
  655. }
  656. // parse input filename for video and audio device
  657. parse_device_name(s);
  658. // check for device index given in filename
  659. if (ctx->video_device_index == -1 && ctx->video_filename) {
  660. sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
  661. }
  662. if (ctx->audio_device_index == -1 && ctx->audio_filename) {
  663. sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
  664. }
  665. if (ctx->video_device_index >= 0) {
  666. if (ctx->video_device_index < ctx->num_video_devices) {
  667. if (ctx->video_device_index < [devices count]) {
  668. video_device = [devices objectAtIndex:ctx->video_device_index];
  669. } else {
  670. video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
  671. ctx->video_is_muxed = 1;
  672. }
  673. } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
  674. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  675. CGDirectDisplayID screens[num_screens];
  676. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  677. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
  678. if (ctx->framerate.num > 0) {
  679. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  680. }
  681. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  682. if (ctx->capture_cursor) {
  683. capture_screen_input.capturesCursor = YES;
  684. } else {
  685. capture_screen_input.capturesCursor = NO;
  686. }
  687. #endif
  688. if (ctx->capture_mouse_clicks) {
  689. capture_screen_input.capturesMouseClicks = YES;
  690. } else {
  691. capture_screen_input.capturesMouseClicks = NO;
  692. }
  693. video_device = (AVCaptureDevice*) capture_screen_input;
  694. capture_screen = 1;
  695. #endif
  696. } else {
  697. av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
  698. goto fail;
  699. }
  700. } else if (ctx->video_filename &&
  701. strncmp(ctx->video_filename, "none", 4)) {
  702. if (!strncmp(ctx->video_filename, "default", 7)) {
  703. video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  704. } else {
  705. // looking for video inputs
  706. for (AVCaptureDevice *device in devices) {
  707. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  708. video_device = device;
  709. break;
  710. }
  711. }
  712. // looking for muxed inputs
  713. for (AVCaptureDevice *device in devices_muxed) {
  714. if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
  715. video_device = device;
  716. ctx->video_is_muxed = 1;
  717. break;
  718. }
  719. }
  720. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
  721. // looking for screen inputs
  722. if (!video_device) {
  723. int idx;
  724. if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
  725. CGDirectDisplayID screens[num_screens];
  726. CGGetActiveDisplayList(num_screens, screens, &num_screens);
  727. AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
  728. video_device = (AVCaptureDevice*) capture_screen_input;
  729. ctx->video_device_index = ctx->num_video_devices + idx;
  730. capture_screen = 1;
  731. if (ctx->framerate.num > 0) {
  732. capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
  733. }
  734. #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
  735. if (ctx->capture_cursor) {
  736. capture_screen_input.capturesCursor = YES;
  737. } else {
  738. capture_screen_input.capturesCursor = NO;
  739. }
  740. #endif
  741. if (ctx->capture_mouse_clicks) {
  742. capture_screen_input.capturesMouseClicks = YES;
  743. } else {
  744. capture_screen_input.capturesMouseClicks = NO;
  745. }
  746. }
  747. }
  748. #endif
  749. }
  750. if (!video_device) {
  751. av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
  752. goto fail;
  753. }
  754. }
  755. // get audio device
  756. if (ctx->audio_device_index >= 0) {
  757. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  758. if (ctx->audio_device_index >= [devices count]) {
  759. av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
  760. goto fail;
  761. }
  762. audio_device = [devices objectAtIndex:ctx->audio_device_index];
  763. } else if (ctx->audio_filename &&
  764. strncmp(ctx->audio_filename, "none", 4)) {
  765. if (!strncmp(ctx->audio_filename, "default", 7)) {
  766. audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  767. } else {
  768. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
  769. for (AVCaptureDevice *device in devices) {
  770. if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
  771. audio_device = device;
  772. break;
  773. }
  774. }
  775. }
  776. if (!audio_device) {
  777. av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
  778. goto fail;
  779. }
  780. }
  781. // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
  782. if (!video_device && !audio_device) {
  783. av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
  784. goto fail;
  785. }
  786. if (video_device) {
  787. if (ctx->video_device_index < ctx->num_video_devices) {
  788. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
  789. } else {
  790. av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
  791. }
  792. }
  793. if (audio_device) {
  794. av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
  795. }
  796. // Initialize capture session
  797. ctx->capture_session = [[AVCaptureSession alloc] init];
  798. if (video_device && add_video_device(s, video_device)) {
  799. goto fail;
  800. }
  801. if (audio_device && add_audio_device(s, audio_device)) {
  802. }
  803. [ctx->capture_session startRunning];
  804. /* Unlock device configuration only after the session is started so it
  805. * does not reset the capture formats */
  806. if (!capture_screen) {
  807. [video_device unlockForConfiguration];
  808. }
  809. if (video_device && get_video_config(s)) {
  810. goto fail;
  811. }
  812. // set audio stream
  813. if (audio_device && get_audio_config(s)) {
  814. goto fail;
  815. }
  816. [pool release];
  817. return 0;
  818. fail:
  819. [pool release];
  820. destroy_context(ctx);
  821. return AVERROR(EIO);
  822. }
  823. static int copy_cvpixelbuffer(AVFormatContext *s,
  824. CVPixelBufferRef image_buffer,
  825. AVPacket *pkt)
  826. {
  827. AVFContext *ctx = s->priv_data;
  828. int src_linesize[4];
  829. const uint8_t *src_data[4];
  830. int width = CVPixelBufferGetWidth(image_buffer);
  831. int height = CVPixelBufferGetHeight(image_buffer);
  832. int status;
  833. memset(src_linesize, 0, sizeof(src_linesize));
  834. memset(src_data, 0, sizeof(src_data));
  835. status = CVPixelBufferLockBaseAddress(image_buffer, 0);
  836. if (status != kCVReturnSuccess) {
  837. av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
  838. return AVERROR_EXTERNAL;
  839. }
  840. if (CVPixelBufferIsPlanar(image_buffer)) {
  841. size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
  842. int i;
  843. for(i = 0; i < plane_count; i++){
  844. src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
  845. src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
  846. }
  847. } else {
  848. src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
  849. src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
  850. }
  851. status = av_image_copy_to_buffer(pkt->data, pkt->size,
  852. src_data, src_linesize,
  853. ctx->pixel_format, width, height, 1);
  854. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  855. return status;
  856. }
  857. static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
  858. {
  859. AVFContext* ctx = (AVFContext*)s->priv_data;
  860. do {
  861. CVImageBufferRef image_buffer;
  862. CMBlockBufferRef block_buffer;
  863. lock_frames(ctx);
  864. if (ctx->current_frame != nil) {
  865. int status;
  866. int length = 0;
  867. image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
  868. block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
  869. if (image_buffer != nil) {
  870. length = (int)CVPixelBufferGetDataSize(image_buffer);
  871. } else if (block_buffer != nil) {
  872. length = (int)CMBlockBufferGetDataLength(block_buffer);
  873. } else {
  874. return AVERROR(EINVAL);
  875. }
  876. if (av_new_packet(pkt, length) < 0) {
  877. return AVERROR(EIO);
  878. }
  879. CMItemCount count;
  880. CMSampleTimingInfo timing_info;
  881. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
  882. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  883. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  884. }
  885. pkt->stream_index = ctx->video_stream_index;
  886. pkt->flags |= AV_PKT_FLAG_KEY;
  887. if (image_buffer) {
  888. status = copy_cvpixelbuffer(s, image_buffer, pkt);
  889. } else {
  890. status = 0;
  891. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  892. if (ret != kCMBlockBufferNoErr) {
  893. status = AVERROR(EIO);
  894. }
  895. }
  896. CFRelease(ctx->current_frame);
  897. ctx->current_frame = nil;
  898. if (status < 0)
  899. return status;
  900. } else if (ctx->current_audio_frame != nil) {
  901. CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
  902. int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
  903. if (!block_buffer || !block_buffer_size) {
  904. return AVERROR(EIO);
  905. }
  906. if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
  907. return AVERROR_BUFFER_TOO_SMALL;
  908. }
  909. if (av_new_packet(pkt, block_buffer_size) < 0) {
  910. return AVERROR(EIO);
  911. }
  912. CMItemCount count;
  913. CMSampleTimingInfo timing_info;
  914. if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
  915. AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
  916. pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
  917. }
  918. pkt->stream_index = ctx->audio_stream_index;
  919. pkt->flags |= AV_PKT_FLAG_KEY;
  920. if (ctx->audio_non_interleaved) {
  921. int sample, c, shift, num_samples;
  922. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
  923. if (ret != kCMBlockBufferNoErr) {
  924. return AVERROR(EIO);
  925. }
  926. num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
  927. // transform decoded frame into output format
  928. #define INTERLEAVE_OUTPUT(bps) \
  929. { \
  930. int##bps##_t **src; \
  931. int##bps##_t *dest; \
  932. src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
  933. if (!src) return AVERROR(EIO); \
  934. for (c = 0; c < ctx->audio_channels; c++) { \
  935. src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
  936. } \
  937. dest = (int##bps##_t*)pkt->data; \
  938. shift = bps - ctx->audio_bits_per_sample; \
  939. for (sample = 0; sample < num_samples; sample++) \
  940. for (c = 0; c < ctx->audio_channels; c++) \
  941. *dest++ = src[c][sample] << shift; \
  942. av_freep(&src); \
  943. }
  944. if (ctx->audio_bits_per_sample <= 16) {
  945. INTERLEAVE_OUTPUT(16)
  946. } else {
  947. INTERLEAVE_OUTPUT(32)
  948. }
  949. } else {
  950. OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
  951. if (ret != kCMBlockBufferNoErr) {
  952. return AVERROR(EIO);
  953. }
  954. }
  955. CFRelease(ctx->current_audio_frame);
  956. ctx->current_audio_frame = nil;
  957. } else {
  958. pkt->data = NULL;
  959. unlock_frames(ctx);
  960. if (ctx->observed_quit) {
  961. return AVERROR_EOF;
  962. } else {
  963. return AVERROR(EAGAIN);
  964. }
  965. }
  966. unlock_frames(ctx);
  967. } while (!pkt->data);
  968. return 0;
  969. }
  970. static int avf_close(AVFormatContext *s)
  971. {
  972. AVFContext* ctx = (AVFContext*)s->priv_data;
  973. destroy_context(ctx);
  974. return 0;
  975. }
  976. static const AVOption options[] = {
  977. { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  978. { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  979. { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  980. { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
  981. { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
  982. { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
  983. { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  984. { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  985. { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  986. { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  987. { NULL },
  988. };
  989. static const AVClass avf_class = {
  990. .class_name = "AVFoundation indev",
  991. .item_name = av_default_item_name,
  992. .option = options,
  993. .version = LIBAVUTIL_VERSION_INT,
  994. .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
  995. };
  996. AVInputFormat ff_avfoundation_demuxer = {
  997. .name = "avfoundation",
  998. .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
  999. .priv_data_size = sizeof(AVFContext),
  1000. .read_header = avf_read_header,
  1001. .read_packet = avf_read_packet,
  1002. .read_close = avf_close,
  1003. .flags = AVFMT_NOFILE,
  1004. .priv_class = &avf_class,
  1005. };