You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

307 lines
8.5KB

  1. /*
  2. * QTKit input device
  3. * Copyright (c) 2013 Vadim Kalinsky <vadim@kalinsky.ru>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * QTKit input device
  24. * @author Vadim Kalinsky <vadim@kalinsky.ru>
  25. */
  26. #import <QTKit/QTkit.h>
  27. #include <pthread.h>
  28. #include "libavutil/pixdesc.h"
  29. #include "libavutil/opt.h"
  30. #include "libavformat/internal.h"
  31. #include "libavutil/internal.h"
  32. #include "libavutil/time.h"
  33. #include "avdevice.h"
  34. #define QTKIT_TIMEBASE 100
  35. static const AVRational kQTKitTimeBase_q = {
  36. .num = 1,
  37. .den = QTKIT_TIMEBASE
  38. };
  39. typedef struct
  40. {
  41. AVClass* class;
  42. float frame_rate;
  43. int frames_captured;
  44. int64_t first_pts;
  45. pthread_mutex_t frame_lock;
  46. pthread_cond_t frame_wait_cond;
  47. id qt_delegate;
  48. QTCaptureSession* capture_session;
  49. QTCaptureDecompressedVideoOutput* video_output;
  50. CVImageBufferRef current_frame;
  51. } CaptureContext;
  52. static void lock_frames(CaptureContext* ctx)
  53. {
  54. pthread_mutex_lock(&ctx->frame_lock);
  55. }
  56. static void unlock_frames(CaptureContext* ctx)
  57. {
  58. pthread_mutex_unlock(&ctx->frame_lock);
  59. }
  60. /** FrameReciever class - delegate for QTCaptureSession
  61. */
  62. @interface FFMPEG_FrameReceiver : NSObject
  63. {
  64. CaptureContext* _context;
  65. }
  66. - (id)initWithContext:(CaptureContext*)context;
  67. - (void)captureOutput:(QTCaptureOutput *)captureOutput
  68. didOutputVideoFrame:(CVImageBufferRef)videoFrame
  69. withSampleBuffer:(QTSampleBuffer *)sampleBuffer
  70. fromConnection:(QTCaptureConnection *)connection;
  71. @end
  72. @implementation FFMPEG_FrameReceiver
  73. - (id)initWithContext:(CaptureContext*)context
  74. {
  75. if (self = [super init]) {
  76. _context = context;
  77. }
  78. return self;
  79. }
  80. - (void)captureOutput:(QTCaptureOutput *)captureOutput
  81. didOutputVideoFrame:(CVImageBufferRef)videoFrame
  82. withSampleBuffer:(QTSampleBuffer *)sampleBuffer
  83. fromConnection:(QTCaptureConnection *)connection
  84. {
  85. lock_frames(_context);
  86. if (_context->current_frame != nil) {
  87. CVBufferRelease(_context->current_frame);
  88. }
  89. _context->current_frame = CVBufferRetain(videoFrame);
  90. pthread_cond_signal(&_context->frame_wait_cond);
  91. unlock_frames(_context);
  92. ++_context->frames_captured;
  93. }
  94. @end
  95. static void destroy_context(CaptureContext* ctx)
  96. {
  97. [ctx->capture_session stopRunning];
  98. [ctx->capture_session release];
  99. [ctx->video_output release];
  100. [ctx->qt_delegate release];
  101. ctx->capture_session = NULL;
  102. ctx->video_output = NULL;
  103. ctx->qt_delegate = NULL;
  104. pthread_mutex_destroy(&ctx->frame_lock);
  105. pthread_cond_destroy(&ctx->frame_wait_cond);
  106. if (ctx->current_frame)
  107. CVBufferRelease(ctx->current_frame);
  108. }
  109. static int qtkit_read_header(AVFormatContext *s)
  110. {
  111. NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
  112. CaptureContext* ctx = (CaptureContext*)s->priv_data;
  113. ctx->first_pts = av_gettime();
  114. pthread_mutex_init(&ctx->frame_lock, NULL);
  115. pthread_cond_init(&ctx->frame_wait_cond, NULL);
  116. // Find default capture device
  117. QTCaptureDevice *video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
  118. BOOL success = [video_device open:nil];
  119. // Video capture device not found, looking for QTMediaTypeVideo
  120. if (!success) {
  121. video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
  122. success = [video_device open:nil];
  123. if (!success) {
  124. av_log(s, AV_LOG_ERROR, "No QT capture device found\n");
  125. goto fail;
  126. }
  127. }
  128. NSString* dev_display_name = [video_device localizedDisplayName];
  129. av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]);
  130. // Initialize capture session
  131. ctx->capture_session = [[QTCaptureSession alloc] init];
  132. QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease];
  133. success = [ctx->capture_session addInput:capture_dev_input error:nil];
  134. if (!success) {
  135. av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n");
  136. goto fail;
  137. }
  138. // Attaching output
  139. // FIXME: Allow for a user defined pixel format
  140. ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init];
  141. NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject:
  142. [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB]
  143. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  144. [ctx->video_output setPixelBufferAttributes:captureDictionary];
  145. ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx];
  146. [ctx->video_output setDelegate:ctx->qt_delegate];
  147. [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES];
  148. [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate];
  149. success = [ctx->capture_session addOutput:ctx->video_output error:nil];
  150. if (!success) {
  151. av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n");
  152. goto fail;
  153. }
  154. [ctx->capture_session startRunning];
  155. // Take stream info from the first frame.
  156. while (ctx->frames_captured < 1) {
  157. CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
  158. }
  159. lock_frames(ctx);
  160. AVStream* stream = avformat_new_stream(s, NULL);
  161. if (!stream) {
  162. goto fail;
  163. }
  164. avpriv_set_pts_info(stream, 64, 1, QTKIT_TIMEBASE);
  165. stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
  166. stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  167. stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame);
  168. stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame);
  169. stream->codec->pix_fmt = AV_PIX_FMT_RGB24;
  170. CVBufferRelease(ctx->current_frame);
  171. ctx->current_frame = nil;
  172. unlock_frames(ctx);
  173. [pool release];
  174. return 0;
  175. fail:
  176. [pool release];
  177. destroy_context(ctx);
  178. return AVERROR(EIO);
  179. }
  180. static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt)
  181. {
  182. CaptureContext* ctx = (CaptureContext*)s->priv_data;
  183. do {
  184. lock_frames(ctx);
  185. if (ctx->current_frame != nil) {
  186. if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
  187. return AVERROR(EIO);
  188. }
  189. pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q);
  190. pkt->stream_index = 0;
  191. pkt->flags |= AV_PKT_FLAG_KEY;
  192. CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
  193. void* data = CVPixelBufferGetBaseAddress(ctx->current_frame);
  194. memcpy(pkt->data, data, pkt->size);
  195. CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
  196. CVBufferRelease(ctx->current_frame);
  197. ctx->current_frame = nil;
  198. } else {
  199. pkt->data = NULL;
  200. pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
  201. }
  202. unlock_frames(ctx);
  203. } while (!pkt->data);
  204. return 0;
  205. }
  206. static int qtkit_close(AVFormatContext *s)
  207. {
  208. CaptureContext* ctx = (CaptureContext*)s->priv_data;
  209. destroy_context(ctx);
  210. return 0;
  211. }
  212. static const AVOption options[] = {
  213. { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL },
  214. { NULL },
  215. };
  216. static const AVClass qtkit_class = {
  217. .class_name = "QTKit input device",
  218. .item_name = av_default_item_name,
  219. .option = options,
  220. .version = LIBAVUTIL_VERSION_INT,
  221. };
  222. AVInputFormat ff_qtkit_demuxer = {
  223. .name = "qtkit",
  224. .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"),
  225. .priv_data_size = sizeof(CaptureContext),
  226. .read_header = qtkit_read_header,
  227. .read_packet = qtkit_read_packet,
  228. .read_close = qtkit_close,
  229. .flags = AVFMT_NOFILE,
  230. .priv_class = &qtkit_class,
  231. };