You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

499 lines
15KB

  1. /*
  2. * VFW capture interface
  3. * Copyright (c) 2006-2008 Ramiro Polla
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavformat/avformat.h"
  22. #include "libavutil/log.h"
  23. #include "libavutil/opt.h"
  24. #include "libavutil/parseutils.h"
  25. #include <windows.h>
  26. #include <vfw.h>
  27. //#define DEBUG_VFW
  28. /* Defines for VFW missing from MinGW.
  29. * Remove this when MinGW incorporates them. */
  30. #define HWND_MESSAGE ((HWND)-3)
  31. #define BI_RGB 0
  32. /* End of missing MinGW defines */
  33. struct vfw_ctx {
  34. const AVClass *class;
  35. HWND hwnd;
  36. HANDLE mutex;
  37. HANDLE event;
  38. AVPacketList *pktl;
  39. unsigned int curbufsize;
  40. unsigned int frame_num;
  41. char *video_size; /**< A string describing video size, set by a private option. */
  42. char *framerate; /**< Set by a private option. */
  43. };
  44. static enum PixelFormat vfw_pixfmt(DWORD biCompression, WORD biBitCount)
  45. {
  46. switch(biCompression) {
  47. case MKTAG('U', 'Y', 'V', 'Y'):
  48. return PIX_FMT_UYVY422;
  49. case MKTAG('Y', 'U', 'Y', '2'):
  50. return PIX_FMT_YUYV422;
  51. case MKTAG('I', '4', '2', '0'):
  52. return PIX_FMT_YUV420P;
  53. case BI_RGB:
  54. switch(biBitCount) { /* 1-8 are untested */
  55. case 1:
  56. return PIX_FMT_MONOWHITE;
  57. case 4:
  58. return PIX_FMT_RGB4;
  59. case 8:
  60. return PIX_FMT_RGB8;
  61. case 16:
  62. return PIX_FMT_RGB555;
  63. case 24:
  64. return PIX_FMT_BGR24;
  65. case 32:
  66. return PIX_FMT_RGB32;
  67. }
  68. }
  69. return PIX_FMT_NONE;
  70. }
  71. static enum CodecID vfw_codecid(DWORD biCompression)
  72. {
  73. switch(biCompression) {
  74. case MKTAG('d', 'v', 's', 'd'):
  75. return CODEC_ID_DVVIDEO;
  76. case MKTAG('M', 'J', 'P', 'G'):
  77. case MKTAG('m', 'j', 'p', 'g'):
  78. return CODEC_ID_MJPEG;
  79. }
  80. return CODEC_ID_NONE;
  81. }
  82. #define dstruct(pctx, sname, var, type) \
  83. av_log(pctx, AV_LOG_DEBUG, #var":\t%"type"\n", sname->var)
  84. static void dump_captureparms(AVFormatContext *s, CAPTUREPARMS *cparms)
  85. {
  86. av_log(s, AV_LOG_DEBUG, "CAPTUREPARMS\n");
  87. dstruct(s, cparms, dwRequestMicroSecPerFrame, "lu");
  88. dstruct(s, cparms, fMakeUserHitOKToCapture, "d");
  89. dstruct(s, cparms, wPercentDropForError, "u");
  90. dstruct(s, cparms, fYield, "d");
  91. dstruct(s, cparms, dwIndexSize, "lu");
  92. dstruct(s, cparms, wChunkGranularity, "u");
  93. dstruct(s, cparms, fUsingDOSMemory, "d");
  94. dstruct(s, cparms, wNumVideoRequested, "u");
  95. dstruct(s, cparms, fCaptureAudio, "d");
  96. dstruct(s, cparms, wNumAudioRequested, "u");
  97. dstruct(s, cparms, vKeyAbort, "u");
  98. dstruct(s, cparms, fAbortLeftMouse, "d");
  99. dstruct(s, cparms, fAbortRightMouse, "d");
  100. dstruct(s, cparms, fLimitEnabled, "d");
  101. dstruct(s, cparms, wTimeLimit, "u");
  102. dstruct(s, cparms, fMCIControl, "d");
  103. dstruct(s, cparms, fStepMCIDevice, "d");
  104. dstruct(s, cparms, dwMCIStartTime, "lu");
  105. dstruct(s, cparms, dwMCIStopTime, "lu");
  106. dstruct(s, cparms, fStepCaptureAt2x, "d");
  107. dstruct(s, cparms, wStepCaptureAverageFrames, "u");
  108. dstruct(s, cparms, dwAudioBufferSize, "lu");
  109. dstruct(s, cparms, fDisableWriteCache, "d");
  110. dstruct(s, cparms, AVStreamMaster, "u");
  111. }
  112. static void dump_videohdr(AVFormatContext *s, VIDEOHDR *vhdr)
  113. {
  114. #ifdef DEBUG_VFW
  115. av_log(s, AV_LOG_DEBUG, "VIDEOHDR\n");
  116. dstruct(s, vhdr, lpData, "p");
  117. dstruct(s, vhdr, dwBufferLength, "lu");
  118. dstruct(s, vhdr, dwBytesUsed, "lu");
  119. dstruct(s, vhdr, dwTimeCaptured, "lu");
  120. dstruct(s, vhdr, dwUser, "lu");
  121. dstruct(s, vhdr, dwFlags, "lu");
  122. dstruct(s, vhdr, dwReserved[0], "lu");
  123. dstruct(s, vhdr, dwReserved[1], "lu");
  124. dstruct(s, vhdr, dwReserved[2], "lu");
  125. dstruct(s, vhdr, dwReserved[3], "lu");
  126. #endif
  127. }
  128. static void dump_bih(AVFormatContext *s, BITMAPINFOHEADER *bih)
  129. {
  130. av_log(s, AV_LOG_DEBUG, "BITMAPINFOHEADER\n");
  131. dstruct(s, bih, biSize, "lu");
  132. dstruct(s, bih, biWidth, "ld");
  133. dstruct(s, bih, biHeight, "ld");
  134. dstruct(s, bih, biPlanes, "d");
  135. dstruct(s, bih, biBitCount, "d");
  136. dstruct(s, bih, biCompression, "lu");
  137. av_log(s, AV_LOG_DEBUG, " biCompression:\t\"%.4s\"\n",
  138. (char*) &bih->biCompression);
  139. dstruct(s, bih, biSizeImage, "lu");
  140. dstruct(s, bih, biXPelsPerMeter, "lu");
  141. dstruct(s, bih, biYPelsPerMeter, "lu");
  142. dstruct(s, bih, biClrUsed, "lu");
  143. dstruct(s, bih, biClrImportant, "lu");
  144. }
  145. static int shall_we_drop(AVFormatContext *s)
  146. {
  147. struct vfw_ctx *ctx = s->priv_data;
  148. const uint8_t dropscore[] = {62, 75, 87, 100};
  149. const int ndropscores = FF_ARRAY_ELEMS(dropscore);
  150. unsigned int buffer_fullness = (ctx->curbufsize*100)/s->max_picture_buffer;
  151. if(dropscore[++ctx->frame_num%ndropscores] <= buffer_fullness) {
  152. av_log(s, AV_LOG_ERROR,
  153. "real-time buffer %d%% full! frame dropped!\n", buffer_fullness);
  154. return 1;
  155. }
  156. return 0;
  157. }
  158. static LRESULT CALLBACK videostream_cb(HWND hwnd, LPVIDEOHDR vdhdr)
  159. {
  160. AVFormatContext *s;
  161. struct vfw_ctx *ctx;
  162. AVPacketList **ppktl, *pktl_next;
  163. s = (AVFormatContext *) GetWindowLongPtr(hwnd, GWLP_USERDATA);
  164. ctx = s->priv_data;
  165. dump_videohdr(s, vdhdr);
  166. if(shall_we_drop(s))
  167. return FALSE;
  168. WaitForSingleObject(ctx->mutex, INFINITE);
  169. pktl_next = av_mallocz(sizeof(AVPacketList));
  170. if(!pktl_next)
  171. goto fail;
  172. if(av_new_packet(&pktl_next->pkt, vdhdr->dwBytesUsed) < 0) {
  173. av_free(pktl_next);
  174. goto fail;
  175. }
  176. pktl_next->pkt.pts = vdhdr->dwTimeCaptured;
  177. memcpy(pktl_next->pkt.data, vdhdr->lpData, vdhdr->dwBytesUsed);
  178. for(ppktl = &ctx->pktl ; *ppktl ; ppktl = &(*ppktl)->next);
  179. *ppktl = pktl_next;
  180. ctx->curbufsize += vdhdr->dwBytesUsed;
  181. SetEvent(ctx->event);
  182. ReleaseMutex(ctx->mutex);
  183. return TRUE;
  184. fail:
  185. ReleaseMutex(ctx->mutex);
  186. return FALSE;
  187. }
  188. static int vfw_read_close(AVFormatContext *s)
  189. {
  190. struct vfw_ctx *ctx = s->priv_data;
  191. AVPacketList *pktl;
  192. if(ctx->hwnd) {
  193. SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, 0);
  194. SendMessage(ctx->hwnd, WM_CAP_DRIVER_DISCONNECT, 0, 0);
  195. DestroyWindow(ctx->hwnd);
  196. }
  197. if(ctx->mutex)
  198. CloseHandle(ctx->mutex);
  199. if(ctx->event)
  200. CloseHandle(ctx->event);
  201. pktl = ctx->pktl;
  202. while (pktl) {
  203. AVPacketList *next = pktl->next;
  204. av_destruct_packet(&pktl->pkt);
  205. av_free(pktl);
  206. pktl = next;
  207. }
  208. av_freep(&ctx->video_size);
  209. av_freep(&ctx->framerate);
  210. return 0;
  211. }
  212. static int vfw_read_header(AVFormatContext *s, AVFormatParameters *ap)
  213. {
  214. struct vfw_ctx *ctx = s->priv_data;
  215. AVCodecContext *codec;
  216. AVStream *st;
  217. int devnum;
  218. int bisize;
  219. BITMAPINFO *bi;
  220. CAPTUREPARMS cparms;
  221. DWORD biCompression;
  222. WORD biBitCount;
  223. int ret;
  224. AVRational fps;
  225. if (!strcmp(s->filename, "list")) {
  226. for (devnum = 0; devnum <= 9; devnum++) {
  227. char driver_name[256];
  228. char driver_ver[256];
  229. ret = capGetDriverDescription(devnum,
  230. driver_name, sizeof(driver_name),
  231. driver_ver, sizeof(driver_ver));
  232. if (ret) {
  233. av_log(s, AV_LOG_INFO, "Driver %d\n", devnum);
  234. av_log(s, AV_LOG_INFO, " %s\n", driver_name);
  235. av_log(s, AV_LOG_INFO, " %s\n", driver_ver);
  236. }
  237. }
  238. return AVERROR(EIO);
  239. }
  240. #if FF_API_FORMAT_PARAMETERS
  241. if (ap->time_base.num)
  242. fps = (AVRational){ap->time_base.den, ap->time_base.num};
  243. #endif
  244. ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0);
  245. if(!ctx->hwnd) {
  246. av_log(s, AV_LOG_ERROR, "Could not create capture window.\n");
  247. return AVERROR(EIO);
  248. }
  249. /* If atoi fails, devnum==0 and the default device is used */
  250. devnum = atoi(s->filename);
  251. ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0);
  252. if(!ret) {
  253. av_log(s, AV_LOG_ERROR, "Could not connect to device.\n");
  254. DestroyWindow(ctx->hwnd);
  255. return AVERROR(ENODEV);
  256. }
  257. SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0);
  258. SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0);
  259. ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0,
  260. (LPARAM) videostream_cb);
  261. if(!ret) {
  262. av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n");
  263. goto fail_io;
  264. }
  265. SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) s);
  266. st = av_new_stream(s, 0);
  267. if(!st) {
  268. vfw_read_close(s);
  269. return AVERROR(ENOMEM);
  270. }
  271. /* Set video format */
  272. bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0);
  273. if(!bisize)
  274. goto fail_io;
  275. bi = av_malloc(bisize);
  276. if(!bi) {
  277. vfw_read_close(s);
  278. return AVERROR(ENOMEM);
  279. }
  280. ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi);
  281. if(!ret)
  282. goto fail_bi;
  283. dump_bih(s, &bi->bmiHeader);
  284. if (ctx->video_size) {
  285. ret = av_parse_video_size(&bi->bmiHeader.biWidth, &bi->bmiHeader.biHeight, ctx->video_size);
  286. if (ret < 0) {
  287. av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n");
  288. goto fail_bi;
  289. }
  290. }
  291. #if FF_API_FORMAT_PARAMETERS
  292. if (ap->width > 0)
  293. bi->bmiHeader.biWidth = ap->width;
  294. if (ap->height > 0)
  295. bi->bmiHeader.biHeight = ap->height;
  296. #endif
  297. if (0) {
  298. /* For testing yet unsupported compressions
  299. * Copy these values from user-supplied verbose information */
  300. bi->bmiHeader.biWidth = 320;
  301. bi->bmiHeader.biHeight = 240;
  302. bi->bmiHeader.biPlanes = 1;
  303. bi->bmiHeader.biBitCount = 12;
  304. bi->bmiHeader.biCompression = MKTAG('I','4','2','0');
  305. bi->bmiHeader.biSizeImage = 115200;
  306. dump_bih(s, &bi->bmiHeader);
  307. }
  308. ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi);
  309. if(!ret) {
  310. av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n");
  311. goto fail_bi;
  312. }
  313. biCompression = bi->bmiHeader.biCompression;
  314. biBitCount = bi->bmiHeader.biBitCount;
  315. av_free(bi);
  316. /* Set sequence setup */
  317. ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms),
  318. (LPARAM) &cparms);
  319. if(!ret)
  320. goto fail_io;
  321. dump_captureparms(s, &cparms);
  322. cparms.fYield = 1; // Spawn a background thread
  323. cparms.dwRequestMicroSecPerFrame =
  324. (fps.den*1000000) / fps.num;
  325. cparms.fAbortLeftMouse = 0;
  326. cparms.fAbortRightMouse = 0;
  327. cparms.fCaptureAudio = 0;
  328. cparms.vKeyAbort = 0;
  329. ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms),
  330. (LPARAM) &cparms);
  331. if(!ret)
  332. goto fail_io;
  333. codec = st->codec;
  334. codec->time_base = (AVRational){fps.den, fps.num};
  335. codec->codec_type = AVMEDIA_TYPE_VIDEO;
  336. codec->width = bi->bmiHeader.biWidth;
  337. codec->height = bi->bmiHeader.biHeight;
  338. codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount);
  339. if(codec->pix_fmt == PIX_FMT_NONE) {
  340. codec->codec_id = vfw_codecid(biCompression);
  341. if(codec->codec_id == CODEC_ID_NONE) {
  342. av_log(s, AV_LOG_ERROR, "Unknown compression type. "
  343. "Please report verbose (-v 9) debug information.\n");
  344. vfw_read_close(s);
  345. return AVERROR_PATCHWELCOME;
  346. }
  347. codec->bits_per_coded_sample = biBitCount;
  348. } else {
  349. codec->codec_id = CODEC_ID_RAWVIDEO;
  350. if(biCompression == BI_RGB) {
  351. codec->bits_per_coded_sample = biBitCount;
  352. codec->extradata = av_malloc(9 + FF_INPUT_BUFFER_PADDING_SIZE);
  353. if (codec->extradata) {
  354. codec->extradata_size = 9;
  355. memcpy(codec->extradata, "BottomUp", 9);
  356. }
  357. }
  358. }
  359. av_set_pts_info(st, 32, 1, 1000);
  360. ctx->mutex = CreateMutex(NULL, 0, NULL);
  361. if(!ctx->mutex) {
  362. av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" );
  363. goto fail_io;
  364. }
  365. ctx->event = CreateEvent(NULL, 1, 0, NULL);
  366. if(!ctx->event) {
  367. av_log(s, AV_LOG_ERROR, "Could not create Event.\n" );
  368. goto fail_io;
  369. }
  370. ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0);
  371. if(!ret) {
  372. av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" );
  373. goto fail_io;
  374. }
  375. return 0;
  376. fail_bi:
  377. av_free(bi);
  378. fail_io:
  379. vfw_read_close(s);
  380. return AVERROR(EIO);
  381. }
  382. static int vfw_read_packet(AVFormatContext *s, AVPacket *pkt)
  383. {
  384. struct vfw_ctx *ctx = s->priv_data;
  385. AVPacketList *pktl = NULL;
  386. while(!pktl) {
  387. WaitForSingleObject(ctx->mutex, INFINITE);
  388. pktl = ctx->pktl;
  389. if(ctx->pktl) {
  390. *pkt = ctx->pktl->pkt;
  391. ctx->pktl = ctx->pktl->next;
  392. av_free(pktl);
  393. }
  394. ResetEvent(ctx->event);
  395. ReleaseMutex(ctx->mutex);
  396. if(!pktl) {
  397. if(s->flags & AVFMT_FLAG_NONBLOCK) {
  398. return AVERROR(EAGAIN);
  399. } else {
  400. WaitForSingleObject(ctx->event, INFINITE);
  401. }
  402. }
  403. }
  404. ctx->curbufsize -= pkt->size;
  405. return pkt->size;
  406. }
  407. #define OFFSET(x) offsetof(struct vfw_ctx, x)
  408. #define DEC AV_OPT_FLAG_DECODING_PARAM
  409. static const AVOption options[] = {
  410. { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), FF_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
  411. { "framerate", "", OFFSET(framerate), FF_OPT_TYPE_STRING, {.str = "ntsc"}, 0, 0, DEC },
  412. { NULL },
  413. };
  414. static const AVClass vfw_class = {
  415. .class_name = "VFW indev",
  416. .item_name = av_default_item_name,
  417. .option = options,
  418. .version = LIBAVUTIL_VERSION_INT,
  419. };
  420. AVInputFormat ff_vfwcap_demuxer = {
  421. "vfwcap",
  422. NULL_IF_CONFIG_SMALL("VFW video capture"),
  423. sizeof(struct vfw_ctx),
  424. NULL,
  425. vfw_read_header,
  426. vfw_read_packet,
  427. vfw_read_close,
  428. .flags = AVFMT_NOFILE,
  429. .priv_class = &vfw_class,
  430. };