You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

432 lines
12KB

  1. /*
  2. * VFW capture interface
  3. * Copyright (c) 2006-2008 Ramiro Polla
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavformat/avformat.h"
  22. #include <vfw.h>
  23. #include <windows.h>
  24. //#define DEBUG_VFW
  25. /* Defines for VFW missing from MinGW.
  26. * Remove this when MinGW incorporates them. */
  27. #define HWND_MESSAGE ((HWND)-3)
  28. #define BI_RGB 0
  29. /* End of missing MinGW defines */
  30. struct vfw_ctx {
  31. HWND hwnd;
  32. HANDLE mutex;
  33. HANDLE event;
  34. AVPacketList *pktl;
  35. AVFormatContext *s;
  36. unsigned int curbufsize;
  37. unsigned int frame_num;
  38. };
  39. static enum PixelFormat vfw_pixfmt(DWORD biCompression, WORD biBitCount)
  40. {
  41. switch(biCompression) {
  42. case MKTAG('Y', 'U', 'Y', '2'):
  43. return PIX_FMT_YUYV422;
  44. case MKTAG('I', '4', '2', '0'):
  45. return PIX_FMT_YUV420P;
  46. case BI_RGB:
  47. switch(biBitCount) { /* 1-8 are untested */
  48. case 1:
  49. return PIX_FMT_MONOWHITE;
  50. case 4:
  51. return PIX_FMT_RGB4;
  52. case 8:
  53. return PIX_FMT_RGB8;
  54. case 16:
  55. return PIX_FMT_RGB555;
  56. case 24:
  57. return PIX_FMT_BGR24;
  58. case 32:
  59. return PIX_FMT_RGB32;
  60. }
  61. }
  62. return PIX_FMT_NONE;
  63. }
  64. static enum CodecID vfw_codecid(DWORD biCompression)
  65. {
  66. switch(biCompression) {
  67. case MKTAG('d', 'v', 's', 'd'):
  68. return CODEC_ID_DVVIDEO;
  69. }
  70. return CODEC_ID_NONE;
  71. }
  72. #define dstruct(pctx, sname, var, type) \
  73. av_log(pctx, AV_LOG_DEBUG, #var":\t%"type"\n", sname->var)
  74. static void dump_captureparms(AVFormatContext *s, CAPTUREPARMS *cparms)
  75. {
  76. av_log(s, AV_LOG_DEBUG, "CAPTUREPARMS\n");
  77. dstruct(s, cparms, dwRequestMicroSecPerFrame, "lu");
  78. dstruct(s, cparms, fMakeUserHitOKToCapture, "d");
  79. dstruct(s, cparms, wPercentDropForError, "u");
  80. dstruct(s, cparms, fYield, "d");
  81. dstruct(s, cparms, dwIndexSize, "lu");
  82. dstruct(s, cparms, wChunkGranularity, "u");
  83. dstruct(s, cparms, fUsingDOSMemory, "d");
  84. dstruct(s, cparms, wNumVideoRequested, "u");
  85. dstruct(s, cparms, fCaptureAudio, "d");
  86. dstruct(s, cparms, wNumAudioRequested, "u");
  87. dstruct(s, cparms, vKeyAbort, "u");
  88. dstruct(s, cparms, fAbortLeftMouse, "d");
  89. dstruct(s, cparms, fAbortRightMouse, "d");
  90. dstruct(s, cparms, fLimitEnabled, "d");
  91. dstruct(s, cparms, wTimeLimit, "u");
  92. dstruct(s, cparms, fMCIControl, "d");
  93. dstruct(s, cparms, fStepMCIDevice, "d");
  94. dstruct(s, cparms, dwMCIStartTime, "lu");
  95. dstruct(s, cparms, dwMCIStopTime, "lu");
  96. dstruct(s, cparms, fStepCaptureAt2x, "d");
  97. dstruct(s, cparms, wStepCaptureAverageFrames, "u");
  98. dstruct(s, cparms, dwAudioBufferSize, "lu");
  99. dstruct(s, cparms, fDisableWriteCache, "d");
  100. dstruct(s, cparms, AVStreamMaster, "u");
  101. }
  102. static void dump_videohdr(AVFormatContext *s, VIDEOHDR *vhdr)
  103. {
  104. #ifdef DEBUG_VFW
  105. av_log(s, AV_LOG_DEBUG, "VIDEOHDR\n");
  106. dstruct(s, vhdr, lpData, "p");
  107. dstruct(s, vhdr, dwBufferLength, "lu");
  108. dstruct(s, vhdr, dwBytesUsed, "lu");
  109. dstruct(s, vhdr, dwTimeCaptured, "lu");
  110. dstruct(s, vhdr, dwUser, "lu");
  111. dstruct(s, vhdr, dwFlags, "lu");
  112. dstruct(s, vhdr, dwReserved[0], "lu");
  113. dstruct(s, vhdr, dwReserved[1], "lu");
  114. dstruct(s, vhdr, dwReserved[2], "lu");
  115. dstruct(s, vhdr, dwReserved[3], "lu");
  116. #endif
  117. }
  118. static void dump_bih(AVFormatContext *s, BITMAPINFOHEADER *bih)
  119. {
  120. av_log(s, AV_LOG_DEBUG, "BITMAPINFOHEADER\n");
  121. dstruct(s, bih, biSize, "lu");
  122. dstruct(s, bih, biWidth, "ld");
  123. dstruct(s, bih, biHeight, "ld");
  124. dstruct(s, bih, biPlanes, "d");
  125. dstruct(s, bih, biBitCount, "d");
  126. dstruct(s, bih, biCompression, "lu");
  127. av_log(s, AV_LOG_DEBUG, " biCompression:\t\"%.4s\"\n",
  128. (char*) &bih->biCompression);
  129. dstruct(s, bih, biSizeImage, "lu");
  130. dstruct(s, bih, biXPelsPerMeter, "lu");
  131. dstruct(s, bih, biYPelsPerMeter, "lu");
  132. dstruct(s, bih, biClrUsed, "lu");
  133. dstruct(s, bih, biClrImportant, "lu");
  134. }
  135. static int shall_we_drop(struct vfw_ctx *ctx)
  136. {
  137. AVFormatContext *s = ctx->s;
  138. const uint8_t dropscore[] = {62, 75, 87, 100};
  139. const int ndropscores = FF_ARRAY_ELEMS(dropscore);
  140. unsigned int buffer_fullness = (ctx->curbufsize*100)/s->max_picture_buffer;
  141. if(dropscore[++ctx->frame_num%ndropscores] <= buffer_fullness) {
  142. av_log(ctx->s, AV_LOG_ERROR,
  143. "real-time buffer %d%% full! frame dropped!\n", buffer_fullness);
  144. return 1;
  145. }
  146. return 0;
  147. }
  148. static LRESULT CALLBACK videostream_cb(HWND hwnd, LPVIDEOHDR vdhdr)
  149. {
  150. struct vfw_ctx *ctx;
  151. AVPacketList **ppktl, *pktl_next;
  152. ctx = (struct vfw_ctx *) GetWindowLongPtr(hwnd, GWLP_USERDATA);
  153. dump_videohdr(ctx->s, vdhdr);
  154. if(shall_we_drop(ctx))
  155. return FALSE;
  156. WaitForSingleObject(ctx->mutex, INFINITE);
  157. pktl_next = av_mallocz(sizeof(AVPacketList));
  158. if(!pktl_next)
  159. goto fail;
  160. if(av_new_packet(&pktl_next->pkt, vdhdr->dwBytesUsed) < 0) {
  161. av_free(pktl_next);
  162. goto fail;
  163. }
  164. pktl_next->pkt.pts = vdhdr->dwTimeCaptured;
  165. memcpy(pktl_next->pkt.data, vdhdr->lpData, vdhdr->dwBytesUsed);
  166. for(ppktl = &ctx->pktl ; *ppktl ; ppktl = &(*ppktl)->next);
  167. *ppktl = pktl_next;
  168. ctx->curbufsize += vdhdr->dwBytesUsed;
  169. SetEvent(ctx->event);
  170. ReleaseMutex(ctx->mutex);
  171. return TRUE;
  172. fail:
  173. ReleaseMutex(ctx->mutex);
  174. return FALSE;
  175. }
  176. static int vfw_read_close(AVFormatContext *s);
  177. static int vfw_read_header(AVFormatContext *s, AVFormatParameters *ap)
  178. {
  179. struct vfw_ctx *ctx = s->priv_data;
  180. AVCodecContext *codec;
  181. AVStream *st;
  182. int devnum;
  183. int bisize;
  184. BITMAPINFO *bi;
  185. CAPTUREPARMS cparms;
  186. DWORD biCompression;
  187. WORD biBitCount;
  188. int width;
  189. int height;
  190. int ret;
  191. if(!ap->time_base.den) {
  192. av_log(s, AV_LOG_ERROR, "A time base must be specified.\n");
  193. return AVERROR_IO;
  194. }
  195. ctx->s = s;
  196. ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0);
  197. if(!ctx->hwnd) {
  198. av_log(s, AV_LOG_ERROR, "Could not create capture window.\n");
  199. return AVERROR_IO;
  200. }
  201. /* If atoi fails, devnum==0 and the default device is used */
  202. devnum = atoi(s->filename);
  203. ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0);
  204. if(!ret) {
  205. av_log(s, AV_LOG_ERROR, "Could not connect to device.\n");
  206. DestroyWindow(ctx->hwnd);
  207. return AVERROR(ENODEV);
  208. }
  209. SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0);
  210. SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0);
  211. ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0,
  212. (LPARAM) videostream_cb);
  213. if(!ret) {
  214. av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n");
  215. goto fail_io;
  216. }
  217. SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) ctx);
  218. st = av_new_stream(s, 0);
  219. if(!st) {
  220. vfw_read_close(s);
  221. return AVERROR_NOMEM;
  222. }
  223. /* Set video format */
  224. bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0);
  225. if(!bisize)
  226. goto fail_io;
  227. bi = av_malloc(bisize);
  228. if(!bi) {
  229. vfw_read_close(s);
  230. return AVERROR_NOMEM;
  231. }
  232. ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi);
  233. if(!ret)
  234. goto fail_bi;
  235. dump_bih(s, &bi->bmiHeader);
  236. width = ap->width ? ap->width : bi->bmiHeader.biWidth ;
  237. height = ap->height ? ap->height : bi->bmiHeader.biHeight;
  238. bi->bmiHeader.biWidth = width ;
  239. bi->bmiHeader.biHeight = height;
  240. #if 0
  241. /* For testing yet unsupported compressions
  242. * Copy these values from user-supplied verbose information */
  243. bi->bmiHeader.biWidth = 320;
  244. bi->bmiHeader.biHeight = 240;
  245. bi->bmiHeader.biPlanes = 1;
  246. bi->bmiHeader.biBitCount = 12;
  247. bi->bmiHeader.biCompression = MKTAG('I','4','2','0');
  248. bi->bmiHeader.biSizeImage = 115200;
  249. dump_bih(s, &bi->bmiHeader);
  250. #endif
  251. ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi);
  252. if(!ret) {
  253. av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n");
  254. goto fail_bi;
  255. }
  256. biCompression = bi->bmiHeader.biCompression;
  257. biBitCount = bi->bmiHeader.biBitCount;
  258. av_free(bi);
  259. /* Set sequence setup */
  260. ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms),
  261. (LPARAM) &cparms);
  262. if(!ret)
  263. goto fail_io;
  264. dump_captureparms(s, &cparms);
  265. cparms.fYield = 1; // Spawn a background thread
  266. cparms.dwRequestMicroSecPerFrame =
  267. (ap->time_base.num*1000000) / ap->time_base.den;
  268. cparms.fAbortLeftMouse = 0;
  269. cparms.fAbortRightMouse = 0;
  270. cparms.fCaptureAudio = 0;
  271. cparms.vKeyAbort = 0;
  272. ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms),
  273. (LPARAM) &cparms);
  274. if(!ret)
  275. goto fail_io;
  276. codec = st->codec;
  277. codec->time_base = ap->time_base;
  278. codec->codec_type = CODEC_TYPE_VIDEO;
  279. codec->width = width;
  280. codec->height = height;
  281. codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount);
  282. if(codec->pix_fmt == PIX_FMT_NONE) {
  283. codec->codec_id = vfw_codecid(biCompression);
  284. if(codec->codec_id == CODEC_ID_NONE) {
  285. av_log(s, AV_LOG_ERROR, "Unknown compression type. "
  286. "Please report verbose (-v 9) debug information.\n");
  287. vfw_read_close(s);
  288. return AVERROR_PATCHWELCOME;
  289. }
  290. codec->bits_per_coded_sample = biBitCount;
  291. } else {
  292. codec->codec_id = CODEC_ID_RAWVIDEO;
  293. if(biCompression == BI_RGB)
  294. codec->bits_per_coded_sample = biBitCount;
  295. }
  296. av_set_pts_info(st, 32, 1, 1000);
  297. ctx->mutex = CreateMutex(NULL, 0, NULL);
  298. if(!ctx->mutex) {
  299. av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" );
  300. goto fail_io;
  301. }
  302. ctx->event = CreateEvent(NULL, 1, 0, NULL);
  303. if(!ctx->event) {
  304. av_log(s, AV_LOG_ERROR, "Could not create Event.\n" );
  305. goto fail_io;
  306. }
  307. ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0);
  308. if(!ret) {
  309. av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" );
  310. goto fail_io;
  311. }
  312. return 0;
  313. fail_bi:
  314. av_free(bi);
  315. fail_io:
  316. vfw_read_close(s);
  317. return AVERROR_IO;
  318. }
  319. static int vfw_read_packet(AVFormatContext *s, AVPacket *pkt)
  320. {
  321. struct vfw_ctx *ctx = s->priv_data;
  322. AVPacketList *pktl = NULL;
  323. while(!pktl) {
  324. WaitForSingleObject(ctx->mutex, INFINITE);
  325. pktl = ctx->pktl;
  326. if(ctx->pktl) {
  327. *pkt = ctx->pktl->pkt;
  328. ctx->pktl = ctx->pktl->next;
  329. av_free(pktl);
  330. }
  331. ResetEvent(ctx->event);
  332. ReleaseMutex(ctx->mutex);
  333. if(!pktl) {
  334. if(s->flags & AVFMT_FLAG_NONBLOCK) {
  335. return AVERROR(EAGAIN);
  336. } else {
  337. WaitForSingleObject(ctx->event, INFINITE);
  338. }
  339. }
  340. }
  341. ctx->curbufsize -= pkt->size;
  342. return pkt->size;
  343. }
  344. static int vfw_read_close(AVFormatContext *s)
  345. {
  346. struct vfw_ctx *ctx = s->priv_data;
  347. if(ctx->hwnd) {
  348. SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, 0);
  349. SendMessage(ctx->hwnd, WM_CAP_DRIVER_DISCONNECT, 0, 0);
  350. DestroyWindow(ctx->hwnd);
  351. }
  352. if(ctx->mutex)
  353. CloseHandle(ctx->mutex);
  354. if(ctx->event)
  355. CloseHandle(ctx->event);
  356. return 0;
  357. }
  358. AVInputFormat vfwcap_demuxer = {
  359. "vfwcap",
  360. NULL_IF_CONFIG_SMALL("VFW video capture"),
  361. sizeof(struct vfw_ctx),
  362. NULL,
  363. vfw_read_header,
  364. vfw_read_packet,
  365. vfw_read_close,
  366. .flags = AVFMT_NOFILE,
  367. };