You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

433 lines
12KB

  1. /*
  2. * VFW capture interface
  3. * Copyright (c) 2006-2008 Ramiro Polla
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavformat/avformat.h"
  22. #include <windows.h>
  23. #include <vfw.h>
  24. //#define DEBUG_VFW
  25. /* Defines for VFW missing from MinGW.
  26. * Remove this when MinGW incorporates them. */
  27. #define HWND_MESSAGE ((HWND)-3)
  28. #define BI_RGB 0
  29. /* End of missing MinGW defines */
  30. struct vfw_ctx {
  31. HWND hwnd;
  32. HANDLE mutex;
  33. HANDLE event;
  34. AVPacketList *pktl;
  35. AVFormatContext *s;
  36. unsigned int curbufsize;
  37. unsigned int frame_num;
  38. };
  39. static enum PixelFormat vfw_pixfmt(DWORD biCompression, WORD biBitCount)
  40. {
  41. switch(biCompression) {
  42. case MKTAG('Y', 'U', 'Y', '2'):
  43. return PIX_FMT_YUYV422;
  44. case MKTAG('I', '4', '2', '0'):
  45. return PIX_FMT_YUV420P;
  46. case BI_RGB:
  47. switch(biBitCount) { /* 1-8 are untested */
  48. case 1:
  49. return PIX_FMT_MONOWHITE;
  50. case 4:
  51. return PIX_FMT_RGB4;
  52. case 8:
  53. return PIX_FMT_RGB8;
  54. case 16:
  55. return PIX_FMT_RGB555;
  56. case 24:
  57. return PIX_FMT_BGR24;
  58. case 32:
  59. return PIX_FMT_RGB32;
  60. }
  61. }
  62. return PIX_FMT_NONE;
  63. }
  64. static enum CodecID vfw_codecid(DWORD biCompression)
  65. {
  66. switch(biCompression) {
  67. case MKTAG('d', 'v', 's', 'd'):
  68. return CODEC_ID_DVVIDEO;
  69. case MKTAG('M', 'J', 'P', 'G'):
  70. case MKTAG('m', 'j', 'p', 'g'):
  71. return CODEC_ID_MJPEG;
  72. }
  73. return CODEC_ID_NONE;
  74. }
  75. #define dstruct(pctx, sname, var, type) \
  76. av_log(pctx, AV_LOG_DEBUG, #var":\t%"type"\n", sname->var)
  77. static void dump_captureparms(AVFormatContext *s, CAPTUREPARMS *cparms)
  78. {
  79. av_log(s, AV_LOG_DEBUG, "CAPTUREPARMS\n");
  80. dstruct(s, cparms, dwRequestMicroSecPerFrame, "lu");
  81. dstruct(s, cparms, fMakeUserHitOKToCapture, "d");
  82. dstruct(s, cparms, wPercentDropForError, "u");
  83. dstruct(s, cparms, fYield, "d");
  84. dstruct(s, cparms, dwIndexSize, "lu");
  85. dstruct(s, cparms, wChunkGranularity, "u");
  86. dstruct(s, cparms, fUsingDOSMemory, "d");
  87. dstruct(s, cparms, wNumVideoRequested, "u");
  88. dstruct(s, cparms, fCaptureAudio, "d");
  89. dstruct(s, cparms, wNumAudioRequested, "u");
  90. dstruct(s, cparms, vKeyAbort, "u");
  91. dstruct(s, cparms, fAbortLeftMouse, "d");
  92. dstruct(s, cparms, fAbortRightMouse, "d");
  93. dstruct(s, cparms, fLimitEnabled, "d");
  94. dstruct(s, cparms, wTimeLimit, "u");
  95. dstruct(s, cparms, fMCIControl, "d");
  96. dstruct(s, cparms, fStepMCIDevice, "d");
  97. dstruct(s, cparms, dwMCIStartTime, "lu");
  98. dstruct(s, cparms, dwMCIStopTime, "lu");
  99. dstruct(s, cparms, fStepCaptureAt2x, "d");
  100. dstruct(s, cparms, wStepCaptureAverageFrames, "u");
  101. dstruct(s, cparms, dwAudioBufferSize, "lu");
  102. dstruct(s, cparms, fDisableWriteCache, "d");
  103. dstruct(s, cparms, AVStreamMaster, "u");
  104. }
  105. static void dump_videohdr(AVFormatContext *s, VIDEOHDR *vhdr)
  106. {
  107. #ifdef DEBUG_VFW
  108. av_log(s, AV_LOG_DEBUG, "VIDEOHDR\n");
  109. dstruct(s, vhdr, lpData, "p");
  110. dstruct(s, vhdr, dwBufferLength, "lu");
  111. dstruct(s, vhdr, dwBytesUsed, "lu");
  112. dstruct(s, vhdr, dwTimeCaptured, "lu");
  113. dstruct(s, vhdr, dwUser, "lu");
  114. dstruct(s, vhdr, dwFlags, "lu");
  115. dstruct(s, vhdr, dwReserved[0], "lu");
  116. dstruct(s, vhdr, dwReserved[1], "lu");
  117. dstruct(s, vhdr, dwReserved[2], "lu");
  118. dstruct(s, vhdr, dwReserved[3], "lu");
  119. #endif
  120. }
  121. static void dump_bih(AVFormatContext *s, BITMAPINFOHEADER *bih)
  122. {
  123. av_log(s, AV_LOG_DEBUG, "BITMAPINFOHEADER\n");
  124. dstruct(s, bih, biSize, "lu");
  125. dstruct(s, bih, biWidth, "ld");
  126. dstruct(s, bih, biHeight, "ld");
  127. dstruct(s, bih, biPlanes, "d");
  128. dstruct(s, bih, biBitCount, "d");
  129. dstruct(s, bih, biCompression, "lu");
  130. av_log(s, AV_LOG_DEBUG, " biCompression:\t\"%.4s\"\n",
  131. (char*) &bih->biCompression);
  132. dstruct(s, bih, biSizeImage, "lu");
  133. dstruct(s, bih, biXPelsPerMeter, "lu");
  134. dstruct(s, bih, biYPelsPerMeter, "lu");
  135. dstruct(s, bih, biClrUsed, "lu");
  136. dstruct(s, bih, biClrImportant, "lu");
  137. }
  138. static int shall_we_drop(struct vfw_ctx *ctx)
  139. {
  140. AVFormatContext *s = ctx->s;
  141. const uint8_t dropscore[] = {62, 75, 87, 100};
  142. const int ndropscores = FF_ARRAY_ELEMS(dropscore);
  143. unsigned int buffer_fullness = (ctx->curbufsize*100)/s->max_picture_buffer;
  144. if(dropscore[++ctx->frame_num%ndropscores] <= buffer_fullness) {
  145. av_log(ctx->s, AV_LOG_ERROR,
  146. "real-time buffer %d%% full! frame dropped!\n", buffer_fullness);
  147. return 1;
  148. }
  149. return 0;
  150. }
  151. static LRESULT CALLBACK videostream_cb(HWND hwnd, LPVIDEOHDR vdhdr)
  152. {
  153. struct vfw_ctx *ctx;
  154. AVPacketList **ppktl, *pktl_next;
  155. ctx = (struct vfw_ctx *) GetWindowLongPtr(hwnd, GWLP_USERDATA);
  156. dump_videohdr(ctx->s, vdhdr);
  157. if(shall_we_drop(ctx))
  158. return FALSE;
  159. WaitForSingleObject(ctx->mutex, INFINITE);
  160. pktl_next = av_mallocz(sizeof(AVPacketList));
  161. if(!pktl_next)
  162. goto fail;
  163. if(av_new_packet(&pktl_next->pkt, vdhdr->dwBytesUsed) < 0) {
  164. av_free(pktl_next);
  165. goto fail;
  166. }
  167. pktl_next->pkt.pts = vdhdr->dwTimeCaptured;
  168. memcpy(pktl_next->pkt.data, vdhdr->lpData, vdhdr->dwBytesUsed);
  169. for(ppktl = &ctx->pktl ; *ppktl ; ppktl = &(*ppktl)->next);
  170. *ppktl = pktl_next;
  171. ctx->curbufsize += vdhdr->dwBytesUsed;
  172. SetEvent(ctx->event);
  173. ReleaseMutex(ctx->mutex);
  174. return TRUE;
  175. fail:
  176. ReleaseMutex(ctx->mutex);
  177. return FALSE;
  178. }
  179. static int vfw_read_close(AVFormatContext *s)
  180. {
  181. struct vfw_ctx *ctx = s->priv_data;
  182. if(ctx->hwnd) {
  183. SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, 0);
  184. SendMessage(ctx->hwnd, WM_CAP_DRIVER_DISCONNECT, 0, 0);
  185. DestroyWindow(ctx->hwnd);
  186. }
  187. if(ctx->mutex)
  188. CloseHandle(ctx->mutex);
  189. if(ctx->event)
  190. CloseHandle(ctx->event);
  191. return 0;
  192. }
  193. static int vfw_read_header(AVFormatContext *s, AVFormatParameters *ap)
  194. {
  195. struct vfw_ctx *ctx = s->priv_data;
  196. AVCodecContext *codec;
  197. AVStream *st;
  198. int devnum;
  199. int bisize;
  200. BITMAPINFO *bi;
  201. CAPTUREPARMS cparms;
  202. DWORD biCompression;
  203. WORD biBitCount;
  204. int width;
  205. int height;
  206. int ret;
  207. if(!ap->time_base.den) {
  208. av_log(s, AV_LOG_ERROR, "A time base must be specified.\n");
  209. return AVERROR_IO;
  210. }
  211. ctx->s = s;
  212. ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0);
  213. if(!ctx->hwnd) {
  214. av_log(s, AV_LOG_ERROR, "Could not create capture window.\n");
  215. return AVERROR_IO;
  216. }
  217. /* If atoi fails, devnum==0 and the default device is used */
  218. devnum = atoi(s->filename);
  219. ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0);
  220. if(!ret) {
  221. av_log(s, AV_LOG_ERROR, "Could not connect to device.\n");
  222. DestroyWindow(ctx->hwnd);
  223. return AVERROR(ENODEV);
  224. }
  225. SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0);
  226. SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0);
  227. ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0,
  228. (LPARAM) videostream_cb);
  229. if(!ret) {
  230. av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n");
  231. goto fail_io;
  232. }
  233. SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) ctx);
  234. st = av_new_stream(s, 0);
  235. if(!st) {
  236. vfw_read_close(s);
  237. return AVERROR_NOMEM;
  238. }
  239. /* Set video format */
  240. bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0);
  241. if(!bisize)
  242. goto fail_io;
  243. bi = av_malloc(bisize);
  244. if(!bi) {
  245. vfw_read_close(s);
  246. return AVERROR_NOMEM;
  247. }
  248. ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi);
  249. if(!ret)
  250. goto fail_bi;
  251. dump_bih(s, &bi->bmiHeader);
  252. width = ap->width ? ap->width : bi->bmiHeader.biWidth ;
  253. height = ap->height ? ap->height : bi->bmiHeader.biHeight;
  254. bi->bmiHeader.biWidth = width ;
  255. bi->bmiHeader.biHeight = height;
  256. if (0) {
  257. /* For testing yet unsupported compressions
  258. * Copy these values from user-supplied verbose information */
  259. bi->bmiHeader.biWidth = 320;
  260. bi->bmiHeader.biHeight = 240;
  261. bi->bmiHeader.biPlanes = 1;
  262. bi->bmiHeader.biBitCount = 12;
  263. bi->bmiHeader.biCompression = MKTAG('I','4','2','0');
  264. bi->bmiHeader.biSizeImage = 115200;
  265. dump_bih(s, &bi->bmiHeader);
  266. }
  267. ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi);
  268. if(!ret) {
  269. av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n");
  270. goto fail_bi;
  271. }
  272. biCompression = bi->bmiHeader.biCompression;
  273. biBitCount = bi->bmiHeader.biBitCount;
  274. av_free(bi);
  275. /* Set sequence setup */
  276. ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms),
  277. (LPARAM) &cparms);
  278. if(!ret)
  279. goto fail_io;
  280. dump_captureparms(s, &cparms);
  281. cparms.fYield = 1; // Spawn a background thread
  282. cparms.dwRequestMicroSecPerFrame =
  283. (ap->time_base.num*1000000) / ap->time_base.den;
  284. cparms.fAbortLeftMouse = 0;
  285. cparms.fAbortRightMouse = 0;
  286. cparms.fCaptureAudio = 0;
  287. cparms.vKeyAbort = 0;
  288. ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms),
  289. (LPARAM) &cparms);
  290. if(!ret)
  291. goto fail_io;
  292. codec = st->codec;
  293. codec->time_base = ap->time_base;
  294. codec->codec_type = CODEC_TYPE_VIDEO;
  295. codec->width = width;
  296. codec->height = height;
  297. codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount);
  298. if(codec->pix_fmt == PIX_FMT_NONE) {
  299. codec->codec_id = vfw_codecid(biCompression);
  300. if(codec->codec_id == CODEC_ID_NONE) {
  301. av_log(s, AV_LOG_ERROR, "Unknown compression type. "
  302. "Please report verbose (-v 9) debug information.\n");
  303. vfw_read_close(s);
  304. return AVERROR_PATCHWELCOME;
  305. }
  306. codec->bits_per_coded_sample = biBitCount;
  307. } else {
  308. codec->codec_id = CODEC_ID_RAWVIDEO;
  309. if(biCompression == BI_RGB)
  310. codec->bits_per_coded_sample = biBitCount;
  311. }
  312. av_set_pts_info(st, 32, 1, 1000);
  313. ctx->mutex = CreateMutex(NULL, 0, NULL);
  314. if(!ctx->mutex) {
  315. av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" );
  316. goto fail_io;
  317. }
  318. ctx->event = CreateEvent(NULL, 1, 0, NULL);
  319. if(!ctx->event) {
  320. av_log(s, AV_LOG_ERROR, "Could not create Event.\n" );
  321. goto fail_io;
  322. }
  323. ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0);
  324. if(!ret) {
  325. av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" );
  326. goto fail_io;
  327. }
  328. return 0;
  329. fail_bi:
  330. av_free(bi);
  331. fail_io:
  332. vfw_read_close(s);
  333. return AVERROR_IO;
  334. }
  335. static int vfw_read_packet(AVFormatContext *s, AVPacket *pkt)
  336. {
  337. struct vfw_ctx *ctx = s->priv_data;
  338. AVPacketList *pktl = NULL;
  339. while(!pktl) {
  340. WaitForSingleObject(ctx->mutex, INFINITE);
  341. pktl = ctx->pktl;
  342. if(ctx->pktl) {
  343. *pkt = ctx->pktl->pkt;
  344. ctx->pktl = ctx->pktl->next;
  345. av_free(pktl);
  346. }
  347. ResetEvent(ctx->event);
  348. ReleaseMutex(ctx->mutex);
  349. if(!pktl) {
  350. if(s->flags & AVFMT_FLAG_NONBLOCK) {
  351. return AVERROR(EAGAIN);
  352. } else {
  353. WaitForSingleObject(ctx->event, INFINITE);
  354. }
  355. }
  356. }
  357. ctx->curbufsize -= pkt->size;
  358. return pkt->size;
  359. }
  360. AVInputFormat vfwcap_demuxer = {
  361. "vfwcap",
  362. NULL_IF_CONFIG_SMALL("VFW video capture"),
  363. sizeof(struct vfw_ctx),
  364. NULL,
  365. vfw_read_header,
  366. vfw_read_packet,
  367. vfw_read_close,
  368. .flags = AVFMT_NOFILE,
  369. };