You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

340 lines
9.4KB

  1. /*
  2. * Linux video grab interface
  3. * Copyright (c) 2000,2001 Gerard Lantau.
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License as published by
  7. * the Free Software Foundation; either version 2 of the License, or
  8. * (at your option) any later version.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program; if not, write to the Free Software
  17. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  18. */
  19. #include "avformat.h"
  20. #include <linux/videodev.h>
  21. #include <unistd.h>
  22. #include <fcntl.h>
  23. #include <sys/ioctl.h>
  24. #include <sys/mman.h>
  25. #include <sys/time.h>
  26. #include <time.h>
  27. typedef struct {
  28. int fd;
  29. int frame_format; /* see VIDEO_PALETTE_xxx */
  30. int use_mmap;
  31. int width, height;
  32. int frame_rate;
  33. INT64 time_frame;
  34. int frame_size;
  35. } VideoData;
  36. const char *v4l_device = "/dev/video";
  37. /* XXX: move all that to the context */
  38. static struct video_capability video_cap;
  39. static UINT8 *video_buf;
  40. static struct video_mbuf gb_buffers;
  41. static struct video_mmap gb_buf;
  42. static struct video_audio audio, audio_saved;
  43. static int gb_frame = 0;
  44. static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  45. {
  46. VideoData *s = s1->priv_data;
  47. AVStream *st;
  48. int width, height;
  49. int video_fd, frame_size;
  50. int ret, frame_rate;
  51. int desired_palette;
  52. if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
  53. return -1;
  54. width = ap->width;
  55. height = ap->height;
  56. frame_rate = ap->frame_rate;
  57. st = av_new_stream(s1, 0);
  58. if (!st)
  59. return -ENOMEM;
  60. s->width = width;
  61. s->height = height;
  62. s->frame_rate = frame_rate;
  63. video_fd = open(v4l_device, O_RDWR);
  64. if (video_fd < 0) {
  65. perror(v4l_device);
  66. goto fail;
  67. }
  68. if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
  69. perror("VIDIOCGCAP");
  70. goto fail;
  71. }
  72. if (!(video_cap.type & VID_TYPE_CAPTURE)) {
  73. fprintf(stderr, "Fatal: grab device does not handle capture\n");
  74. goto fail;
  75. }
  76. desired_palette = -1;
  77. if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
  78. desired_palette = VIDEO_PALETTE_YUV420P;
  79. } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
  80. desired_palette = VIDEO_PALETTE_YUV422;
  81. } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
  82. desired_palette = VIDEO_PALETTE_RGB24;
  83. }
  84. /* unmute audio */
  85. ioctl(video_fd, VIDIOCGAUDIO, &audio);
  86. memcpy(&audio_saved, &audio, sizeof(audio));
  87. audio.flags &= ~VIDEO_AUDIO_MUTE;
  88. ioctl(video_fd, VIDIOCSAUDIO, &audio);
  89. ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
  90. if (ret < 0) {
  91. /* try to use read based access */
  92. struct video_window win;
  93. struct video_picture pict;
  94. int val;
  95. win.x = 0;
  96. win.y = 0;
  97. win.width = width;
  98. win.height = height;
  99. win.chromakey = -1;
  100. win.flags = 0;
  101. ioctl(video_fd, VIDIOCSWIN, &win);
  102. ioctl(video_fd, VIDIOCGPICT, &pict);
  103. #if 0
  104. printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
  105. pict.colour,
  106. pict.hue,
  107. pict.brightness,
  108. pict.contrast,
  109. pict.whiteness);
  110. #endif
  111. /* try to choose a suitable video format */
  112. pict.palette = desired_palette;
  113. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
  114. pict.palette=VIDEO_PALETTE_YUV420P;
  115. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  116. if (ret < 0) {
  117. pict.palette=VIDEO_PALETTE_YUV422;
  118. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  119. if (ret < 0) {
  120. pict.palette=VIDEO_PALETTE_RGB24;
  121. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  122. if (ret < 0)
  123. goto fail1;
  124. }
  125. }
  126. }
  127. s->frame_format = pict.palette;
  128. val = 1;
  129. ioctl(video_fd, VIDIOCCAPTURE, &val);
  130. s->time_frame = gettime();
  131. s->use_mmap = 0;
  132. } else {
  133. video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
  134. if ((unsigned char*)-1 == video_buf) {
  135. perror("mmap");
  136. goto fail;
  137. }
  138. gb_frame = 0;
  139. s->time_frame = gettime();
  140. /* start to grab the first frame */
  141. gb_buf.frame = gb_frame % gb_buffers.frames;
  142. gb_buf.height = height;
  143. gb_buf.width = width;
  144. gb_buf.format = desired_palette;
  145. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
  146. gb_buf.format = VIDEO_PALETTE_YUV420P;
  147. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  148. if (ret < 0 && errno != EAGAIN) {
  149. /* try YUV422 */
  150. gb_buf.format = VIDEO_PALETTE_YUV422;
  151. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  152. if (ret < 0 && errno != EAGAIN) {
  153. /* try RGB24 */
  154. gb_buf.format = VIDEO_PALETTE_RGB24;
  155. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  156. }
  157. }
  158. }
  159. if (ret < 0) {
  160. if (errno != EAGAIN) {
  161. fail1:
  162. fprintf(stderr, "Fatal: grab device does not support suitable format\n");
  163. } else {
  164. fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
  165. }
  166. goto fail;
  167. }
  168. s->frame_format = gb_buf.format;
  169. s->use_mmap = 1;
  170. }
  171. switch(s->frame_format) {
  172. case VIDEO_PALETTE_YUV420P:
  173. frame_size = (width * height * 3) / 2;
  174. st->codec.pix_fmt = PIX_FMT_YUV420P;
  175. break;
  176. case VIDEO_PALETTE_YUV422:
  177. frame_size = width * height * 2;
  178. st->codec.pix_fmt = PIX_FMT_YUV422;
  179. break;
  180. case VIDEO_PALETTE_RGB24:
  181. frame_size = width * height * 3;
  182. st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
  183. break;
  184. default:
  185. goto fail;
  186. }
  187. s->fd = video_fd;
  188. s->frame_size = frame_size;
  189. st->codec.codec_type = CODEC_TYPE_VIDEO;
  190. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  191. st->codec.width = width;
  192. st->codec.height = height;
  193. st->codec.frame_rate = frame_rate;
  194. return 0;
  195. fail:
  196. if (video_fd >= 0)
  197. close(video_fd);
  198. av_free(st);
  199. return -EIO;
  200. }
  201. static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
  202. {
  203. UINT8 *ptr;
  204. struct timeval tv_s;
  205. //struct timeval tv_e;
  206. //int delay;
  207. /* Setup to capture the next frame */
  208. gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
  209. if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
  210. if (errno == EAGAIN)
  211. fprintf(stderr,"Cannot Sync\n");
  212. else
  213. perror("VIDIOCMCAPTURE");
  214. return -EIO;
  215. }
  216. gettimeofday(&tv_s, 0);
  217. while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
  218. (errno == EAGAIN || errno == EINTR));
  219. /*
  220. gettimeofday(&tv_e, 0);
  221. delay = (tv_e.tv_sec - tv_s.tv_sec) * 1000000 + tv_e.tv_usec - tv_s.tv_usec;
  222. if (delay > 10000)
  223. printf("VIDIOCSYNC took %d us\n", delay);
  224. */
  225. ptr = video_buf + gb_buffers.offsets[gb_frame];
  226. memcpy(buf, ptr, s->frame_size);
  227. /* This is now the grabbing frame */
  228. gb_frame = gb_buf.frame;
  229. return s->frame_size;
  230. }
  231. static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
  232. {
  233. VideoData *s = s1->priv_data;
  234. INT64 curtime, delay;
  235. struct timespec ts;
  236. int first;
  237. INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
  238. int dropped = 0;
  239. /* Calculate the time of the next frame */
  240. s->time_frame += per_frame;
  241. /* wait based on the frame rate */
  242. for(first = 1;; first = 0) {
  243. curtime = gettime();
  244. delay = s->time_frame - curtime;
  245. if (delay <= 0) {
  246. if (delay < -per_frame) {
  247. /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
  248. dropped = 1;
  249. s->time_frame += per_frame;
  250. }
  251. break;
  252. }
  253. ts.tv_sec = delay / 1000000;
  254. ts.tv_nsec = (delay % 1000000) * 1000;
  255. nanosleep(&ts, NULL);
  256. }
  257. if (av_new_packet(pkt, s->frame_size) < 0)
  258. return -EIO;
  259. if (dropped)
  260. pkt->flags |= PKT_FLAG_DROPPED_FRAME;
  261. /* read one frame */
  262. if (s->use_mmap) {
  263. return v4l_mm_read_picture(s, pkt->data);
  264. } else {
  265. if (read(s->fd, pkt->data, pkt->size) != pkt->size)
  266. return -EIO;
  267. return s->frame_size;
  268. }
  269. }
  270. static int grab_read_close(AVFormatContext *s1)
  271. {
  272. VideoData *s = s1->priv_data;
  273. /* restore audio settings */
  274. ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
  275. close(s->fd);
  276. return 0;
  277. }
  278. AVInputFormat video_grab_device_format = {
  279. "video_grab_device",
  280. "video grab",
  281. sizeof(VideoData),
  282. NULL,
  283. grab_read_header,
  284. grab_read_packet,
  285. grab_read_close,
  286. flags: AVFMT_NOFILE,
  287. };
  288. int video_grab_init(void)
  289. {
  290. av_register_input_format(&video_grab_device_format);
  291. return 0;
  292. }