You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

350 lines
9.4KB

  1. /*
  2. * Linux video grab interface
  3. * Copyright (c) 2000,2001 Gerard Lantau.
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License as published by
  7. * the Free Software Foundation; either version 2 of the License, or
  8. * (at your option) any later version.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program; if not, write to the Free Software
  17. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  18. */
  19. #include "avformat.h"
  20. #include <linux/videodev.h>
  21. #include <unistd.h>
  22. #include <fcntl.h>
  23. #include <sys/ioctl.h>
  24. #include <sys/mman.h>
  25. #include <sys/time.h>
  26. #include <time.h>
  27. typedef struct {
  28. int fd;
  29. int frame_format; /* see VIDEO_PALETTE_xxx */
  30. int use_mmap;
  31. int width, height;
  32. int frame_rate;
  33. INT64 time_frame;
  34. int frame_size;
  35. } VideoData;
  36. const char *v4l_device = "/dev/video";
  37. /* XXX: move all that to the context */
  38. static struct video_capability video_cap;
  39. static UINT8 *video_buf;
  40. static struct video_mbuf gb_buffers;
  41. static struct video_mmap gb_buf;
  42. static struct video_audio audio, audio_saved;
  43. static int gb_frame = 0;
  44. static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  45. {
  46. VideoData *s;
  47. AVStream *st;
  48. int width, height;
  49. int video_fd, frame_size;
  50. int ret, frame_rate;
  51. int desired_palette;
  52. if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
  53. return -1;
  54. width = ap->width;
  55. height = ap->height;
  56. frame_rate = ap->frame_rate;
  57. s = av_mallocz(sizeof(VideoData));
  58. if (!s)
  59. return -ENOMEM;
  60. st = av_mallocz(sizeof(AVStream));
  61. if (!st) {
  62. free(s);
  63. return -ENOMEM;
  64. }
  65. s1->priv_data = s;
  66. s1->nb_streams = 1;
  67. s1->streams[0] = st;
  68. s->width = width;
  69. s->height = height;
  70. s->frame_rate = frame_rate;
  71. video_fd = open(v4l_device, O_RDWR);
  72. if (video_fd < 0) {
  73. perror(v4l_device);
  74. goto fail;
  75. }
  76. if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
  77. perror("VIDIOCGCAP");
  78. goto fail;
  79. }
  80. if (!(video_cap.type & VID_TYPE_CAPTURE)) {
  81. fprintf(stderr, "Fatal: grab device does not handle capture\n");
  82. goto fail;
  83. }
  84. desired_palette = -1;
  85. if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
  86. desired_palette = VIDEO_PALETTE_YUV420P;
  87. } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
  88. desired_palette = VIDEO_PALETTE_YUV422;
  89. } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
  90. desired_palette = VIDEO_PALETTE_RGB24;
  91. }
  92. /* unmute audio */
  93. ioctl(video_fd, VIDIOCGAUDIO, &audio);
  94. memcpy(&audio_saved, &audio, sizeof(audio));
  95. audio.flags &= ~VIDEO_AUDIO_MUTE;
  96. ioctl(video_fd, VIDIOCSAUDIO, &audio);
  97. ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
  98. if (ret < 0) {
  99. /* try to use read based access */
  100. struct video_window win;
  101. struct video_picture pict;
  102. int val;
  103. win.x = 0;
  104. win.y = 0;
  105. win.width = width;
  106. win.height = height;
  107. win.chromakey = -1;
  108. win.flags = 0;
  109. ioctl(video_fd, VIDIOCSWIN, &win);
  110. ioctl(video_fd, VIDIOCGPICT, &pict);
  111. #if 0
  112. printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
  113. pict.colour,
  114. pict.hue,
  115. pict.brightness,
  116. pict.contrast,
  117. pict.whiteness);
  118. #endif
  119. /* try to choose a suitable video format */
  120. pict.palette = desired_palette;
  121. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
  122. pict.palette=VIDEO_PALETTE_YUV420P;
  123. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  124. if (ret < 0) {
  125. pict.palette=VIDEO_PALETTE_YUV422;
  126. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  127. if (ret < 0) {
  128. pict.palette=VIDEO_PALETTE_RGB24;
  129. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  130. if (ret < 0)
  131. goto fail1;
  132. }
  133. }
  134. }
  135. s->frame_format = pict.palette;
  136. val = 1;
  137. ioctl(video_fd, VIDIOCCAPTURE, &val);
  138. s->time_frame = gettime();
  139. s->use_mmap = 0;
  140. } else {
  141. video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
  142. if ((unsigned char*)-1 == video_buf) {
  143. perror("mmap");
  144. goto fail;
  145. }
  146. gb_frame = 0;
  147. s->time_frame = gettime();
  148. /* start to grab the first frame */
  149. gb_buf.frame = gb_frame % gb_buffers.frames;
  150. gb_buf.height = height;
  151. gb_buf.width = width;
  152. gb_buf.format = desired_palette;
  153. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
  154. gb_buf.format = VIDEO_PALETTE_YUV420P;
  155. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  156. if (ret < 0 && errno != EAGAIN) {
  157. /* try YUV422 */
  158. gb_buf.format = VIDEO_PALETTE_YUV422;
  159. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  160. if (ret < 0 && errno != EAGAIN) {
  161. /* try RGB24 */
  162. gb_buf.format = VIDEO_PALETTE_RGB24;
  163. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  164. }
  165. }
  166. }
  167. if (ret < 0) {
  168. if (errno != EAGAIN) {
  169. fail1:
  170. fprintf(stderr, "Fatal: grab device does not support suitable format\n");
  171. } else {
  172. fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
  173. }
  174. goto fail;
  175. }
  176. s->frame_format = gb_buf.format;
  177. s->use_mmap = 1;
  178. }
  179. switch(s->frame_format) {
  180. case VIDEO_PALETTE_YUV420P:
  181. frame_size = (width * height * 3) / 2;
  182. st->codec.pix_fmt = PIX_FMT_YUV420P;
  183. break;
  184. case VIDEO_PALETTE_YUV422:
  185. frame_size = width * height * 2;
  186. st->codec.pix_fmt = PIX_FMT_YUV422;
  187. break;
  188. case VIDEO_PALETTE_RGB24:
  189. frame_size = width * height * 3;
  190. st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
  191. break;
  192. default:
  193. goto fail;
  194. }
  195. s->fd = video_fd;
  196. s->frame_size = frame_size;
  197. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  198. st->codec.width = width;
  199. st->codec.height = height;
  200. st->codec.frame_rate = frame_rate;
  201. return 0;
  202. fail:
  203. if (video_fd >= 0)
  204. close(video_fd);
  205. free(st);
  206. free(s);
  207. return -EIO;
  208. }
  209. static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
  210. {
  211. UINT8 *ptr;
  212. struct timeval tv_s;
  213. //struct timeval tv_e;
  214. //int delay;
  215. /* Setup to capture the next frame */
  216. gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
  217. if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
  218. if (errno == EAGAIN)
  219. fprintf(stderr,"Cannot Sync\n");
  220. else
  221. perror("VIDIOCMCAPTURE");
  222. return -EIO;
  223. }
  224. gettimeofday(&tv_s, 0);
  225. while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
  226. (errno == EAGAIN || errno == EINTR));
  227. /*
  228. gettimeofday(&tv_e, 0);
  229. delay = (tv_e.tv_sec - tv_s.tv_sec) * 1000000 + tv_e.tv_usec - tv_s.tv_usec;
  230. if (delay > 10000)
  231. printf("VIDIOCSYNC took %d us\n", delay);
  232. */
  233. ptr = video_buf + gb_buffers.offsets[gb_frame];
  234. memcpy(buf, ptr, s->frame_size);
  235. /* This is now the grabbing frame */
  236. gb_frame = gb_buf.frame;
  237. return s->frame_size;
  238. }
  239. static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
  240. {
  241. VideoData *s = s1->priv_data;
  242. INT64 curtime, delay;
  243. struct timespec ts;
  244. int first;
  245. INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
  246. int dropped = 0;
  247. /* Calculate the time of the next frame */
  248. s->time_frame += per_frame;
  249. /* wait based on the frame rate */
  250. for(first = 1;; first = 0) {
  251. curtime = gettime();
  252. delay = s->time_frame - curtime;
  253. if (delay <= 0) {
  254. if (delay < -per_frame) {
  255. /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
  256. dropped = 1;
  257. s->time_frame += per_frame;
  258. }
  259. break;
  260. }
  261. ts.tv_sec = delay / 1000000;
  262. ts.tv_nsec = (delay % 1000000) * 1000;
  263. nanosleep(&ts, NULL);
  264. }
  265. if (av_new_packet(pkt, s->frame_size) < 0)
  266. return -EIO;
  267. if (dropped)
  268. pkt->flags |= PKT_FLAG_DROPPED_FRAME;
  269. /* read one frame */
  270. if (s->use_mmap) {
  271. return v4l_mm_read_picture(s, pkt->data);
  272. } else {
  273. if (read(s->fd, pkt->data, pkt->size) != pkt->size)
  274. return -EIO;
  275. return s->frame_size;
  276. }
  277. }
  278. static int grab_read_close(AVFormatContext *s1)
  279. {
  280. VideoData *s = s1->priv_data;
  281. /* restore audio settings */
  282. ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
  283. close(s->fd);
  284. free(s);
  285. return 0;
  286. }
  287. AVFormat video_grab_device_format = {
  288. "video_grab_device",
  289. "video grab",
  290. "",
  291. "",
  292. CODEC_ID_NONE,
  293. CODEC_ID_NONE,
  294. NULL,
  295. NULL,
  296. NULL,
  297. grab_read_header,
  298. grab_read_packet,
  299. grab_read_close,
  300. NULL,
  301. AVFMT_NOFILE,
  302. };