You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

359 lines
11KB

  1. /*
  2. * Linux video grab interface
  3. * Copyright (c) 2000,2001 Fabrice Bellard.
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "config.h"
  22. #include "libavformat/avformat.h"
  23. #include "libavcodec/dsputil.h"
  24. #include <unistd.h>
  25. #include <fcntl.h>
  26. #include <sys/ioctl.h>
  27. #include <sys/mman.h>
  28. #include <sys/time.h>
  29. #define _LINUX_TIME_H 1
  30. #include <linux/videodev.h>
  31. #include <time.h>
  32. #include <strings.h>
  33. typedef struct {
  34. int fd;
  35. int frame_format; /* see VIDEO_PALETTE_xxx */
  36. int use_mmap;
  37. int width, height;
  38. int frame_rate;
  39. int frame_rate_base;
  40. int64_t time_frame;
  41. int frame_size;
  42. struct video_capability video_cap;
  43. struct video_audio audio_saved;
  44. uint8_t *video_buf;
  45. struct video_mbuf gb_buffers;
  46. struct video_mmap gb_buf;
  47. int gb_frame;
  48. } VideoData;
  49. static const struct {
  50. int palette;
  51. int depth;
  52. enum PixelFormat pix_fmt;
  53. } video_formats [] = {
  54. {.palette = VIDEO_PALETTE_YUV420P, .depth = 12, .pix_fmt = PIX_FMT_YUV420P },
  55. {.palette = VIDEO_PALETTE_YUV422, .depth = 16, .pix_fmt = PIX_FMT_YUYV422 },
  56. {.palette = VIDEO_PALETTE_UYVY, .depth = 16, .pix_fmt = PIX_FMT_UYVY422 },
  57. {.palette = VIDEO_PALETTE_YUYV, .depth = 16, .pix_fmt = PIX_FMT_YUYV422 },
  58. /* NOTE: v4l uses BGR24, not RGB24 */
  59. {.palette = VIDEO_PALETTE_RGB24, .depth = 24, .pix_fmt = PIX_FMT_BGR24 },
  60. {.palette = VIDEO_PALETTE_RGB565, .depth = 16, .pix_fmt = PIX_FMT_BGR565 },
  61. {.palette = VIDEO_PALETTE_GREY, .depth = 8, .pix_fmt = PIX_FMT_GRAY8 },
  62. };
  63. static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  64. {
  65. VideoData *s = s1->priv_data;
  66. AVStream *st;
  67. int width, height;
  68. int video_fd, frame_size;
  69. int ret, frame_rate, frame_rate_base;
  70. int desired_palette, desired_depth;
  71. struct video_tuner tuner;
  72. struct video_audio audio;
  73. struct video_picture pict;
  74. int j;
  75. int vformat_num = sizeof(video_formats) / sizeof(video_formats[0]);
  76. if (ap->width <= 0 || ap->height <= 0) {
  77. av_log(s1, AV_LOG_ERROR, "Wrong size (%dx%d)\n", ap->width, ap->height);
  78. return -1;
  79. }
  80. if (ap->time_base.den <= 0) {
  81. av_log(s1, AV_LOG_ERROR, "Wrong time base (%d)\n", ap->time_base.den);
  82. return -1;
  83. }
  84. width = ap->width;
  85. height = ap->height;
  86. frame_rate = ap->time_base.den;
  87. frame_rate_base = ap->time_base.num;
  88. if((unsigned)width > 32767 || (unsigned)height > 32767) {
  89. av_log(s1, AV_LOG_ERROR, "Capture size is out of range: %dx%d\n",
  90. width, height);
  91. return -1;
  92. }
  93. st = av_new_stream(s1, 0);
  94. if (!st)
  95. return AVERROR(ENOMEM);
  96. av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
  97. s->width = width;
  98. s->height = height;
  99. s->frame_rate = frame_rate;
  100. s->frame_rate_base = frame_rate_base;
  101. video_fd = open(s1->filename, O_RDWR);
  102. if (video_fd < 0) {
  103. av_log(s1, AV_LOG_ERROR, "%s: %s\n", s1->filename, strerror(errno));
  104. goto fail;
  105. }
  106. if (ioctl(video_fd,VIDIOCGCAP, &s->video_cap) < 0) {
  107. av_log(s1, AV_LOG_ERROR, "VIDIOCGCAP: %s\n", strerror(errno));
  108. goto fail;
  109. }
  110. if (!(s->video_cap.type & VID_TYPE_CAPTURE)) {
  111. av_log(s1, AV_LOG_ERROR, "Fatal: grab device does not handle capture\n");
  112. goto fail;
  113. }
  114. desired_palette = -1;
  115. desired_depth = -1;
  116. for (j = 0; j < vformat_num; j++) {
  117. if (ap->pix_fmt == video_formats[j].pix_fmt) {
  118. desired_palette = video_formats[j].palette;
  119. desired_depth = video_formats[j].depth;
  120. break;
  121. }
  122. }
  123. /* set tv standard */
  124. if (ap->standard && !ioctl(video_fd, VIDIOCGTUNER, &tuner)) {
  125. if (!strcasecmp(ap->standard, "pal"))
  126. tuner.mode = VIDEO_MODE_PAL;
  127. else if (!strcasecmp(ap->standard, "secam"))
  128. tuner.mode = VIDEO_MODE_SECAM;
  129. else
  130. tuner.mode = VIDEO_MODE_NTSC;
  131. ioctl(video_fd, VIDIOCSTUNER, &tuner);
  132. }
  133. /* unmute audio */
  134. audio.audio = 0;
  135. ioctl(video_fd, VIDIOCGAUDIO, &audio);
  136. memcpy(&s->audio_saved, &audio, sizeof(audio));
  137. audio.flags &= ~VIDEO_AUDIO_MUTE;
  138. ioctl(video_fd, VIDIOCSAUDIO, &audio);
  139. ioctl(video_fd, VIDIOCGPICT, &pict);
  140. #if 0
  141. printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
  142. pict.colour,
  143. pict.hue,
  144. pict.brightness,
  145. pict.contrast,
  146. pict.whiteness);
  147. #endif
  148. /* try to choose a suitable video format */
  149. pict.palette = desired_palette;
  150. pict.depth= desired_depth;
  151. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
  152. for (j = 0; j < vformat_num; j++) {
  153. pict.palette = video_formats[j].palette;
  154. pict.depth = video_formats[j].depth;
  155. if (-1 != ioctl(video_fd, VIDIOCSPICT, &pict))
  156. break;
  157. }
  158. if (j >= vformat_num)
  159. goto fail1;
  160. }
  161. ret = ioctl(video_fd,VIDIOCGMBUF,&s->gb_buffers);
  162. if (ret < 0) {
  163. /* try to use read based access */
  164. struct video_window win;
  165. int val;
  166. win.x = 0;
  167. win.y = 0;
  168. win.width = width;
  169. win.height = height;
  170. win.chromakey = -1;
  171. win.flags = 0;
  172. ioctl(video_fd, VIDIOCSWIN, &win);
  173. s->frame_format = pict.palette;
  174. val = 1;
  175. ioctl(video_fd, VIDIOCCAPTURE, &val);
  176. s->time_frame = av_gettime() * s->frame_rate / s->frame_rate_base;
  177. s->use_mmap = 0;
  178. } else {
  179. s->video_buf = mmap(0,s->gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
  180. if ((unsigned char*)-1 == s->video_buf) {
  181. s->video_buf = mmap(0,s->gb_buffers.size,PROT_READ|PROT_WRITE,MAP_PRIVATE,video_fd,0);
  182. if ((unsigned char*)-1 == s->video_buf) {
  183. av_log(s1, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
  184. goto fail;
  185. }
  186. }
  187. s->gb_frame = 0;
  188. s->time_frame = av_gettime() * s->frame_rate / s->frame_rate_base;
  189. /* start to grab the first frame */
  190. s->gb_buf.frame = s->gb_frame % s->gb_buffers.frames;
  191. s->gb_buf.height = height;
  192. s->gb_buf.width = width;
  193. s->gb_buf.format = pict.palette;
  194. ret = ioctl(video_fd, VIDIOCMCAPTURE, &s->gb_buf);
  195. if (ret < 0) {
  196. if (errno != EAGAIN) {
  197. fail1:
  198. av_log(s1, AV_LOG_ERROR, "Fatal: grab device does not support suitable format\n");
  199. } else {
  200. av_log(s1, AV_LOG_ERROR,"Fatal: grab device does not receive any video signal\n");
  201. }
  202. goto fail;
  203. }
  204. for (j = 1; j < s->gb_buffers.frames; j++) {
  205. s->gb_buf.frame = j;
  206. ioctl(video_fd, VIDIOCMCAPTURE, &s->gb_buf);
  207. }
  208. s->frame_format = s->gb_buf.format;
  209. s->use_mmap = 1;
  210. }
  211. for (j = 0; j < vformat_num; j++) {
  212. if (s->frame_format == video_formats[j].palette) {
  213. frame_size = width * height * video_formats[j].depth / 8;
  214. st->codec->pix_fmt = video_formats[j].pix_fmt;
  215. break;
  216. }
  217. }
  218. if (j >= vformat_num)
  219. goto fail;
  220. s->fd = video_fd;
  221. s->frame_size = frame_size;
  222. st->codec->codec_type = CODEC_TYPE_VIDEO;
  223. st->codec->codec_id = CODEC_ID_RAWVIDEO;
  224. st->codec->width = width;
  225. st->codec->height = height;
  226. st->codec->time_base.den = frame_rate;
  227. st->codec->time_base.num = frame_rate_base;
  228. st->codec->bit_rate = frame_size * 1/av_q2d(st->codec->time_base) * 8;
  229. return 0;
  230. fail:
  231. if (video_fd >= 0)
  232. close(video_fd);
  233. return AVERROR(EIO);
  234. }
  235. static int v4l_mm_read_picture(VideoData *s, uint8_t *buf)
  236. {
  237. uint8_t *ptr;
  238. while (ioctl(s->fd, VIDIOCSYNC, &s->gb_frame) < 0 &&
  239. (errno == EAGAIN || errno == EINTR));
  240. ptr = s->video_buf + s->gb_buffers.offsets[s->gb_frame];
  241. memcpy(buf, ptr, s->frame_size);
  242. /* Setup to capture the next frame */
  243. s->gb_buf.frame = s->gb_frame;
  244. if (ioctl(s->fd, VIDIOCMCAPTURE, &s->gb_buf) < 0) {
  245. if (errno == EAGAIN)
  246. av_log(NULL, AV_LOG_ERROR, "Cannot Sync\n");
  247. else
  248. av_log(NULL, AV_LOG_ERROR, "VIDIOCMCAPTURE: %s\n", strerror(errno));
  249. return AVERROR(EIO);
  250. }
  251. /* This is now the grabbing frame */
  252. s->gb_frame = (s->gb_frame + 1) % s->gb_buffers.frames;
  253. return s->frame_size;
  254. }
  255. static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
  256. {
  257. VideoData *s = s1->priv_data;
  258. int64_t curtime, delay;
  259. struct timespec ts;
  260. /* Calculate the time of the next frame */
  261. s->time_frame += INT64_C(1000000);
  262. /* wait based on the frame rate */
  263. for(;;) {
  264. curtime = av_gettime();
  265. delay = s->time_frame * s->frame_rate_base / s->frame_rate - curtime;
  266. if (delay <= 0) {
  267. if (delay < INT64_C(-1000000) * s->frame_rate_base / s->frame_rate) {
  268. /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
  269. s->time_frame += INT64_C(1000000);
  270. }
  271. break;
  272. }
  273. ts.tv_sec = delay / 1000000;
  274. ts.tv_nsec = (delay % 1000000) * 1000;
  275. nanosleep(&ts, NULL);
  276. }
  277. if (av_new_packet(pkt, s->frame_size) < 0)
  278. return AVERROR(EIO);
  279. pkt->pts = curtime;
  280. /* read one frame */
  281. if (s->use_mmap) {
  282. return v4l_mm_read_picture(s, pkt->data);
  283. } else {
  284. if (read(s->fd, pkt->data, pkt->size) != pkt->size)
  285. return AVERROR(EIO);
  286. return s->frame_size;
  287. }
  288. }
  289. static int grab_read_close(AVFormatContext *s1)
  290. {
  291. VideoData *s = s1->priv_data;
  292. if (s->use_mmap)
  293. munmap(s->video_buf, s->gb_buffers.size);
  294. /* mute audio. we must force it because the BTTV driver does not
  295. return its state correctly */
  296. s->audio_saved.flags |= VIDEO_AUDIO_MUTE;
  297. ioctl(s->fd, VIDIOCSAUDIO, &s->audio_saved);
  298. close(s->fd);
  299. return 0;
  300. }
  301. AVInputFormat v4l_demuxer = {
  302. "video4linux",
  303. NULL_IF_CONFIG_SMALL("video grab"),
  304. sizeof(VideoData),
  305. NULL,
  306. grab_read_header,
  307. grab_read_packet,
  308. grab_read_close,
  309. .flags = AVFMT_NOFILE,
  310. };