You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

521 lines
13KB

  1. /*
  2. * Video4Linux2 grab interface
  3. * Copyright (c) 2000,2001 Fabrice Bellard.
  4. * Copyright (c) 2006 Luca Abeni.
  5. *
  6. * Part of this file is based on the V4L2 video capture example
  7. * (http://v4l2spec.bytesex.org/v4l2spec/capture.c)
  8. *
  9. * Thanks to Michael Niedermayer for providing the mapping between
  10. * V4L2_PIX_FMT_* and PIX_FMT_*
  11. *
  12. *
  13. * This library is free software; you can redistribute it and/or
  14. * modify it under the terms of the GNU Lesser General Public
  15. * License as published by the Free Software Foundation; either
  16. * version 2 of the License, or (at your option) any later version.
  17. *
  18. * This library is distributed in the hope that it will be useful,
  19. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  21. * Lesser General Public License for more details.
  22. *
  23. * You should have received a copy of the GNU Lesser General Public
  24. * License along with this library; if not, write to the Free Software
  25. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  26. */
  27. #include "avformat.h"
  28. #include <unistd.h>
  29. #include <fcntl.h>
  30. #include <sys/ioctl.h>
  31. #include <sys/mman.h>
  32. #include <sys/time.h>
  33. #define _LINUX_TIME_H 1
  34. #include <linux/videodev.h>
  35. #include <time.h>
  36. static const int desired_video_buffers = 256;
  37. enum io_method {
  38. io_read,
  39. io_mmap,
  40. io_userptr
  41. };
  42. struct video_data {
  43. int fd;
  44. int frame_format; /* V4L2_PIX_FMT_* */
  45. enum io_method io_method;
  46. int width, height;
  47. int frame_rate;
  48. int frame_rate_base;
  49. int frame_size;
  50. int top_field_first;
  51. int buffers;
  52. void **buf_start;
  53. unsigned int *buf_len;
  54. };
  55. struct fmt_map {
  56. enum PixelFormat ff_fmt;
  57. int32_t v4l2_fmt;
  58. };
  59. static struct fmt_map fmt_conversion_table[] = {
  60. {
  61. .ff_fmt = PIX_FMT_YUV420P,
  62. .v4l2_fmt = V4L2_PIX_FMT_YUV420,
  63. },
  64. {
  65. .ff_fmt = PIX_FMT_YUV422P,
  66. .v4l2_fmt = V4L2_PIX_FMT_YUV422P,
  67. },
  68. {
  69. .ff_fmt = PIX_FMT_YUV422,
  70. .v4l2_fmt = V4L2_PIX_FMT_YUYV,
  71. },
  72. {
  73. .ff_fmt = PIX_FMT_UYVY422,
  74. .v4l2_fmt = V4L2_PIX_FMT_UYVY,
  75. },
  76. {
  77. .ff_fmt = PIX_FMT_YUV411P,
  78. .v4l2_fmt = V4L2_PIX_FMT_YUV411P,
  79. },
  80. {
  81. .ff_fmt = PIX_FMT_YUV410P,
  82. .v4l2_fmt = V4L2_PIX_FMT_YUV410,
  83. },
  84. {
  85. .ff_fmt = PIX_FMT_BGR24,
  86. .v4l2_fmt = V4L2_PIX_FMT_BGR24,
  87. },
  88. {
  89. .ff_fmt = PIX_FMT_RGB24,
  90. .v4l2_fmt = V4L2_PIX_FMT_RGB24,
  91. },
  92. /*
  93. {
  94. .ff_fmt = PIX_FMT_RGBA32,
  95. .v4l2_fmt = V4L2_PIX_FMT_BGR32,
  96. },
  97. */
  98. {
  99. .ff_fmt = PIX_FMT_GRAY8,
  100. .v4l2_fmt = V4L2_PIX_FMT_GREY,
  101. },
  102. };
  103. static int device_open(const char *devname, uint32_t *capabilities)
  104. {
  105. struct v4l2_capability cap;
  106. int fd;
  107. int res;
  108. fd = open(devname, O_RDWR /*| O_NONBLOCK*/, 0);
  109. if (fd < 0) {
  110. av_log(NULL, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
  111. devname, strerror(errno));
  112. return -1;
  113. }
  114. res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
  115. if (res < 0) {
  116. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
  117. strerror(errno));
  118. return -1;
  119. }
  120. if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
  121. av_log(NULL, AV_LOG_ERROR, "Not a video capture device\n");
  122. return -1;
  123. }
  124. *capabilities = cap.capabilities;
  125. return fd;
  126. }
  127. static int device_init(int fd, int width, int height, int pix_fmt)
  128. {
  129. struct v4l2_format fmt;
  130. memset(&fmt, 0, sizeof(struct v4l2_format));
  131. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  132. fmt.fmt.pix.width = width;
  133. fmt.fmt.pix.height = height;
  134. fmt.fmt.pix.pixelformat = pix_fmt;
  135. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  136. return ioctl (fd, VIDIOC_S_FMT, &fmt);
  137. }
  138. static int first_field(int fd)
  139. {
  140. int res;
  141. v4l2_std_id std;
  142. res = ioctl(fd, VIDIOC_G_STD, &std);
  143. if (res < 0) {
  144. return 0;
  145. }
  146. if (std & V4L2_STD_NTSC) {
  147. return 0;
  148. }
  149. return 1;
  150. }
  151. static uint32_t fmt_ff2v4l(enum PixelFormat pix_fmt)
  152. {
  153. int i;
  154. for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
  155. if (fmt_conversion_table[i].ff_fmt == pix_fmt) {
  156. return fmt_conversion_table[i].v4l2_fmt;
  157. }
  158. }
  159. return 0;
  160. }
  161. static enum PixelFormat fmt_v4l2ff(uint32_t pix_fmt)
  162. {
  163. int i;
  164. for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
  165. if (fmt_conversion_table[i].v4l2_fmt == pix_fmt) {
  166. return fmt_conversion_table[i].ff_fmt;
  167. }
  168. }
  169. return -1;
  170. }
  171. static int mmap_init(struct video_data *s)
  172. {
  173. struct v4l2_requestbuffers req;
  174. int i, res;
  175. memset(&req, 0, sizeof(struct v4l2_requestbuffers));
  176. req.count = desired_video_buffers;
  177. req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  178. req.memory = V4L2_MEMORY_MMAP;
  179. res = ioctl (s->fd, VIDIOC_REQBUFS, &req);
  180. if (res < 0) {
  181. if (errno == EINVAL) {
  182. av_log(NULL, AV_LOG_ERROR, "Device does not support mmap\n");
  183. } else {
  184. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");
  185. }
  186. return -1;
  187. }
  188. if (req.count < 2) {
  189. av_log(NULL, AV_LOG_ERROR, "Insufficient buffer memory\n");
  190. return -1;
  191. }
  192. s->buffers = req.count;
  193. s->buf_start = av_malloc(sizeof(void *) * s->buffers);
  194. if (s->buf_start == NULL) {
  195. av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
  196. return -1;
  197. }
  198. s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
  199. if (s->buf_len == NULL) {
  200. av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
  201. av_free(s->buf_start);
  202. return -1;
  203. }
  204. for (i = 0; i < req.count; i++) {
  205. struct v4l2_buffer buf;
  206. memset(&buf, 0, sizeof(struct v4l2_buffer));
  207. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  208. buf.memory = V4L2_MEMORY_MMAP;
  209. buf.index = i;
  210. res = ioctl (s->fd, VIDIOC_QUERYBUF, &buf);
  211. if (res < 0) {
  212. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
  213. return -1;
  214. }
  215. s->buf_len[i] = buf.length;
  216. if (s->buf_len[i] < s->frame_size) {
  217. av_log(NULL, AV_LOG_ERROR, "Buffer len [%d] = %d != %d\n", i, s->buf_len[i], s->frame_size);
  218. return -1;
  219. }
  220. s->buf_start[i] = mmap (NULL, buf.length,
  221. PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
  222. if (s->buf_start[i] == MAP_FAILED) {
  223. av_log(NULL, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
  224. return -1;
  225. }
  226. }
  227. return 0;
  228. }
  229. static int read_init(struct video_data *s)
  230. {
  231. return -1;
  232. }
  233. static int mmap_read_frame(struct video_data *s, void *frame, int64_t *ts)
  234. {
  235. struct v4l2_buffer buf;
  236. int res;
  237. memset(&buf, 0, sizeof(struct v4l2_buffer));
  238. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  239. buf.memory = V4L2_MEMORY_MMAP;
  240. /* FIXME: Some special treatment might be needed in case of loss of signal... */
  241. while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 &&
  242. ((errno == EAGAIN) || (errno == EINTR)));
  243. if (res < 0) {
  244. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", strerror(errno));
  245. return -1;
  246. }
  247. assert (buf.index < s->buffers);
  248. assert(buf.bytesused == s->frame_size);
  249. /* Image is at s->buff_start[buf.index] */
  250. memcpy(frame, s->buf_start[buf.index], buf.bytesused);
  251. *ts = buf.timestamp.tv_sec * int64_t_C(1000000) + buf.timestamp.tv_usec;
  252. res = ioctl (s->fd, VIDIOC_QBUF, &buf);
  253. if (res < 0) {
  254. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");
  255. return -1;
  256. }
  257. return s->buf_len[buf.index];
  258. }
  259. static int read_frame(struct video_data *s, void *frame, int64_t *ts)
  260. {
  261. return -1;
  262. }
  263. static int mmap_start(struct video_data *s)
  264. {
  265. enum v4l2_buf_type type;
  266. int i, res;
  267. for (i = 0; i < s->buffers; i++) {
  268. struct v4l2_buffer buf;
  269. memset(&buf, 0, sizeof(struct v4l2_buffer));
  270. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  271. buf.memory = V4L2_MEMORY_MMAP;
  272. buf.index = i;
  273. res = ioctl (s->fd, VIDIOC_QBUF, &buf);
  274. if (res < 0) {
  275. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
  276. return -1;
  277. }
  278. }
  279. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  280. res = ioctl (s->fd, VIDIOC_STREAMON, &type);
  281. if (res < 0) {
  282. av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", strerror(errno));
  283. return -1;
  284. }
  285. return 0;
  286. }
  287. static void mmap_close(struct video_data *s)
  288. {
  289. enum v4l2_buf_type type;
  290. int i;
  291. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  292. /* We do not check for the result, because we could
  293. * not do anything about it anyway...
  294. */
  295. ioctl(s->fd, VIDIOC_STREAMOFF, &type);
  296. for (i = 0; i < s->buffers; i++) {
  297. munmap(s->buf_start[i], s->buf_len[i]);
  298. }
  299. av_free(s->buf_start);
  300. av_free(s->buf_len);
  301. }
  302. static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  303. {
  304. struct video_data *s = s1->priv_data;
  305. AVStream *st;
  306. int width, height;
  307. int res, frame_rate, frame_rate_base;
  308. uint32_t desired_format, capabilities;
  309. const char *video_device;
  310. if (!ap || ap->width <= 0 || ap->height <= 0 || ap->time_base.den <= 0) {
  311. av_log(s1, AV_LOG_ERROR, "Missing/Wrong parameters\n");
  312. return -1;
  313. }
  314. width = ap->width;
  315. height = ap->height;
  316. frame_rate = ap->time_base.den;
  317. frame_rate_base = ap->time_base.num;
  318. if((unsigned)width > 32767 || (unsigned)height > 32767) {
  319. av_log(s1, AV_LOG_ERROR, "Wrong size %dx%d\n", width, height);
  320. return -1;
  321. }
  322. st = av_new_stream(s1, 0);
  323. if (!st) {
  324. return -ENOMEM;
  325. }
  326. av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
  327. s->width = width;
  328. s->height = height;
  329. s->frame_rate = frame_rate;
  330. s->frame_rate_base = frame_rate_base;
  331. video_device = ap->device;
  332. if (!video_device) {
  333. video_device = "/dev/video";
  334. }
  335. capabilities = 0;
  336. s->fd = device_open(video_device, &capabilities);
  337. if (s->fd < 0) {
  338. av_free(st);
  339. return AVERROR_IO;
  340. }
  341. av_log(s1, AV_LOG_ERROR, "[%d]Capabilities: %x\n", s->fd, capabilities);
  342. desired_format = fmt_ff2v4l(ap->pix_fmt);
  343. if (desired_format == 0 || (device_init(s->fd, width, height, desired_format) < 0)) {
  344. int i, done;
  345. done = 0; i = 0;
  346. while (!done) {
  347. desired_format = fmt_conversion_table[i].v4l2_fmt;
  348. if (device_init(s->fd, width, height, desired_format) < 0) {
  349. desired_format = 0;
  350. i++;
  351. } else {
  352. done = 1;
  353. }
  354. if (i == sizeof(fmt_conversion_table) / sizeof(struct fmt_map)) {
  355. done = 1;
  356. }
  357. }
  358. }
  359. if (desired_format == 0) {
  360. av_log(s1, AV_LOG_ERROR, "Cannot find a proper format.\n");
  361. close(s->fd);
  362. av_free(st);
  363. return AVERROR_IO;
  364. }
  365. s->frame_format = desired_format;
  366. st->codec->pix_fmt = fmt_v4l2ff(desired_format);
  367. s->frame_size = avpicture_get_size(st->codec->pix_fmt, width, height);
  368. if (capabilities & V4L2_CAP_STREAMING) {
  369. s->io_method = io_mmap;
  370. res = mmap_init(s);
  371. res = mmap_start(s);
  372. } else {
  373. s->io_method = io_read;
  374. res = read_init(s);
  375. }
  376. if (res < 0) {
  377. close(s->fd);
  378. av_free(st);
  379. return AVERROR_IO;
  380. }
  381. s->top_field_first = first_field(s->fd);
  382. st->codec->codec_type = CODEC_TYPE_VIDEO;
  383. st->codec->codec_id = CODEC_ID_RAWVIDEO;
  384. st->codec->width = width;
  385. st->codec->height = height;
  386. st->codec->time_base.den = frame_rate;
  387. st->codec->time_base.num = frame_rate_base;
  388. st->codec->bit_rate = s->frame_size * 1/av_q2d(st->codec->time_base) * 8;
  389. return 0;
  390. }
  391. static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
  392. {
  393. struct video_data *s = s1->priv_data;
  394. int res;
  395. if (av_new_packet(pkt, s->frame_size) < 0)
  396. return AVERROR_IO;
  397. if (s->io_method == io_mmap) {
  398. res = mmap_read_frame(s, pkt->data, &pkt->pts);
  399. } else if (s->io_method == io_read) {
  400. res = read_frame(s, pkt->data, &pkt->pts);
  401. } else {
  402. return AVERROR_IO;
  403. }
  404. if (res < 0) {
  405. return AVERROR_IO;
  406. }
  407. if (s1->streams[0]->codec->coded_frame) {
  408. s1->streams[0]->codec->coded_frame->interlaced_frame = 1;
  409. s1->streams[0]->codec->coded_frame->top_field_first = s->top_field_first;
  410. }
  411. return s->frame_size;
  412. }
  413. static int v4l2_read_close(AVFormatContext *s1)
  414. {
  415. struct video_data *s = s1->priv_data;
  416. if (s->io_method == io_mmap) {
  417. mmap_close(s);
  418. }
  419. close(s->fd);
  420. return 0;
  421. }
  422. static AVInputFormat v4l2_format = {
  423. "video4linux2",
  424. "video grab",
  425. sizeof(struct video_data),
  426. NULL,
  427. v4l2_read_header,
  428. v4l2_read_packet,
  429. v4l2_read_close,
  430. .flags = AVFMT_NOFILE,
  431. };
  432. int v4l2_init(void)
  433. {
  434. av_register_input_format(&v4l2_format);
  435. return 0;
  436. }