You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

780 lines
22KB

  1. /*
  2. * V4L2 context helper functions.
  3. *
  4. * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
  5. * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <linux/videodev2.h>
  24. #include <sys/ioctl.h>
  25. #include <sys/mman.h>
  26. #include <unistd.h>
  27. #include <fcntl.h>
  28. #include <poll.h>
  29. #include "libavcodec/avcodec.h"
  30. #include "libavcodec/internal.h"
  31. #include "v4l2_buffers.h"
  32. #include "v4l2_fmt.h"
  33. #include "v4l2_m2m.h"
  34. struct v4l2_format_update {
  35. uint32_t v4l2_fmt;
  36. int update_v4l2;
  37. enum AVPixelFormat av_fmt;
  38. int update_avfmt;
  39. };
  40. static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx)
  41. {
  42. return V4L2_TYPE_IS_OUTPUT(ctx->type) ?
  43. container_of(ctx, V4L2m2mContext, output) :
  44. container_of(ctx, V4L2m2mContext, capture);
  45. }
  46. static inline AVCodecContext *logger(V4L2Context *ctx)
  47. {
  48. return ctx_to_m2mctx(ctx)->avctx;
  49. }
  50. static inline unsigned int v4l2_get_width(struct v4l2_format *fmt)
  51. {
  52. return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width;
  53. }
  54. static inline unsigned int v4l2_get_height(struct v4l2_format *fmt)
  55. {
  56. return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height;
  57. }
  58. static AVRational v4l2_get_sar(V4L2Context *ctx)
  59. {
  60. struct AVRational sar = { 0, 1 };
  61. struct v4l2_cropcap cropcap;
  62. int ret;
  63. memset(&cropcap, 0, sizeof(cropcap));
  64. cropcap.type = ctx->type;
  65. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_CROPCAP, &cropcap);
  66. if (ret)
  67. return sar;
  68. sar.num = cropcap.pixelaspect.numerator;
  69. sar.den = cropcap.pixelaspect.denominator;
  70. return sar;
  71. }
  72. static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2)
  73. {
  74. struct v4l2_format *fmt1 = &ctx->format;
  75. int ret = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
  76. fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width ||
  77. fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height
  78. :
  79. fmt1->fmt.pix.width != fmt2->fmt.pix.width ||
  80. fmt1->fmt.pix.height != fmt2->fmt.pix.height;
  81. if (ret)
  82. av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n",
  83. ctx->name,
  84. v4l2_get_width(fmt1), v4l2_get_height(fmt1),
  85. v4l2_get_width(fmt2), v4l2_get_height(fmt2));
  86. return ret;
  87. }
  88. static inline int v4l2_type_supported(V4L2Context *ctx)
  89. {
  90. return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
  91. ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
  92. ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
  93. ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;
  94. }
  95. static inline int v4l2_get_framesize_compressed(V4L2Context* ctx, int width, int height)
  96. {
  97. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  98. const int SZ_4K = 0x1000;
  99. int size;
  100. if (s->avctx && av_codec_is_decoder(s->avctx->codec))
  101. return ((width * height * 3 / 2) / 2) + 128;
  102. /* encoder */
  103. size = FFALIGN(height, 32) * FFALIGN(width, 32) * 3 / 2 / 2;
  104. return FFALIGN(size, SZ_4K);
  105. }
  106. static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt)
  107. {
  108. ctx->format.type = ctx->type;
  109. if (fmt->update_avfmt)
  110. ctx->av_pix_fmt = fmt->av_fmt;
  111. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  112. /* update the sizes to handle the reconfiguration of the capture stream at runtime */
  113. ctx->format.fmt.pix_mp.height = ctx->height;
  114. ctx->format.fmt.pix_mp.width = ctx->width;
  115. if (fmt->update_v4l2) {
  116. ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt;
  117. /* s5p-mfc requires the user to specify a buffer size */
  118. ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage =
  119. v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height);
  120. }
  121. } else {
  122. ctx->format.fmt.pix.height = ctx->height;
  123. ctx->format.fmt.pix.width = ctx->width;
  124. if (fmt->update_v4l2) {
  125. ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt;
  126. /* s5p-mfc requires the user to specify a buffer size */
  127. ctx->format.fmt.pix.sizeimage =
  128. v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height);
  129. }
  130. }
  131. }
  132. /**
  133. * handle resolution change event and end of stream event
  134. * returns 1 if reinit was successful, negative if it failed
  135. * returns 0 if reinit was not executed
  136. */
  137. static int v4l2_handle_event(V4L2Context *ctx)
  138. {
  139. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  140. struct v4l2_format cap_fmt = s->capture.format;
  141. struct v4l2_format out_fmt = s->output.format;
  142. struct v4l2_event evt = { 0 };
  143. int full_reinit, reinit, ret;
  144. ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt);
  145. if (ret < 0) {
  146. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name);
  147. return 0;
  148. }
  149. if (evt.type == V4L2_EVENT_EOS) {
  150. ctx->done = 1;
  151. return 0;
  152. }
  153. if (evt.type != V4L2_EVENT_SOURCE_CHANGE)
  154. return 0;
  155. ret = ioctl(s->fd, VIDIOC_G_FMT, &out_fmt);
  156. if (ret) {
  157. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->output.name);
  158. return 0;
  159. }
  160. ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt);
  161. if (ret) {
  162. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name);
  163. return 0;
  164. }
  165. full_reinit = v4l2_resolution_changed(&s->output, &out_fmt);
  166. if (full_reinit) {
  167. s->output.height = v4l2_get_height(&out_fmt);
  168. s->output.width = v4l2_get_width(&out_fmt);
  169. s->output.sample_aspect_ratio = v4l2_get_sar(&s->output);
  170. }
  171. reinit = v4l2_resolution_changed(&s->capture, &cap_fmt);
  172. if (reinit) {
  173. s->capture.height = v4l2_get_height(&cap_fmt);
  174. s->capture.width = v4l2_get_width(&cap_fmt);
  175. s->capture.sample_aspect_ratio = v4l2_get_sar(&s->capture);
  176. }
  177. if (full_reinit || reinit)
  178. s->reinit = 1;
  179. if (full_reinit) {
  180. ret = ff_v4l2_m2m_codec_full_reinit(s);
  181. if (ret) {
  182. av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_full_reinit\n");
  183. return AVERROR(EINVAL);
  184. }
  185. goto reinit_run;
  186. }
  187. if (reinit) {
  188. if (s->avctx)
  189. ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height);
  190. if (ret < 0)
  191. av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n");
  192. ret = ff_v4l2_m2m_codec_reinit(s);
  193. if (ret) {
  194. av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n");
  195. return AVERROR(EINVAL);
  196. }
  197. goto reinit_run;
  198. }
  199. /* dummy event received */
  200. return 0;
  201. /* reinit executed */
  202. reinit_run:
  203. return 1;
  204. }
  205. static int v4l2_stop_decode(V4L2Context *ctx)
  206. {
  207. struct v4l2_decoder_cmd cmd = {
  208. .cmd = V4L2_DEC_CMD_STOP,
  209. .flags = 0,
  210. };
  211. int ret;
  212. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd);
  213. if (ret) {
  214. /* DECODER_CMD is optional */
  215. if (errno == ENOTTY)
  216. return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
  217. else
  218. return AVERROR(errno);
  219. }
  220. return 0;
  221. }
  222. static int v4l2_stop_encode(V4L2Context *ctx)
  223. {
  224. struct v4l2_encoder_cmd cmd = {
  225. .cmd = V4L2_ENC_CMD_STOP,
  226. .flags = 0,
  227. };
  228. int ret;
  229. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd);
  230. if (ret) {
  231. /* ENCODER_CMD is optional */
  232. if (errno == ENOTTY)
  233. return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
  234. else
  235. return AVERROR(errno);
  236. }
  237. return 0;
  238. }
  239. static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout)
  240. {
  241. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  242. struct v4l2_buffer buf = { 0 };
  243. V4L2Buffer *avbuf;
  244. struct pollfd pfd = {
  245. .events = POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */
  246. .fd = ctx_to_m2mctx(ctx)->fd,
  247. };
  248. int i, ret;
  249. if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx->buffers) {
  250. for (i = 0; i < ctx->num_buffers; i++) {
  251. if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER)
  252. break;
  253. }
  254. if (i == ctx->num_buffers)
  255. av_log(logger(ctx), AV_LOG_WARNING, "All capture buffers returned to "
  256. "userspace. Increase num_capture_buffers "
  257. "to prevent device deadlock or dropped "
  258. "packets/frames.\n");
  259. }
  260. /* if we are draining and there are no more capture buffers queued in the driver we are done */
  261. if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx_to_m2mctx(ctx)->draining) {
  262. for (i = 0; i < ctx->num_buffers; i++) {
  263. /* capture buffer initialization happens during decode hence
  264. * detection happens at runtime
  265. */
  266. if (!ctx->buffers)
  267. break;
  268. if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER)
  269. goto start;
  270. }
  271. ctx->done = 1;
  272. return NULL;
  273. }
  274. start:
  275. if (V4L2_TYPE_IS_OUTPUT(ctx->type))
  276. pfd.events = POLLOUT | POLLWRNORM;
  277. else {
  278. /* no need to listen to requests for more input while draining */
  279. if (ctx_to_m2mctx(ctx)->draining)
  280. pfd.events = POLLIN | POLLRDNORM | POLLPRI;
  281. }
  282. for (;;) {
  283. ret = poll(&pfd, 1, timeout);
  284. if (ret > 0)
  285. break;
  286. if (errno == EINTR)
  287. continue;
  288. return NULL;
  289. }
  290. /* 0. handle errors */
  291. if (pfd.revents & POLLERR) {
  292. /* if we are trying to get free buffers but none have been queued yet
  293. no need to raise a warning */
  294. if (timeout == 0) {
  295. for (i = 0; i < ctx->num_buffers; i++) {
  296. if (ctx->buffers[i].status != V4L2BUF_AVAILABLE)
  297. av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name);
  298. }
  299. }
  300. else
  301. av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name);
  302. return NULL;
  303. }
  304. /* 1. handle resolution changes */
  305. if (pfd.revents & POLLPRI) {
  306. ret = v4l2_handle_event(ctx);
  307. if (ret < 0) {
  308. /* if re-init failed, abort */
  309. ctx->done = 1;
  310. return NULL;
  311. }
  312. if (ret) {
  313. /* if re-init was successful drop the buffer (if there was one)
  314. * since we had to reconfigure capture (unmap all buffers)
  315. */
  316. return NULL;
  317. }
  318. }
  319. /* 2. dequeue the buffer */
  320. if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) {
  321. if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) {
  322. /* there is a capture buffer ready */
  323. if (pfd.revents & (POLLIN | POLLRDNORM))
  324. goto dequeue;
  325. /* the driver is ready to accept more input; instead of waiting for the capture
  326. * buffer to complete we return NULL so input can proceed (we are single threaded)
  327. */
  328. if (pfd.revents & (POLLOUT | POLLWRNORM))
  329. return NULL;
  330. }
  331. dequeue:
  332. memset(&buf, 0, sizeof(buf));
  333. buf.memory = V4L2_MEMORY_MMAP;
  334. buf.type = ctx->type;
  335. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  336. memset(planes, 0, sizeof(planes));
  337. buf.length = VIDEO_MAX_PLANES;
  338. buf.m.planes = planes;
  339. }
  340. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf);
  341. if (ret) {
  342. if (errno != EAGAIN) {
  343. ctx->done = 1;
  344. if (errno != EPIPE)
  345. av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n",
  346. ctx->name, av_err2str(AVERROR(errno)));
  347. }
  348. return NULL;
  349. }
  350. if (ctx_to_m2mctx(ctx)->draining && !V4L2_TYPE_IS_OUTPUT(ctx->type)) {
  351. int bytesused = V4L2_TYPE_IS_MULTIPLANAR(buf.type) ?
  352. buf.m.planes[0].bytesused : buf.bytesused;
  353. if (bytesused == 0) {
  354. ctx->done = 1;
  355. return NULL;
  356. }
  357. #ifdef V4L2_BUF_FLAG_LAST
  358. if (buf.flags & V4L2_BUF_FLAG_LAST)
  359. ctx->done = 1;
  360. #endif
  361. }
  362. avbuf = &ctx->buffers[buf.index];
  363. avbuf->status = V4L2BUF_AVAILABLE;
  364. avbuf->buf = buf;
  365. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  366. memcpy(avbuf->planes, planes, sizeof(planes));
  367. avbuf->buf.m.planes = avbuf->planes;
  368. }
  369. return avbuf;
  370. }
  371. return NULL;
  372. }
  373. static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx)
  374. {
  375. int timeout = 0; /* return when no more buffers to dequeue */
  376. int i;
  377. /* get back as many output buffers as possible */
  378. if (V4L2_TYPE_IS_OUTPUT(ctx->type)) {
  379. do {
  380. } while (v4l2_dequeue_v4l2buf(ctx, timeout));
  381. }
  382. for (i = 0; i < ctx->num_buffers; i++) {
  383. if (ctx->buffers[i].status == V4L2BUF_AVAILABLE)
  384. return &ctx->buffers[i];
  385. }
  386. return NULL;
  387. }
  388. static int v4l2_release_buffers(V4L2Context* ctx)
  389. {
  390. struct v4l2_requestbuffers req = {
  391. .memory = V4L2_MEMORY_MMAP,
  392. .type = ctx->type,
  393. .count = 0, /* 0 -> unmaps buffers from the driver */
  394. };
  395. int i, j;
  396. for (i = 0; i < ctx->num_buffers; i++) {
  397. V4L2Buffer *buffer = &ctx->buffers[i];
  398. for (j = 0; j < buffer->num_planes; j++) {
  399. struct V4L2Plane_info *p = &buffer->plane_info[j];
  400. if (p->mm_addr && p->length)
  401. if (munmap(p->mm_addr, p->length) < 0)
  402. av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno)));
  403. }
  404. }
  405. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req);
  406. }
  407. static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt)
  408. {
  409. struct v4l2_format *fmt = &ctx->format;
  410. uint32_t v4l2_fmt;
  411. int ret;
  412. v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt);
  413. if (!v4l2_fmt)
  414. return AVERROR(EINVAL);
  415. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type))
  416. fmt->fmt.pix_mp.pixelformat = v4l2_fmt;
  417. else
  418. fmt->fmt.pix.pixelformat = v4l2_fmt;
  419. fmt->type = ctx->type;
  420. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt);
  421. if (ret)
  422. return AVERROR(EINVAL);
  423. return 0;
  424. }
  425. static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p)
  426. {
  427. enum AVPixelFormat pixfmt = ctx->av_pix_fmt;
  428. struct v4l2_fmtdesc fdesc;
  429. int ret;
  430. memset(&fdesc, 0, sizeof(fdesc));
  431. fdesc.type = ctx->type;
  432. if (pixfmt != AV_PIX_FMT_NONE) {
  433. ret = v4l2_try_raw_format(ctx, pixfmt);
  434. if (!ret)
  435. return 0;
  436. }
  437. for (;;) {
  438. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
  439. if (ret)
  440. return AVERROR(EINVAL);
  441. pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO);
  442. ret = v4l2_try_raw_format(ctx, pixfmt);
  443. if (ret){
  444. fdesc.index++;
  445. continue;
  446. }
  447. *p = pixfmt;
  448. return 0;
  449. }
  450. return AVERROR(EINVAL);
  451. }
  452. static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p)
  453. {
  454. struct v4l2_fmtdesc fdesc;
  455. uint32_t v4l2_fmt;
  456. int ret;
  457. /* translate to a valid v4l2 format */
  458. v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id);
  459. if (!v4l2_fmt)
  460. return AVERROR(EINVAL);
  461. /* check if the driver supports this format */
  462. memset(&fdesc, 0, sizeof(fdesc));
  463. fdesc.type = ctx->type;
  464. for (;;) {
  465. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
  466. if (ret)
  467. return AVERROR(EINVAL);
  468. if (fdesc.pixelformat == v4l2_fmt)
  469. break;
  470. fdesc.index++;
  471. }
  472. *p = v4l2_fmt;
  473. return 0;
  474. }
  475. /*****************************************************************************
  476. *
  477. * V4L2 Context Interface
  478. *
  479. *****************************************************************************/
  480. int ff_v4l2_context_set_status(V4L2Context* ctx, uint32_t cmd)
  481. {
  482. int type = ctx->type;
  483. int ret;
  484. ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type);
  485. if (ret < 0)
  486. return AVERROR(errno);
  487. ctx->streamon = (cmd == VIDIOC_STREAMON);
  488. return 0;
  489. }
  490. int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame)
  491. {
  492. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  493. V4L2Buffer* avbuf;
  494. int ret;
  495. if (!frame) {
  496. ret = v4l2_stop_encode(ctx);
  497. if (ret)
  498. av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name);
  499. s->draining= 1;
  500. return 0;
  501. }
  502. avbuf = v4l2_getfree_v4l2buf(ctx);
  503. if (!avbuf)
  504. return AVERROR(EAGAIN);
  505. ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf);
  506. if (ret)
  507. return ret;
  508. return ff_v4l2_buffer_enqueue(avbuf);
  509. }
  510. int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt)
  511. {
  512. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  513. V4L2Buffer* avbuf;
  514. int ret;
  515. if (!pkt->size) {
  516. ret = v4l2_stop_decode(ctx);
  517. if (ret)
  518. av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name);
  519. s->draining = 1;
  520. return 0;
  521. }
  522. avbuf = v4l2_getfree_v4l2buf(ctx);
  523. if (!avbuf)
  524. return AVERROR(EAGAIN);
  525. ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf);
  526. if (ret)
  527. return ret;
  528. return ff_v4l2_buffer_enqueue(avbuf);
  529. }
  530. int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame, int timeout)
  531. {
  532. V4L2Buffer *avbuf;
  533. /*
  534. * timeout=-1 blocks until:
  535. * 1. decoded frame available
  536. * 2. an input buffer is ready to be dequeued
  537. */
  538. avbuf = v4l2_dequeue_v4l2buf(ctx, timeout);
  539. if (!avbuf) {
  540. if (ctx->done)
  541. return AVERROR_EOF;
  542. return AVERROR(EAGAIN);
  543. }
  544. return ff_v4l2_buffer_buf_to_avframe(frame, avbuf);
  545. }
  546. int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt)
  547. {
  548. V4L2Buffer *avbuf;
  549. /*
  550. * blocks until:
  551. * 1. encoded packet available
  552. * 2. an input buffer ready to be dequeued
  553. */
  554. avbuf = v4l2_dequeue_v4l2buf(ctx, -1);
  555. if (!avbuf) {
  556. if (ctx->done)
  557. return AVERROR_EOF;
  558. return AVERROR(EAGAIN);
  559. }
  560. return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf);
  561. }
  562. int ff_v4l2_context_get_format(V4L2Context* ctx, int probe)
  563. {
  564. struct v4l2_format_update fmt = { 0 };
  565. int ret;
  566. if (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) {
  567. ret = v4l2_get_raw_format(ctx, &fmt.av_fmt);
  568. if (ret)
  569. return ret;
  570. fmt.update_avfmt = !probe;
  571. v4l2_save_to_context(ctx, &fmt);
  572. /* format has been tried already */
  573. return ret;
  574. }
  575. ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt);
  576. if (ret)
  577. return ret;
  578. fmt.update_v4l2 = 1;
  579. v4l2_save_to_context(ctx, &fmt);
  580. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format);
  581. }
  582. int ff_v4l2_context_set_format(V4L2Context* ctx)
  583. {
  584. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format);
  585. }
  586. void ff_v4l2_context_release(V4L2Context* ctx)
  587. {
  588. int ret;
  589. if (!ctx->buffers)
  590. return;
  591. ret = v4l2_release_buffers(ctx);
  592. if (ret)
  593. av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name);
  594. av_freep(&ctx->buffers);
  595. }
  596. int ff_v4l2_context_init(V4L2Context* ctx)
  597. {
  598. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  599. struct v4l2_requestbuffers req;
  600. int ret, i;
  601. if (!v4l2_type_supported(ctx)) {
  602. av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type);
  603. return AVERROR_PATCHWELCOME;
  604. }
  605. ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format);
  606. if (ret)
  607. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name);
  608. memset(&req, 0, sizeof(req));
  609. req.count = ctx->num_buffers;
  610. req.memory = V4L2_MEMORY_MMAP;
  611. req.type = ctx->type;
  612. ret = ioctl(s->fd, VIDIOC_REQBUFS, &req);
  613. if (ret < 0) {
  614. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_REQBUFS failed: %s\n", ctx->name, strerror(errno));
  615. return AVERROR(errno);
  616. }
  617. ctx->num_buffers = req.count;
  618. ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer));
  619. if (!ctx->buffers) {
  620. av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name);
  621. return AVERROR(ENOMEM);
  622. }
  623. for (i = 0; i < req.count; i++) {
  624. ctx->buffers[i].context = ctx;
  625. ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i);
  626. if (ret < 0) {
  627. av_log(logger(ctx), AV_LOG_ERROR, "%s buffer[%d] initialization (%s)\n", ctx->name, i, av_err2str(ret));
  628. goto error;
  629. }
  630. }
  631. av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name,
  632. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat),
  633. req.count,
  634. v4l2_get_width(&ctx->format),
  635. v4l2_get_height(&ctx->format),
  636. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage,
  637. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline);
  638. return 0;
  639. error:
  640. v4l2_release_buffers(ctx);
  641. av_freep(&ctx->buffers);
  642. return ret;
  643. }