You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

668 lines
18KB

  1. /*
  2. * V4L2 context helper functions.
  3. *
  4. * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
  5. * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <linux/videodev2.h>
  24. #include <sys/ioctl.h>
  25. #include <sys/mman.h>
  26. #include <unistd.h>
  27. #include <fcntl.h>
  28. #include <poll.h>
  29. #include "libavcodec/avcodec.h"
  30. #include "libavcodec/internal.h"
  31. #include "v4l2_buffers.h"
  32. #include "v4l2_fmt.h"
  33. #include "v4l2_m2m.h"
  34. struct v4l2_format_update {
  35. uint32_t v4l2_fmt;
  36. int update_v4l2;
  37. enum AVPixelFormat av_fmt;
  38. int update_avfmt;
  39. };
  40. static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx)
  41. {
  42. return V4L2_TYPE_IS_OUTPUT(ctx->type) ?
  43. container_of(ctx, V4L2m2mContext, output) :
  44. container_of(ctx, V4L2m2mContext, capture);
  45. }
  46. static inline AVCodecContext *logger(V4L2Context *ctx)
  47. {
  48. return ctx_to_m2mctx(ctx)->avctx;
  49. }
  50. static inline unsigned int v4l2_get_width(struct v4l2_format *fmt)
  51. {
  52. return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width;
  53. }
  54. static inline unsigned int v4l2_get_height(struct v4l2_format *fmt)
  55. {
  56. return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height;
  57. }
  58. static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2)
  59. {
  60. struct v4l2_format *fmt1 = &ctx->format;
  61. int ret = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
  62. fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width ||
  63. fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height
  64. :
  65. fmt1->fmt.pix.width != fmt2->fmt.pix.width ||
  66. fmt1->fmt.pix.height != fmt2->fmt.pix.height;
  67. if (ret)
  68. av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n",
  69. ctx->name,
  70. v4l2_get_width(fmt1), v4l2_get_height(fmt1),
  71. v4l2_get_width(fmt2), v4l2_get_height(fmt2));
  72. return ret;
  73. }
  74. static inline int v4l2_type_supported(V4L2Context *ctx)
  75. {
  76. return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
  77. ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
  78. ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
  79. ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;
  80. }
  81. static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt)
  82. {
  83. ctx->format.type = ctx->type;
  84. if (fmt->update_avfmt)
  85. ctx->av_pix_fmt = fmt->av_fmt;
  86. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  87. /* update the sizes to handle the reconfiguration of the capture stream at runtime */
  88. ctx->format.fmt.pix_mp.height = ctx->height;
  89. ctx->format.fmt.pix_mp.width = ctx->width;
  90. if (fmt->update_v4l2)
  91. ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt;
  92. } else {
  93. ctx->format.fmt.pix.height = ctx->height;
  94. ctx->format.fmt.pix.width = ctx->width;
  95. if (fmt->update_v4l2)
  96. ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt;
  97. }
  98. }
  99. /**
  100. * returns 1 if reinit was succesful, negative if it failed
  101. * returns 0 if reinit was not executed
  102. */
  103. static int v4l2_handle_event(V4L2Context *ctx)
  104. {
  105. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  106. struct v4l2_format cap_fmt = s->capture.format;
  107. struct v4l2_format out_fmt = s->output.format;
  108. struct v4l2_event evt = { 0 };
  109. int full_reinit, reinit, ret;
  110. ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt);
  111. if (ret < 0) {
  112. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name);
  113. return 0;
  114. }
  115. if (evt.type != V4L2_EVENT_SOURCE_CHANGE)
  116. return 0;
  117. ret = ioctl(s->fd, VIDIOC_G_FMT, &out_fmt);
  118. if (ret) {
  119. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->output.name);
  120. return 0;
  121. }
  122. ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt);
  123. if (ret) {
  124. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name);
  125. return 0;
  126. }
  127. full_reinit = v4l2_resolution_changed(&s->output, &out_fmt);
  128. if (full_reinit) {
  129. s->output.height = v4l2_get_height(&out_fmt);
  130. s->output.width = v4l2_get_width(&out_fmt);
  131. }
  132. reinit = v4l2_resolution_changed(&s->capture, &cap_fmt);
  133. if (reinit) {
  134. s->capture.height = v4l2_get_height(&cap_fmt);
  135. s->capture.width = v4l2_get_width(&cap_fmt);
  136. }
  137. if (full_reinit || reinit)
  138. s->reinit = 1;
  139. if (full_reinit) {
  140. ret = ff_v4l2_m2m_codec_full_reinit(s);
  141. if (ret) {
  142. av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_full_reinit\n");
  143. return -EINVAL;
  144. }
  145. goto reinit_run;
  146. }
  147. if (reinit) {
  148. ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height);
  149. if (ret < 0)
  150. av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n");
  151. ret = ff_v4l2_m2m_codec_reinit(s);
  152. if (ret) {
  153. av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n");
  154. return -EINVAL;
  155. }
  156. goto reinit_run;
  157. }
  158. /* dummy event received */
  159. return 0;
  160. /* reinit executed */
  161. reinit_run:
  162. return 1;
  163. }
  164. static int v4l2_stop_decode(V4L2Context *ctx)
  165. {
  166. struct v4l2_decoder_cmd cmd = {
  167. .cmd = V4L2_DEC_CMD_STOP,
  168. };
  169. int ret;
  170. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd);
  171. if (ret) {
  172. /* DECODER_CMD is optional */
  173. if (errno == ENOTTY)
  174. return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
  175. }
  176. return 0;
  177. }
  178. static int v4l2_stop_encode(V4L2Context *ctx)
  179. {
  180. struct v4l2_encoder_cmd cmd = {
  181. .cmd = V4L2_ENC_CMD_STOP,
  182. };
  183. int ret;
  184. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd);
  185. if (ret) {
  186. /* ENCODER_CMD is optional */
  187. if (errno == ENOTTY)
  188. return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF);
  189. }
  190. return 0;
  191. }
  192. static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout)
  193. {
  194. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  195. struct v4l2_buffer buf = { 0 };
  196. V4L2Buffer* avbuf = NULL;
  197. struct pollfd pfd = {
  198. .events = POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */
  199. .fd = ctx_to_m2mctx(ctx)->fd,
  200. };
  201. int ret;
  202. if (V4L2_TYPE_IS_OUTPUT(ctx->type))
  203. pfd.events = POLLOUT | POLLWRNORM;
  204. for (;;) {
  205. ret = poll(&pfd, 1, timeout);
  206. if (ret > 0)
  207. break;
  208. if (errno == EINTR)
  209. continue;
  210. /* timeout is being used to indicate last valid bufer when draining */
  211. if (ctx_to_m2mctx(ctx)->draining)
  212. ctx->done = 1;
  213. return NULL;
  214. }
  215. /* 0. handle errors */
  216. if (pfd.revents & POLLERR) {
  217. av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name);
  218. return NULL;
  219. }
  220. /* 1. handle resolution changes */
  221. if (pfd.revents & POLLPRI) {
  222. ret = v4l2_handle_event(ctx);
  223. if (ret < 0) {
  224. /* if re-init failed, abort */
  225. ctx->done = EINVAL;
  226. return NULL;
  227. }
  228. if (ret) {
  229. /* if re-init was successfull drop the buffer (if there was one)
  230. * since we had to reconfigure capture (unmap all buffers)
  231. */
  232. return NULL;
  233. }
  234. }
  235. /* 2. dequeue the buffer */
  236. if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) {
  237. if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) {
  238. /* there is a capture buffer ready */
  239. if (pfd.revents & (POLLIN | POLLRDNORM))
  240. goto dequeue;
  241. /* the driver is ready to accept more input; instead of waiting for the capture
  242. * buffer to complete we return NULL so input can proceed (we are single threaded)
  243. */
  244. if (pfd.revents & (POLLOUT | POLLWRNORM))
  245. return NULL;
  246. }
  247. dequeue:
  248. memset(&buf, 0, sizeof(buf));
  249. buf.memory = V4L2_MEMORY_MMAP;
  250. buf.type = ctx->type;
  251. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  252. memset(planes, 0, sizeof(planes));
  253. buf.length = VIDEO_MAX_PLANES;
  254. buf.m.planes = planes;
  255. }
  256. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf);
  257. if (ret) {
  258. if (errno != EAGAIN) {
  259. ctx->done = errno;
  260. if (errno != EPIPE)
  261. av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n",
  262. ctx->name, av_err2str(AVERROR(errno)));
  263. }
  264. } else {
  265. avbuf = &ctx->buffers[buf.index];
  266. avbuf->status = V4L2BUF_AVAILABLE;
  267. avbuf->buf = buf;
  268. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
  269. memcpy(avbuf->planes, planes, sizeof(planes));
  270. avbuf->buf.m.planes = avbuf->planes;
  271. }
  272. }
  273. }
  274. return avbuf;
  275. }
  276. static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx)
  277. {
  278. int timeout = 0; /* return when no more buffers to dequeue */
  279. int i;
  280. /* get back as many output buffers as possible */
  281. if (V4L2_TYPE_IS_OUTPUT(ctx->type)) {
  282. do {
  283. } while (v4l2_dequeue_v4l2buf(ctx, timeout));
  284. }
  285. for (i = 0; i < ctx->num_buffers; i++) {
  286. if (ctx->buffers[i].status == V4L2BUF_AVAILABLE)
  287. return &ctx->buffers[i];
  288. }
  289. return NULL;
  290. }
  291. static int v4l2_release_buffers(V4L2Context* ctx)
  292. {
  293. struct v4l2_requestbuffers req = {
  294. .memory = V4L2_MEMORY_MMAP,
  295. .type = ctx->type,
  296. .count = 0, /* 0 -> unmaps buffers from the driver */
  297. };
  298. int i, j;
  299. for (i = 0; i < ctx->num_buffers; i++) {
  300. V4L2Buffer *buffer = &ctx->buffers[i];
  301. for (j = 0; j < buffer->num_planes; j++) {
  302. struct V4L2Plane_info *p = &buffer->plane_info[j];
  303. if (p->mm_addr && p->length)
  304. if (munmap(p->mm_addr, p->length) < 0)
  305. av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno)));
  306. }
  307. }
  308. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req);
  309. }
  310. static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt)
  311. {
  312. struct v4l2_format *fmt = &ctx->format;
  313. uint32_t v4l2_fmt;
  314. int ret;
  315. v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt);
  316. if (!v4l2_fmt)
  317. return AVERROR(EINVAL);
  318. if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type))
  319. fmt->fmt.pix_mp.pixelformat = v4l2_fmt;
  320. else
  321. fmt->fmt.pix.pixelformat = v4l2_fmt;
  322. fmt->type = ctx->type;
  323. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt);
  324. if (ret)
  325. return AVERROR(EINVAL);
  326. return 0;
  327. }
  328. static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p)
  329. {
  330. enum AVPixelFormat pixfmt = ctx->av_pix_fmt;
  331. struct v4l2_fmtdesc fdesc;
  332. int ret;
  333. memset(&fdesc, 0, sizeof(fdesc));
  334. fdesc.type = ctx->type;
  335. if (pixfmt != AV_PIX_FMT_NONE) {
  336. ret = v4l2_try_raw_format(ctx, pixfmt);
  337. if (ret)
  338. pixfmt = AV_PIX_FMT_NONE;
  339. else
  340. return 0;
  341. }
  342. for (;;) {
  343. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
  344. if (ret)
  345. return AVERROR(EINVAL);
  346. pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO);
  347. ret = v4l2_try_raw_format(ctx, pixfmt);
  348. if (ret){
  349. fdesc.index++;
  350. continue;
  351. }
  352. *p = pixfmt;
  353. return 0;
  354. }
  355. return AVERROR(EINVAL);
  356. }
  357. static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p)
  358. {
  359. struct v4l2_fmtdesc fdesc;
  360. uint32_t v4l2_fmt;
  361. int ret;
  362. /* translate to a valid v4l2 format */
  363. v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id);
  364. if (!v4l2_fmt)
  365. return AVERROR(EINVAL);
  366. /* check if the driver supports this format */
  367. memset(&fdesc, 0, sizeof(fdesc));
  368. fdesc.type = ctx->type;
  369. for (;;) {
  370. ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc);
  371. if (ret)
  372. return AVERROR(EINVAL);
  373. if (fdesc.pixelformat == v4l2_fmt)
  374. break;
  375. fdesc.index++;
  376. }
  377. *p = v4l2_fmt;
  378. return 0;
  379. }
  380. /*****************************************************************************
  381. *
  382. * V4L2 Context Interface
  383. *
  384. *****************************************************************************/
  385. int ff_v4l2_context_set_status(V4L2Context* ctx, int cmd)
  386. {
  387. int type = ctx->type;
  388. int ret;
  389. ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type);
  390. if (ret < 0)
  391. return AVERROR(errno);
  392. ctx->streamon = (cmd == VIDIOC_STREAMON);
  393. return 0;
  394. }
  395. int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame)
  396. {
  397. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  398. V4L2Buffer* avbuf;
  399. int ret;
  400. if (!frame) {
  401. ret = v4l2_stop_encode(ctx);
  402. if (ret)
  403. av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name);
  404. s->draining= 1;
  405. return 0;
  406. }
  407. avbuf = v4l2_getfree_v4l2buf(ctx);
  408. if (!avbuf)
  409. return AVERROR(ENOMEM);
  410. ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf);
  411. if (ret)
  412. return ret;
  413. return ff_v4l2_buffer_enqueue(avbuf);
  414. }
  415. int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt)
  416. {
  417. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  418. V4L2Buffer* avbuf;
  419. int ret;
  420. if (!pkt->size) {
  421. ret = v4l2_stop_decode(ctx);
  422. if (ret)
  423. av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name);
  424. s->draining = 1;
  425. return 0;
  426. }
  427. avbuf = v4l2_getfree_v4l2buf(ctx);
  428. if (!avbuf)
  429. return AVERROR(ENOMEM);
  430. ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf);
  431. if (ret)
  432. return ret;
  433. return ff_v4l2_buffer_enqueue(avbuf);
  434. }
  435. int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame)
  436. {
  437. V4L2Buffer* avbuf = NULL;
  438. /* if we are draining, we are no longer inputing data, therefore enable a
  439. * timeout so we can dequeue and flag the last valid buffer.
  440. *
  441. * blocks until:
  442. * 1. decoded frame available
  443. * 2. an input buffer is ready to be dequeued
  444. */
  445. avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1);
  446. if (!avbuf) {
  447. if (ctx->done)
  448. return AVERROR_EOF;
  449. return AVERROR(EAGAIN);
  450. }
  451. return ff_v4l2_buffer_buf_to_avframe(frame, avbuf);
  452. }
  453. int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt)
  454. {
  455. V4L2Buffer* avbuf = NULL;
  456. /* if we are draining, we are no longer inputing data, therefore enable a
  457. * timeout so we can dequeue and flag the last valid buffer.
  458. *
  459. * blocks until:
  460. * 1. encoded packet available
  461. * 2. an input buffer ready to be dequeued
  462. */
  463. avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1);
  464. if (!avbuf) {
  465. if (ctx->done)
  466. return AVERROR_EOF;
  467. return AVERROR(EAGAIN);
  468. }
  469. return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf);
  470. }
  471. int ff_v4l2_context_get_format(V4L2Context* ctx)
  472. {
  473. struct v4l2_format_update fmt = { 0 };
  474. int ret;
  475. if (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) {
  476. ret = v4l2_get_raw_format(ctx, &fmt.av_fmt);
  477. if (ret)
  478. return ret;
  479. fmt.update_avfmt = 1;
  480. v4l2_save_to_context(ctx, &fmt);
  481. /* format has been tried already */
  482. return ret;
  483. }
  484. ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt);
  485. if (ret)
  486. return ret;
  487. fmt.update_v4l2 = 1;
  488. v4l2_save_to_context(ctx, &fmt);
  489. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format);
  490. }
  491. int ff_v4l2_context_set_format(V4L2Context* ctx)
  492. {
  493. return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format);
  494. }
  495. void ff_v4l2_context_release(V4L2Context* ctx)
  496. {
  497. int ret;
  498. if (!ctx->buffers)
  499. return;
  500. ret = v4l2_release_buffers(ctx);
  501. if (ret)
  502. av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name);
  503. av_free(ctx->buffers);
  504. ctx->buffers = NULL;
  505. }
  506. int ff_v4l2_context_init(V4L2Context* ctx)
  507. {
  508. V4L2m2mContext *s = ctx_to_m2mctx(ctx);
  509. struct v4l2_requestbuffers req;
  510. int ret, i;
  511. if (!v4l2_type_supported(ctx)) {
  512. av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type);
  513. return AVERROR_PATCHWELCOME;
  514. }
  515. ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format);
  516. if (ret)
  517. av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name);
  518. memset(&req, 0, sizeof(req));
  519. req.count = ctx->num_buffers;
  520. req.memory = V4L2_MEMORY_MMAP;
  521. req.type = ctx->type;
  522. ret = ioctl(s->fd, VIDIOC_REQBUFS, &req);
  523. if (ret < 0)
  524. return AVERROR(errno);
  525. ctx->num_buffers = req.count;
  526. ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer));
  527. if (!ctx->buffers) {
  528. av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name);
  529. return AVERROR(ENOMEM);
  530. }
  531. for (i = 0; i < req.count; i++) {
  532. ctx->buffers[i].context = ctx;
  533. ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i);
  534. if (ret < 0) {
  535. av_log(logger(ctx), AV_LOG_ERROR, "%s buffer initialization (%s)\n", ctx->name, av_err2str(ret));
  536. av_free(ctx->buffers);
  537. return ret;
  538. }
  539. }
  540. av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name,
  541. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat),
  542. req.count,
  543. v4l2_get_width(&ctx->format),
  544. v4l2_get_height(&ctx->format),
  545. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage,
  546. V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline);
  547. return 0;
  548. }