You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

611 lines
21KB

  1. /*
  2. * YUV4MPEG format
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavutil/pixdesc.h"
  22. #include "avformat.h"
  23. #include "internal.h"
  24. #define Y4M_MAGIC "YUV4MPEG2"
  25. #define Y4M_FRAME_MAGIC "FRAME"
  26. #define Y4M_LINE_MAX 256
  27. #if CONFIG_YUV4MPEGPIPE_MUXER
  28. static int yuv4_generate_header(AVFormatContext *s, char* buf)
  29. {
  30. AVStream *st;
  31. int width, height;
  32. int raten, rated, aspectn, aspectd, n;
  33. char inter;
  34. const char *colorspace = "";
  35. st = s->streams[0];
  36. width = st->codec->width;
  37. height = st->codec->height;
  38. av_reduce(&raten, &rated, st->codec->time_base.den,
  39. st->codec->time_base.num, (1UL << 31) - 1);
  40. aspectn = st->sample_aspect_ratio.num;
  41. aspectd = st->sample_aspect_ratio.den;
  42. if (aspectn == 0 && aspectd == 1)
  43. aspectd = 0; // 0:0 means unknown
  44. inter = 'p'; /* progressive is the default */
  45. if (st->codec->coded_frame && st->codec->coded_frame->interlaced_frame)
  46. inter = st->codec->coded_frame->top_field_first ? 't' : 'b';
  47. if (st->codec->field_order == AV_FIELD_PROGRESSIVE) {
  48. inter = 'p';
  49. } else if (st->codec->field_order == AV_FIELD_TB || st->codec->field_order == AV_FIELD_TT) {
  50. inter = 't';
  51. } else if (st->codec->field_order == AV_FIELD_BT || st->codec->field_order == AV_FIELD_BB) {
  52. inter = 'b';
  53. }
  54. switch (st->codec->pix_fmt) {
  55. case AV_PIX_FMT_GRAY8:
  56. colorspace = " Cmono";
  57. break;
  58. case AV_PIX_FMT_GRAY16:
  59. colorspace = " Cmono16";
  60. break;
  61. case AV_PIX_FMT_YUV411P:
  62. colorspace = " C411 XYSCSS=411";
  63. break;
  64. case AV_PIX_FMT_YUV420P:
  65. switch (st->codec->chroma_sample_location) {
  66. case AVCHROMA_LOC_TOPLEFT: colorspace = " C420paldv XYSCSS=420PALDV"; break;
  67. case AVCHROMA_LOC_LEFT: colorspace = " C420mpeg2 XYSCSS=420MPEG2"; break;
  68. default: colorspace = " C420jpeg XYSCSS=420JPEG"; break;
  69. }
  70. break;
  71. case AV_PIX_FMT_YUV422P:
  72. colorspace = " C422 XYSCSS=422";
  73. break;
  74. case AV_PIX_FMT_YUV444P:
  75. colorspace = " C444 XYSCSS=444";
  76. break;
  77. case AV_PIX_FMT_YUV420P9:
  78. colorspace = " C420p9 XYSCSS=420P9";
  79. break;
  80. case AV_PIX_FMT_YUV422P9:
  81. colorspace = " C422p9 XYSCSS=422P9";
  82. break;
  83. case AV_PIX_FMT_YUV444P9:
  84. colorspace = " C444p9 XYSCSS=444P9";
  85. break;
  86. case AV_PIX_FMT_YUV420P10:
  87. colorspace = " C420p10 XYSCSS=420P10";
  88. break;
  89. case AV_PIX_FMT_YUV422P10:
  90. colorspace = " C422p10 XYSCSS=422P10";
  91. break;
  92. case AV_PIX_FMT_YUV444P10:
  93. colorspace = " C444p10 XYSCSS=444P10";
  94. break;
  95. case AV_PIX_FMT_YUV420P12:
  96. colorspace = " C420p12 XYSCSS=420P12";
  97. break;
  98. case AV_PIX_FMT_YUV422P12:
  99. colorspace = " C422p12 XYSCSS=422P12";
  100. break;
  101. case AV_PIX_FMT_YUV444P12:
  102. colorspace = " C444p12 XYSCSS=444P12";
  103. break;
  104. case AV_PIX_FMT_YUV420P14:
  105. colorspace = " C420p14 XYSCSS=420P14";
  106. break;
  107. case AV_PIX_FMT_YUV422P14:
  108. colorspace = " C422p14 XYSCSS=422P14";
  109. break;
  110. case AV_PIX_FMT_YUV444P14:
  111. colorspace = " C444p14 XYSCSS=444P14";
  112. break;
  113. case AV_PIX_FMT_YUV420P16:
  114. colorspace = " C420p16 XYSCSS=420P16";
  115. break;
  116. case AV_PIX_FMT_YUV422P16:
  117. colorspace = " C422p16 XYSCSS=422P16";
  118. break;
  119. case AV_PIX_FMT_YUV444P16:
  120. colorspace = " C444p16 XYSCSS=444P16";
  121. break;
  122. }
  123. /* construct stream header, if this is the first frame */
  124. n = snprintf(buf, Y4M_LINE_MAX, "%s W%d H%d F%d:%d I%c A%d:%d%s\n",
  125. Y4M_MAGIC, width, height, raten, rated, inter,
  126. aspectn, aspectd, colorspace);
  127. return n;
  128. }
  129. static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
  130. {
  131. AVStream *st = s->streams[pkt->stream_index];
  132. AVIOContext *pb = s->pb;
  133. AVPicture *picture, picture_tmp;
  134. int* first_pkt = s->priv_data;
  135. int width, height, h_chroma_shift, v_chroma_shift;
  136. int i;
  137. char buf2[Y4M_LINE_MAX + 1];
  138. char buf1[20];
  139. uint8_t *ptr, *ptr1, *ptr2;
  140. memcpy(&picture_tmp, pkt->data, sizeof(AVPicture));
  141. picture = &picture_tmp;
  142. /* for the first packet we have to output the header as well */
  143. if (*first_pkt) {
  144. *first_pkt = 0;
  145. if (yuv4_generate_header(s, buf2) < 0) {
  146. av_log(s, AV_LOG_ERROR,
  147. "Error. YUV4MPEG stream header write failed.\n");
  148. return AVERROR(EIO);
  149. } else {
  150. avio_write(pb, buf2, strlen(buf2));
  151. }
  152. }
  153. /* construct frame header */
  154. snprintf(buf1, sizeof(buf1), "%s\n", Y4M_FRAME_MAGIC);
  155. avio_write(pb, buf1, strlen(buf1));
  156. width = st->codec->width;
  157. height = st->codec->height;
  158. ptr = picture->data[0];
  159. switch (st->codec->pix_fmt) {
  160. case AV_PIX_FMT_GRAY8:
  161. case AV_PIX_FMT_YUV411P:
  162. case AV_PIX_FMT_YUV420P:
  163. case AV_PIX_FMT_YUV422P:
  164. case AV_PIX_FMT_YUV444P:
  165. break;
  166. case AV_PIX_FMT_GRAY16:
  167. case AV_PIX_FMT_YUV420P9:
  168. case AV_PIX_FMT_YUV422P9:
  169. case AV_PIX_FMT_YUV444P9:
  170. case AV_PIX_FMT_YUV420P10:
  171. case AV_PIX_FMT_YUV422P10:
  172. case AV_PIX_FMT_YUV444P10:
  173. case AV_PIX_FMT_YUV420P12:
  174. case AV_PIX_FMT_YUV422P12:
  175. case AV_PIX_FMT_YUV444P12:
  176. case AV_PIX_FMT_YUV420P14:
  177. case AV_PIX_FMT_YUV422P14:
  178. case AV_PIX_FMT_YUV444P14:
  179. case AV_PIX_FMT_YUV420P16:
  180. case AV_PIX_FMT_YUV422P16:
  181. case AV_PIX_FMT_YUV444P16:
  182. width *= 2;
  183. break;
  184. default:
  185. av_log(s, AV_LOG_ERROR, "The pixel format '%s' is not supported.\n",
  186. av_get_pix_fmt_name(st->codec->pix_fmt));
  187. return AVERROR(EINVAL);
  188. }
  189. for (i = 0; i < height; i++) {
  190. avio_write(pb, ptr, width);
  191. ptr += picture->linesize[0];
  192. }
  193. if (st->codec->pix_fmt != AV_PIX_FMT_GRAY8 &&
  194. st->codec->pix_fmt != AV_PIX_FMT_GRAY16) {
  195. // Adjust for smaller Cb and Cr planes
  196. av_pix_fmt_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift,
  197. &v_chroma_shift);
  198. width = FF_CEIL_RSHIFT(width, h_chroma_shift);
  199. height = FF_CEIL_RSHIFT(height, v_chroma_shift);
  200. ptr1 = picture->data[1];
  201. ptr2 = picture->data[2];
  202. for (i = 0; i < height; i++) { /* Cb */
  203. avio_write(pb, ptr1, width);
  204. ptr1 += picture->linesize[1];
  205. }
  206. for (i = 0; i < height; i++) { /* Cr */
  207. avio_write(pb, ptr2, width);
  208. ptr2 += picture->linesize[2];
  209. }
  210. }
  211. return 0;
  212. }
  213. static int yuv4_write_header(AVFormatContext *s)
  214. {
  215. int *first_pkt = s->priv_data;
  216. if (s->nb_streams != 1)
  217. return AVERROR(EIO);
  218. if (s->streams[0]->codec->codec_id != AV_CODEC_ID_RAWVIDEO) {
  219. av_log(s, AV_LOG_ERROR, "ERROR: Only rawvideo supported.\n");
  220. return AVERROR_INVALIDDATA;
  221. }
  222. switch (s->streams[0]->codec->pix_fmt) {
  223. case AV_PIX_FMT_YUV411P:
  224. av_log(s, AV_LOG_WARNING, "Warning: generating rarely used 4:1:1 YUV "
  225. "stream, some mjpegtools might not work.\n");
  226. break;
  227. case AV_PIX_FMT_GRAY8:
  228. case AV_PIX_FMT_GRAY16:
  229. case AV_PIX_FMT_YUV420P:
  230. case AV_PIX_FMT_YUV422P:
  231. case AV_PIX_FMT_YUV444P:
  232. break;
  233. case AV_PIX_FMT_YUV420P9:
  234. case AV_PIX_FMT_YUV422P9:
  235. case AV_PIX_FMT_YUV444P9:
  236. case AV_PIX_FMT_YUV420P10:
  237. case AV_PIX_FMT_YUV422P10:
  238. case AV_PIX_FMT_YUV444P10:
  239. case AV_PIX_FMT_YUV420P12:
  240. case AV_PIX_FMT_YUV422P12:
  241. case AV_PIX_FMT_YUV444P12:
  242. case AV_PIX_FMT_YUV420P14:
  243. case AV_PIX_FMT_YUV422P14:
  244. case AV_PIX_FMT_YUV444P14:
  245. case AV_PIX_FMT_YUV420P16:
  246. case AV_PIX_FMT_YUV422P16:
  247. case AV_PIX_FMT_YUV444P16:
  248. if (s->streams[0]->codec->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
  249. av_log(s, AV_LOG_ERROR, "'%s' is not a official yuv4mpegpipe pixel format. "
  250. "Use '-strict -1' to encode to this pixel format.\n",
  251. av_get_pix_fmt_name(s->streams[0]->codec->pix_fmt));
  252. return AVERROR(EINVAL);
  253. }
  254. av_log(s, AV_LOG_WARNING, "Warning: generating non standard YUV stream. "
  255. "Mjpegtools will not work.\n");
  256. break;
  257. default:
  258. av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg can only handle "
  259. "yuv444p, yuv422p, yuv420p, yuv411p and gray8 pixel formats. "
  260. "And using 'strict -1' also yuv444p9, yuv422p9, yuv420p9, "
  261. "yuv444p10, yuv422p10, yuv420p10, "
  262. "yuv444p12, yuv422p12, yuv420p12, "
  263. "yuv444p14, yuv422p14, yuv420p14, "
  264. "yuv444p16, yuv422p16, yuv420p16 "
  265. "and gray16 pixel formats. "
  266. "Use -pix_fmt to select one.\n");
  267. return AVERROR(EIO);
  268. }
  269. *first_pkt = 1;
  270. return 0;
  271. }
  272. AVOutputFormat ff_yuv4mpegpipe_muxer = {
  273. .name = "yuv4mpegpipe",
  274. .long_name = NULL_IF_CONFIG_SMALL("YUV4MPEG pipe"),
  275. .extensions = "y4m",
  276. .priv_data_size = sizeof(int),
  277. .audio_codec = AV_CODEC_ID_NONE,
  278. .video_codec = AV_CODEC_ID_RAWVIDEO,
  279. .write_header = yuv4_write_header,
  280. .write_packet = yuv4_write_packet,
  281. .flags = AVFMT_RAWPICTURE,
  282. };
  283. #endif
  284. /* Header size increased to allow room for optional flags */
  285. #define MAX_YUV4_HEADER 80
  286. #define MAX_FRAME_HEADER 80
  287. static int yuv4_read_header(AVFormatContext *s)
  288. {
  289. char header[MAX_YUV4_HEADER + 10]; // Include headroom for
  290. // the longest option
  291. char *tokstart, *tokend, *header_end, interlaced = '?';
  292. int i;
  293. AVIOContext *pb = s->pb;
  294. int width = -1, height = -1, raten = 0,
  295. rated = 0, aspectn = 0, aspectd = 0;
  296. enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE, alt_pix_fmt = AV_PIX_FMT_NONE;
  297. enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED;
  298. AVStream *st;
  299. for (i = 0; i < MAX_YUV4_HEADER; i++) {
  300. header[i] = avio_r8(pb);
  301. if (header[i] == '\n') {
  302. header[i + 1] = 0x20; // Add a space after last option.
  303. // Makes parsing "444" vs "444alpha" easier.
  304. header[i + 2] = 0;
  305. break;
  306. }
  307. }
  308. if (i == MAX_YUV4_HEADER)
  309. return -1;
  310. if (strncmp(header, Y4M_MAGIC, strlen(Y4M_MAGIC)))
  311. return -1;
  312. header_end = &header[i + 1]; // Include space
  313. for (tokstart = &header[strlen(Y4M_MAGIC) + 1];
  314. tokstart < header_end; tokstart++) {
  315. if (*tokstart == 0x20)
  316. continue;
  317. switch (*tokstart++) {
  318. case 'W': // Width. Required.
  319. width = strtol(tokstart, &tokend, 10);
  320. tokstart = tokend;
  321. break;
  322. case 'H': // Height. Required.
  323. height = strtol(tokstart, &tokend, 10);
  324. tokstart = tokend;
  325. break;
  326. case 'C': // Color space
  327. if (strncmp("420jpeg", tokstart, 7) == 0) {
  328. pix_fmt = AV_PIX_FMT_YUV420P;
  329. chroma_sample_location = AVCHROMA_LOC_CENTER;
  330. } else if (strncmp("420mpeg2", tokstart, 8) == 0) {
  331. pix_fmt = AV_PIX_FMT_YUV420P;
  332. chroma_sample_location = AVCHROMA_LOC_LEFT;
  333. } else if (strncmp("420paldv", tokstart, 8) == 0) {
  334. pix_fmt = AV_PIX_FMT_YUV420P;
  335. chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
  336. } else if (strncmp("420p16", tokstart, 6) == 0) {
  337. pix_fmt = AV_PIX_FMT_YUV420P16;
  338. } else if (strncmp("422p16", tokstart, 6) == 0) {
  339. pix_fmt = AV_PIX_FMT_YUV422P16;
  340. } else if (strncmp("444p16", tokstart, 6) == 0) {
  341. pix_fmt = AV_PIX_FMT_YUV444P16;
  342. } else if (strncmp("420p14", tokstart, 6) == 0) {
  343. pix_fmt = AV_PIX_FMT_YUV420P14;
  344. } else if (strncmp("422p14", tokstart, 6) == 0) {
  345. pix_fmt = AV_PIX_FMT_YUV422P14;
  346. } else if (strncmp("444p14", tokstart, 6) == 0) {
  347. pix_fmt = AV_PIX_FMT_YUV444P14;
  348. } else if (strncmp("420p12", tokstart, 6) == 0) {
  349. pix_fmt = AV_PIX_FMT_YUV420P12;
  350. } else if (strncmp("422p12", tokstart, 6) == 0) {
  351. pix_fmt = AV_PIX_FMT_YUV422P12;
  352. } else if (strncmp("444p12", tokstart, 6) == 0) {
  353. pix_fmt = AV_PIX_FMT_YUV444P12;
  354. } else if (strncmp("420p10", tokstart, 6) == 0) {
  355. pix_fmt = AV_PIX_FMT_YUV420P10;
  356. } else if (strncmp("422p10", tokstart, 6) == 0) {
  357. pix_fmt = AV_PIX_FMT_YUV422P10;
  358. } else if (strncmp("444p10", tokstart, 6) == 0) {
  359. pix_fmt = AV_PIX_FMT_YUV444P10;
  360. } else if (strncmp("420p9", tokstart, 5) == 0) {
  361. pix_fmt = AV_PIX_FMT_YUV420P9;
  362. } else if (strncmp("422p9", tokstart, 5) == 0) {
  363. pix_fmt = AV_PIX_FMT_YUV422P9;
  364. } else if (strncmp("444p9", tokstart, 5) == 0) {
  365. pix_fmt = AV_PIX_FMT_YUV444P9;
  366. } else if (strncmp("420", tokstart, 3) == 0) {
  367. pix_fmt = AV_PIX_FMT_YUV420P;
  368. chroma_sample_location = AVCHROMA_LOC_CENTER;
  369. } else if (strncmp("411", tokstart, 3) == 0) {
  370. pix_fmt = AV_PIX_FMT_YUV411P;
  371. } else if (strncmp("422", tokstart, 3) == 0) {
  372. pix_fmt = AV_PIX_FMT_YUV422P;
  373. } else if (strncmp("444alpha", tokstart, 8) == 0 ) {
  374. av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 "
  375. "YUV4MPEG stream.\n");
  376. return -1;
  377. } else if (strncmp("444", tokstart, 3) == 0) {
  378. pix_fmt = AV_PIX_FMT_YUV444P;
  379. } else if (strncmp("mono16", tokstart, 6) == 0) {
  380. pix_fmt = AV_PIX_FMT_GRAY16;
  381. } else if (strncmp("mono", tokstart, 4) == 0) {
  382. pix_fmt = AV_PIX_FMT_GRAY8;
  383. } else {
  384. av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown "
  385. "pixel format.\n");
  386. return -1;
  387. }
  388. while (tokstart < header_end && *tokstart != 0x20)
  389. tokstart++;
  390. break;
  391. case 'I': // Interlace type
  392. interlaced = *tokstart++;
  393. break;
  394. case 'F': // Frame rate
  395. sscanf(tokstart, "%d:%d", &raten, &rated); // 0:0 if unknown
  396. while (tokstart < header_end && *tokstart != 0x20)
  397. tokstart++;
  398. break;
  399. case 'A': // Pixel aspect
  400. sscanf(tokstart, "%d:%d", &aspectn, &aspectd); // 0:0 if unknown
  401. while (tokstart < header_end && *tokstart != 0x20)
  402. tokstart++;
  403. break;
  404. case 'X': // Vendor extensions
  405. if (strncmp("YSCSS=", tokstart, 6) == 0) {
  406. // Older nonstandard pixel format representation
  407. tokstart += 6;
  408. if (strncmp("420JPEG", tokstart, 7) == 0)
  409. alt_pix_fmt = AV_PIX_FMT_YUV420P;
  410. else if (strncmp("420MPEG2", tokstart, 8) == 0)
  411. alt_pix_fmt = AV_PIX_FMT_YUV420P;
  412. else if (strncmp("420PALDV", tokstart, 8) == 0)
  413. alt_pix_fmt = AV_PIX_FMT_YUV420P;
  414. else if (strncmp("420P9", tokstart, 5) == 0)
  415. alt_pix_fmt = AV_PIX_FMT_YUV420P9;
  416. else if (strncmp("422P9", tokstart, 5) == 0)
  417. alt_pix_fmt = AV_PIX_FMT_YUV422P9;
  418. else if (strncmp("444P9", tokstart, 5) == 0)
  419. alt_pix_fmt = AV_PIX_FMT_YUV444P9;
  420. else if (strncmp("420P10", tokstart, 6) == 0)
  421. alt_pix_fmt = AV_PIX_FMT_YUV420P10;
  422. else if (strncmp("422P10", tokstart, 6) == 0)
  423. alt_pix_fmt = AV_PIX_FMT_YUV422P10;
  424. else if (strncmp("444P10", tokstart, 6) == 0)
  425. alt_pix_fmt = AV_PIX_FMT_YUV444P10;
  426. else if (strncmp("420P12", tokstart, 6) == 0)
  427. alt_pix_fmt = AV_PIX_FMT_YUV420P12;
  428. else if (strncmp("422P12", tokstart, 6) == 0)
  429. alt_pix_fmt = AV_PIX_FMT_YUV422P12;
  430. else if (strncmp("444P12", tokstart, 6) == 0)
  431. alt_pix_fmt = AV_PIX_FMT_YUV444P12;
  432. else if (strncmp("420P14", tokstart, 6) == 0)
  433. alt_pix_fmt = AV_PIX_FMT_YUV420P14;
  434. else if (strncmp("422P14", tokstart, 6) == 0)
  435. alt_pix_fmt = AV_PIX_FMT_YUV422P14;
  436. else if (strncmp("444P14", tokstart, 6) == 0)
  437. alt_pix_fmt = AV_PIX_FMT_YUV444P14;
  438. else if (strncmp("420P16", tokstart, 6) == 0)
  439. alt_pix_fmt = AV_PIX_FMT_YUV420P16;
  440. else if (strncmp("422P16", tokstart, 6) == 0)
  441. alt_pix_fmt = AV_PIX_FMT_YUV422P16;
  442. else if (strncmp("444P16", tokstart, 6) == 0)
  443. alt_pix_fmt = AV_PIX_FMT_YUV444P16;
  444. else if (strncmp("411", tokstart, 3) == 0)
  445. alt_pix_fmt = AV_PIX_FMT_YUV411P;
  446. else if (strncmp("422", tokstart, 3) == 0)
  447. alt_pix_fmt = AV_PIX_FMT_YUV422P;
  448. else if (strncmp("444", tokstart, 3) == 0)
  449. alt_pix_fmt = AV_PIX_FMT_YUV444P;
  450. }
  451. while (tokstart < header_end && *tokstart != 0x20)
  452. tokstart++;
  453. break;
  454. }
  455. }
  456. if (width == -1 || height == -1) {
  457. av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
  458. return -1;
  459. }
  460. if (pix_fmt == AV_PIX_FMT_NONE) {
  461. if (alt_pix_fmt == AV_PIX_FMT_NONE)
  462. pix_fmt = AV_PIX_FMT_YUV420P;
  463. else
  464. pix_fmt = alt_pix_fmt;
  465. }
  466. if (raten <= 0 || rated <= 0) {
  467. // Frame rate unknown
  468. raten = 25;
  469. rated = 1;
  470. }
  471. if (aspectn == 0 && aspectd == 0) {
  472. // Pixel aspect unknown
  473. aspectd = 1;
  474. }
  475. st = avformat_new_stream(s, NULL);
  476. if (!st)
  477. return AVERROR(ENOMEM);
  478. st->codec->width = width;
  479. st->codec->height = height;
  480. av_reduce(&raten, &rated, raten, rated, (1UL << 31) - 1);
  481. avpriv_set_pts_info(st, 64, rated, raten);
  482. st->avg_frame_rate = av_inv_q(st->time_base);
  483. st->codec->pix_fmt = pix_fmt;
  484. st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  485. st->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
  486. st->sample_aspect_ratio = (AVRational){ aspectn, aspectd };
  487. st->codec->chroma_sample_location = chroma_sample_location;
  488. switch (interlaced){
  489. case 'p':
  490. st->codec->field_order = AV_FIELD_PROGRESSIVE;
  491. break;
  492. case 't':
  493. st->codec->field_order = AV_FIELD_TB;
  494. break;
  495. case 'b':
  496. st->codec->field_order = AV_FIELD_BT;
  497. break;
  498. case 'm':
  499. av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains mixed "
  500. "interlaced and non-interlaced frames.\n");
  501. case '?':
  502. st->codec->field_order = AV_FIELD_UNKNOWN;
  503. break;
  504. default:
  505. av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
  506. return AVERROR(EINVAL);
  507. }
  508. return 0;
  509. }
  510. static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
  511. {
  512. int i;
  513. char header[MAX_FRAME_HEADER+1];
  514. int packet_size, width, height, ret;
  515. AVStream *st = s->streams[0];
  516. for (i = 0; i < MAX_FRAME_HEADER; i++) {
  517. header[i] = avio_r8(s->pb);
  518. if (header[i] == '\n') {
  519. header[i + 1] = 0;
  520. break;
  521. }
  522. }
  523. if (s->pb->error)
  524. return s->pb->error;
  525. else if (s->pb->eof_reached)
  526. return AVERROR_EOF;
  527. else if (i == MAX_FRAME_HEADER)
  528. return AVERROR_INVALIDDATA;
  529. if (strncmp(header, Y4M_FRAME_MAGIC, strlen(Y4M_FRAME_MAGIC)))
  530. return AVERROR_INVALIDDATA;
  531. width = st->codec->width;
  532. height = st->codec->height;
  533. packet_size = avpicture_get_size(st->codec->pix_fmt, width, height);
  534. if (packet_size < 0)
  535. return packet_size;
  536. ret = av_get_packet(s->pb, pkt, packet_size);
  537. if (ret < 0)
  538. return ret;
  539. else if (ret != packet_size)
  540. return s->pb->eof_reached ? AVERROR_EOF : AVERROR(EIO);
  541. pkt->stream_index = 0;
  542. return 0;
  543. }
  544. static int yuv4_probe(AVProbeData *pd)
  545. {
  546. /* check file header */
  547. if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC) - 1) == 0)
  548. return AVPROBE_SCORE_MAX;
  549. else
  550. return 0;
  551. }
  552. #if CONFIG_YUV4MPEGPIPE_DEMUXER
  553. AVInputFormat ff_yuv4mpegpipe_demuxer = {
  554. .name = "yuv4mpegpipe",
  555. .long_name = NULL_IF_CONFIG_SMALL("YUV4MPEG pipe"),
  556. .read_probe = yuv4_probe,
  557. .read_header = yuv4_read_header,
  558. .read_packet = yuv4_read_packet,
  559. .extensions = "y4m",
  560. };
  561. #endif