You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

973 lines
23KB

  1. /*
  2. * Image format
  3. * Copyright (c) 2000, 2001, 2002 Fabrice Bellard.
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. */
  19. #include <unistd.h>
  20. #ifdef __BEOS__
  21. # include <OS.h>
  22. #endif
  23. #include "avformat.h"
  24. extern AVInputFormat pgm_iformat;
  25. extern AVOutputFormat pgm_oformat;
  26. extern AVInputFormat pgmyuv_iformat;
  27. extern AVOutputFormat pgmyuv_oformat;
  28. extern AVInputFormat ppm_iformat;
  29. extern AVOutputFormat ppm_oformat;
  30. extern AVInputFormat imgyuv_iformat;
  31. extern AVOutputFormat imgyuv_oformat;
  32. extern AVInputFormat pgmpipe_iformat;
  33. extern AVOutputFormat pgmpipe_oformat;
  34. extern AVInputFormat pgmyuvpipe_iformat;
  35. extern AVOutputFormat pgmyuvpipe_oformat;
  36. extern AVInputFormat ppmpipe_iformat;
  37. extern AVOutputFormat ppmpipe_oformat;
  38. extern AVOutputFormat yuv4mpegpipe_oformat;
  39. #define IMGFMT_YUV 1
  40. #define IMGFMT_PGMYUV 2
  41. #define IMGFMT_PGM 3
  42. #define IMGFMT_PPM 4
  43. #define IMGFMT_YUV4MPEG 5
  44. #define Y4M_MAGIC "YUV4MPEG2"
  45. #define Y4M_FRAME_MAGIC "FRAME"
  46. #define Y4M_LINE_MAX 256
  47. typedef struct {
  48. int width;
  49. int height;
  50. int img_number;
  51. int img_size;
  52. int img_fmt;
  53. int is_pipe;
  54. int header_written;
  55. char path[1024];
  56. } VideoData;
  57. int emulate_frame_rate;
  58. static inline int pnm_space(int c)
  59. {
  60. return (c==' ' || c=='\n' || c=='\r' || c=='\t');
  61. }
  62. static void pnm_get(ByteIOContext *f, char *str, int buf_size)
  63. {
  64. char *s;
  65. int c;
  66. do {
  67. c=get_byte(f);
  68. if (c=='#') {
  69. do {
  70. c=get_byte(f);
  71. } while (c!='\n');
  72. c=get_byte(f);
  73. }
  74. } while (pnm_space(c));
  75. s=str;
  76. do {
  77. if (url_feof(f))
  78. break;
  79. if ((s - str) < buf_size - 1)
  80. *s++=c;
  81. c=get_byte(f);
  82. } while (!pnm_space(c));
  83. *s = '\0';
  84. }
  85. static int pgm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size, int is_yuv)
  86. {
  87. int width, height, i;
  88. char buf1[32];
  89. UINT8 *picture[3];
  90. width = s->width;
  91. height = s->height;
  92. pnm_get(f, buf1, sizeof(buf1));
  93. if (strcmp(buf1, "P5")) {
  94. return -EIO;
  95. }
  96. pnm_get(f, buf1, sizeof(buf1));
  97. pnm_get(f, buf1, sizeof(buf1));
  98. pnm_get(f, buf1, sizeof(buf1));
  99. picture[0] = buf;
  100. picture[1] = buf + width * height;
  101. picture[2] = buf + width * height + (width * height / 4);
  102. get_buffer(f, picture[0], width * height);
  103. height>>=1;
  104. width>>=1;
  105. if (is_yuv) {
  106. for(i=0;i<height;i++) {
  107. get_buffer(f, picture[1] + i * width, width);
  108. get_buffer(f, picture[2] + i * width, width);
  109. }
  110. } else {
  111. for(i=0;i<height;i++) {
  112. memset(picture[1] + i * width, 128, width);
  113. memset(picture[2] + i * width, 128, width);
  114. }
  115. }
  116. return 0;
  117. }
  118. static int ppm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size)
  119. {
  120. int width, height;
  121. char buf1[32];
  122. UINT8 *picture[3];
  123. width = s->width;
  124. height = s->height;
  125. pnm_get(f, buf1, sizeof(buf1));
  126. if (strcmp(buf1, "P6")) {
  127. return -EIO;
  128. }
  129. pnm_get(f, buf1, sizeof(buf1));
  130. pnm_get(f, buf1, sizeof(buf1));
  131. pnm_get(f, buf1, sizeof(buf1));
  132. picture[0] = buf;
  133. get_buffer(f, picture[0], width * height*3);
  134. return 0;
  135. }
  136. static int yuv_read(VideoData *s, const char *filename, UINT8 *buf, int size1)
  137. {
  138. ByteIOContext pb1, *pb = &pb1;
  139. char fname[1024], *p;
  140. int size;
  141. size = s->width * s->height;
  142. strcpy(fname, filename);
  143. p = strrchr(fname, '.');
  144. if (!p || p[1] != 'Y')
  145. return -EIO;
  146. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  147. return -EIO;
  148. get_buffer(pb, buf, size);
  149. url_fclose(pb);
  150. p[1] = 'U';
  151. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  152. return -EIO;
  153. get_buffer(pb, buf + size, size / 4);
  154. url_fclose(pb);
  155. p[1] = 'V';
  156. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  157. return -EIO;
  158. get_buffer(pb, buf + size + (size / 4), size / 4);
  159. url_fclose(pb);
  160. return 0;
  161. }
  162. static int img_read_packet(AVFormatContext *s1, AVPacket *pkt)
  163. {
  164. VideoData *s = s1->priv_data;
  165. char filename[1024];
  166. int ret;
  167. ByteIOContext f1, *f;
  168. static INT64 first_frame;
  169. if (emulate_frame_rate) {
  170. if (!first_frame) {
  171. first_frame = av_gettime();
  172. } else {
  173. INT64 pts;
  174. INT64 nowus;
  175. nowus = av_gettime() - first_frame;
  176. pts = ((INT64)s->img_number * FRAME_RATE_BASE * 1000000) / (s1->streams[0]->codec.frame_rate);
  177. if (pts > nowus)
  178. #ifdef __BEOS__
  179. snooze((bigtime_t)(pts - nowus));
  180. #else
  181. usleep(pts - nowus);
  182. #endif
  183. }
  184. }
  185. /*
  186. This if-statement destroys pipes - I do not see why it is necessary
  187. if (get_frame_filename(filename, sizeof(filename),
  188. s->path, s->img_number) < 0)
  189. return -EIO;
  190. */
  191. get_frame_filename(filename, sizeof(filename),
  192. s->path, s->img_number);
  193. if (!s->is_pipe) {
  194. f = &f1;
  195. if (url_fopen(f, filename, URL_RDONLY) < 0)
  196. return -EIO;
  197. } else {
  198. f = &s1->pb;
  199. if (url_feof(f))
  200. return -EIO;
  201. }
  202. av_new_packet(pkt, s->img_size);
  203. pkt->stream_index = 0;
  204. switch(s->img_fmt) {
  205. case IMGFMT_PGMYUV:
  206. ret = pgm_read(s, f, pkt->data, pkt->size, 1);
  207. break;
  208. case IMGFMT_PGM:
  209. ret = pgm_read(s, f, pkt->data, pkt->size, 0);
  210. break;
  211. case IMGFMT_YUV:
  212. ret = yuv_read(s, filename, pkt->data, pkt->size);
  213. break;
  214. case IMGFMT_PPM:
  215. ret = ppm_read(s, f, pkt->data, pkt->size);
  216. break;
  217. default:
  218. return -EIO;
  219. }
  220. if (!s->is_pipe) {
  221. url_fclose(f);
  222. }
  223. if (ret < 0) {
  224. av_free_packet(pkt);
  225. return -EIO; /* signal EOF */
  226. } else {
  227. pkt->pts = ((INT64)s->img_number * s1->pts_den * FRAME_RATE_BASE) / (s1->streams[0]->codec.frame_rate * s1->pts_num);
  228. s->img_number++;
  229. return 0;
  230. }
  231. }
  232. static int sizes[][2] = {
  233. { 640, 480 },
  234. { 720, 480 },
  235. { 720, 576 },
  236. { 352, 288 },
  237. { 352, 240 },
  238. { 160, 128 },
  239. { 512, 384 },
  240. { 640, 352 },
  241. { 640, 240 },
  242. };
  243. static int infer_size(int *width_ptr, int *height_ptr, int size)
  244. {
  245. int i;
  246. for(i=0;i<sizeof(sizes)/sizeof(sizes[0]);i++) {
  247. if ((sizes[i][0] * sizes[i][1]) == size) {
  248. *width_ptr = sizes[i][0];
  249. *height_ptr = sizes[i][1];
  250. return 0;
  251. }
  252. }
  253. return -1;
  254. }
  255. static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  256. {
  257. VideoData *s = s1->priv_data;
  258. int i, h;
  259. char buf[1024];
  260. char buf1[32];
  261. ByteIOContext pb1, *f = &pb1;
  262. AVStream *st;
  263. st = av_new_stream(s1, 0);
  264. if (!st) {
  265. av_free(s);
  266. return -ENOMEM;
  267. }
  268. strcpy(s->path, s1->filename);
  269. s->img_number = 0;
  270. /* find format */
  271. if (s1->iformat->flags & AVFMT_NOFILE)
  272. s->is_pipe = 0;
  273. else
  274. s->is_pipe = 1;
  275. if (s1->iformat == &pgmyuvpipe_iformat ||
  276. s1->iformat == &pgmyuv_iformat)
  277. s->img_fmt = IMGFMT_PGMYUV;
  278. else if (s1->iformat == &pgmpipe_iformat ||
  279. s1->iformat == &pgm_iformat)
  280. s->img_fmt = IMGFMT_PGM;
  281. else if (s1->iformat == &imgyuv_iformat)
  282. s->img_fmt = IMGFMT_YUV;
  283. else if (s1->iformat == &ppmpipe_iformat ||
  284. s1->iformat == &ppm_iformat)
  285. s->img_fmt = IMGFMT_PPM;
  286. else
  287. goto fail;
  288. if (!s->is_pipe) {
  289. /* try to find the first image */
  290. for(i=0;i<5;i++) {
  291. if (get_frame_filename(buf, sizeof(buf), s->path, s->img_number) < 0)
  292. goto fail;
  293. if (url_fopen(f, buf, URL_RDONLY) >= 0)
  294. break;
  295. s->img_number++;
  296. }
  297. if (i == 5)
  298. goto fail;
  299. } else {
  300. f = &s1->pb;
  301. }
  302. /* find the image size */
  303. /* XXX: use generic file format guessing, as mpeg */
  304. switch(s->img_fmt) {
  305. case IMGFMT_PGM:
  306. case IMGFMT_PGMYUV:
  307. case IMGFMT_PPM:
  308. pnm_get(f, buf1, sizeof(buf1));
  309. pnm_get(f, buf1, sizeof(buf1));
  310. s->width = atoi(buf1);
  311. pnm_get(f, buf1, sizeof(buf1));
  312. h = atoi(buf1);
  313. if (s->img_fmt == IMGFMT_PGMYUV)
  314. h = (h * 2) / 3;
  315. s->height = h;
  316. if (s->width <= 0 ||
  317. s->height <= 0 ||
  318. (s->width % 2) != 0 ||
  319. (s->height % 2) != 0) {
  320. goto fail1;
  321. }
  322. break;
  323. case IMGFMT_YUV:
  324. /* infer size by using the file size. */
  325. {
  326. int img_size;
  327. URLContext *h;
  328. /* XXX: hack hack */
  329. h = url_fileno(f);
  330. img_size = url_seek(h, 0, SEEK_END);
  331. if (infer_size(&s->width, &s->height, img_size) < 0) {
  332. goto fail1;
  333. }
  334. }
  335. break;
  336. }
  337. if (!s->is_pipe) {
  338. url_fclose(f);
  339. } else {
  340. url_fseek(f, 0, SEEK_SET);
  341. }
  342. st->codec.codec_type = CODEC_TYPE_VIDEO;
  343. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  344. st->codec.width = s->width;
  345. st->codec.height = s->height;
  346. if (s->img_fmt == IMGFMT_PPM) {
  347. st->codec.pix_fmt = PIX_FMT_RGB24;
  348. s->img_size = (s->width * s->height * 3);
  349. } else {
  350. st->codec.pix_fmt = PIX_FMT_YUV420P;
  351. s->img_size = (s->width * s->height * 3) / 2;
  352. }
  353. if (!ap || !ap->frame_rate)
  354. st->codec.frame_rate = 25 * FRAME_RATE_BASE;
  355. else
  356. st->codec.frame_rate = ap->frame_rate;
  357. return 0;
  358. fail1:
  359. if (!s->is_pipe)
  360. url_fclose(f);
  361. fail:
  362. av_free(s);
  363. return -EIO;
  364. }
  365. static int img_read_close(AVFormatContext *s1)
  366. {
  367. return 0;
  368. }
  369. /******************************************************/
  370. /* image output */
  371. static int pgm_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int is_yuv)
  372. {
  373. int i, h;
  374. char buf[100];
  375. UINT8 *ptr, *ptr1, *ptr2;
  376. h = height;
  377. if (is_yuv)
  378. h = (height * 3) / 2;
  379. snprintf(buf, sizeof(buf),
  380. "P5\n%d %d\n%d\n",
  381. width, h, 255);
  382. put_buffer(pb, buf, strlen(buf));
  383. ptr = picture->data[0];
  384. for(i=0;i<height;i++) {
  385. put_buffer(pb, ptr, width);
  386. ptr += picture->linesize[0];
  387. }
  388. if (is_yuv) {
  389. height >>= 1;
  390. width >>= 1;
  391. ptr1 = picture->data[1];
  392. ptr2 = picture->data[2];
  393. for(i=0;i<height;i++) {
  394. put_buffer(pb, ptr1, width);
  395. put_buffer(pb, ptr2, width);
  396. ptr1 += picture->linesize[1];
  397. ptr2 += picture->linesize[2];
  398. }
  399. }
  400. put_flush_packet(pb);
  401. return 0;
  402. }
  403. static int ppm_save(AVPicture *picture, int width, int height, ByteIOContext *pb)
  404. {
  405. int i;
  406. char buf[100];
  407. UINT8 *ptr;
  408. snprintf(buf, sizeof(buf),
  409. "P6\n%d %d\n%d\n",
  410. width, height, 255);
  411. put_buffer(pb, buf, strlen(buf));
  412. ptr = picture->data[0];
  413. for(i=0;i<height;i++) {
  414. put_buffer(pb, ptr, width * 3);
  415. ptr += picture->linesize[0];
  416. }
  417. put_flush_packet(pb);
  418. return 0;
  419. }
  420. static int yuv_save(AVPicture *picture, int width, int height, const char *filename)
  421. {
  422. ByteIOContext pb1, *pb = &pb1;
  423. char fname[1024], *p;
  424. int i, j;
  425. UINT8 *ptr;
  426. static char *ext = "YUV";
  427. strcpy(fname, filename);
  428. p = strrchr(fname, '.');
  429. if (!p || p[1] != 'Y')
  430. return -EIO;
  431. for(i=0;i<3;i++) {
  432. if (i == 1) {
  433. width >>= 1;
  434. height >>= 1;
  435. }
  436. p[1] = ext[i];
  437. if (url_fopen(pb, fname, URL_WRONLY) < 0)
  438. return -EIO;
  439. ptr = picture->data[i];
  440. for(j=0;j<height;j++) {
  441. put_buffer(pb, ptr, width);
  442. ptr += picture->linesize[i];
  443. }
  444. put_flush_packet(pb);
  445. url_fclose(pb);
  446. }
  447. return 0;
  448. }
  449. static int yuv4mpeg_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int need_stream_header,
  450. int is_yuv, int raten, int rated, int aspectn, int aspectd)
  451. {
  452. int i, n, m;
  453. char buf[Y4M_LINE_MAX+1], buf1[20];
  454. UINT8 *ptr, *ptr1, *ptr2;
  455. /* construct stream header, if this is the first frame */
  456. if(need_stream_header) {
  457. n = snprintf(buf, sizeof(buf), "%s W%d H%d F%d:%d I%s A%d:%d\n",
  458. Y4M_MAGIC,
  459. width,
  460. height,
  461. raten, rated,
  462. "p", /* ffmpeg seems to only output progressive video */
  463. aspectn, aspectd);
  464. if (n < 0) {
  465. fprintf(stderr, "Error. YUV4MPEG stream header write failed.\n");
  466. } else {
  467. fprintf(stderr, "YUV4MPEG stream header written. FPS is %d\n", raten);
  468. put_buffer(pb, buf, strlen(buf));
  469. }
  470. }
  471. /* construct frame header */
  472. m = snprintf(buf1, sizeof(buf1), "%s \n", Y4M_FRAME_MAGIC);
  473. if (m < 0) {
  474. fprintf(stderr, "Error. YUV4MPEG frame header write failed.\n");
  475. } else {
  476. /* fprintf(stderr, "YUV4MPEG frame header written.\n"); */
  477. put_buffer(pb, buf1, strlen(buf1));
  478. }
  479. ptr = picture->data[0];
  480. for(i=0;i<height;i++) {
  481. put_buffer(pb, ptr, width);
  482. ptr += picture->linesize[0];
  483. }
  484. if (is_yuv) {
  485. height >>= 1;
  486. width >>= 1;
  487. ptr1 = picture->data[1];
  488. ptr2 = picture->data[2];
  489. for(i=0;i<height;i++) { /* Cb */
  490. put_buffer(pb, ptr1, width);
  491. ptr1 += picture->linesize[1];
  492. }
  493. for(i=0;i<height;i++) { /* Cr */
  494. put_buffer(pb, ptr2, width);
  495. ptr2 += picture->linesize[2];
  496. }
  497. }
  498. put_flush_packet(pb);
  499. return 0;
  500. }
  501. static int img_write_header(AVFormatContext *s)
  502. {
  503. VideoData *img = s->priv_data;
  504. img->img_number = 1;
  505. strcpy(img->path, s->filename);
  506. /* find format */
  507. if (s->oformat->flags & AVFMT_NOFILE)
  508. img->is_pipe = 0;
  509. else
  510. img->is_pipe = 1;
  511. if (s->oformat == &pgmyuvpipe_oformat ||
  512. s->oformat == &pgmyuv_oformat) {
  513. img->img_fmt = IMGFMT_PGMYUV;
  514. } else if (s->oformat == &pgmpipe_oformat ||
  515. s->oformat == &pgm_oformat) {
  516. img->img_fmt = IMGFMT_PGM;
  517. } else if (s->oformat == &imgyuv_oformat) {
  518. img->img_fmt = IMGFMT_YUV;
  519. } else if (s->oformat == &ppmpipe_oformat ||
  520. s->oformat == &ppm_oformat) {
  521. img->img_fmt = IMGFMT_PPM;
  522. } else if (s->oformat == &yuv4mpegpipe_oformat) {
  523. img->img_fmt = IMGFMT_YUV4MPEG;
  524. img->header_written = 0;
  525. } else {
  526. goto fail;
  527. }
  528. return 0;
  529. fail:
  530. av_free(img);
  531. return -EIO;
  532. }
  533. static int img_write_packet(AVFormatContext *s, int stream_index,
  534. UINT8 *buf, int size, int force_pts)
  535. {
  536. VideoData *img = s->priv_data;
  537. AVStream *st = s->streams[stream_index];
  538. ByteIOContext pb1, *pb;
  539. AVPicture picture;
  540. int width, height, need_stream_header, ret, size1, raten, rated, aspectn, aspectd, fps, fps1;
  541. char filename[1024];
  542. width = st->codec.width;
  543. height = st->codec.height;
  544. if (img->img_number == 1) {
  545. need_stream_header = 1;
  546. } else {
  547. need_stream_header = 0;
  548. }
  549. fps = st->codec.frame_rate;
  550. fps1 = (((float)fps / FRAME_RATE_BASE) * 1000);
  551. /* Sorry about this messy code, but mpeg2enc is very picky about
  552. * the framerates it accepts. */
  553. switch(fps1) {
  554. case 23976:
  555. raten = 24000; /* turn the framerate into a ratio */
  556. rated = 1001;
  557. break;
  558. case 29970:
  559. raten = 30000;
  560. rated = 1001;
  561. break;
  562. case 25000:
  563. raten = 25;
  564. rated = 1;
  565. break;
  566. case 30000:
  567. raten = 30;
  568. rated = 1;
  569. break;
  570. case 24000:
  571. raten = 24;
  572. rated = 1;
  573. break;
  574. case 50000:
  575. raten = 50;
  576. rated = 1;
  577. break;
  578. case 59940:
  579. raten = 60000;
  580. rated = 1001;
  581. break;
  582. case 60000:
  583. raten = 60;
  584. rated = 1;
  585. break;
  586. default:
  587. raten = fps1; /* this setting should work, but often doesn't */
  588. rated = 1000;
  589. break;
  590. }
  591. aspectn = 1;
  592. aspectd = 1; /* ffmpeg always uses a 1:1 aspect ratio */
  593. switch(st->codec.pix_fmt) {
  594. case PIX_FMT_YUV420P:
  595. size1 = (width * height * 3) / 2;
  596. if (size != size1)
  597. return -EIO;
  598. picture.data[0] = buf;
  599. picture.data[1] = picture.data[0] + width * height;
  600. picture.data[2] = picture.data[1] + (width * height) / 4;
  601. picture.linesize[0] = width;
  602. picture.linesize[1] = width >> 1;
  603. picture.linesize[2] = width >> 1;
  604. break;
  605. case PIX_FMT_RGB24:
  606. size1 = (width * height * 3);
  607. if (size != size1)
  608. return -EIO;
  609. picture.data[0] = buf;
  610. picture.linesize[0] = width * 3;
  611. break;
  612. default:
  613. return -EIO;
  614. }
  615. /*
  616. This if-statement destroys pipes - I do not see why it is necessary
  617. if (get_frame_filename(filename, sizeof(filename),
  618. img->path, img->img_number) < 0)
  619. return -EIO;
  620. */
  621. get_frame_filename(filename, sizeof(filename),
  622. img->path, img->img_number);
  623. if (!img->is_pipe) {
  624. pb = &pb1;
  625. if (url_fopen(pb, filename, URL_WRONLY) < 0)
  626. return -EIO;
  627. } else {
  628. pb = &s->pb;
  629. }
  630. switch(img->img_fmt) {
  631. case IMGFMT_PGMYUV:
  632. ret = pgm_save(&picture, width, height, pb, 1);
  633. break;
  634. case IMGFMT_PGM:
  635. ret = pgm_save(&picture, width, height, pb, 0);
  636. break;
  637. case IMGFMT_YUV:
  638. ret = yuv_save(&picture, width, height, filename);
  639. break;
  640. case IMGFMT_PPM:
  641. ret = ppm_save(&picture, width, height, pb);
  642. break;
  643. case IMGFMT_YUV4MPEG:
  644. ret = yuv4mpeg_save(&picture, width, height, pb,
  645. need_stream_header, 1, raten, rated, aspectn, aspectd);
  646. break;
  647. }
  648. if (!img->is_pipe) {
  649. url_fclose(pb);
  650. }
  651. img->img_number++;
  652. return 0;
  653. }
  654. static int img_write_trailer(AVFormatContext *s)
  655. {
  656. return 0;
  657. }
  658. static AVInputFormat pgm_iformat = {
  659. "pgm",
  660. "pgm image format",
  661. sizeof(VideoData),
  662. NULL,
  663. img_read_header,
  664. img_read_packet,
  665. img_read_close,
  666. NULL,
  667. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  668. .extensions = "pgm",
  669. };
  670. static AVOutputFormat pgm_oformat = {
  671. "pgm",
  672. "pgm image format",
  673. "",
  674. "pgm",
  675. sizeof(VideoData),
  676. CODEC_ID_NONE,
  677. CODEC_ID_RAWVIDEO,
  678. img_write_header,
  679. img_write_packet,
  680. img_write_trailer,
  681. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  682. };
  683. static AVInputFormat pgmyuv_iformat = {
  684. "pgmyuv",
  685. "pgm with YUV content image format",
  686. sizeof(VideoData),
  687. NULL, /* no probe */
  688. img_read_header,
  689. img_read_packet,
  690. img_read_close,
  691. NULL,
  692. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  693. };
  694. static AVOutputFormat pgmyuv_oformat = {
  695. "pgmyuv",
  696. "pgm with YUV content image format",
  697. "",
  698. "pgm",
  699. sizeof(VideoData),
  700. CODEC_ID_NONE,
  701. CODEC_ID_RAWVIDEO,
  702. img_write_header,
  703. img_write_packet,
  704. img_write_trailer,
  705. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  706. };
  707. static AVInputFormat ppm_iformat = {
  708. "ppm",
  709. "ppm image format",
  710. sizeof(VideoData),
  711. NULL,
  712. img_read_header,
  713. img_read_packet,
  714. img_read_close,
  715. NULL,
  716. AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
  717. .extensions = "ppm",
  718. };
  719. static AVOutputFormat ppm_oformat = {
  720. "ppm",
  721. "ppm image format",
  722. "",
  723. "ppm",
  724. sizeof(VideoData),
  725. CODEC_ID_NONE,
  726. CODEC_ID_RAWVIDEO,
  727. img_write_header,
  728. img_write_packet,
  729. img_write_trailer,
  730. AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
  731. };
  732. static AVInputFormat imgyuv_iformat = {
  733. ".Y.U.V",
  734. ".Y.U.V format",
  735. sizeof(VideoData),
  736. NULL,
  737. img_read_header,
  738. img_read_packet,
  739. img_read_close,
  740. NULL,
  741. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  742. .extensions = "Y",
  743. };
  744. static AVOutputFormat imgyuv_oformat = {
  745. ".Y.U.V",
  746. ".Y.U.V format",
  747. "",
  748. "Y",
  749. sizeof(VideoData),
  750. CODEC_ID_NONE,
  751. CODEC_ID_RAWVIDEO,
  752. img_write_header,
  753. img_write_packet,
  754. img_write_trailer,
  755. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  756. };
  757. static AVInputFormat pgmpipe_iformat = {
  758. "pgmpipe",
  759. "PGM pipe format",
  760. sizeof(VideoData),
  761. NULL, /* no probe */
  762. img_read_header,
  763. img_read_packet,
  764. img_read_close,
  765. NULL,
  766. };
  767. static AVOutputFormat pgmpipe_oformat = {
  768. "pgmpipe",
  769. "PGM pipe format",
  770. "",
  771. "pgm",
  772. sizeof(VideoData),
  773. CODEC_ID_NONE,
  774. CODEC_ID_RAWVIDEO,
  775. img_write_header,
  776. img_write_packet,
  777. img_write_trailer,
  778. };
  779. static AVInputFormat pgmyuvpipe_iformat = {
  780. "pgmyuvpipe",
  781. "PGM YUV pipe format",
  782. sizeof(VideoData),
  783. NULL, /* no probe */
  784. img_read_header,
  785. img_read_packet,
  786. img_read_close,
  787. NULL,
  788. };
  789. static AVOutputFormat pgmyuvpipe_oformat = {
  790. "pgmyuvpipe",
  791. "PGM YUV pipe format",
  792. "",
  793. "pgm",
  794. sizeof(VideoData),
  795. CODEC_ID_NONE,
  796. CODEC_ID_RAWVIDEO,
  797. img_write_header,
  798. img_write_packet,
  799. img_write_trailer,
  800. };
  801. static AVInputFormat ppmpipe_iformat = {
  802. "ppmpipe",
  803. "PPM pipe format",
  804. sizeof(VideoData),
  805. NULL, /* no probe */
  806. img_read_header,
  807. img_read_packet,
  808. img_read_close,
  809. NULL,
  810. .flags = AVFMT_RGB24,
  811. };
  812. static AVOutputFormat ppmpipe_oformat = {
  813. "ppmpipe",
  814. "PPM pipe format",
  815. "",
  816. "ppm",
  817. sizeof(VideoData),
  818. CODEC_ID_NONE,
  819. CODEC_ID_RAWVIDEO,
  820. img_write_header,
  821. img_write_packet,
  822. img_write_trailer,
  823. .flags = AVFMT_RGB24,
  824. };
  825. static AVOutputFormat yuv4mpegpipe_oformat = {
  826. "yuv4mpegpipe",
  827. "YUV4MPEG pipe format",
  828. "",
  829. "yuv4mpeg",
  830. sizeof(VideoData),
  831. CODEC_ID_NONE,
  832. CODEC_ID_RAWVIDEO,
  833. img_write_header,
  834. img_write_packet,
  835. img_write_trailer,
  836. };
  837. int img_init(void)
  838. {
  839. av_register_input_format(&pgm_iformat);
  840. av_register_output_format(&pgm_oformat);
  841. av_register_input_format(&pgmyuv_iformat);
  842. av_register_output_format(&pgmyuv_oformat);
  843. av_register_input_format(&ppm_iformat);
  844. av_register_output_format(&ppm_oformat);
  845. av_register_input_format(&imgyuv_iformat);
  846. av_register_output_format(&imgyuv_oformat);
  847. av_register_input_format(&pgmpipe_iformat);
  848. av_register_output_format(&pgmpipe_oformat);
  849. av_register_input_format(&pgmyuvpipe_iformat);
  850. av_register_output_format(&pgmyuvpipe_oformat);
  851. av_register_input_format(&ppmpipe_iformat);
  852. av_register_output_format(&ppmpipe_oformat);
  853. av_register_output_format(&yuv4mpegpipe_oformat);
  854. return 0;
  855. }