You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

977 lines
23KB

  1. /*
  2. * Image format
  3. * Copyright (c) 2000, 2001, 2002 Fabrice Bellard.
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. */
  19. #include <unistd.h>
  20. #include "avformat.h"
  21. extern AVInputFormat pgm_iformat;
  22. extern AVOutputFormat pgm_oformat;
  23. extern AVInputFormat pgmyuv_iformat;
  24. extern AVOutputFormat pgmyuv_oformat;
  25. extern AVInputFormat ppm_iformat;
  26. extern AVOutputFormat ppm_oformat;
  27. extern AVInputFormat imgyuv_iformat;
  28. extern AVOutputFormat imgyuv_oformat;
  29. extern AVInputFormat pgmpipe_iformat;
  30. extern AVOutputFormat pgmpipe_oformat;
  31. extern AVInputFormat pgmyuvpipe_iformat;
  32. extern AVOutputFormat pgmyuvpipe_oformat;
  33. extern AVInputFormat ppmpipe_iformat;
  34. extern AVOutputFormat ppmpipe_oformat;
  35. extern AVOutputFormat yuv4mpegpipe_oformat;
  36. #ifdef __MINGW32__
  37. # include <windows.h>
  38. # define usleep(t) Sleep((t) / 1000)
  39. #endif
  40. #ifdef __BEOS__
  41. # ifndef usleep
  42. # include <OS.h>
  43. # define usleep(t) snooze((bigtime_t)(t))
  44. # endif
  45. #endif
  46. #define IMGFMT_YUV 1
  47. #define IMGFMT_PGMYUV 2
  48. #define IMGFMT_PGM 3
  49. #define IMGFMT_PPM 4
  50. #define IMGFMT_YUV4MPEG 5
  51. #define Y4M_MAGIC "YUV4MPEG2"
  52. #define Y4M_FRAME_MAGIC "FRAME"
  53. #define Y4M_LINE_MAX 256
  54. typedef struct {
  55. int width;
  56. int height;
  57. int img_number;
  58. int img_size;
  59. int img_fmt;
  60. int is_pipe;
  61. int header_written;
  62. char path[1024];
  63. } VideoData;
  64. int emulate_frame_rate;
  65. static inline int pnm_space(int c)
  66. {
  67. return (c==' ' || c=='\n' || c=='\r' || c=='\t');
  68. }
  69. static void pnm_get(ByteIOContext *f, char *str, int buf_size)
  70. {
  71. char *s;
  72. int c;
  73. do {
  74. c=get_byte(f);
  75. if (c=='#') {
  76. do {
  77. c=get_byte(f);
  78. } while (c!='\n');
  79. c=get_byte(f);
  80. }
  81. } while (pnm_space(c));
  82. s=str;
  83. do {
  84. if (url_feof(f))
  85. break;
  86. if ((s - str) < buf_size - 1)
  87. *s++=c;
  88. c=get_byte(f);
  89. } while (!pnm_space(c));
  90. *s = '\0';
  91. }
  92. static int pgm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size, int is_yuv)
  93. {
  94. int width, height, i;
  95. char buf1[32];
  96. UINT8 *picture[3];
  97. width = s->width;
  98. height = s->height;
  99. pnm_get(f, buf1, sizeof(buf1));
  100. if (strcmp(buf1, "P5")) {
  101. return -EIO;
  102. }
  103. pnm_get(f, buf1, sizeof(buf1));
  104. pnm_get(f, buf1, sizeof(buf1));
  105. pnm_get(f, buf1, sizeof(buf1));
  106. picture[0] = buf;
  107. picture[1] = buf + width * height;
  108. picture[2] = buf + width * height + (width * height / 4);
  109. get_buffer(f, picture[0], width * height);
  110. height>>=1;
  111. width>>=1;
  112. if (is_yuv) {
  113. for(i=0;i<height;i++) {
  114. get_buffer(f, picture[1] + i * width, width);
  115. get_buffer(f, picture[2] + i * width, width);
  116. }
  117. } else {
  118. for(i=0;i<height;i++) {
  119. memset(picture[1] + i * width, 128, width);
  120. memset(picture[2] + i * width, 128, width);
  121. }
  122. }
  123. return 0;
  124. }
  125. static int ppm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size)
  126. {
  127. int width, height;
  128. char buf1[32];
  129. UINT8 *picture[3];
  130. width = s->width;
  131. height = s->height;
  132. pnm_get(f, buf1, sizeof(buf1));
  133. if (strcmp(buf1, "P6")) {
  134. return -EIO;
  135. }
  136. pnm_get(f, buf1, sizeof(buf1));
  137. pnm_get(f, buf1, sizeof(buf1));
  138. pnm_get(f, buf1, sizeof(buf1));
  139. picture[0] = buf;
  140. get_buffer(f, picture[0], width * height*3);
  141. return 0;
  142. }
  143. static int yuv_read(VideoData *s, const char *filename, UINT8 *buf, int size1)
  144. {
  145. ByteIOContext pb1, *pb = &pb1;
  146. char fname[1024], *p;
  147. int size;
  148. size = s->width * s->height;
  149. strcpy(fname, filename);
  150. p = strrchr(fname, '.');
  151. if (!p || p[1] != 'Y')
  152. return -EIO;
  153. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  154. return -EIO;
  155. get_buffer(pb, buf, size);
  156. url_fclose(pb);
  157. p[1] = 'U';
  158. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  159. return -EIO;
  160. get_buffer(pb, buf + size, size / 4);
  161. url_fclose(pb);
  162. p[1] = 'V';
  163. if (url_fopen(pb, fname, URL_RDONLY) < 0)
  164. return -EIO;
  165. get_buffer(pb, buf + size + (size / 4), size / 4);
  166. url_fclose(pb);
  167. return 0;
  168. }
  169. static int img_read_packet(AVFormatContext *s1, AVPacket *pkt)
  170. {
  171. VideoData *s = s1->priv_data;
  172. char filename[1024];
  173. int ret;
  174. ByteIOContext f1, *f;
  175. static INT64 first_frame;
  176. if (emulate_frame_rate) {
  177. if (!first_frame) {
  178. first_frame = av_gettime();
  179. } else {
  180. INT64 pts;
  181. INT64 nowus;
  182. nowus = av_gettime() - first_frame;
  183. pts = ((INT64)s->img_number * FRAME_RATE_BASE * 1000000) / (s1->streams[0]->codec.frame_rate);
  184. if (pts > nowus)
  185. usleep(pts - nowus);
  186. }
  187. }
  188. /*
  189. This if-statement destroys pipes - I do not see why it is necessary
  190. if (get_frame_filename(filename, sizeof(filename),
  191. s->path, s->img_number) < 0)
  192. return -EIO;
  193. */
  194. get_frame_filename(filename, sizeof(filename),
  195. s->path, s->img_number);
  196. if (!s->is_pipe) {
  197. f = &f1;
  198. if (url_fopen(f, filename, URL_RDONLY) < 0)
  199. return -EIO;
  200. } else {
  201. f = &s1->pb;
  202. if (url_feof(f))
  203. return -EIO;
  204. }
  205. av_new_packet(pkt, s->img_size);
  206. pkt->stream_index = 0;
  207. switch(s->img_fmt) {
  208. case IMGFMT_PGMYUV:
  209. ret = pgm_read(s, f, pkt->data, pkt->size, 1);
  210. break;
  211. case IMGFMT_PGM:
  212. ret = pgm_read(s, f, pkt->data, pkt->size, 0);
  213. break;
  214. case IMGFMT_YUV:
  215. ret = yuv_read(s, filename, pkt->data, pkt->size);
  216. break;
  217. case IMGFMT_PPM:
  218. ret = ppm_read(s, f, pkt->data, pkt->size);
  219. break;
  220. default:
  221. return -EIO;
  222. }
  223. if (!s->is_pipe) {
  224. url_fclose(f);
  225. }
  226. if (ret < 0) {
  227. av_free_packet(pkt);
  228. return -EIO; /* signal EOF */
  229. } else {
  230. pkt->pts = ((INT64)s->img_number * s1->pts_den * FRAME_RATE_BASE) / (s1->streams[0]->codec.frame_rate * s1->pts_num);
  231. s->img_number++;
  232. return 0;
  233. }
  234. }
  235. static int sizes[][2] = {
  236. { 640, 480 },
  237. { 720, 480 },
  238. { 720, 576 },
  239. { 352, 288 },
  240. { 352, 240 },
  241. { 160, 128 },
  242. { 512, 384 },
  243. { 640, 352 },
  244. { 640, 240 },
  245. };
  246. static int infer_size(int *width_ptr, int *height_ptr, int size)
  247. {
  248. int i;
  249. for(i=0;i<sizeof(sizes)/sizeof(sizes[0]);i++) {
  250. if ((sizes[i][0] * sizes[i][1]) == size) {
  251. *width_ptr = sizes[i][0];
  252. *height_ptr = sizes[i][1];
  253. return 0;
  254. }
  255. }
  256. return -1;
  257. }
  258. static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  259. {
  260. VideoData *s = s1->priv_data;
  261. int i, h;
  262. char buf[1024];
  263. char buf1[32];
  264. ByteIOContext pb1, *f = &pb1;
  265. AVStream *st;
  266. st = av_new_stream(s1, 0);
  267. if (!st) {
  268. av_free(s);
  269. return -ENOMEM;
  270. }
  271. strcpy(s->path, s1->filename);
  272. s->img_number = 0;
  273. /* find format */
  274. if (s1->iformat->flags & AVFMT_NOFILE)
  275. s->is_pipe = 0;
  276. else
  277. s->is_pipe = 1;
  278. if (s1->iformat == &pgmyuvpipe_iformat ||
  279. s1->iformat == &pgmyuv_iformat)
  280. s->img_fmt = IMGFMT_PGMYUV;
  281. else if (s1->iformat == &pgmpipe_iformat ||
  282. s1->iformat == &pgm_iformat)
  283. s->img_fmt = IMGFMT_PGM;
  284. else if (s1->iformat == &imgyuv_iformat)
  285. s->img_fmt = IMGFMT_YUV;
  286. else if (s1->iformat == &ppmpipe_iformat ||
  287. s1->iformat == &ppm_iformat)
  288. s->img_fmt = IMGFMT_PPM;
  289. else
  290. goto fail;
  291. if (!s->is_pipe) {
  292. /* try to find the first image */
  293. for(i=0;i<5;i++) {
  294. if (get_frame_filename(buf, sizeof(buf), s->path, s->img_number) < 0)
  295. goto fail;
  296. if (url_fopen(f, buf, URL_RDONLY) >= 0)
  297. break;
  298. s->img_number++;
  299. }
  300. if (i == 5)
  301. goto fail;
  302. } else {
  303. f = &s1->pb;
  304. }
  305. /* find the image size */
  306. /* XXX: use generic file format guessing, as mpeg */
  307. switch(s->img_fmt) {
  308. case IMGFMT_PGM:
  309. case IMGFMT_PGMYUV:
  310. case IMGFMT_PPM:
  311. pnm_get(f, buf1, sizeof(buf1));
  312. pnm_get(f, buf1, sizeof(buf1));
  313. s->width = atoi(buf1);
  314. pnm_get(f, buf1, sizeof(buf1));
  315. h = atoi(buf1);
  316. if (s->img_fmt == IMGFMT_PGMYUV)
  317. h = (h * 2) / 3;
  318. s->height = h;
  319. if (s->width <= 0 ||
  320. s->height <= 0 ||
  321. (s->width % 2) != 0 ||
  322. (s->height % 2) != 0) {
  323. goto fail1;
  324. }
  325. break;
  326. case IMGFMT_YUV:
  327. /* infer size by using the file size. */
  328. {
  329. int img_size;
  330. URLContext *h;
  331. /* XXX: hack hack */
  332. h = url_fileno(f);
  333. img_size = url_seek(h, 0, SEEK_END);
  334. if (infer_size(&s->width, &s->height, img_size) < 0) {
  335. goto fail1;
  336. }
  337. }
  338. break;
  339. }
  340. if (!s->is_pipe) {
  341. url_fclose(f);
  342. } else {
  343. url_fseek(f, 0, SEEK_SET);
  344. }
  345. st->codec.codec_type = CODEC_TYPE_VIDEO;
  346. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  347. st->codec.width = s->width;
  348. st->codec.height = s->height;
  349. if (s->img_fmt == IMGFMT_PPM) {
  350. st->codec.pix_fmt = PIX_FMT_RGB24;
  351. s->img_size = (s->width * s->height * 3);
  352. } else {
  353. st->codec.pix_fmt = PIX_FMT_YUV420P;
  354. s->img_size = (s->width * s->height * 3) / 2;
  355. }
  356. if (!ap || !ap->frame_rate)
  357. st->codec.frame_rate = 25 * FRAME_RATE_BASE;
  358. else
  359. st->codec.frame_rate = ap->frame_rate;
  360. return 0;
  361. fail1:
  362. if (!s->is_pipe)
  363. url_fclose(f);
  364. fail:
  365. av_free(s);
  366. return -EIO;
  367. }
  368. static int img_read_close(AVFormatContext *s1)
  369. {
  370. return 0;
  371. }
  372. /******************************************************/
  373. /* image output */
  374. static int pgm_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int is_yuv)
  375. {
  376. int i, h;
  377. char buf[100];
  378. UINT8 *ptr, *ptr1, *ptr2;
  379. h = height;
  380. if (is_yuv)
  381. h = (height * 3) / 2;
  382. snprintf(buf, sizeof(buf),
  383. "P5\n%d %d\n%d\n",
  384. width, h, 255);
  385. put_buffer(pb, buf, strlen(buf));
  386. ptr = picture->data[0];
  387. for(i=0;i<height;i++) {
  388. put_buffer(pb, ptr, width);
  389. ptr += picture->linesize[0];
  390. }
  391. if (is_yuv) {
  392. height >>= 1;
  393. width >>= 1;
  394. ptr1 = picture->data[1];
  395. ptr2 = picture->data[2];
  396. for(i=0;i<height;i++) {
  397. put_buffer(pb, ptr1, width);
  398. put_buffer(pb, ptr2, width);
  399. ptr1 += picture->linesize[1];
  400. ptr2 += picture->linesize[2];
  401. }
  402. }
  403. put_flush_packet(pb);
  404. return 0;
  405. }
  406. static int ppm_save(AVPicture *picture, int width, int height, ByteIOContext *pb)
  407. {
  408. int i;
  409. char buf[100];
  410. UINT8 *ptr;
  411. snprintf(buf, sizeof(buf),
  412. "P6\n%d %d\n%d\n",
  413. width, height, 255);
  414. put_buffer(pb, buf, strlen(buf));
  415. ptr = picture->data[0];
  416. for(i=0;i<height;i++) {
  417. put_buffer(pb, ptr, width * 3);
  418. ptr += picture->linesize[0];
  419. }
  420. put_flush_packet(pb);
  421. return 0;
  422. }
  423. static int yuv_save(AVPicture *picture, int width, int height, const char *filename)
  424. {
  425. ByteIOContext pb1, *pb = &pb1;
  426. char fname[1024], *p;
  427. int i, j;
  428. UINT8 *ptr;
  429. static char *ext = "YUV";
  430. strcpy(fname, filename);
  431. p = strrchr(fname, '.');
  432. if (!p || p[1] != 'Y')
  433. return -EIO;
  434. for(i=0;i<3;i++) {
  435. if (i == 1) {
  436. width >>= 1;
  437. height >>= 1;
  438. }
  439. p[1] = ext[i];
  440. if (url_fopen(pb, fname, URL_WRONLY) < 0)
  441. return -EIO;
  442. ptr = picture->data[i];
  443. for(j=0;j<height;j++) {
  444. put_buffer(pb, ptr, width);
  445. ptr += picture->linesize[i];
  446. }
  447. put_flush_packet(pb);
  448. url_fclose(pb);
  449. }
  450. return 0;
  451. }
  452. static int yuv4mpeg_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int need_stream_header,
  453. int is_yuv, int raten, int rated, int aspectn, int aspectd)
  454. {
  455. int i, n, m;
  456. char buf[Y4M_LINE_MAX+1], buf1[20];
  457. UINT8 *ptr, *ptr1, *ptr2;
  458. /* construct stream header, if this is the first frame */
  459. if(need_stream_header) {
  460. n = snprintf(buf, sizeof(buf), "%s W%d H%d F%d:%d I%s A%d:%d\n",
  461. Y4M_MAGIC,
  462. width,
  463. height,
  464. raten, rated,
  465. "p", /* ffmpeg seems to only output progressive video */
  466. aspectn, aspectd);
  467. if (n < 0) {
  468. fprintf(stderr, "Error. YUV4MPEG stream header write failed.\n");
  469. } else {
  470. fprintf(stderr, "YUV4MPEG stream header written. FPS is %d\n", raten);
  471. put_buffer(pb, buf, strlen(buf));
  472. }
  473. }
  474. /* construct frame header */
  475. m = snprintf(buf1, sizeof(buf1), "%s \n", Y4M_FRAME_MAGIC);
  476. if (m < 0) {
  477. fprintf(stderr, "Error. YUV4MPEG frame header write failed.\n");
  478. } else {
  479. /* fprintf(stderr, "YUV4MPEG frame header written.\n"); */
  480. put_buffer(pb, buf1, strlen(buf1));
  481. }
  482. ptr = picture->data[0];
  483. for(i=0;i<height;i++) {
  484. put_buffer(pb, ptr, width);
  485. ptr += picture->linesize[0];
  486. }
  487. if (is_yuv) {
  488. height >>= 1;
  489. width >>= 1;
  490. ptr1 = picture->data[1];
  491. ptr2 = picture->data[2];
  492. for(i=0;i<height;i++) { /* Cb */
  493. put_buffer(pb, ptr1, width);
  494. ptr1 += picture->linesize[1];
  495. }
  496. for(i=0;i<height;i++) { /* Cr */
  497. put_buffer(pb, ptr2, width);
  498. ptr2 += picture->linesize[2];
  499. }
  500. }
  501. put_flush_packet(pb);
  502. return 0;
  503. }
  504. static int img_write_header(AVFormatContext *s)
  505. {
  506. VideoData *img = s->priv_data;
  507. img->img_number = 1;
  508. strcpy(img->path, s->filename);
  509. /* find format */
  510. if (s->oformat->flags & AVFMT_NOFILE)
  511. img->is_pipe = 0;
  512. else
  513. img->is_pipe = 1;
  514. if (s->oformat == &pgmyuvpipe_oformat ||
  515. s->oformat == &pgmyuv_oformat) {
  516. img->img_fmt = IMGFMT_PGMYUV;
  517. } else if (s->oformat == &pgmpipe_oformat ||
  518. s->oformat == &pgm_oformat) {
  519. img->img_fmt = IMGFMT_PGM;
  520. } else if (s->oformat == &imgyuv_oformat) {
  521. img->img_fmt = IMGFMT_YUV;
  522. } else if (s->oformat == &ppmpipe_oformat ||
  523. s->oformat == &ppm_oformat) {
  524. img->img_fmt = IMGFMT_PPM;
  525. } else if (s->oformat == &yuv4mpegpipe_oformat) {
  526. img->img_fmt = IMGFMT_YUV4MPEG;
  527. img->header_written = 0;
  528. } else {
  529. goto fail;
  530. }
  531. return 0;
  532. fail:
  533. av_free(img);
  534. return -EIO;
  535. }
  536. static int img_write_packet(AVFormatContext *s, int stream_index,
  537. UINT8 *buf, int size, int force_pts)
  538. {
  539. VideoData *img = s->priv_data;
  540. AVStream *st = s->streams[stream_index];
  541. ByteIOContext pb1, *pb;
  542. AVPicture picture;
  543. int width, height, need_stream_header, ret, size1, raten, rated, aspectn, aspectd, fps, fps1;
  544. char filename[1024];
  545. width = st->codec.width;
  546. height = st->codec.height;
  547. if (img->img_number == 1) {
  548. need_stream_header = 1;
  549. } else {
  550. need_stream_header = 0;
  551. }
  552. fps = st->codec.frame_rate;
  553. fps1 = (((float)fps / FRAME_RATE_BASE) * 1000);
  554. /* Sorry about this messy code, but mpeg2enc is very picky about
  555. * the framerates it accepts. */
  556. switch(fps1) {
  557. case 23976:
  558. raten = 24000; /* turn the framerate into a ratio */
  559. rated = 1001;
  560. break;
  561. case 29970:
  562. raten = 30000;
  563. rated = 1001;
  564. break;
  565. case 25000:
  566. raten = 25;
  567. rated = 1;
  568. break;
  569. case 30000:
  570. raten = 30;
  571. rated = 1;
  572. break;
  573. case 24000:
  574. raten = 24;
  575. rated = 1;
  576. break;
  577. case 50000:
  578. raten = 50;
  579. rated = 1;
  580. break;
  581. case 59940:
  582. raten = 60000;
  583. rated = 1001;
  584. break;
  585. case 60000:
  586. raten = 60;
  587. rated = 1;
  588. break;
  589. default:
  590. raten = fps1; /* this setting should work, but often doesn't */
  591. rated = 1000;
  592. break;
  593. }
  594. aspectn = 1;
  595. aspectd = 1; /* ffmpeg always uses a 1:1 aspect ratio */
  596. switch(st->codec.pix_fmt) {
  597. case PIX_FMT_YUV420P:
  598. size1 = (width * height * 3) / 2;
  599. if (size != size1)
  600. return -EIO;
  601. picture.data[0] = buf;
  602. picture.data[1] = picture.data[0] + width * height;
  603. picture.data[2] = picture.data[1] + (width * height) / 4;
  604. picture.linesize[0] = width;
  605. picture.linesize[1] = width >> 1;
  606. picture.linesize[2] = width >> 1;
  607. break;
  608. case PIX_FMT_RGB24:
  609. size1 = (width * height * 3);
  610. if (size != size1)
  611. return -EIO;
  612. picture.data[0] = buf;
  613. picture.linesize[0] = width * 3;
  614. break;
  615. default:
  616. return -EIO;
  617. }
  618. /*
  619. This if-statement destroys pipes - I do not see why it is necessary
  620. if (get_frame_filename(filename, sizeof(filename),
  621. img->path, img->img_number) < 0)
  622. return -EIO;
  623. */
  624. get_frame_filename(filename, sizeof(filename),
  625. img->path, img->img_number);
  626. if (!img->is_pipe) {
  627. pb = &pb1;
  628. if (url_fopen(pb, filename, URL_WRONLY) < 0)
  629. return -EIO;
  630. } else {
  631. pb = &s->pb;
  632. }
  633. switch(img->img_fmt) {
  634. case IMGFMT_PGMYUV:
  635. ret = pgm_save(&picture, width, height, pb, 1);
  636. break;
  637. case IMGFMT_PGM:
  638. ret = pgm_save(&picture, width, height, pb, 0);
  639. break;
  640. case IMGFMT_YUV:
  641. ret = yuv_save(&picture, width, height, filename);
  642. break;
  643. case IMGFMT_PPM:
  644. ret = ppm_save(&picture, width, height, pb);
  645. break;
  646. case IMGFMT_YUV4MPEG:
  647. ret = yuv4mpeg_save(&picture, width, height, pb,
  648. need_stream_header, 1, raten, rated, aspectn, aspectd);
  649. break;
  650. }
  651. if (!img->is_pipe) {
  652. url_fclose(pb);
  653. }
  654. img->img_number++;
  655. return 0;
  656. }
  657. static int img_write_trailer(AVFormatContext *s)
  658. {
  659. return 0;
  660. }
  661. static AVInputFormat pgm_iformat = {
  662. "pgm",
  663. "pgm image format",
  664. sizeof(VideoData),
  665. NULL,
  666. img_read_header,
  667. img_read_packet,
  668. img_read_close,
  669. NULL,
  670. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  671. .extensions = "pgm",
  672. };
  673. static AVOutputFormat pgm_oformat = {
  674. "pgm",
  675. "pgm image format",
  676. "",
  677. "pgm",
  678. sizeof(VideoData),
  679. CODEC_ID_NONE,
  680. CODEC_ID_RAWVIDEO,
  681. img_write_header,
  682. img_write_packet,
  683. img_write_trailer,
  684. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  685. };
  686. static AVInputFormat pgmyuv_iformat = {
  687. "pgmyuv",
  688. "pgm with YUV content image format",
  689. sizeof(VideoData),
  690. NULL, /* no probe */
  691. img_read_header,
  692. img_read_packet,
  693. img_read_close,
  694. NULL,
  695. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  696. };
  697. static AVOutputFormat pgmyuv_oformat = {
  698. "pgmyuv",
  699. "pgm with YUV content image format",
  700. "",
  701. "pgm",
  702. sizeof(VideoData),
  703. CODEC_ID_NONE,
  704. CODEC_ID_RAWVIDEO,
  705. img_write_header,
  706. img_write_packet,
  707. img_write_trailer,
  708. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  709. };
  710. static AVInputFormat ppm_iformat = {
  711. "ppm",
  712. "ppm image format",
  713. sizeof(VideoData),
  714. NULL,
  715. img_read_header,
  716. img_read_packet,
  717. img_read_close,
  718. NULL,
  719. AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
  720. .extensions = "ppm",
  721. };
  722. static AVOutputFormat ppm_oformat = {
  723. "ppm",
  724. "ppm image format",
  725. "",
  726. "ppm",
  727. sizeof(VideoData),
  728. CODEC_ID_NONE,
  729. CODEC_ID_RAWVIDEO,
  730. img_write_header,
  731. img_write_packet,
  732. img_write_trailer,
  733. AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
  734. };
  735. static AVInputFormat imgyuv_iformat = {
  736. ".Y.U.V",
  737. ".Y.U.V format",
  738. sizeof(VideoData),
  739. NULL,
  740. img_read_header,
  741. img_read_packet,
  742. img_read_close,
  743. NULL,
  744. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  745. .extensions = "Y",
  746. };
  747. static AVOutputFormat imgyuv_oformat = {
  748. ".Y.U.V",
  749. ".Y.U.V format",
  750. "",
  751. "Y",
  752. sizeof(VideoData),
  753. CODEC_ID_NONE,
  754. CODEC_ID_RAWVIDEO,
  755. img_write_header,
  756. img_write_packet,
  757. img_write_trailer,
  758. AVFMT_NOFILE | AVFMT_NEEDNUMBER,
  759. };
  760. static AVInputFormat pgmpipe_iformat = {
  761. "pgmpipe",
  762. "PGM pipe format",
  763. sizeof(VideoData),
  764. NULL, /* no probe */
  765. img_read_header,
  766. img_read_packet,
  767. img_read_close,
  768. NULL,
  769. };
  770. static AVOutputFormat pgmpipe_oformat = {
  771. "pgmpipe",
  772. "PGM pipe format",
  773. "",
  774. "pgm",
  775. sizeof(VideoData),
  776. CODEC_ID_NONE,
  777. CODEC_ID_RAWVIDEO,
  778. img_write_header,
  779. img_write_packet,
  780. img_write_trailer,
  781. };
  782. static AVInputFormat pgmyuvpipe_iformat = {
  783. "pgmyuvpipe",
  784. "PGM YUV pipe format",
  785. sizeof(VideoData),
  786. NULL, /* no probe */
  787. img_read_header,
  788. img_read_packet,
  789. img_read_close,
  790. NULL,
  791. };
  792. static AVOutputFormat pgmyuvpipe_oformat = {
  793. "pgmyuvpipe",
  794. "PGM YUV pipe format",
  795. "",
  796. "pgm",
  797. sizeof(VideoData),
  798. CODEC_ID_NONE,
  799. CODEC_ID_RAWVIDEO,
  800. img_write_header,
  801. img_write_packet,
  802. img_write_trailer,
  803. };
  804. static AVInputFormat ppmpipe_iformat = {
  805. "ppmpipe",
  806. "PPM pipe format",
  807. sizeof(VideoData),
  808. NULL, /* no probe */
  809. img_read_header,
  810. img_read_packet,
  811. img_read_close,
  812. NULL,
  813. .flags = AVFMT_RGB24,
  814. };
  815. static AVOutputFormat ppmpipe_oformat = {
  816. "ppmpipe",
  817. "PPM pipe format",
  818. "",
  819. "ppm",
  820. sizeof(VideoData),
  821. CODEC_ID_NONE,
  822. CODEC_ID_RAWVIDEO,
  823. img_write_header,
  824. img_write_packet,
  825. img_write_trailer,
  826. .flags = AVFMT_RGB24,
  827. };
  828. static AVOutputFormat yuv4mpegpipe_oformat = {
  829. "yuv4mpegpipe",
  830. "YUV4MPEG pipe format",
  831. "",
  832. "yuv4mpeg",
  833. sizeof(VideoData),
  834. CODEC_ID_NONE,
  835. CODEC_ID_RAWVIDEO,
  836. img_write_header,
  837. img_write_packet,
  838. img_write_trailer,
  839. };
  840. int img_init(void)
  841. {
  842. av_register_input_format(&pgm_iformat);
  843. av_register_output_format(&pgm_oformat);
  844. av_register_input_format(&pgmyuv_iformat);
  845. av_register_output_format(&pgmyuv_oformat);
  846. av_register_input_format(&ppm_iformat);
  847. av_register_output_format(&ppm_oformat);
  848. av_register_input_format(&imgyuv_iformat);
  849. av_register_output_format(&imgyuv_oformat);
  850. av_register_input_format(&pgmpipe_iformat);
  851. av_register_output_format(&pgmpipe_oformat);
  852. av_register_input_format(&pgmyuvpipe_iformat);
  853. av_register_output_format(&pgmyuvpipe_oformat);
  854. av_register_input_format(&ppmpipe_iformat);
  855. av_register_output_format(&ppmpipe_oformat);
  856. av_register_output_format(&yuv4mpegpipe_oformat);
  857. return 0;
  858. }