You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

503 lines
13KB

  1. /*
  2. * PNM image format
  3. * Copyright (c) 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "avcodec.h"
  22. #include "bytestream.h"
  23. #include "pnm.h"
  24. static av_cold int common_init(AVCodecContext *avctx){
  25. PNMContext *s = avctx->priv_data;
  26. avcodec_get_frame_defaults((AVFrame*)&s->picture);
  27. avctx->coded_frame= (AVFrame*)&s->picture;
  28. return 0;
  29. }
  30. static int pnm_decode_frame(AVCodecContext *avctx,
  31. void *data, int *data_size,
  32. const uint8_t *buf, int buf_size)
  33. {
  34. PNMContext * const s = avctx->priv_data;
  35. AVFrame *picture = data;
  36. AVFrame * const p= (AVFrame*)&s->picture;
  37. int i, n, linesize, h, upgrade = 0;
  38. unsigned char *ptr;
  39. s->bytestream_start=
  40. s->bytestream= buf;
  41. s->bytestream_end= buf + buf_size;
  42. if(ff_pnm_decode_header(avctx, s) < 0)
  43. return -1;
  44. if(p->data[0])
  45. avctx->release_buffer(avctx, p);
  46. p->reference= 0;
  47. if(avctx->get_buffer(avctx, p) < 0){
  48. av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
  49. return -1;
  50. }
  51. p->pict_type= FF_I_TYPE;
  52. p->key_frame= 1;
  53. switch(avctx->pix_fmt) {
  54. default:
  55. return -1;
  56. case PIX_FMT_RGB48BE:
  57. n = avctx->width * 6;
  58. goto do_read;
  59. case PIX_FMT_RGB24:
  60. n = avctx->width * 3;
  61. goto do_read;
  62. case PIX_FMT_GRAY8:
  63. n = avctx->width;
  64. if (s->maxval < 255)
  65. upgrade = 1;
  66. goto do_read;
  67. case PIX_FMT_GRAY16BE:
  68. case PIX_FMT_GRAY16LE:
  69. n = avctx->width * 2;
  70. if (s->maxval < 65535)
  71. upgrade = 2;
  72. goto do_read;
  73. case PIX_FMT_MONOWHITE:
  74. case PIX_FMT_MONOBLACK:
  75. n = (avctx->width + 7) >> 3;
  76. do_read:
  77. ptr = p->data[0];
  78. linesize = p->linesize[0];
  79. if(s->bytestream + n*avctx->height > s->bytestream_end)
  80. return -1;
  81. for(i = 0; i < avctx->height; i++) {
  82. if (!upgrade)
  83. memcpy(ptr, s->bytestream, n);
  84. else if (upgrade == 1) {
  85. unsigned int j, f = (255*128 + s->maxval/2) / s->maxval;
  86. for (j=0; j<n; j++)
  87. ptr[j] = (s->bytestream[j] * f + 64) >> 7;
  88. } else if (upgrade == 2) {
  89. unsigned int j, v, f = (65535*32768 + s->maxval/2) / s->maxval;
  90. for (j=0; j<n/2; j++) {
  91. v = be2me_16(((uint16_t *)s->bytestream)[j]);
  92. ((uint16_t *)ptr)[j] = (v * f + 16384) >> 15;
  93. }
  94. }
  95. s->bytestream += n;
  96. ptr += linesize;
  97. }
  98. break;
  99. case PIX_FMT_YUV420P:
  100. {
  101. unsigned char *ptr1, *ptr2;
  102. n = avctx->width;
  103. ptr = p->data[0];
  104. linesize = p->linesize[0];
  105. if(s->bytestream + n*avctx->height*3/2 > s->bytestream_end)
  106. return -1;
  107. for(i = 0; i < avctx->height; i++) {
  108. memcpy(ptr, s->bytestream, n);
  109. s->bytestream += n;
  110. ptr += linesize;
  111. }
  112. ptr1 = p->data[1];
  113. ptr2 = p->data[2];
  114. n >>= 1;
  115. h = avctx->height >> 1;
  116. for(i = 0; i < h; i++) {
  117. memcpy(ptr1, s->bytestream, n);
  118. s->bytestream += n;
  119. memcpy(ptr2, s->bytestream, n);
  120. s->bytestream += n;
  121. ptr1 += p->linesize[1];
  122. ptr2 += p->linesize[2];
  123. }
  124. }
  125. break;
  126. case PIX_FMT_RGB32:
  127. ptr = p->data[0];
  128. linesize = p->linesize[0];
  129. if(s->bytestream + avctx->width*avctx->height*4 > s->bytestream_end)
  130. return -1;
  131. for(i = 0; i < avctx->height; i++) {
  132. int j, r, g, b, a;
  133. for(j = 0;j < avctx->width; j++) {
  134. r = *s->bytestream++;
  135. g = *s->bytestream++;
  136. b = *s->bytestream++;
  137. a = *s->bytestream++;
  138. ((uint32_t *)ptr)[j] = (a << 24) | (r << 16) | (g << 8) | b;
  139. }
  140. ptr += linesize;
  141. }
  142. break;
  143. }
  144. *picture= *(AVFrame*)&s->picture;
  145. *data_size = sizeof(AVPicture);
  146. return s->bytestream - s->bytestream_start;
  147. }
  148. static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
  149. PNMContext *s = avctx->priv_data;
  150. AVFrame *pict = data;
  151. AVFrame * const p= (AVFrame*)&s->picture;
  152. int i, h, h1, c, n, linesize;
  153. uint8_t *ptr, *ptr1, *ptr2;
  154. if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
  155. av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
  156. return -1;
  157. }
  158. *p = *pict;
  159. p->pict_type= FF_I_TYPE;
  160. p->key_frame= 1;
  161. s->bytestream_start=
  162. s->bytestream= outbuf;
  163. s->bytestream_end= outbuf+buf_size;
  164. h = avctx->height;
  165. h1 = h;
  166. switch(avctx->pix_fmt) {
  167. case PIX_FMT_MONOWHITE:
  168. c = '4';
  169. n = (avctx->width + 7) >> 3;
  170. break;
  171. case PIX_FMT_GRAY8:
  172. c = '5';
  173. n = avctx->width;
  174. break;
  175. case PIX_FMT_GRAY16BE:
  176. c = '5';
  177. n = avctx->width * 2;
  178. break;
  179. case PIX_FMT_RGB24:
  180. c = '6';
  181. n = avctx->width * 3;
  182. break;
  183. case PIX_FMT_RGB48BE:
  184. c = '6';
  185. n = avctx->width * 6;
  186. break;
  187. case PIX_FMT_YUV420P:
  188. c = '5';
  189. n = avctx->width;
  190. h1 = (h * 3) / 2;
  191. break;
  192. default:
  193. return -1;
  194. }
  195. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  196. "P%c\n%d %d\n",
  197. c, avctx->width, h1);
  198. s->bytestream += strlen(s->bytestream);
  199. if (avctx->pix_fmt != PIX_FMT_MONOWHITE) {
  200. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  201. "%d\n", (avctx->pix_fmt != PIX_FMT_GRAY16BE && avctx->pix_fmt != PIX_FMT_RGB48BE) ? 255 : 65535);
  202. s->bytestream += strlen(s->bytestream);
  203. }
  204. ptr = p->data[0];
  205. linesize = p->linesize[0];
  206. for(i=0;i<h;i++) {
  207. memcpy(s->bytestream, ptr, n);
  208. s->bytestream += n;
  209. ptr += linesize;
  210. }
  211. if (avctx->pix_fmt == PIX_FMT_YUV420P) {
  212. h >>= 1;
  213. n >>= 1;
  214. ptr1 = p->data[1];
  215. ptr2 = p->data[2];
  216. for(i=0;i<h;i++) {
  217. memcpy(s->bytestream, ptr1, n);
  218. s->bytestream += n;
  219. memcpy(s->bytestream, ptr2, n);
  220. s->bytestream += n;
  221. ptr1 += p->linesize[1];
  222. ptr2 += p->linesize[2];
  223. }
  224. }
  225. return s->bytestream - s->bytestream_start;
  226. }
  227. static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
  228. PNMContext *s = avctx->priv_data;
  229. AVFrame *pict = data;
  230. AVFrame * const p= (AVFrame*)&s->picture;
  231. int i, h, w, n, linesize, depth, maxval;
  232. const char *tuple_type;
  233. uint8_t *ptr;
  234. if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
  235. av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
  236. return -1;
  237. }
  238. *p = *pict;
  239. p->pict_type= FF_I_TYPE;
  240. p->key_frame= 1;
  241. s->bytestream_start=
  242. s->bytestream= outbuf;
  243. s->bytestream_end= outbuf+buf_size;
  244. h = avctx->height;
  245. w = avctx->width;
  246. switch(avctx->pix_fmt) {
  247. case PIX_FMT_MONOWHITE:
  248. n = (w + 7) >> 3;
  249. depth = 1;
  250. maxval = 1;
  251. tuple_type = "BLACKANDWHITE";
  252. break;
  253. case PIX_FMT_GRAY8:
  254. n = w;
  255. depth = 1;
  256. maxval = 255;
  257. tuple_type = "GRAYSCALE";
  258. break;
  259. case PIX_FMT_RGB24:
  260. n = w * 3;
  261. depth = 3;
  262. maxval = 255;
  263. tuple_type = "RGB";
  264. break;
  265. case PIX_FMT_RGB32:
  266. n = w * 4;
  267. depth = 4;
  268. maxval = 255;
  269. tuple_type = "RGB_ALPHA";
  270. break;
  271. default:
  272. return -1;
  273. }
  274. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  275. "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\nTUPLETYPE %s\nENDHDR\n",
  276. w, h, depth, maxval, tuple_type);
  277. s->bytestream += strlen(s->bytestream);
  278. ptr = p->data[0];
  279. linesize = p->linesize[0];
  280. if (avctx->pix_fmt == PIX_FMT_RGB32) {
  281. int j;
  282. unsigned int v;
  283. for(i=0;i<h;i++) {
  284. for(j=0;j<w;j++) {
  285. v = ((uint32_t *)ptr)[j];
  286. bytestream_put_be24(&s->bytestream, v);
  287. *s->bytestream++ = v >> 24;
  288. }
  289. ptr += linesize;
  290. }
  291. } else {
  292. for(i=0;i<h;i++) {
  293. memcpy(s->bytestream, ptr, n);
  294. s->bytestream += n;
  295. ptr += linesize;
  296. }
  297. }
  298. return s->bytestream - s->bytestream_start;
  299. }
  300. #if 0
  301. static int pnm_probe(AVProbeData *pd)
  302. {
  303. const char *p = pd->buf;
  304. if (pd->buf_size >= 8 &&
  305. p[0] == 'P' &&
  306. p[1] >= '4' && p[1] <= '6' &&
  307. pnm_space(p[2]) )
  308. return AVPROBE_SCORE_MAX - 1; /* to permit pgmyuv probe */
  309. else
  310. return 0;
  311. }
  312. static int pgmyuv_probe(AVProbeData *pd)
  313. {
  314. if (match_ext(pd->filename, "pgmyuv"))
  315. return AVPROBE_SCORE_MAX;
  316. else
  317. return 0;
  318. }
  319. static int pam_probe(AVProbeData *pd)
  320. {
  321. const char *p = pd->buf;
  322. if (pd->buf_size >= 8 &&
  323. p[0] == 'P' &&
  324. p[1] == '7' &&
  325. p[2] == '\n')
  326. return AVPROBE_SCORE_MAX;
  327. else
  328. return 0;
  329. }
  330. #endif
  331. #if CONFIG_PGM_DECODER
  332. AVCodec pgm_decoder = {
  333. "pgm",
  334. CODEC_TYPE_VIDEO,
  335. CODEC_ID_PGM,
  336. sizeof(PNMContext),
  337. common_init,
  338. NULL,
  339. NULL,
  340. pnm_decode_frame,
  341. .pix_fmts= (enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
  342. .long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
  343. };
  344. #endif
  345. #if CONFIG_PGM_ENCODER
  346. AVCodec pgm_encoder = {
  347. "pgm",
  348. CODEC_TYPE_VIDEO,
  349. CODEC_ID_PGM,
  350. sizeof(PNMContext),
  351. common_init,
  352. pnm_encode_frame,
  353. .pix_fmts= (enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
  354. .long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
  355. };
  356. #endif // CONFIG_PGM_ENCODER
  357. #if CONFIG_PGMYUV_DECODER
  358. AVCodec pgmyuv_decoder = {
  359. "pgmyuv",
  360. CODEC_TYPE_VIDEO,
  361. CODEC_ID_PGMYUV,
  362. sizeof(PNMContext),
  363. common_init,
  364. NULL,
  365. NULL,
  366. pnm_decode_frame,
  367. .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
  368. .long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
  369. };
  370. #endif
  371. #if CONFIG_PGMYUV_ENCODER
  372. AVCodec pgmyuv_encoder = {
  373. "pgmyuv",
  374. CODEC_TYPE_VIDEO,
  375. CODEC_ID_PGMYUV,
  376. sizeof(PNMContext),
  377. common_init,
  378. pnm_encode_frame,
  379. .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
  380. .long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
  381. };
  382. #endif // CONFIG_PGMYUV_ENCODER
  383. #if CONFIG_PPM_DECODER
  384. AVCodec ppm_decoder = {
  385. "ppm",
  386. CODEC_TYPE_VIDEO,
  387. CODEC_ID_PPM,
  388. sizeof(PNMContext),
  389. common_init,
  390. NULL,
  391. NULL,
  392. pnm_decode_frame,
  393. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
  394. .long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
  395. };
  396. #endif
  397. #if CONFIG_PPM_ENCODER
  398. AVCodec ppm_encoder = {
  399. "ppm",
  400. CODEC_TYPE_VIDEO,
  401. CODEC_ID_PPM,
  402. sizeof(PNMContext),
  403. common_init,
  404. pnm_encode_frame,
  405. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
  406. .long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
  407. };
  408. #endif // CONFIG_PPM_ENCODER
  409. #if CONFIG_PBM_DECODER
  410. AVCodec pbm_decoder = {
  411. "pbm",
  412. CODEC_TYPE_VIDEO,
  413. CODEC_ID_PBM,
  414. sizeof(PNMContext),
  415. common_init,
  416. NULL,
  417. NULL,
  418. pnm_decode_frame,
  419. .pix_fmts= (enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  420. .long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
  421. };
  422. #endif
  423. #if CONFIG_PBM_ENCODER
  424. AVCodec pbm_encoder = {
  425. "pbm",
  426. CODEC_TYPE_VIDEO,
  427. CODEC_ID_PBM,
  428. sizeof(PNMContext),
  429. common_init,
  430. pnm_encode_frame,
  431. .pix_fmts= (enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  432. .long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
  433. };
  434. #endif // CONFIG_PBM_ENCODER
  435. #if CONFIG_PAM_DECODER
  436. AVCodec pam_decoder = {
  437. "pam",
  438. CODEC_TYPE_VIDEO,
  439. CODEC_ID_PAM,
  440. sizeof(PNMContext),
  441. common_init,
  442. NULL,
  443. NULL,
  444. pnm_decode_frame,
  445. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  446. .long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
  447. };
  448. #endif
  449. #if CONFIG_PAM_ENCODER
  450. AVCodec pam_encoder = {
  451. "pam",
  452. CODEC_TYPE_VIDEO,
  453. CODEC_ID_PAM,
  454. sizeof(PNMContext),
  455. common_init,
  456. pam_encode_frame,
  457. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  458. .long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
  459. };
  460. #endif // CONFIG_PAM_ENCODER