You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

431 lines
12KB

  1. /*
  2. * PNM image format
  3. * Copyright (c) 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "avcodec.h"
  22. #include "bytestream.h"
  23. #include "pnm.h"
  24. static av_cold int common_init(AVCodecContext *avctx){
  25. PNMContext *s = avctx->priv_data;
  26. avcodec_get_frame_defaults((AVFrame*)&s->picture);
  27. avctx->coded_frame= (AVFrame*)&s->picture;
  28. return 0;
  29. }
  30. static int pnm_decode_frame(AVCodecContext *avctx,
  31. void *data, int *data_size,
  32. const uint8_t *buf, int buf_size)
  33. {
  34. PNMContext * const s = avctx->priv_data;
  35. AVFrame *picture = data;
  36. AVFrame * const p= (AVFrame*)&s->picture;
  37. int i, n, linesize, h, upgrade = 0;
  38. unsigned char *ptr;
  39. s->bytestream_start=
  40. s->bytestream= buf;
  41. s->bytestream_end= buf + buf_size;
  42. if(ff_pnm_decode_header(avctx, s) < 0)
  43. return -1;
  44. if(p->data[0])
  45. avctx->release_buffer(avctx, p);
  46. p->reference= 0;
  47. if(avctx->get_buffer(avctx, p) < 0){
  48. av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
  49. return -1;
  50. }
  51. p->pict_type= FF_I_TYPE;
  52. p->key_frame= 1;
  53. switch(avctx->pix_fmt) {
  54. default:
  55. return -1;
  56. case PIX_FMT_RGB24:
  57. n = avctx->width * 3;
  58. goto do_read;
  59. case PIX_FMT_GRAY8:
  60. n = avctx->width;
  61. if (s->maxval < 255)
  62. upgrade = 1;
  63. goto do_read;
  64. case PIX_FMT_GRAY16BE:
  65. case PIX_FMT_GRAY16LE:
  66. n = avctx->width * 2;
  67. if (s->maxval < 65535)
  68. upgrade = 2;
  69. goto do_read;
  70. case PIX_FMT_MONOWHITE:
  71. case PIX_FMT_MONOBLACK:
  72. n = (avctx->width + 7) >> 3;
  73. do_read:
  74. ptr = p->data[0];
  75. linesize = p->linesize[0];
  76. if(s->bytestream + n*avctx->height > s->bytestream_end)
  77. return -1;
  78. for(i = 0; i < avctx->height; i++) {
  79. if (!upgrade)
  80. memcpy(ptr, s->bytestream, n);
  81. else if (upgrade == 1) {
  82. unsigned int j, f = (255*128 + s->maxval/2) / s->maxval;
  83. for (j=0; j<n; j++)
  84. ptr[j] = (s->bytestream[j] * f + 64) >> 7;
  85. } else if (upgrade == 2) {
  86. unsigned int j, v, f = (65535*32768 + s->maxval/2) / s->maxval;
  87. for (j=0; j<n/2; j++) {
  88. v = be2me_16(((uint16_t *)s->bytestream)[j]);
  89. ((uint16_t *)ptr)[j] = (v * f + 16384) >> 15;
  90. }
  91. }
  92. s->bytestream += n;
  93. ptr += linesize;
  94. }
  95. break;
  96. case PIX_FMT_YUV420P:
  97. {
  98. unsigned char *ptr1, *ptr2;
  99. n = avctx->width;
  100. ptr = p->data[0];
  101. linesize = p->linesize[0];
  102. if(s->bytestream + n*avctx->height*3/2 > s->bytestream_end)
  103. return -1;
  104. for(i = 0; i < avctx->height; i++) {
  105. memcpy(ptr, s->bytestream, n);
  106. s->bytestream += n;
  107. ptr += linesize;
  108. }
  109. ptr1 = p->data[1];
  110. ptr2 = p->data[2];
  111. n >>= 1;
  112. h = avctx->height >> 1;
  113. for(i = 0; i < h; i++) {
  114. memcpy(ptr1, s->bytestream, n);
  115. s->bytestream += n;
  116. memcpy(ptr2, s->bytestream, n);
  117. s->bytestream += n;
  118. ptr1 += p->linesize[1];
  119. ptr2 += p->linesize[2];
  120. }
  121. }
  122. break;
  123. case PIX_FMT_RGB32:
  124. ptr = p->data[0];
  125. linesize = p->linesize[0];
  126. if(s->bytestream + avctx->width*avctx->height*4 > s->bytestream_end)
  127. return -1;
  128. for(i = 0; i < avctx->height; i++) {
  129. int j, r, g, b, a;
  130. for(j = 0;j < avctx->width; j++) {
  131. r = *s->bytestream++;
  132. g = *s->bytestream++;
  133. b = *s->bytestream++;
  134. a = *s->bytestream++;
  135. ((uint32_t *)ptr)[j] = (a << 24) | (r << 16) | (g << 8) | b;
  136. }
  137. ptr += linesize;
  138. }
  139. break;
  140. }
  141. *picture= *(AVFrame*)&s->picture;
  142. *data_size = sizeof(AVPicture);
  143. return s->bytestream - s->bytestream_start;
  144. }
  145. static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
  146. PNMContext *s = avctx->priv_data;
  147. AVFrame *pict = data;
  148. AVFrame * const p= (AVFrame*)&s->picture;
  149. int i, h, h1, c, n, linesize;
  150. uint8_t *ptr, *ptr1, *ptr2;
  151. if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
  152. av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
  153. return -1;
  154. }
  155. *p = *pict;
  156. p->pict_type= FF_I_TYPE;
  157. p->key_frame= 1;
  158. s->bytestream_start=
  159. s->bytestream= outbuf;
  160. s->bytestream_end= outbuf+buf_size;
  161. h = avctx->height;
  162. h1 = h;
  163. switch(avctx->pix_fmt) {
  164. case PIX_FMT_MONOWHITE:
  165. c = '4';
  166. n = (avctx->width + 7) >> 3;
  167. break;
  168. case PIX_FMT_GRAY8:
  169. c = '5';
  170. n = avctx->width;
  171. break;
  172. case PIX_FMT_GRAY16BE:
  173. c = '5';
  174. n = avctx->width * 2;
  175. break;
  176. case PIX_FMT_RGB24:
  177. c = '6';
  178. n = avctx->width * 3;
  179. break;
  180. case PIX_FMT_YUV420P:
  181. c = '5';
  182. n = avctx->width;
  183. h1 = (h * 3) / 2;
  184. break;
  185. default:
  186. return -1;
  187. }
  188. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  189. "P%c\n%d %d\n",
  190. c, avctx->width, h1);
  191. s->bytestream += strlen(s->bytestream);
  192. if (avctx->pix_fmt != PIX_FMT_MONOWHITE) {
  193. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  194. "%d\n", (avctx->pix_fmt != PIX_FMT_GRAY16BE) ? 255 : 65535);
  195. s->bytestream += strlen(s->bytestream);
  196. }
  197. ptr = p->data[0];
  198. linesize = p->linesize[0];
  199. for(i=0;i<h;i++) {
  200. memcpy(s->bytestream, ptr, n);
  201. s->bytestream += n;
  202. ptr += linesize;
  203. }
  204. if (avctx->pix_fmt == PIX_FMT_YUV420P) {
  205. h >>= 1;
  206. n >>= 1;
  207. ptr1 = p->data[1];
  208. ptr2 = p->data[2];
  209. for(i=0;i<h;i++) {
  210. memcpy(s->bytestream, ptr1, n);
  211. s->bytestream += n;
  212. memcpy(s->bytestream, ptr2, n);
  213. s->bytestream += n;
  214. ptr1 += p->linesize[1];
  215. ptr2 += p->linesize[2];
  216. }
  217. }
  218. return s->bytestream - s->bytestream_start;
  219. }
  220. static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
  221. PNMContext *s = avctx->priv_data;
  222. AVFrame *pict = data;
  223. AVFrame * const p= (AVFrame*)&s->picture;
  224. int i, h, w, n, linesize, depth, maxval;
  225. const char *tuple_type;
  226. uint8_t *ptr;
  227. if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
  228. av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
  229. return -1;
  230. }
  231. *p = *pict;
  232. p->pict_type= FF_I_TYPE;
  233. p->key_frame= 1;
  234. s->bytestream_start=
  235. s->bytestream= outbuf;
  236. s->bytestream_end= outbuf+buf_size;
  237. h = avctx->height;
  238. w = avctx->width;
  239. switch(avctx->pix_fmt) {
  240. case PIX_FMT_MONOWHITE:
  241. n = (w + 7) >> 3;
  242. depth = 1;
  243. maxval = 1;
  244. tuple_type = "BLACKANDWHITE";
  245. break;
  246. case PIX_FMT_GRAY8:
  247. n = w;
  248. depth = 1;
  249. maxval = 255;
  250. tuple_type = "GRAYSCALE";
  251. break;
  252. case PIX_FMT_RGB24:
  253. n = w * 3;
  254. depth = 3;
  255. maxval = 255;
  256. tuple_type = "RGB";
  257. break;
  258. case PIX_FMT_RGB32:
  259. n = w * 4;
  260. depth = 4;
  261. maxval = 255;
  262. tuple_type = "RGB_ALPHA";
  263. break;
  264. default:
  265. return -1;
  266. }
  267. snprintf(s->bytestream, s->bytestream_end - s->bytestream,
  268. "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\nTUPLETYPE %s\nENDHDR\n",
  269. w, h, depth, maxval, tuple_type);
  270. s->bytestream += strlen(s->bytestream);
  271. ptr = p->data[0];
  272. linesize = p->linesize[0];
  273. if (avctx->pix_fmt == PIX_FMT_RGB32) {
  274. int j;
  275. unsigned int v;
  276. for(i=0;i<h;i++) {
  277. for(j=0;j<w;j++) {
  278. v = ((uint32_t *)ptr)[j];
  279. bytestream_put_be24(&s->bytestream, v);
  280. *s->bytestream++ = v >> 24;
  281. }
  282. ptr += linesize;
  283. }
  284. } else {
  285. for(i=0;i<h;i++) {
  286. memcpy(s->bytestream, ptr, n);
  287. s->bytestream += n;
  288. ptr += linesize;
  289. }
  290. }
  291. return s->bytestream - s->bytestream_start;
  292. }
  293. #if 0
  294. static int pnm_probe(AVProbeData *pd)
  295. {
  296. const char *p = pd->buf;
  297. if (pd->buf_size >= 8 &&
  298. p[0] == 'P' &&
  299. p[1] >= '4' && p[1] <= '6' &&
  300. pnm_space(p[2]) )
  301. return AVPROBE_SCORE_MAX - 1; /* to permit pgmyuv probe */
  302. else
  303. return 0;
  304. }
  305. static int pgmyuv_probe(AVProbeData *pd)
  306. {
  307. if (match_ext(pd->filename, "pgmyuv"))
  308. return AVPROBE_SCORE_MAX;
  309. else
  310. return 0;
  311. }
  312. static int pam_probe(AVProbeData *pd)
  313. {
  314. const char *p = pd->buf;
  315. if (pd->buf_size >= 8 &&
  316. p[0] == 'P' &&
  317. p[1] == '7' &&
  318. p[2] == '\n')
  319. return AVPROBE_SCORE_MAX;
  320. else
  321. return 0;
  322. }
  323. #endif
  324. #if CONFIG_PGM_ENCODER
  325. AVCodec pgm_encoder = {
  326. "pgm",
  327. CODEC_TYPE_VIDEO,
  328. CODEC_ID_PGM,
  329. sizeof(PNMContext),
  330. common_init,
  331. pnm_encode_frame,
  332. NULL, //encode_end,
  333. pnm_decode_frame,
  334. .pix_fmts= (enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
  335. .long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
  336. };
  337. #endif // CONFIG_PGM_ENCODER
  338. #if CONFIG_PGMYUV_ENCODER
  339. AVCodec pgmyuv_encoder = {
  340. "pgmyuv",
  341. CODEC_TYPE_VIDEO,
  342. CODEC_ID_PGMYUV,
  343. sizeof(PNMContext),
  344. common_init,
  345. pnm_encode_frame,
  346. NULL, //encode_end,
  347. pnm_decode_frame,
  348. .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
  349. .long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
  350. };
  351. #endif // CONFIG_PGMYUV_ENCODER
  352. #if CONFIG_PPM_ENCODER
  353. AVCodec ppm_encoder = {
  354. "ppm",
  355. CODEC_TYPE_VIDEO,
  356. CODEC_ID_PPM,
  357. sizeof(PNMContext),
  358. common_init,
  359. pnm_encode_frame,
  360. NULL, //encode_end,
  361. pnm_decode_frame,
  362. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_NONE},
  363. .long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
  364. };
  365. #endif // CONFIG_PPM_ENCODER
  366. #if CONFIG_PBM_ENCODER
  367. AVCodec pbm_encoder = {
  368. "pbm",
  369. CODEC_TYPE_VIDEO,
  370. CODEC_ID_PBM,
  371. sizeof(PNMContext),
  372. common_init,
  373. pnm_encode_frame,
  374. NULL, //encode_end,
  375. pnm_decode_frame,
  376. .pix_fmts= (enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  377. .long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
  378. };
  379. #endif // CONFIG_PBM_ENCODER
  380. #if CONFIG_PAM_ENCODER
  381. AVCodec pam_encoder = {
  382. "pam",
  383. CODEC_TYPE_VIDEO,
  384. CODEC_ID_PAM,
  385. sizeof(PNMContext),
  386. common_init,
  387. pam_encode_frame,
  388. NULL, //encode_end,
  389. pnm_decode_frame,
  390. .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
  391. .long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
  392. };
  393. #endif // CONFIG_PAM_ENCODER