You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1785 lines
59KB

  1. /*
  2. * PNG image format
  3. * Copyright (c) 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/avassert.h"
  23. #include "libavutil/bprint.h"
  24. #include "libavutil/crc.h"
  25. #include "libavutil/imgutils.h"
  26. #include "libavutil/intreadwrite.h"
  27. #include "libavutil/stereo3d.h"
  28. #include "libavutil/mastering_display_metadata.h"
  29. #include "avcodec.h"
  30. #include "bytestream.h"
  31. #include "internal.h"
  32. #include "apng.h"
  33. #include "png.h"
  34. #include "pngdsp.h"
  35. #include "thread.h"
  36. #include <zlib.h>
  37. enum PNGHeaderState {
  38. PNG_IHDR = 1 << 0,
  39. PNG_PLTE = 1 << 1,
  40. };
  41. enum PNGImageState {
  42. PNG_IDAT = 1 << 0,
  43. PNG_ALLIMAGE = 1 << 1,
  44. };
  45. typedef struct PNGDecContext {
  46. PNGDSPContext dsp;
  47. AVCodecContext *avctx;
  48. GetByteContext gb;
  49. ThreadFrame last_picture;
  50. ThreadFrame picture;
  51. AVDictionary *frame_metadata;
  52. uint8_t iccp_name[82];
  53. uint8_t *iccp_data;
  54. size_t iccp_data_len;
  55. int stereo_mode;
  56. int have_chrm;
  57. uint32_t white_point[2];
  58. uint32_t display_primaries[3][2];
  59. enum PNGHeaderState hdr_state;
  60. enum PNGImageState pic_state;
  61. int width, height;
  62. int cur_w, cur_h;
  63. int last_w, last_h;
  64. int x_offset, y_offset;
  65. int last_x_offset, last_y_offset;
  66. uint8_t dispose_op, blend_op;
  67. uint8_t last_dispose_op;
  68. int bit_depth;
  69. int color_type;
  70. int compression_type;
  71. int interlace_type;
  72. int filter_type;
  73. int channels;
  74. int bits_per_pixel;
  75. int bpp;
  76. int has_trns;
  77. uint8_t transparent_color_be[6];
  78. uint8_t *background_buf;
  79. unsigned background_buf_allocated;
  80. uint32_t palette[256];
  81. uint8_t *crow_buf;
  82. uint8_t *last_row;
  83. unsigned int last_row_size;
  84. uint8_t *tmp_row;
  85. unsigned int tmp_row_size;
  86. uint8_t *buffer;
  87. int buffer_size;
  88. int pass;
  89. int crow_size; /* compressed row size (include filter type) */
  90. int row_size; /* decompressed row size */
  91. int pass_row_size; /* decompress row size of the current pass */
  92. int y;
  93. z_stream zstream;
  94. } PNGDecContext;
  95. /* Mask to determine which pixels are valid in a pass */
  96. static const uint8_t png_pass_mask[NB_PASSES] = {
  97. 0x01, 0x01, 0x11, 0x11, 0x55, 0x55, 0xff,
  98. };
  99. /* Mask to determine which y pixels can be written in a pass */
  100. static const uint8_t png_pass_dsp_ymask[NB_PASSES] = {
  101. 0xff, 0xff, 0x0f, 0xff, 0x33, 0xff, 0x55,
  102. };
  103. /* Mask to determine which pixels to overwrite while displaying */
  104. static const uint8_t png_pass_dsp_mask[NB_PASSES] = {
  105. 0xff, 0x0f, 0xff, 0x33, 0xff, 0x55, 0xff
  106. };
  107. /* NOTE: we try to construct a good looking image at each pass. width
  108. * is the original image width. We also do pixel format conversion at
  109. * this stage */
  110. static void png_put_interlaced_row(uint8_t *dst, int width,
  111. int bits_per_pixel, int pass,
  112. int color_type, const uint8_t *src)
  113. {
  114. int x, mask, dsp_mask, j, src_x, b, bpp;
  115. uint8_t *d;
  116. const uint8_t *s;
  117. mask = png_pass_mask[pass];
  118. dsp_mask = png_pass_dsp_mask[pass];
  119. switch (bits_per_pixel) {
  120. case 1:
  121. src_x = 0;
  122. for (x = 0; x < width; x++) {
  123. j = (x & 7);
  124. if ((dsp_mask << j) & 0x80) {
  125. b = (src[src_x >> 3] >> (7 - (src_x & 7))) & 1;
  126. dst[x >> 3] &= 0xFF7F>>j;
  127. dst[x >> 3] |= b << (7 - j);
  128. }
  129. if ((mask << j) & 0x80)
  130. src_x++;
  131. }
  132. break;
  133. case 2:
  134. src_x = 0;
  135. for (x = 0; x < width; x++) {
  136. int j2 = 2 * (x & 3);
  137. j = (x & 7);
  138. if ((dsp_mask << j) & 0x80) {
  139. b = (src[src_x >> 2] >> (6 - 2*(src_x & 3))) & 3;
  140. dst[x >> 2] &= 0xFF3F>>j2;
  141. dst[x >> 2] |= b << (6 - j2);
  142. }
  143. if ((mask << j) & 0x80)
  144. src_x++;
  145. }
  146. break;
  147. case 4:
  148. src_x = 0;
  149. for (x = 0; x < width; x++) {
  150. int j2 = 4*(x&1);
  151. j = (x & 7);
  152. if ((dsp_mask << j) & 0x80) {
  153. b = (src[src_x >> 1] >> (4 - 4*(src_x & 1))) & 15;
  154. dst[x >> 1] &= 0xFF0F>>j2;
  155. dst[x >> 1] |= b << (4 - j2);
  156. }
  157. if ((mask << j) & 0x80)
  158. src_x++;
  159. }
  160. break;
  161. default:
  162. bpp = bits_per_pixel >> 3;
  163. d = dst;
  164. s = src;
  165. for (x = 0; x < width; x++) {
  166. j = x & 7;
  167. if ((dsp_mask << j) & 0x80) {
  168. memcpy(d, s, bpp);
  169. }
  170. d += bpp;
  171. if ((mask << j) & 0x80)
  172. s += bpp;
  173. }
  174. break;
  175. }
  176. }
  177. void ff_add_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
  178. int w, int bpp)
  179. {
  180. int i;
  181. for (i = 0; i < w; i++) {
  182. int a, b, c, p, pa, pb, pc;
  183. a = dst[i - bpp];
  184. b = top[i];
  185. c = top[i - bpp];
  186. p = b - c;
  187. pc = a - c;
  188. pa = abs(p);
  189. pb = abs(pc);
  190. pc = abs(p + pc);
  191. if (pa <= pb && pa <= pc)
  192. p = a;
  193. else if (pb <= pc)
  194. p = b;
  195. else
  196. p = c;
  197. dst[i] = p + src[i];
  198. }
  199. }
  200. #define UNROLL1(bpp, op) \
  201. { \
  202. r = dst[0]; \
  203. if (bpp >= 2) \
  204. g = dst[1]; \
  205. if (bpp >= 3) \
  206. b = dst[2]; \
  207. if (bpp >= 4) \
  208. a = dst[3]; \
  209. for (; i <= size - bpp; i += bpp) { \
  210. dst[i + 0] = r = op(r, src[i + 0], last[i + 0]); \
  211. if (bpp == 1) \
  212. continue; \
  213. dst[i + 1] = g = op(g, src[i + 1], last[i + 1]); \
  214. if (bpp == 2) \
  215. continue; \
  216. dst[i + 2] = b = op(b, src[i + 2], last[i + 2]); \
  217. if (bpp == 3) \
  218. continue; \
  219. dst[i + 3] = a = op(a, src[i + 3], last[i + 3]); \
  220. } \
  221. }
  222. #define UNROLL_FILTER(op) \
  223. if (bpp == 1) { \
  224. UNROLL1(1, op) \
  225. } else if (bpp == 2) { \
  226. UNROLL1(2, op) \
  227. } else if (bpp == 3) { \
  228. UNROLL1(3, op) \
  229. } else if (bpp == 4) { \
  230. UNROLL1(4, op) \
  231. } \
  232. for (; i < size; i++) { \
  233. dst[i] = op(dst[i - bpp], src[i], last[i]); \
  234. }
  235. /* NOTE: 'dst' can be equal to 'last' */
  236. void ff_png_filter_row(PNGDSPContext *dsp, uint8_t *dst, int filter_type,
  237. uint8_t *src, uint8_t *last, int size, int bpp)
  238. {
  239. int i, p, r, g, b, a;
  240. switch (filter_type) {
  241. case PNG_FILTER_VALUE_NONE:
  242. memcpy(dst, src, size);
  243. break;
  244. case PNG_FILTER_VALUE_SUB:
  245. for (i = 0; i < bpp; i++)
  246. dst[i] = src[i];
  247. if (bpp == 4) {
  248. p = *(int *)dst;
  249. for (; i < size; i += bpp) {
  250. unsigned s = *(int *)(src + i);
  251. p = ((s & 0x7f7f7f7f) + (p & 0x7f7f7f7f)) ^ ((s ^ p) & 0x80808080);
  252. *(int *)(dst + i) = p;
  253. }
  254. } else {
  255. #define OP_SUB(x, s, l) ((x) + (s))
  256. UNROLL_FILTER(OP_SUB);
  257. }
  258. break;
  259. case PNG_FILTER_VALUE_UP:
  260. dsp->add_bytes_l2(dst, src, last, size);
  261. break;
  262. case PNG_FILTER_VALUE_AVG:
  263. for (i = 0; i < bpp; i++) {
  264. p = (last[i] >> 1);
  265. dst[i] = p + src[i];
  266. }
  267. #define OP_AVG(x, s, l) (((((x) + (l)) >> 1) + (s)) & 0xff)
  268. UNROLL_FILTER(OP_AVG);
  269. break;
  270. case PNG_FILTER_VALUE_PAETH:
  271. for (i = 0; i < bpp; i++) {
  272. p = last[i];
  273. dst[i] = p + src[i];
  274. }
  275. if (bpp > 2 && size > 4) {
  276. /* would write off the end of the array if we let it process
  277. * the last pixel with bpp=3 */
  278. int w = (bpp & 3) ? size - 3 : size;
  279. if (w > i) {
  280. dsp->add_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
  281. i = w;
  282. }
  283. }
  284. ff_add_png_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
  285. break;
  286. }
  287. }
  288. /* This used to be called "deloco" in FFmpeg
  289. * and is actually an inverse reversible colorspace transformation */
  290. #define YUV2RGB(NAME, TYPE) \
  291. static void deloco_ ## NAME(TYPE *dst, int size, int alpha) \
  292. { \
  293. int i; \
  294. for (i = 0; i < size; i += 3 + alpha) { \
  295. int g = dst [i + 1]; \
  296. dst[i + 0] += g; \
  297. dst[i + 2] += g; \
  298. } \
  299. }
  300. YUV2RGB(rgb8, uint8_t)
  301. YUV2RGB(rgb16, uint16_t)
  302. static int percent_missing(PNGDecContext *s)
  303. {
  304. if (s->interlace_type) {
  305. return 100 - 100 * s->pass / (NB_PASSES - 1);
  306. } else {
  307. return 100 - 100 * s->y / s->cur_h;
  308. }
  309. }
  310. /* process exactly one decompressed row */
  311. static void png_handle_row(PNGDecContext *s, uint8_t *dst, ptrdiff_t dst_stride)
  312. {
  313. uint8_t *ptr, *last_row;
  314. int got_line;
  315. if (!s->interlace_type) {
  316. ptr = dst + dst_stride * (s->y + s->y_offset) + s->x_offset * s->bpp;
  317. if (s->y == 0)
  318. last_row = s->last_row;
  319. else
  320. last_row = ptr - dst_stride;
  321. ff_png_filter_row(&s->dsp, ptr, s->crow_buf[0], s->crow_buf + 1,
  322. last_row, s->row_size, s->bpp);
  323. /* loco lags by 1 row so that it doesn't interfere with top prediction */
  324. if (s->filter_type == PNG_FILTER_TYPE_LOCO && s->y > 0) {
  325. if (s->bit_depth == 16) {
  326. deloco_rgb16((uint16_t *)(ptr - dst_stride), s->row_size / 2,
  327. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA);
  328. } else {
  329. deloco_rgb8(ptr - dst_stride, s->row_size,
  330. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA);
  331. }
  332. }
  333. s->y++;
  334. if (s->y == s->cur_h) {
  335. s->pic_state |= PNG_ALLIMAGE;
  336. if (s->filter_type == PNG_FILTER_TYPE_LOCO) {
  337. if (s->bit_depth == 16) {
  338. deloco_rgb16((uint16_t *)ptr, s->row_size / 2,
  339. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA);
  340. } else {
  341. deloco_rgb8(ptr, s->row_size,
  342. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA);
  343. }
  344. }
  345. }
  346. } else {
  347. got_line = 0;
  348. for (;;) {
  349. ptr = dst + dst_stride * (s->y + s->y_offset) + s->x_offset * s->bpp;
  350. if ((ff_png_pass_ymask[s->pass] << (s->y & 7)) & 0x80) {
  351. /* if we already read one row, it is time to stop to
  352. * wait for the next one */
  353. if (got_line)
  354. break;
  355. ff_png_filter_row(&s->dsp, s->tmp_row, s->crow_buf[0], s->crow_buf + 1,
  356. s->last_row, s->pass_row_size, s->bpp);
  357. FFSWAP(uint8_t *, s->last_row, s->tmp_row);
  358. FFSWAP(unsigned int, s->last_row_size, s->tmp_row_size);
  359. got_line = 1;
  360. }
  361. if ((png_pass_dsp_ymask[s->pass] << (s->y & 7)) & 0x80) {
  362. png_put_interlaced_row(ptr, s->cur_w, s->bits_per_pixel, s->pass,
  363. s->color_type, s->last_row);
  364. }
  365. s->y++;
  366. if (s->y == s->cur_h) {
  367. memset(s->last_row, 0, s->row_size);
  368. for (;;) {
  369. if (s->pass == NB_PASSES - 1) {
  370. s->pic_state |= PNG_ALLIMAGE;
  371. goto the_end;
  372. } else {
  373. s->pass++;
  374. s->y = 0;
  375. s->pass_row_size = ff_png_pass_row_size(s->pass,
  376. s->bits_per_pixel,
  377. s->cur_w);
  378. s->crow_size = s->pass_row_size + 1;
  379. if (s->pass_row_size != 0)
  380. break;
  381. /* skip pass if empty row */
  382. }
  383. }
  384. }
  385. }
  386. the_end:;
  387. }
  388. }
  389. static int png_decode_idat(PNGDecContext *s, int length,
  390. uint8_t *dst, ptrdiff_t dst_stride)
  391. {
  392. int ret;
  393. s->zstream.avail_in = FFMIN(length, bytestream2_get_bytes_left(&s->gb));
  394. s->zstream.next_in = s->gb.buffer;
  395. bytestream2_skip(&s->gb, length);
  396. /* decode one line if possible */
  397. while (s->zstream.avail_in > 0) {
  398. ret = inflate(&s->zstream, Z_PARTIAL_FLUSH);
  399. if (ret != Z_OK && ret != Z_STREAM_END) {
  400. av_log(s->avctx, AV_LOG_ERROR, "inflate returned error %d\n", ret);
  401. return AVERROR_EXTERNAL;
  402. }
  403. if (s->zstream.avail_out == 0) {
  404. if (!(s->pic_state & PNG_ALLIMAGE)) {
  405. png_handle_row(s, dst, dst_stride);
  406. }
  407. s->zstream.avail_out = s->crow_size;
  408. s->zstream.next_out = s->crow_buf;
  409. }
  410. if (ret == Z_STREAM_END && s->zstream.avail_in > 0) {
  411. av_log(s->avctx, AV_LOG_WARNING,
  412. "%d undecompressed bytes left in buffer\n", s->zstream.avail_in);
  413. return 0;
  414. }
  415. }
  416. return 0;
  417. }
  418. static int decode_zbuf(AVBPrint *bp, const uint8_t *data,
  419. const uint8_t *data_end)
  420. {
  421. z_stream zstream;
  422. unsigned char *buf;
  423. unsigned buf_size;
  424. int ret;
  425. zstream.zalloc = ff_png_zalloc;
  426. zstream.zfree = ff_png_zfree;
  427. zstream.opaque = NULL;
  428. if (inflateInit(&zstream) != Z_OK)
  429. return AVERROR_EXTERNAL;
  430. zstream.next_in = data;
  431. zstream.avail_in = data_end - data;
  432. av_bprint_init(bp, 0, AV_BPRINT_SIZE_UNLIMITED);
  433. while (zstream.avail_in > 0) {
  434. av_bprint_get_buffer(bp, 2, &buf, &buf_size);
  435. if (buf_size < 2) {
  436. ret = AVERROR(ENOMEM);
  437. goto fail;
  438. }
  439. zstream.next_out = buf;
  440. zstream.avail_out = buf_size - 1;
  441. ret = inflate(&zstream, Z_PARTIAL_FLUSH);
  442. if (ret != Z_OK && ret != Z_STREAM_END) {
  443. ret = AVERROR_EXTERNAL;
  444. goto fail;
  445. }
  446. bp->len += zstream.next_out - buf;
  447. if (ret == Z_STREAM_END)
  448. break;
  449. }
  450. inflateEnd(&zstream);
  451. bp->str[bp->len] = 0;
  452. return 0;
  453. fail:
  454. inflateEnd(&zstream);
  455. av_bprint_finalize(bp, NULL);
  456. return ret;
  457. }
  458. static uint8_t *iso88591_to_utf8(const uint8_t *in, size_t size_in)
  459. {
  460. size_t extra = 0, i;
  461. uint8_t *out, *q;
  462. for (i = 0; i < size_in; i++)
  463. extra += in[i] >= 0x80;
  464. if (size_in == SIZE_MAX || extra > SIZE_MAX - size_in - 1)
  465. return NULL;
  466. q = out = av_malloc(size_in + extra + 1);
  467. if (!out)
  468. return NULL;
  469. for (i = 0; i < size_in; i++) {
  470. if (in[i] >= 0x80) {
  471. *(q++) = 0xC0 | (in[i] >> 6);
  472. *(q++) = 0x80 | (in[i] & 0x3F);
  473. } else {
  474. *(q++) = in[i];
  475. }
  476. }
  477. *(q++) = 0;
  478. return out;
  479. }
  480. static int decode_text_chunk(PNGDecContext *s, uint32_t length, int compressed)
  481. {
  482. int ret, method;
  483. const uint8_t *data = s->gb.buffer;
  484. const uint8_t *data_end = data + length;
  485. const uint8_t *keyword = data;
  486. const uint8_t *keyword_end = memchr(keyword, 0, data_end - keyword);
  487. uint8_t *kw_utf8 = NULL, *text, *txt_utf8 = NULL;
  488. unsigned text_len;
  489. AVBPrint bp;
  490. if (!keyword_end)
  491. return AVERROR_INVALIDDATA;
  492. data = keyword_end + 1;
  493. if (compressed) {
  494. if (data == data_end)
  495. return AVERROR_INVALIDDATA;
  496. method = *(data++);
  497. if (method)
  498. return AVERROR_INVALIDDATA;
  499. if ((ret = decode_zbuf(&bp, data, data_end)) < 0)
  500. return ret;
  501. text_len = bp.len;
  502. ret = av_bprint_finalize(&bp, (char **)&text);
  503. if (ret < 0)
  504. return ret;
  505. } else {
  506. text = (uint8_t *)data;
  507. text_len = data_end - text;
  508. }
  509. kw_utf8 = iso88591_to_utf8(keyword, keyword_end - keyword);
  510. txt_utf8 = iso88591_to_utf8(text, text_len);
  511. if (text != data)
  512. av_free(text);
  513. if (!(kw_utf8 && txt_utf8)) {
  514. av_free(kw_utf8);
  515. av_free(txt_utf8);
  516. return AVERROR(ENOMEM);
  517. }
  518. av_dict_set(&s->frame_metadata, kw_utf8, txt_utf8,
  519. AV_DICT_DONT_STRDUP_KEY | AV_DICT_DONT_STRDUP_VAL);
  520. return 0;
  521. }
  522. static int decode_ihdr_chunk(AVCodecContext *avctx, PNGDecContext *s,
  523. uint32_t length)
  524. {
  525. if (length != 13)
  526. return AVERROR_INVALIDDATA;
  527. if (s->pic_state & PNG_IDAT) {
  528. av_log(avctx, AV_LOG_ERROR, "IHDR after IDAT\n");
  529. return AVERROR_INVALIDDATA;
  530. }
  531. if (s->hdr_state & PNG_IHDR) {
  532. av_log(avctx, AV_LOG_ERROR, "Multiple IHDR\n");
  533. return AVERROR_INVALIDDATA;
  534. }
  535. s->width = s->cur_w = bytestream2_get_be32(&s->gb);
  536. s->height = s->cur_h = bytestream2_get_be32(&s->gb);
  537. if (av_image_check_size(s->width, s->height, 0, avctx)) {
  538. s->cur_w = s->cur_h = s->width = s->height = 0;
  539. av_log(avctx, AV_LOG_ERROR, "Invalid image size\n");
  540. return AVERROR_INVALIDDATA;
  541. }
  542. s->bit_depth = bytestream2_get_byte(&s->gb);
  543. if (s->bit_depth != 1 && s->bit_depth != 2 && s->bit_depth != 4 &&
  544. s->bit_depth != 8 && s->bit_depth != 16) {
  545. av_log(avctx, AV_LOG_ERROR, "Invalid bit depth\n");
  546. goto error;
  547. }
  548. s->color_type = bytestream2_get_byte(&s->gb);
  549. s->compression_type = bytestream2_get_byte(&s->gb);
  550. if (s->compression_type) {
  551. av_log(avctx, AV_LOG_ERROR, "Invalid compression method %d\n", s->compression_type);
  552. goto error;
  553. }
  554. s->filter_type = bytestream2_get_byte(&s->gb);
  555. s->interlace_type = bytestream2_get_byte(&s->gb);
  556. bytestream2_skip(&s->gb, 4); /* crc */
  557. s->hdr_state |= PNG_IHDR;
  558. if (avctx->debug & FF_DEBUG_PICT_INFO)
  559. av_log(avctx, AV_LOG_DEBUG, "width=%d height=%d depth=%d color_type=%d "
  560. "compression_type=%d filter_type=%d interlace_type=%d\n",
  561. s->width, s->height, s->bit_depth, s->color_type,
  562. s->compression_type, s->filter_type, s->interlace_type);
  563. return 0;
  564. error:
  565. s->cur_w = s->cur_h = s->width = s->height = 0;
  566. s->bit_depth = 8;
  567. return AVERROR_INVALIDDATA;
  568. }
  569. static int decode_phys_chunk(AVCodecContext *avctx, PNGDecContext *s)
  570. {
  571. if (s->pic_state & PNG_IDAT) {
  572. av_log(avctx, AV_LOG_ERROR, "pHYs after IDAT\n");
  573. return AVERROR_INVALIDDATA;
  574. }
  575. avctx->sample_aspect_ratio.num = bytestream2_get_be32(&s->gb);
  576. avctx->sample_aspect_ratio.den = bytestream2_get_be32(&s->gb);
  577. if (avctx->sample_aspect_ratio.num < 0 || avctx->sample_aspect_ratio.den < 0)
  578. avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
  579. bytestream2_skip(&s->gb, 1); /* unit specifier */
  580. bytestream2_skip(&s->gb, 4); /* crc */
  581. return 0;
  582. }
  583. static int decode_idat_chunk(AVCodecContext *avctx, PNGDecContext *s,
  584. uint32_t length, AVFrame *p)
  585. {
  586. int ret;
  587. size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
  588. if (!(s->hdr_state & PNG_IHDR)) {
  589. av_log(avctx, AV_LOG_ERROR, "IDAT without IHDR\n");
  590. return AVERROR_INVALIDDATA;
  591. }
  592. if (!(s->pic_state & PNG_IDAT)) {
  593. /* init image info */
  594. ret = ff_set_dimensions(avctx, s->width, s->height);
  595. if (ret < 0)
  596. return ret;
  597. s->channels = ff_png_get_nb_channels(s->color_type);
  598. s->bits_per_pixel = s->bit_depth * s->channels;
  599. s->bpp = (s->bits_per_pixel + 7) >> 3;
  600. s->row_size = (s->cur_w * s->bits_per_pixel + 7) >> 3;
  601. if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
  602. s->color_type == PNG_COLOR_TYPE_RGB) {
  603. avctx->pix_fmt = AV_PIX_FMT_RGB24;
  604. } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
  605. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA) {
  606. avctx->pix_fmt = AV_PIX_FMT_RGBA;
  607. } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
  608. s->color_type == PNG_COLOR_TYPE_GRAY) {
  609. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  610. } else if (s->bit_depth == 16 &&
  611. s->color_type == PNG_COLOR_TYPE_GRAY) {
  612. avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
  613. } else if (s->bit_depth == 16 &&
  614. s->color_type == PNG_COLOR_TYPE_RGB) {
  615. avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
  616. } else if (s->bit_depth == 16 &&
  617. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA) {
  618. avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
  619. } else if ((s->bits_per_pixel == 1 || s->bits_per_pixel == 2 || s->bits_per_pixel == 4 || s->bits_per_pixel == 8) &&
  620. s->color_type == PNG_COLOR_TYPE_PALETTE) {
  621. avctx->pix_fmt = AV_PIX_FMT_PAL8;
  622. } else if (s->bit_depth == 1 && s->bits_per_pixel == 1 && avctx->codec_id != AV_CODEC_ID_APNG) {
  623. avctx->pix_fmt = AV_PIX_FMT_MONOBLACK;
  624. } else if (s->bit_depth == 8 &&
  625. s->color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
  626. avctx->pix_fmt = AV_PIX_FMT_YA8;
  627. } else if (s->bit_depth == 16 &&
  628. s->color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
  629. avctx->pix_fmt = AV_PIX_FMT_YA16BE;
  630. } else {
  631. avpriv_report_missing_feature(avctx,
  632. "Bit depth %d color type %d",
  633. s->bit_depth, s->color_type);
  634. return AVERROR_PATCHWELCOME;
  635. }
  636. if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
  637. switch (avctx->pix_fmt) {
  638. case AV_PIX_FMT_RGB24:
  639. avctx->pix_fmt = AV_PIX_FMT_RGBA;
  640. break;
  641. case AV_PIX_FMT_RGB48BE:
  642. avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
  643. break;
  644. case AV_PIX_FMT_GRAY8:
  645. avctx->pix_fmt = AV_PIX_FMT_YA8;
  646. break;
  647. case AV_PIX_FMT_GRAY16BE:
  648. avctx->pix_fmt = AV_PIX_FMT_YA16BE;
  649. break;
  650. default:
  651. avpriv_request_sample(avctx, "bit depth %d "
  652. "and color type %d with TRNS",
  653. s->bit_depth, s->color_type);
  654. return AVERROR_INVALIDDATA;
  655. }
  656. s->bpp += byte_depth;
  657. }
  658. ff_thread_release_buffer(avctx, &s->picture);
  659. if ((ret = ff_thread_get_buffer(avctx, &s->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
  660. return ret;
  661. p->pict_type = AV_PICTURE_TYPE_I;
  662. p->key_frame = 1;
  663. p->interlaced_frame = !!s->interlace_type;
  664. ff_thread_finish_setup(avctx);
  665. /* compute the compressed row size */
  666. if (!s->interlace_type) {
  667. s->crow_size = s->row_size + 1;
  668. } else {
  669. s->pass = 0;
  670. s->pass_row_size = ff_png_pass_row_size(s->pass,
  671. s->bits_per_pixel,
  672. s->cur_w);
  673. s->crow_size = s->pass_row_size + 1;
  674. }
  675. ff_dlog(avctx, "row_size=%d crow_size =%d\n",
  676. s->row_size, s->crow_size);
  677. /* copy the palette if needed */
  678. if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
  679. memcpy(p->data[1], s->palette, 256 * sizeof(uint32_t));
  680. /* empty row is used if differencing to the first row */
  681. av_fast_padded_mallocz(&s->last_row, &s->last_row_size, s->row_size);
  682. if (!s->last_row)
  683. return AVERROR_INVALIDDATA;
  684. if (s->interlace_type ||
  685. s->color_type == PNG_COLOR_TYPE_RGB_ALPHA) {
  686. av_fast_padded_malloc(&s->tmp_row, &s->tmp_row_size, s->row_size);
  687. if (!s->tmp_row)
  688. return AVERROR_INVALIDDATA;
  689. }
  690. /* compressed row */
  691. av_fast_padded_malloc(&s->buffer, &s->buffer_size, s->row_size + 16);
  692. if (!s->buffer)
  693. return AVERROR(ENOMEM);
  694. /* we want crow_buf+1 to be 16-byte aligned */
  695. s->crow_buf = s->buffer + 15;
  696. s->zstream.avail_out = s->crow_size;
  697. s->zstream.next_out = s->crow_buf;
  698. }
  699. s->pic_state |= PNG_IDAT;
  700. /* set image to non-transparent bpp while decompressing */
  701. if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE)
  702. s->bpp -= byte_depth;
  703. ret = png_decode_idat(s, length, p->data[0], p->linesize[0]);
  704. if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE)
  705. s->bpp += byte_depth;
  706. if (ret < 0)
  707. return ret;
  708. bytestream2_skip(&s->gb, 4); /* crc */
  709. return 0;
  710. }
  711. static int decode_plte_chunk(AVCodecContext *avctx, PNGDecContext *s,
  712. uint32_t length)
  713. {
  714. int n, i, r, g, b;
  715. if ((length % 3) != 0 || length > 256 * 3)
  716. return AVERROR_INVALIDDATA;
  717. /* read the palette */
  718. n = length / 3;
  719. for (i = 0; i < n; i++) {
  720. r = bytestream2_get_byte(&s->gb);
  721. g = bytestream2_get_byte(&s->gb);
  722. b = bytestream2_get_byte(&s->gb);
  723. s->palette[i] = (0xFFU << 24) | (r << 16) | (g << 8) | b;
  724. }
  725. for (; i < 256; i++)
  726. s->palette[i] = (0xFFU << 24);
  727. s->hdr_state |= PNG_PLTE;
  728. bytestream2_skip(&s->gb, 4); /* crc */
  729. return 0;
  730. }
  731. static int decode_trns_chunk(AVCodecContext *avctx, PNGDecContext *s,
  732. uint32_t length)
  733. {
  734. int v, i;
  735. if (!(s->hdr_state & PNG_IHDR)) {
  736. av_log(avctx, AV_LOG_ERROR, "trns before IHDR\n");
  737. return AVERROR_INVALIDDATA;
  738. }
  739. if (s->pic_state & PNG_IDAT) {
  740. av_log(avctx, AV_LOG_ERROR, "trns after IDAT\n");
  741. return AVERROR_INVALIDDATA;
  742. }
  743. if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
  744. if (length > 256 || !(s->hdr_state & PNG_PLTE))
  745. return AVERROR_INVALIDDATA;
  746. for (i = 0; i < length; i++) {
  747. unsigned v = bytestream2_get_byte(&s->gb);
  748. s->palette[i] = (s->palette[i] & 0x00ffffff) | (v << 24);
  749. }
  750. } else if (s->color_type == PNG_COLOR_TYPE_GRAY || s->color_type == PNG_COLOR_TYPE_RGB) {
  751. if ((s->color_type == PNG_COLOR_TYPE_GRAY && length != 2) ||
  752. (s->color_type == PNG_COLOR_TYPE_RGB && length != 6) ||
  753. s->bit_depth == 1)
  754. return AVERROR_INVALIDDATA;
  755. for (i = 0; i < length / 2; i++) {
  756. /* only use the least significant bits */
  757. v = av_mod_uintp2(bytestream2_get_be16(&s->gb), s->bit_depth);
  758. if (s->bit_depth > 8)
  759. AV_WB16(&s->transparent_color_be[2 * i], v);
  760. else
  761. s->transparent_color_be[i] = v;
  762. }
  763. } else {
  764. return AVERROR_INVALIDDATA;
  765. }
  766. bytestream2_skip(&s->gb, 4); /* crc */
  767. s->has_trns = 1;
  768. return 0;
  769. }
  770. static int decode_iccp_chunk(PNGDecContext *s, int length, AVFrame *f)
  771. {
  772. int ret, cnt = 0;
  773. AVBPrint bp;
  774. while ((s->iccp_name[cnt++] = bytestream2_get_byte(&s->gb)) && cnt < 81);
  775. if (cnt > 80) {
  776. av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid name!\n");
  777. ret = AVERROR_INVALIDDATA;
  778. goto fail;
  779. }
  780. length = FFMAX(length - cnt, 0);
  781. if (bytestream2_get_byte(&s->gb) != 0) {
  782. av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid compression!\n");
  783. ret = AVERROR_INVALIDDATA;
  784. goto fail;
  785. }
  786. length = FFMAX(length - 1, 0);
  787. if ((ret = decode_zbuf(&bp, s->gb.buffer, s->gb.buffer + length)) < 0)
  788. return ret;
  789. av_freep(&s->iccp_data);
  790. ret = av_bprint_finalize(&bp, (char **)&s->iccp_data);
  791. if (ret < 0)
  792. return ret;
  793. s->iccp_data_len = bp.len;
  794. /* ICC compressed data and CRC */
  795. bytestream2_skip(&s->gb, length + 4);
  796. return 0;
  797. fail:
  798. s->iccp_name[0] = 0;
  799. return ret;
  800. }
  801. static void handle_small_bpp(PNGDecContext *s, AVFrame *p)
  802. {
  803. if (s->bits_per_pixel == 1 && s->color_type == PNG_COLOR_TYPE_PALETTE) {
  804. int i, j, k;
  805. uint8_t *pd = p->data[0];
  806. for (j = 0; j < s->height; j++) {
  807. i = s->width / 8;
  808. for (k = 7; k >= 1; k--)
  809. if ((s->width&7) >= k)
  810. pd[8*i + k - 1] = (pd[i]>>8-k) & 1;
  811. for (i--; i >= 0; i--) {
  812. pd[8*i + 7]= pd[i] & 1;
  813. pd[8*i + 6]= (pd[i]>>1) & 1;
  814. pd[8*i + 5]= (pd[i]>>2) & 1;
  815. pd[8*i + 4]= (pd[i]>>3) & 1;
  816. pd[8*i + 3]= (pd[i]>>4) & 1;
  817. pd[8*i + 2]= (pd[i]>>5) & 1;
  818. pd[8*i + 1]= (pd[i]>>6) & 1;
  819. pd[8*i + 0]= pd[i]>>7;
  820. }
  821. pd += p->linesize[0];
  822. }
  823. } else if (s->bits_per_pixel == 2) {
  824. int i, j;
  825. uint8_t *pd = p->data[0];
  826. for (j = 0; j < s->height; j++) {
  827. i = s->width / 4;
  828. if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
  829. if ((s->width&3) >= 3) pd[4*i + 2]= (pd[i] >> 2) & 3;
  830. if ((s->width&3) >= 2) pd[4*i + 1]= (pd[i] >> 4) & 3;
  831. if ((s->width&3) >= 1) pd[4*i + 0]= pd[i] >> 6;
  832. for (i--; i >= 0; i--) {
  833. pd[4*i + 3]= pd[i] & 3;
  834. pd[4*i + 2]= (pd[i]>>2) & 3;
  835. pd[4*i + 1]= (pd[i]>>4) & 3;
  836. pd[4*i + 0]= pd[i]>>6;
  837. }
  838. } else {
  839. if ((s->width&3) >= 3) pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
  840. if ((s->width&3) >= 2) pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
  841. if ((s->width&3) >= 1) pd[4*i + 0]= ( pd[i]>>6 )*0x55;
  842. for (i--; i >= 0; i--) {
  843. pd[4*i + 3]= ( pd[i] & 3)*0x55;
  844. pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
  845. pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
  846. pd[4*i + 0]= ( pd[i]>>6 )*0x55;
  847. }
  848. }
  849. pd += p->linesize[0];
  850. }
  851. } else if (s->bits_per_pixel == 4) {
  852. int i, j;
  853. uint8_t *pd = p->data[0];
  854. for (j = 0; j < s->height; j++) {
  855. i = s->width/2;
  856. if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
  857. if (s->width&1) pd[2*i+0]= pd[i]>>4;
  858. for (i--; i >= 0; i--) {
  859. pd[2*i + 1] = pd[i] & 15;
  860. pd[2*i + 0] = pd[i] >> 4;
  861. }
  862. } else {
  863. if (s->width & 1) pd[2*i + 0]= (pd[i] >> 4) * 0x11;
  864. for (i--; i >= 0; i--) {
  865. pd[2*i + 1] = (pd[i] & 15) * 0x11;
  866. pd[2*i + 0] = (pd[i] >> 4) * 0x11;
  867. }
  868. }
  869. pd += p->linesize[0];
  870. }
  871. }
  872. }
  873. static int decode_fctl_chunk(AVCodecContext *avctx, PNGDecContext *s,
  874. uint32_t length)
  875. {
  876. uint32_t sequence_number;
  877. int cur_w, cur_h, x_offset, y_offset, dispose_op, blend_op;
  878. if (length != 26)
  879. return AVERROR_INVALIDDATA;
  880. if (!(s->hdr_state & PNG_IHDR)) {
  881. av_log(avctx, AV_LOG_ERROR, "fctl before IHDR\n");
  882. return AVERROR_INVALIDDATA;
  883. }
  884. if (s->pic_state & PNG_IDAT) {
  885. av_log(avctx, AV_LOG_ERROR, "fctl after IDAT\n");
  886. return AVERROR_INVALIDDATA;
  887. }
  888. s->last_w = s->cur_w;
  889. s->last_h = s->cur_h;
  890. s->last_x_offset = s->x_offset;
  891. s->last_y_offset = s->y_offset;
  892. s->last_dispose_op = s->dispose_op;
  893. sequence_number = bytestream2_get_be32(&s->gb);
  894. cur_w = bytestream2_get_be32(&s->gb);
  895. cur_h = bytestream2_get_be32(&s->gb);
  896. x_offset = bytestream2_get_be32(&s->gb);
  897. y_offset = bytestream2_get_be32(&s->gb);
  898. bytestream2_skip(&s->gb, 4); /* delay_num (2), delay_den (2) */
  899. dispose_op = bytestream2_get_byte(&s->gb);
  900. blend_op = bytestream2_get_byte(&s->gb);
  901. bytestream2_skip(&s->gb, 4); /* crc */
  902. if (sequence_number == 0 &&
  903. (cur_w != s->width ||
  904. cur_h != s->height ||
  905. x_offset != 0 ||
  906. y_offset != 0) ||
  907. cur_w <= 0 || cur_h <= 0 ||
  908. x_offset < 0 || y_offset < 0 ||
  909. cur_w > s->width - x_offset|| cur_h > s->height - y_offset)
  910. return AVERROR_INVALIDDATA;
  911. if (blend_op != APNG_BLEND_OP_OVER && blend_op != APNG_BLEND_OP_SOURCE) {
  912. av_log(avctx, AV_LOG_ERROR, "Invalid blend_op %d\n", blend_op);
  913. return AVERROR_INVALIDDATA;
  914. }
  915. if ((sequence_number == 0 || !s->last_picture.f->data[0]) &&
  916. dispose_op == APNG_DISPOSE_OP_PREVIOUS) {
  917. // No previous frame to revert to for the first frame
  918. // Spec says to just treat it as a APNG_DISPOSE_OP_BACKGROUND
  919. dispose_op = APNG_DISPOSE_OP_BACKGROUND;
  920. }
  921. if (blend_op == APNG_BLEND_OP_OVER && !s->has_trns && (
  922. avctx->pix_fmt == AV_PIX_FMT_RGB24 ||
  923. avctx->pix_fmt == AV_PIX_FMT_RGB48BE ||
  924. avctx->pix_fmt == AV_PIX_FMT_PAL8 ||
  925. avctx->pix_fmt == AV_PIX_FMT_GRAY8 ||
  926. avctx->pix_fmt == AV_PIX_FMT_GRAY16BE ||
  927. avctx->pix_fmt == AV_PIX_FMT_MONOBLACK
  928. )) {
  929. // APNG_BLEND_OP_OVER is the same as APNG_BLEND_OP_SOURCE when there is no alpha channel
  930. blend_op = APNG_BLEND_OP_SOURCE;
  931. }
  932. s->cur_w = cur_w;
  933. s->cur_h = cur_h;
  934. s->x_offset = x_offset;
  935. s->y_offset = y_offset;
  936. s->dispose_op = dispose_op;
  937. s->blend_op = blend_op;
  938. return 0;
  939. }
  940. static void handle_p_frame_png(PNGDecContext *s, AVFrame *p)
  941. {
  942. int i, j;
  943. uint8_t *pd = p->data[0];
  944. uint8_t *pd_last = s->last_picture.f->data[0];
  945. int ls = FFMIN(av_image_get_linesize(p->format, s->width, 0), s->width * s->bpp);
  946. ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
  947. for (j = 0; j < s->height; j++) {
  948. for (i = 0; i < ls; i++)
  949. pd[i] += pd_last[i];
  950. pd += p->linesize[0];
  951. pd_last += s->last_picture.f->linesize[0];
  952. }
  953. }
  954. // divide by 255 and round to nearest
  955. // apply a fast variant: (X+127)/255 = ((X+127)*257+257)>>16 = ((X+128)*257)>>16
  956. #define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
  957. static int handle_p_frame_apng(AVCodecContext *avctx, PNGDecContext *s,
  958. AVFrame *p)
  959. {
  960. uint8_t *dst = p->data[0];
  961. ptrdiff_t dst_stride = p->linesize[0];
  962. const uint8_t *src = s->last_picture.f->data[0];
  963. ptrdiff_t src_stride = s->last_picture.f->linesize[0];
  964. size_t x, y;
  965. if (s->blend_op == APNG_BLEND_OP_OVER &&
  966. avctx->pix_fmt != AV_PIX_FMT_RGBA &&
  967. avctx->pix_fmt != AV_PIX_FMT_GRAY8A &&
  968. avctx->pix_fmt != AV_PIX_FMT_PAL8) {
  969. avpriv_request_sample(avctx, "Blending with pixel format %s",
  970. av_get_pix_fmt_name(avctx->pix_fmt));
  971. return AVERROR_PATCHWELCOME;
  972. }
  973. ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
  974. // need to reset a rectangle to background:
  975. if (s->last_dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
  976. av_fast_malloc(&s->background_buf, &s->background_buf_allocated,
  977. src_stride * p->height);
  978. if (!s->background_buf)
  979. return AVERROR(ENOMEM);
  980. memcpy(s->background_buf, src, src_stride * p->height);
  981. for (y = s->last_y_offset; y < s->last_y_offset + s->last_h; y++) {
  982. memset(s->background_buf + src_stride * y +
  983. s->bpp * s->last_x_offset, 0, s->bpp * s->last_w);
  984. }
  985. src = s->background_buf;
  986. }
  987. // copy unchanged rectangles from the last frame
  988. for (y = 0; y < s->y_offset; y++)
  989. memcpy(dst + y * dst_stride, src + y * src_stride, p->width * s->bpp);
  990. for (y = s->y_offset; y < s->y_offset + s->cur_h; y++) {
  991. memcpy(dst + y * dst_stride, src + y * src_stride, s->x_offset * s->bpp);
  992. memcpy(dst + y * dst_stride + (s->x_offset + s->cur_w) * s->bpp,
  993. src + y * src_stride + (s->x_offset + s->cur_w) * s->bpp,
  994. (p->width - s->cur_w - s->x_offset) * s->bpp);
  995. }
  996. for (y = s->y_offset + s->cur_h; y < p->height; y++)
  997. memcpy(dst + y * dst_stride, src + y * src_stride, p->width * s->bpp);
  998. if (s->blend_op == APNG_BLEND_OP_OVER) {
  999. // Perform blending
  1000. for (y = s->y_offset; y < s->y_offset + s->cur_h; ++y) {
  1001. uint8_t *foreground = dst + dst_stride * y + s->bpp * s->x_offset;
  1002. const uint8_t *background = src + src_stride * y + s->bpp * s->x_offset;
  1003. for (x = s->x_offset; x < s->x_offset + s->cur_w; ++x, foreground += s->bpp, background += s->bpp) {
  1004. size_t b;
  1005. uint8_t foreground_alpha, background_alpha, output_alpha;
  1006. uint8_t output[10];
  1007. // Since we might be blending alpha onto alpha, we use the following equations:
  1008. // output_alpha = foreground_alpha + (1 - foreground_alpha) * background_alpha
  1009. // output = (foreground_alpha * foreground + (1 - foreground_alpha) * background_alpha * background) / output_alpha
  1010. switch (avctx->pix_fmt) {
  1011. case AV_PIX_FMT_RGBA:
  1012. foreground_alpha = foreground[3];
  1013. background_alpha = background[3];
  1014. break;
  1015. case AV_PIX_FMT_GRAY8A:
  1016. foreground_alpha = foreground[1];
  1017. background_alpha = background[1];
  1018. break;
  1019. case AV_PIX_FMT_PAL8:
  1020. foreground_alpha = s->palette[foreground[0]] >> 24;
  1021. background_alpha = s->palette[background[0]] >> 24;
  1022. break;
  1023. }
  1024. if (foreground_alpha == 255)
  1025. continue;
  1026. if (foreground_alpha == 0) {
  1027. memcpy(foreground, background, s->bpp);
  1028. continue;
  1029. }
  1030. if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
  1031. // TODO: Alpha blending with PAL8 will likely need the entire image converted over to RGBA first
  1032. avpriv_request_sample(avctx, "Alpha blending palette samples");
  1033. continue;
  1034. }
  1035. output_alpha = foreground_alpha + FAST_DIV255((255 - foreground_alpha) * background_alpha);
  1036. av_assert0(s->bpp <= 10);
  1037. for (b = 0; b < s->bpp - 1; ++b) {
  1038. if (output_alpha == 0) {
  1039. output[b] = 0;
  1040. } else if (background_alpha == 255) {
  1041. output[b] = FAST_DIV255(foreground_alpha * foreground[b] + (255 - foreground_alpha) * background[b]);
  1042. } else {
  1043. output[b] = (255 * foreground_alpha * foreground[b] + (255 - foreground_alpha) * background_alpha * background[b]) / (255 * output_alpha);
  1044. }
  1045. }
  1046. output[b] = output_alpha;
  1047. memcpy(foreground, output, s->bpp);
  1048. }
  1049. }
  1050. }
  1051. return 0;
  1052. }
  1053. static int decode_frame_common(AVCodecContext *avctx, PNGDecContext *s,
  1054. AVFrame *p, const AVPacket *avpkt)
  1055. {
  1056. const AVCRC *crc_tab = av_crc_get_table(AV_CRC_32_IEEE_LE);
  1057. uint32_t tag, length;
  1058. int decode_next_dat = 0;
  1059. int i, ret;
  1060. for (;;) {
  1061. length = bytestream2_get_bytes_left(&s->gb);
  1062. if (length <= 0) {
  1063. if (avctx->codec_id == AV_CODEC_ID_PNG &&
  1064. avctx->skip_frame == AVDISCARD_ALL) {
  1065. return 0;
  1066. }
  1067. if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && length == 0) {
  1068. if (!(s->pic_state & PNG_IDAT))
  1069. return 0;
  1070. else
  1071. goto exit_loop;
  1072. }
  1073. av_log(avctx, AV_LOG_ERROR, "%d bytes left\n", length);
  1074. if ( s->pic_state & PNG_ALLIMAGE
  1075. && avctx->strict_std_compliance <= FF_COMPLIANCE_NORMAL)
  1076. goto exit_loop;
  1077. ret = AVERROR_INVALIDDATA;
  1078. goto fail;
  1079. }
  1080. length = bytestream2_get_be32(&s->gb);
  1081. if (length > 0x7fffffff || length + 8 > bytestream2_get_bytes_left(&s->gb)) {
  1082. av_log(avctx, AV_LOG_ERROR, "chunk too big\n");
  1083. ret = AVERROR_INVALIDDATA;
  1084. goto fail;
  1085. }
  1086. if (avctx->err_recognition & (AV_EF_CRCCHECK | AV_EF_IGNORE_ERR)) {
  1087. uint32_t crc_sig = AV_RB32(s->gb.buffer + length + 4);
  1088. uint32_t crc_cal = ~av_crc(crc_tab, UINT32_MAX, s->gb.buffer, length + 4);
  1089. if (crc_sig ^ crc_cal) {
  1090. av_log(avctx, AV_LOG_ERROR, "CRC mismatch in chunk");
  1091. if (avctx->err_recognition & AV_EF_EXPLODE) {
  1092. av_log(avctx, AV_LOG_ERROR, ", quitting\n");
  1093. ret = AVERROR_INVALIDDATA;
  1094. goto fail;
  1095. }
  1096. av_log(avctx, AV_LOG_ERROR, ", skipping\n");
  1097. bytestream2_skip(&s->gb, 4); /* tag */
  1098. goto skip_tag;
  1099. }
  1100. }
  1101. tag = bytestream2_get_le32(&s->gb);
  1102. if (avctx->debug & FF_DEBUG_STARTCODE)
  1103. av_log(avctx, AV_LOG_DEBUG, "png: tag=%s length=%u\n",
  1104. av_fourcc2str(tag), length);
  1105. if (avctx->codec_id == AV_CODEC_ID_PNG &&
  1106. avctx->skip_frame == AVDISCARD_ALL) {
  1107. switch(tag) {
  1108. case MKTAG('I', 'H', 'D', 'R'):
  1109. case MKTAG('p', 'H', 'Y', 's'):
  1110. case MKTAG('t', 'E', 'X', 't'):
  1111. case MKTAG('I', 'D', 'A', 'T'):
  1112. case MKTAG('t', 'R', 'N', 'S'):
  1113. break;
  1114. default:
  1115. goto skip_tag;
  1116. }
  1117. }
  1118. switch (tag) {
  1119. case MKTAG('I', 'H', 'D', 'R'):
  1120. if ((ret = decode_ihdr_chunk(avctx, s, length)) < 0)
  1121. goto fail;
  1122. break;
  1123. case MKTAG('p', 'H', 'Y', 's'):
  1124. if ((ret = decode_phys_chunk(avctx, s)) < 0)
  1125. goto fail;
  1126. break;
  1127. case MKTAG('f', 'c', 'T', 'L'):
  1128. if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
  1129. goto skip_tag;
  1130. if ((ret = decode_fctl_chunk(avctx, s, length)) < 0)
  1131. goto fail;
  1132. decode_next_dat = 1;
  1133. break;
  1134. case MKTAG('f', 'd', 'A', 'T'):
  1135. if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
  1136. goto skip_tag;
  1137. if (!decode_next_dat || length < 4) {
  1138. ret = AVERROR_INVALIDDATA;
  1139. goto fail;
  1140. }
  1141. bytestream2_get_be32(&s->gb);
  1142. length -= 4;
  1143. /* fallthrough */
  1144. case MKTAG('I', 'D', 'A', 'T'):
  1145. if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && !decode_next_dat)
  1146. goto skip_tag;
  1147. if ((ret = decode_idat_chunk(avctx, s, length, p)) < 0)
  1148. goto fail;
  1149. break;
  1150. case MKTAG('P', 'L', 'T', 'E'):
  1151. if (decode_plte_chunk(avctx, s, length) < 0)
  1152. goto skip_tag;
  1153. break;
  1154. case MKTAG('t', 'R', 'N', 'S'):
  1155. if (decode_trns_chunk(avctx, s, length) < 0)
  1156. goto skip_tag;
  1157. break;
  1158. case MKTAG('t', 'E', 'X', 't'):
  1159. if (decode_text_chunk(s, length, 0) < 0)
  1160. av_log(avctx, AV_LOG_WARNING, "Broken tEXt chunk\n");
  1161. bytestream2_skip(&s->gb, length + 4);
  1162. break;
  1163. case MKTAG('z', 'T', 'X', 't'):
  1164. if (decode_text_chunk(s, length, 1) < 0)
  1165. av_log(avctx, AV_LOG_WARNING, "Broken zTXt chunk\n");
  1166. bytestream2_skip(&s->gb, length + 4);
  1167. break;
  1168. case MKTAG('s', 'T', 'E', 'R'): {
  1169. int mode = bytestream2_get_byte(&s->gb);
  1170. if (mode == 0 || mode == 1) {
  1171. s->stereo_mode = mode;
  1172. } else {
  1173. av_log(avctx, AV_LOG_WARNING,
  1174. "Unknown value in sTER chunk (%d)\n", mode);
  1175. }
  1176. bytestream2_skip(&s->gb, 4); /* crc */
  1177. break;
  1178. }
  1179. case MKTAG('i', 'C', 'C', 'P'): {
  1180. if ((ret = decode_iccp_chunk(s, length, p)) < 0)
  1181. goto fail;
  1182. break;
  1183. }
  1184. case MKTAG('c', 'H', 'R', 'M'): {
  1185. s->have_chrm = 1;
  1186. s->white_point[0] = bytestream2_get_be32(&s->gb);
  1187. s->white_point[1] = bytestream2_get_be32(&s->gb);
  1188. /* RGB Primaries */
  1189. for (i = 0; i < 3; i++) {
  1190. s->display_primaries[i][0] = bytestream2_get_be32(&s->gb);
  1191. s->display_primaries[i][1] = bytestream2_get_be32(&s->gb);
  1192. }
  1193. bytestream2_skip(&s->gb, 4); /* crc */
  1194. break;
  1195. }
  1196. case MKTAG('g', 'A', 'M', 'A'): {
  1197. AVBPrint bp;
  1198. char *gamma_str;
  1199. int num = bytestream2_get_be32(&s->gb);
  1200. av_bprint_init(&bp, 0, AV_BPRINT_SIZE_UNLIMITED);
  1201. av_bprintf(&bp, "%i/%i", num, 100000);
  1202. ret = av_bprint_finalize(&bp, &gamma_str);
  1203. if (ret < 0)
  1204. return ret;
  1205. av_dict_set(&s->frame_metadata, "gamma", gamma_str, AV_DICT_DONT_STRDUP_VAL);
  1206. bytestream2_skip(&s->gb, 4); /* crc */
  1207. break;
  1208. }
  1209. case MKTAG('I', 'E', 'N', 'D'):
  1210. if (!(s->pic_state & PNG_ALLIMAGE))
  1211. av_log(avctx, AV_LOG_ERROR, "IEND without all image\n");
  1212. if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
  1213. ret = AVERROR_INVALIDDATA;
  1214. goto fail;
  1215. }
  1216. bytestream2_skip(&s->gb, 4); /* crc */
  1217. goto exit_loop;
  1218. default:
  1219. /* skip tag */
  1220. skip_tag:
  1221. bytestream2_skip(&s->gb, length + 4);
  1222. break;
  1223. }
  1224. }
  1225. exit_loop:
  1226. if (avctx->codec_id == AV_CODEC_ID_PNG &&
  1227. avctx->skip_frame == AVDISCARD_ALL) {
  1228. return 0;
  1229. }
  1230. if (percent_missing(s) > avctx->discard_damaged_percentage)
  1231. return AVERROR_INVALIDDATA;
  1232. if (s->bits_per_pixel <= 4)
  1233. handle_small_bpp(s, p);
  1234. /* apply transparency if needed */
  1235. if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
  1236. size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
  1237. size_t raw_bpp = s->bpp - byte_depth;
  1238. unsigned x, y;
  1239. av_assert0(s->bit_depth > 1);
  1240. for (y = 0; y < s->height; ++y) {
  1241. uint8_t *row = &p->data[0][p->linesize[0] * y];
  1242. if (s->bpp == 2 && byte_depth == 1) {
  1243. uint8_t *pixel = &row[2 * s->width - 1];
  1244. uint8_t *rowp = &row[1 * s->width - 1];
  1245. int tcolor = s->transparent_color_be[0];
  1246. for (x = s->width; x > 0; --x) {
  1247. *pixel-- = *rowp == tcolor ? 0 : 0xff;
  1248. *pixel-- = *rowp--;
  1249. }
  1250. } else if (s->bpp == 4 && byte_depth == 1) {
  1251. uint8_t *pixel = &row[4 * s->width - 1];
  1252. uint8_t *rowp = &row[3 * s->width - 1];
  1253. int tcolor = AV_RL24(s->transparent_color_be);
  1254. for (x = s->width; x > 0; --x) {
  1255. *pixel-- = AV_RL24(rowp-2) == tcolor ? 0 : 0xff;
  1256. *pixel-- = *rowp--;
  1257. *pixel-- = *rowp--;
  1258. *pixel-- = *rowp--;
  1259. }
  1260. } else {
  1261. /* since we're updating in-place, we have to go from right to left */
  1262. for (x = s->width; x > 0; --x) {
  1263. uint8_t *pixel = &row[s->bpp * (x - 1)];
  1264. memmove(pixel, &row[raw_bpp * (x - 1)], raw_bpp);
  1265. if (!memcmp(pixel, s->transparent_color_be, raw_bpp)) {
  1266. memset(&pixel[raw_bpp], 0, byte_depth);
  1267. } else {
  1268. memset(&pixel[raw_bpp], 0xff, byte_depth);
  1269. }
  1270. }
  1271. }
  1272. }
  1273. }
  1274. /* handle P-frames only if a predecessor frame is available */
  1275. if (s->last_picture.f->data[0]) {
  1276. if ( !(avpkt->flags & AV_PKT_FLAG_KEY) && avctx->codec_tag != AV_RL32("MPNG")
  1277. && s->last_picture.f->width == p->width
  1278. && s->last_picture.f->height== p->height
  1279. && s->last_picture.f->format== p->format
  1280. ) {
  1281. if (CONFIG_PNG_DECODER && avctx->codec_id != AV_CODEC_ID_APNG)
  1282. handle_p_frame_png(s, p);
  1283. else if (CONFIG_APNG_DECODER &&
  1284. avctx->codec_id == AV_CODEC_ID_APNG &&
  1285. (ret = handle_p_frame_apng(avctx, s, p)) < 0)
  1286. goto fail;
  1287. }
  1288. }
  1289. ff_thread_report_progress(&s->picture, INT_MAX, 0);
  1290. return 0;
  1291. fail:
  1292. ff_thread_report_progress(&s->picture, INT_MAX, 0);
  1293. return ret;
  1294. }
  1295. static void clear_frame_metadata(PNGDecContext *s)
  1296. {
  1297. av_freep(&s->iccp_data);
  1298. s->iccp_data_len = 0;
  1299. s->iccp_name[0] = 0;
  1300. s->stereo_mode = -1;
  1301. s->have_chrm = 0;
  1302. av_dict_free(&s->frame_metadata);
  1303. }
  1304. static int output_frame(PNGDecContext *s, AVFrame *f,
  1305. const AVFrame *src)
  1306. {
  1307. int ret;
  1308. ret = av_frame_ref(f, src);
  1309. if (ret < 0)
  1310. return ret;
  1311. if (s->iccp_data) {
  1312. AVFrameSideData *sd = av_frame_new_side_data(f, AV_FRAME_DATA_ICC_PROFILE, s->iccp_data_len);
  1313. if (!sd) {
  1314. ret = AVERROR(ENOMEM);
  1315. goto fail;
  1316. }
  1317. memcpy(sd->data, s->iccp_data, s->iccp_data_len);
  1318. av_dict_set(&sd->metadata, "name", s->iccp_name, 0);
  1319. }
  1320. if (s->stereo_mode >= 0) {
  1321. AVStereo3D *stereo3d = av_stereo3d_create_side_data(f);
  1322. if (!stereo3d) {
  1323. ret = AVERROR(ENOMEM);
  1324. goto fail;
  1325. }
  1326. stereo3d->type = AV_STEREO3D_SIDEBYSIDE;
  1327. stereo3d->flags = s->stereo_mode ? 0 : AV_STEREO3D_FLAG_INVERT;
  1328. }
  1329. if (s->have_chrm) {
  1330. AVMasteringDisplayMetadata *mdm = av_mastering_display_metadata_create_side_data(f);
  1331. if (!mdm) {
  1332. ret = AVERROR(ENOMEM);
  1333. goto fail;
  1334. }
  1335. mdm->white_point[0] = av_make_q(s->white_point[0], 100000);
  1336. mdm->white_point[1] = av_make_q(s->white_point[1], 100000);
  1337. /* RGB Primaries */
  1338. for (int i = 0; i < 3; i++) {
  1339. mdm->display_primaries[i][0] = av_make_q(s->display_primaries[i][0], 100000);
  1340. mdm->display_primaries[i][1] = av_make_q(s->display_primaries[i][1], 100000);
  1341. }
  1342. mdm->has_primaries = 1;
  1343. }
  1344. FFSWAP(AVDictionary*, f->metadata, s->frame_metadata);
  1345. return 0;
  1346. fail:
  1347. av_frame_unref(f);
  1348. return ret;
  1349. }
  1350. #if CONFIG_PNG_DECODER
  1351. static int decode_frame_png(AVCodecContext *avctx,
  1352. void *data, int *got_frame,
  1353. AVPacket *avpkt)
  1354. {
  1355. PNGDecContext *const s = avctx->priv_data;
  1356. const uint8_t *buf = avpkt->data;
  1357. int buf_size = avpkt->size;
  1358. AVFrame *dst_frame = data;
  1359. AVFrame *p = s->picture.f;
  1360. int64_t sig;
  1361. int ret;
  1362. clear_frame_metadata(s);
  1363. bytestream2_init(&s->gb, buf, buf_size);
  1364. /* check signature */
  1365. sig = bytestream2_get_be64(&s->gb);
  1366. if (sig != PNGSIG &&
  1367. sig != MNGSIG) {
  1368. av_log(avctx, AV_LOG_ERROR, "Invalid PNG signature 0x%08"PRIX64".\n", sig);
  1369. return AVERROR_INVALIDDATA;
  1370. }
  1371. s->y = s->has_trns = 0;
  1372. s->hdr_state = 0;
  1373. s->pic_state = 0;
  1374. /* init the zlib */
  1375. s->zstream.zalloc = ff_png_zalloc;
  1376. s->zstream.zfree = ff_png_zfree;
  1377. s->zstream.opaque = NULL;
  1378. ret = inflateInit(&s->zstream);
  1379. if (ret != Z_OK) {
  1380. av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
  1381. return AVERROR_EXTERNAL;
  1382. }
  1383. if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
  1384. goto the_end;
  1385. if (avctx->skip_frame == AVDISCARD_ALL) {
  1386. *got_frame = 0;
  1387. ret = bytestream2_tell(&s->gb);
  1388. goto the_end;
  1389. }
  1390. ret = output_frame(s, dst_frame, s->picture.f);
  1391. if (ret < 0)
  1392. goto the_end;
  1393. if (!(avctx->active_thread_type & FF_THREAD_FRAME)) {
  1394. ff_thread_release_buffer(avctx, &s->last_picture);
  1395. FFSWAP(ThreadFrame, s->picture, s->last_picture);
  1396. }
  1397. *got_frame = 1;
  1398. ret = bytestream2_tell(&s->gb);
  1399. the_end:
  1400. inflateEnd(&s->zstream);
  1401. s->crow_buf = NULL;
  1402. return ret;
  1403. }
  1404. #endif
  1405. #if CONFIG_APNG_DECODER
  1406. static int decode_frame_apng(AVCodecContext *avctx,
  1407. void *data, int *got_frame,
  1408. AVPacket *avpkt)
  1409. {
  1410. PNGDecContext *const s = avctx->priv_data;
  1411. AVFrame *dst_frame = data;
  1412. int ret;
  1413. AVFrame *p = s->picture.f;
  1414. clear_frame_metadata(s);
  1415. if (!(s->hdr_state & PNG_IHDR)) {
  1416. if (!avctx->extradata_size)
  1417. return AVERROR_INVALIDDATA;
  1418. /* only init fields, there is no zlib use in extradata */
  1419. s->zstream.zalloc = ff_png_zalloc;
  1420. s->zstream.zfree = ff_png_zfree;
  1421. bytestream2_init(&s->gb, avctx->extradata, avctx->extradata_size);
  1422. if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
  1423. goto end;
  1424. }
  1425. /* reset state for a new frame */
  1426. if ((ret = inflateInit(&s->zstream)) != Z_OK) {
  1427. av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
  1428. ret = AVERROR_EXTERNAL;
  1429. goto end;
  1430. }
  1431. s->y = 0;
  1432. s->pic_state = 0;
  1433. bytestream2_init(&s->gb, avpkt->data, avpkt->size);
  1434. if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
  1435. goto end;
  1436. if (!(s->pic_state & PNG_ALLIMAGE))
  1437. av_log(avctx, AV_LOG_WARNING, "Frame did not contain a complete image\n");
  1438. if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
  1439. ret = AVERROR_INVALIDDATA;
  1440. goto end;
  1441. }
  1442. ret = output_frame(s, dst_frame, s->picture.f);
  1443. if (ret < 0)
  1444. goto end;
  1445. if (!(avctx->active_thread_type & FF_THREAD_FRAME)) {
  1446. if (s->dispose_op == APNG_DISPOSE_OP_PREVIOUS) {
  1447. ff_thread_release_buffer(avctx, &s->picture);
  1448. } else if (s->dispose_op == APNG_DISPOSE_OP_NONE) {
  1449. ff_thread_release_buffer(avctx, &s->last_picture);
  1450. FFSWAP(ThreadFrame, s->picture, s->last_picture);
  1451. }
  1452. }
  1453. *got_frame = 1;
  1454. ret = bytestream2_tell(&s->gb);
  1455. end:
  1456. inflateEnd(&s->zstream);
  1457. return ret;
  1458. }
  1459. #endif
  1460. #if HAVE_THREADS
  1461. static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
  1462. {
  1463. PNGDecContext *psrc = src->priv_data;
  1464. PNGDecContext *pdst = dst->priv_data;
  1465. ThreadFrame *src_frame = NULL;
  1466. int ret;
  1467. if (dst == src)
  1468. return 0;
  1469. if (CONFIG_APNG_DECODER && dst->codec_id == AV_CODEC_ID_APNG) {
  1470. pdst->width = psrc->width;
  1471. pdst->height = psrc->height;
  1472. pdst->bit_depth = psrc->bit_depth;
  1473. pdst->color_type = psrc->color_type;
  1474. pdst->compression_type = psrc->compression_type;
  1475. pdst->interlace_type = psrc->interlace_type;
  1476. pdst->filter_type = psrc->filter_type;
  1477. pdst->cur_w = psrc->cur_w;
  1478. pdst->cur_h = psrc->cur_h;
  1479. pdst->x_offset = psrc->x_offset;
  1480. pdst->y_offset = psrc->y_offset;
  1481. pdst->has_trns = psrc->has_trns;
  1482. memcpy(pdst->transparent_color_be, psrc->transparent_color_be, sizeof(pdst->transparent_color_be));
  1483. pdst->dispose_op = psrc->dispose_op;
  1484. memcpy(pdst->palette, psrc->palette, sizeof(pdst->palette));
  1485. pdst->hdr_state |= psrc->hdr_state;
  1486. }
  1487. src_frame = psrc->dispose_op == APNG_DISPOSE_OP_NONE ?
  1488. &psrc->picture : &psrc->last_picture;
  1489. ff_thread_release_buffer(dst, &pdst->last_picture);
  1490. if (src_frame && src_frame->f->data[0]) {
  1491. ret = ff_thread_ref_frame(&pdst->last_picture, src_frame);
  1492. if (ret < 0)
  1493. return ret;
  1494. }
  1495. return 0;
  1496. }
  1497. #endif
  1498. static av_cold int png_dec_init(AVCodecContext *avctx)
  1499. {
  1500. PNGDecContext *s = avctx->priv_data;
  1501. avctx->color_range = AVCOL_RANGE_JPEG;
  1502. s->avctx = avctx;
  1503. s->last_picture.f = av_frame_alloc();
  1504. s->picture.f = av_frame_alloc();
  1505. if (!s->last_picture.f || !s->picture.f) {
  1506. av_frame_free(&s->last_picture.f);
  1507. av_frame_free(&s->picture.f);
  1508. return AVERROR(ENOMEM);
  1509. }
  1510. ff_pngdsp_init(&s->dsp);
  1511. return 0;
  1512. }
  1513. static av_cold int png_dec_end(AVCodecContext *avctx)
  1514. {
  1515. PNGDecContext *s = avctx->priv_data;
  1516. ff_thread_release_buffer(avctx, &s->last_picture);
  1517. av_frame_free(&s->last_picture.f);
  1518. ff_thread_release_buffer(avctx, &s->picture);
  1519. av_frame_free(&s->picture.f);
  1520. av_freep(&s->buffer);
  1521. s->buffer_size = 0;
  1522. av_freep(&s->last_row);
  1523. s->last_row_size = 0;
  1524. av_freep(&s->tmp_row);
  1525. s->tmp_row_size = 0;
  1526. av_freep(&s->background_buf);
  1527. av_freep(&s->iccp_data);
  1528. av_dict_free(&s->frame_metadata);
  1529. return 0;
  1530. }
  1531. #if CONFIG_APNG_DECODER
  1532. AVCodec ff_apng_decoder = {
  1533. .name = "apng",
  1534. .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
  1535. .type = AVMEDIA_TYPE_VIDEO,
  1536. .id = AV_CODEC_ID_APNG,
  1537. .priv_data_size = sizeof(PNGDecContext),
  1538. .init = png_dec_init,
  1539. .close = png_dec_end,
  1540. .decode = decode_frame_apng,
  1541. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1542. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
  1543. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
  1544. FF_CODEC_CAP_ALLOCATE_PROGRESS,
  1545. };
  1546. #endif
  1547. #if CONFIG_PNG_DECODER
  1548. AVCodec ff_png_decoder = {
  1549. .name = "png",
  1550. .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
  1551. .type = AVMEDIA_TYPE_VIDEO,
  1552. .id = AV_CODEC_ID_PNG,
  1553. .priv_data_size = sizeof(PNGDecContext),
  1554. .init = png_dec_init,
  1555. .close = png_dec_end,
  1556. .decode = decode_frame_png,
  1557. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1558. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
  1559. .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM | FF_CODEC_CAP_INIT_THREADSAFE |
  1560. FF_CODEC_CAP_ALLOCATE_PROGRESS,
  1561. };
  1562. #endif