You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1705 lines
62KB

  1. /*
  2. * JPEG 2000 image decoder
  3. * Copyright (c) 2007 Kamil Nowosad
  4. * Copyright (c) 2013 Nicolas Bertrand <nicoinattendu@gmail.com>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * JPEG 2000 image decoder
  25. */
  26. #include "libavutil/avassert.h"
  27. #include "libavutil/common.h"
  28. #include "libavutil/opt.h"
  29. #include "libavutil/pixdesc.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "internal.h"
  33. #include "thread.h"
  34. #include "jpeg2000.h"
  35. #define JP2_SIG_TYPE 0x6A502020
  36. #define JP2_SIG_VALUE 0x0D0A870A
  37. #define JP2_CODESTREAM 0x6A703263
  38. #define JP2_HEADER 0x6A703268
  39. #define HAD_COC 0x01
  40. #define HAD_QCC 0x02
  41. typedef struct Jpeg2000TilePart {
  42. uint8_t tile_index; // Tile index who refers the tile-part
  43. const uint8_t *tp_end;
  44. GetByteContext tpg; // bit stream in tile-part
  45. } Jpeg2000TilePart;
  46. /* RMK: For JPEG2000 DCINEMA 3 tile-parts in a tile
  47. * one per component, so tile_part elements have a size of 3 */
  48. typedef struct Jpeg2000Tile {
  49. Jpeg2000Component *comp;
  50. uint8_t properties[4];
  51. Jpeg2000CodingStyle codsty[4];
  52. Jpeg2000QuantStyle qntsty[4];
  53. Jpeg2000TilePart tile_part[4];
  54. uint16_t tp_idx; // Tile-part index
  55. } Jpeg2000Tile;
  56. typedef struct Jpeg2000DecoderContext {
  57. AVClass *class;
  58. AVCodecContext *avctx;
  59. GetByteContext g;
  60. int width, height;
  61. int image_offset_x, image_offset_y;
  62. int tile_offset_x, tile_offset_y;
  63. uint8_t cbps[4]; // bits per sample in particular components
  64. uint8_t sgnd[4]; // if a component is signed
  65. uint8_t properties[4];
  66. int cdx[4], cdy[4];
  67. int precision;
  68. int ncomponents;
  69. int colour_space;
  70. uint32_t palette[256];
  71. int8_t pal8;
  72. int cdef[4];
  73. int tile_width, tile_height;
  74. unsigned numXtiles, numYtiles;
  75. int maxtilelen;
  76. Jpeg2000CodingStyle codsty[4];
  77. Jpeg2000QuantStyle qntsty[4];
  78. int bit_index;
  79. int curtileno;
  80. Jpeg2000Tile *tile;
  81. /*options parameters*/
  82. int reduction_factor;
  83. } Jpeg2000DecoderContext;
  84. /* get_bits functions for JPEG2000 packet bitstream
  85. * It is a get_bit function with a bit-stuffing routine. If the value of the
  86. * byte is 0xFF, the next byte includes an extra zero bit stuffed into the MSB.
  87. * cf. ISO-15444-1:2002 / B.10.1 Bit-stuffing routine */
  88. static int get_bits(Jpeg2000DecoderContext *s, int n)
  89. {
  90. int res = 0;
  91. while (--n >= 0) {
  92. res <<= 1;
  93. if (s->bit_index == 0) {
  94. s->bit_index = 7 + (bytestream2_get_byte(&s->g) != 0xFFu);
  95. }
  96. s->bit_index--;
  97. res |= (bytestream2_peek_byte(&s->g) >> s->bit_index) & 1;
  98. }
  99. return res;
  100. }
  101. static void jpeg2000_flush(Jpeg2000DecoderContext *s)
  102. {
  103. if (bytestream2_get_byte(&s->g) == 0xff)
  104. bytestream2_skip(&s->g, 1);
  105. s->bit_index = 8;
  106. }
  107. /* decode the value stored in node */
  108. static int tag_tree_decode(Jpeg2000DecoderContext *s, Jpeg2000TgtNode *node,
  109. int threshold)
  110. {
  111. Jpeg2000TgtNode *stack[30];
  112. int sp = -1, curval = 0;
  113. if (!node)
  114. return AVERROR_INVALIDDATA;
  115. while (node && !node->vis) {
  116. stack[++sp] = node;
  117. node = node->parent;
  118. }
  119. if (node)
  120. curval = node->val;
  121. else
  122. curval = stack[sp]->val;
  123. while (curval < threshold && sp >= 0) {
  124. if (curval < stack[sp]->val)
  125. curval = stack[sp]->val;
  126. while (curval < threshold) {
  127. int ret;
  128. if ((ret = get_bits(s, 1)) > 0) {
  129. stack[sp]->vis++;
  130. break;
  131. } else if (!ret)
  132. curval++;
  133. else
  134. return ret;
  135. }
  136. stack[sp]->val = curval;
  137. sp--;
  138. }
  139. return curval;
  140. }
  141. static int pix_fmt_match(enum AVPixelFormat pix_fmt, int components,
  142. int bpc, uint32_t log2_chroma_wh, int pal8)
  143. {
  144. int match = 1;
  145. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  146. if (desc->nb_components != components) {
  147. return 0;
  148. }
  149. switch (components) {
  150. case 4:
  151. match = match && desc->comp[3].depth_minus1 + 1 >= bpc &&
  152. (log2_chroma_wh >> 14 & 3) == 0 &&
  153. (log2_chroma_wh >> 12 & 3) == 0;
  154. case 3:
  155. match = match && desc->comp[2].depth_minus1 + 1 >= bpc &&
  156. (log2_chroma_wh >> 10 & 3) == desc->log2_chroma_w &&
  157. (log2_chroma_wh >> 8 & 3) == desc->log2_chroma_h;
  158. case 2:
  159. match = match && desc->comp[1].depth_minus1 + 1 >= bpc &&
  160. (log2_chroma_wh >> 6 & 3) == desc->log2_chroma_w &&
  161. (log2_chroma_wh >> 4 & 3) == desc->log2_chroma_h;
  162. case 1:
  163. match = match && desc->comp[0].depth_minus1 + 1 >= bpc &&
  164. (log2_chroma_wh >> 2 & 3) == 0 &&
  165. (log2_chroma_wh & 3) == 0 &&
  166. (desc->flags & AV_PIX_FMT_FLAG_PAL) == pal8 * AV_PIX_FMT_FLAG_PAL;
  167. }
  168. return match;
  169. }
  170. // pix_fmts with lower bpp have to be listed before
  171. // similar pix_fmts with higher bpp.
  172. #define RGB_PIXEL_FORMATS AV_PIX_FMT_PAL8,AV_PIX_FMT_RGB24,AV_PIX_FMT_RGBA,AV_PIX_FMT_RGB48,AV_PIX_FMT_RGBA64
  173. #define GRAY_PIXEL_FORMATS AV_PIX_FMT_GRAY8,AV_PIX_FMT_GRAY8A,AV_PIX_FMT_GRAY16
  174. #define YUV_PIXEL_FORMATS AV_PIX_FMT_YUV410P,AV_PIX_FMT_YUV411P,AV_PIX_FMT_YUVA420P, \
  175. AV_PIX_FMT_YUV420P,AV_PIX_FMT_YUV422P,AV_PIX_FMT_YUVA422P, \
  176. AV_PIX_FMT_YUV440P,AV_PIX_FMT_YUV444P,AV_PIX_FMT_YUVA444P, \
  177. AV_PIX_FMT_YUV420P9,AV_PIX_FMT_YUV422P9,AV_PIX_FMT_YUV444P9, \
  178. AV_PIX_FMT_YUVA420P9,AV_PIX_FMT_YUVA422P9,AV_PIX_FMT_YUVA444P9, \
  179. AV_PIX_FMT_YUV420P10,AV_PIX_FMT_YUV422P10,AV_PIX_FMT_YUV444P10, \
  180. AV_PIX_FMT_YUVA420P10,AV_PIX_FMT_YUVA422P10,AV_PIX_FMT_YUVA444P10, \
  181. AV_PIX_FMT_YUV420P12,AV_PIX_FMT_YUV422P12,AV_PIX_FMT_YUV444P12, \
  182. AV_PIX_FMT_YUV420P14,AV_PIX_FMT_YUV422P14,AV_PIX_FMT_YUV444P14, \
  183. AV_PIX_FMT_YUV420P16,AV_PIX_FMT_YUV422P16,AV_PIX_FMT_YUV444P16, \
  184. AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P16,AV_PIX_FMT_YUVA444P16
  185. #define XYZ_PIXEL_FORMATS AV_PIX_FMT_XYZ12
  186. static const enum AVPixelFormat rgb_pix_fmts[] = {RGB_PIXEL_FORMATS};
  187. static const enum AVPixelFormat gray_pix_fmts[] = {GRAY_PIXEL_FORMATS};
  188. static const enum AVPixelFormat yuv_pix_fmts[] = {YUV_PIXEL_FORMATS};
  189. static const enum AVPixelFormat xyz_pix_fmts[] = {XYZ_PIXEL_FORMATS};
  190. static const enum AVPixelFormat all_pix_fmts[] = {RGB_PIXEL_FORMATS,
  191. GRAY_PIXEL_FORMATS,
  192. YUV_PIXEL_FORMATS,
  193. XYZ_PIXEL_FORMATS};
  194. /* marker segments */
  195. /* get sizes and offsets of image, tiles; number of components */
  196. static int get_siz(Jpeg2000DecoderContext *s)
  197. {
  198. int i;
  199. int ncomponents;
  200. uint32_t log2_chroma_wh = 0;
  201. const enum AVPixelFormat *possible_fmts = NULL;
  202. int possible_fmts_nb = 0;
  203. if (bytestream2_get_bytes_left(&s->g) < 36)
  204. return AVERROR_INVALIDDATA;
  205. s->avctx->profile = bytestream2_get_be16u(&s->g); // Rsiz
  206. s->width = bytestream2_get_be32u(&s->g); // Width
  207. s->height = bytestream2_get_be32u(&s->g); // Height
  208. s->image_offset_x = bytestream2_get_be32u(&s->g); // X0Siz
  209. s->image_offset_y = bytestream2_get_be32u(&s->g); // Y0Siz
  210. s->tile_width = bytestream2_get_be32u(&s->g); // XTSiz
  211. s->tile_height = bytestream2_get_be32u(&s->g); // YTSiz
  212. s->tile_offset_x = bytestream2_get_be32u(&s->g); // XT0Siz
  213. s->tile_offset_y = bytestream2_get_be32u(&s->g); // YT0Siz
  214. ncomponents = bytestream2_get_be16u(&s->g); // CSiz
  215. if (ncomponents <= 0) {
  216. av_log(s->avctx, AV_LOG_ERROR, "Invalid number of components: %d\n",
  217. s->ncomponents);
  218. return AVERROR_INVALIDDATA;
  219. }
  220. if (ncomponents > 4) {
  221. avpriv_request_sample(s->avctx, "Support for %d components",
  222. s->ncomponents);
  223. return AVERROR_PATCHWELCOME;
  224. }
  225. s->ncomponents = ncomponents;
  226. if (s->tile_width <= 0 || s->tile_height <= 0) {
  227. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile dimension %dx%d.\n",
  228. s->tile_width, s->tile_height);
  229. return AVERROR_INVALIDDATA;
  230. }
  231. if (bytestream2_get_bytes_left(&s->g) < 3 * s->ncomponents)
  232. return AVERROR_INVALIDDATA;
  233. for (i = 0; i < s->ncomponents; i++) { // Ssiz_i XRsiz_i, YRsiz_i
  234. uint8_t x = bytestream2_get_byteu(&s->g);
  235. s->cbps[i] = (x & 0x7f) + 1;
  236. s->precision = FFMAX(s->cbps[i], s->precision);
  237. s->sgnd[i] = !!(x & 0x80);
  238. s->cdx[i] = bytestream2_get_byteu(&s->g);
  239. s->cdy[i] = bytestream2_get_byteu(&s->g);
  240. if ( !s->cdx[i] || s->cdx[i] == 3 || s->cdx[i] > 4
  241. || !s->cdy[i] || s->cdy[i] == 3 || s->cdy[i] > 4) {
  242. av_log(s->avctx, AV_LOG_ERROR, "Invalid sample separation %d/%d\n", s->cdx[i], s->cdy[i]);
  243. return AVERROR_INVALIDDATA;
  244. }
  245. log2_chroma_wh |= s->cdy[i] >> 1 << i * 4 | s->cdx[i] >> 1 << i * 4 + 2;
  246. }
  247. s->numXtiles = ff_jpeg2000_ceildiv(s->width - s->tile_offset_x, s->tile_width);
  248. s->numYtiles = ff_jpeg2000_ceildiv(s->height - s->tile_offset_y, s->tile_height);
  249. if (s->numXtiles * (uint64_t)s->numYtiles > INT_MAX/sizeof(*s->tile)) {
  250. s->numXtiles = s->numYtiles = 0;
  251. return AVERROR(EINVAL);
  252. }
  253. s->tile = av_mallocz_array(s->numXtiles * s->numYtiles, sizeof(*s->tile));
  254. if (!s->tile) {
  255. s->numXtiles = s->numYtiles = 0;
  256. return AVERROR(ENOMEM);
  257. }
  258. for (i = 0; i < s->numXtiles * s->numYtiles; i++) {
  259. Jpeg2000Tile *tile = s->tile + i;
  260. tile->comp = av_mallocz(s->ncomponents * sizeof(*tile->comp));
  261. if (!tile->comp)
  262. return AVERROR(ENOMEM);
  263. }
  264. /* compute image size with reduction factor */
  265. s->avctx->width = ff_jpeg2000_ceildivpow2(s->width - s->image_offset_x,
  266. s->reduction_factor);
  267. s->avctx->height = ff_jpeg2000_ceildivpow2(s->height - s->image_offset_y,
  268. s->reduction_factor);
  269. if (s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_2K ||
  270. s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_4K) {
  271. possible_fmts = xyz_pix_fmts;
  272. possible_fmts_nb = FF_ARRAY_ELEMS(xyz_pix_fmts);
  273. } else {
  274. switch (s->colour_space) {
  275. case 16:
  276. possible_fmts = rgb_pix_fmts;
  277. possible_fmts_nb = FF_ARRAY_ELEMS(rgb_pix_fmts);
  278. break;
  279. case 17:
  280. possible_fmts = gray_pix_fmts;
  281. possible_fmts_nb = FF_ARRAY_ELEMS(gray_pix_fmts);
  282. break;
  283. case 18:
  284. possible_fmts = yuv_pix_fmts;
  285. possible_fmts_nb = FF_ARRAY_ELEMS(yuv_pix_fmts);
  286. break;
  287. default:
  288. possible_fmts = all_pix_fmts;
  289. possible_fmts_nb = FF_ARRAY_ELEMS(all_pix_fmts);
  290. break;
  291. }
  292. }
  293. for (i = 0; i < possible_fmts_nb; ++i) {
  294. if (pix_fmt_match(possible_fmts[i], ncomponents, s->precision, log2_chroma_wh, s->pal8)) {
  295. s->avctx->pix_fmt = possible_fmts[i];
  296. break;
  297. }
  298. }
  299. if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
  300. av_log(s->avctx, AV_LOG_ERROR,
  301. "Unknown pix_fmt, profile: %d, colour_space: %d, "
  302. "components: %d, precision: %d, "
  303. "cdx[1]: %d, cdy[1]: %d, cdx[2]: %d, cdy[2]: %d\n",
  304. s->avctx->profile, s->colour_space, ncomponents, s->precision,
  305. ncomponents > 2 ? s->cdx[1] : 0,
  306. ncomponents > 2 ? s->cdy[1] : 0,
  307. ncomponents > 2 ? s->cdx[2] : 0,
  308. ncomponents > 2 ? s->cdy[2] : 0);
  309. }
  310. s->avctx->bits_per_raw_sample = s->precision;
  311. return 0;
  312. }
  313. /* get common part for COD and COC segments */
  314. static int get_cox(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c)
  315. {
  316. uint8_t byte;
  317. if (bytestream2_get_bytes_left(&s->g) < 5)
  318. return AVERROR_INVALIDDATA;
  319. /* nreslevels = number of resolution levels
  320. = number of decomposition level +1 */
  321. c->nreslevels = bytestream2_get_byteu(&s->g) + 1;
  322. if (c->nreslevels >= JPEG2000_MAX_RESLEVELS) {
  323. av_log(s->avctx, AV_LOG_ERROR, "nreslevels %d is invalid\n", c->nreslevels);
  324. return AVERROR_INVALIDDATA;
  325. }
  326. /* compute number of resolution levels to decode */
  327. if (c->nreslevels < s->reduction_factor)
  328. c->nreslevels2decode = 1;
  329. else
  330. c->nreslevels2decode = c->nreslevels - s->reduction_factor;
  331. c->log2_cblk_width = (bytestream2_get_byteu(&s->g) & 15) + 2; // cblk width
  332. c->log2_cblk_height = (bytestream2_get_byteu(&s->g) & 15) + 2; // cblk height
  333. if (c->log2_cblk_width > 10 || c->log2_cblk_height > 10 ||
  334. c->log2_cblk_width + c->log2_cblk_height > 12) {
  335. av_log(s->avctx, AV_LOG_ERROR, "cblk size invalid\n");
  336. return AVERROR_INVALIDDATA;
  337. }
  338. if (c->log2_cblk_width > 6 || c->log2_cblk_height > 6) {
  339. avpriv_request_sample(s->avctx, "cblk size > 64");
  340. return AVERROR_PATCHWELCOME;
  341. }
  342. c->cblk_style = bytestream2_get_byteu(&s->g);
  343. if (c->cblk_style != 0) { // cblk style
  344. av_log(s->avctx, AV_LOG_WARNING, "extra cblk styles %X\n", c->cblk_style);
  345. }
  346. c->transform = bytestream2_get_byteu(&s->g); // DWT transformation type
  347. /* set integer 9/7 DWT in case of BITEXACT flag */
  348. if ((s->avctx->flags & CODEC_FLAG_BITEXACT) && (c->transform == FF_DWT97))
  349. c->transform = FF_DWT97_INT;
  350. if (c->csty & JPEG2000_CSTY_PREC) {
  351. int i;
  352. for (i = 0; i < c->nreslevels; i++) {
  353. byte = bytestream2_get_byte(&s->g);
  354. c->log2_prec_widths[i] = byte & 0x0F; // precinct PPx
  355. c->log2_prec_heights[i] = (byte >> 4) & 0x0F; // precinct PPy
  356. }
  357. } else {
  358. memset(c->log2_prec_widths , 15, sizeof(c->log2_prec_widths ));
  359. memset(c->log2_prec_heights, 15, sizeof(c->log2_prec_heights));
  360. }
  361. return 0;
  362. }
  363. /* get coding parameters for a particular tile or whole image*/
  364. static int get_cod(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c,
  365. uint8_t *properties)
  366. {
  367. Jpeg2000CodingStyle tmp;
  368. int compno, ret;
  369. if (bytestream2_get_bytes_left(&s->g) < 5)
  370. return AVERROR_INVALIDDATA;
  371. tmp.csty = bytestream2_get_byteu(&s->g);
  372. // get progression order
  373. tmp.prog_order = bytestream2_get_byteu(&s->g);
  374. tmp.nlayers = bytestream2_get_be16u(&s->g);
  375. tmp.mct = bytestream2_get_byteu(&s->g); // multiple component transformation
  376. if (tmp.mct && s->ncomponents < 3) {
  377. av_log(s->avctx, AV_LOG_ERROR,
  378. "MCT %d with too few components (%d)\n",
  379. tmp.mct, s->ncomponents);
  380. return AVERROR_INVALIDDATA;
  381. }
  382. if ((ret = get_cox(s, &tmp)) < 0)
  383. return ret;
  384. for (compno = 0; compno < s->ncomponents; compno++)
  385. if (!(properties[compno] & HAD_COC))
  386. memcpy(c + compno, &tmp, sizeof(tmp));
  387. return 0;
  388. }
  389. /* Get coding parameters for a component in the whole image or a
  390. * particular tile. */
  391. static int get_coc(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c,
  392. uint8_t *properties)
  393. {
  394. int compno, ret;
  395. if (bytestream2_get_bytes_left(&s->g) < 2)
  396. return AVERROR_INVALIDDATA;
  397. compno = bytestream2_get_byteu(&s->g);
  398. if (compno >= s->ncomponents) {
  399. av_log(s->avctx, AV_LOG_ERROR,
  400. "Invalid compno %d. There are %d components in the image.\n",
  401. compno, s->ncomponents);
  402. return AVERROR_INVALIDDATA;
  403. }
  404. c += compno;
  405. c->csty = bytestream2_get_byteu(&s->g);
  406. if ((ret = get_cox(s, c)) < 0)
  407. return ret;
  408. properties[compno] |= HAD_COC;
  409. return 0;
  410. }
  411. /* Get common part for QCD and QCC segments. */
  412. static int get_qcx(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q)
  413. {
  414. int i, x;
  415. if (bytestream2_get_bytes_left(&s->g) < 1)
  416. return AVERROR_INVALIDDATA;
  417. x = bytestream2_get_byteu(&s->g); // Sqcd
  418. q->nguardbits = x >> 5;
  419. q->quantsty = x & 0x1f;
  420. if (q->quantsty == JPEG2000_QSTY_NONE) {
  421. n -= 3;
  422. if (bytestream2_get_bytes_left(&s->g) < n ||
  423. n > JPEG2000_MAX_DECLEVELS*3)
  424. return AVERROR_INVALIDDATA;
  425. for (i = 0; i < n; i++)
  426. q->expn[i] = bytestream2_get_byteu(&s->g) >> 3;
  427. } else if (q->quantsty == JPEG2000_QSTY_SI) {
  428. if (bytestream2_get_bytes_left(&s->g) < 2)
  429. return AVERROR_INVALIDDATA;
  430. x = bytestream2_get_be16u(&s->g);
  431. q->expn[0] = x >> 11;
  432. q->mant[0] = x & 0x7ff;
  433. for (i = 1; i < JPEG2000_MAX_DECLEVELS * 3; i++) {
  434. int curexpn = FFMAX(0, q->expn[0] - (i - 1) / 3);
  435. q->expn[i] = curexpn;
  436. q->mant[i] = q->mant[0];
  437. }
  438. } else {
  439. n = (n - 3) >> 1;
  440. if (bytestream2_get_bytes_left(&s->g) < 2 * n ||
  441. n > JPEG2000_MAX_DECLEVELS*3)
  442. return AVERROR_INVALIDDATA;
  443. for (i = 0; i < n; i++) {
  444. x = bytestream2_get_be16u(&s->g);
  445. q->expn[i] = x >> 11;
  446. q->mant[i] = x & 0x7ff;
  447. }
  448. }
  449. return 0;
  450. }
  451. /* Get quantization parameters for a particular tile or a whole image. */
  452. static int get_qcd(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q,
  453. uint8_t *properties)
  454. {
  455. Jpeg2000QuantStyle tmp;
  456. int compno, ret;
  457. if ((ret = get_qcx(s, n, &tmp)) < 0)
  458. return ret;
  459. for (compno = 0; compno < s->ncomponents; compno++)
  460. if (!(properties[compno] & HAD_QCC))
  461. memcpy(q + compno, &tmp, sizeof(tmp));
  462. return 0;
  463. }
  464. /* Get quantization parameters for a component in the whole image
  465. * on in a particular tile. */
  466. static int get_qcc(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q,
  467. uint8_t *properties)
  468. {
  469. int compno;
  470. if (bytestream2_get_bytes_left(&s->g) < 1)
  471. return AVERROR_INVALIDDATA;
  472. compno = bytestream2_get_byteu(&s->g);
  473. if (compno >= s->ncomponents) {
  474. av_log(s->avctx, AV_LOG_ERROR,
  475. "Invalid compno %d. There are %d components in the image.\n",
  476. compno, s->ncomponents);
  477. return AVERROR_INVALIDDATA;
  478. }
  479. properties[compno] |= HAD_QCC;
  480. return get_qcx(s, n - 1, q + compno);
  481. }
  482. /* Get start of tile segment. */
  483. static int get_sot(Jpeg2000DecoderContext *s, int n)
  484. {
  485. Jpeg2000TilePart *tp;
  486. uint16_t Isot;
  487. uint32_t Psot;
  488. uint8_t TPsot;
  489. if (bytestream2_get_bytes_left(&s->g) < 8)
  490. return AVERROR_INVALIDDATA;
  491. s->curtileno = 0;
  492. Isot = bytestream2_get_be16u(&s->g); // Isot
  493. if (Isot >= s->numXtiles * s->numYtiles)
  494. return AVERROR_INVALIDDATA;
  495. s->curtileno = Isot;
  496. Psot = bytestream2_get_be32u(&s->g); // Psot
  497. TPsot = bytestream2_get_byteu(&s->g); // TPsot
  498. /* Read TNSot but not used */
  499. bytestream2_get_byteu(&s->g); // TNsot
  500. if (Psot > bytestream2_get_bytes_left(&s->g) + n + 2) {
  501. av_log(s->avctx, AV_LOG_ERROR, "Psot %d too big\n", Psot);
  502. return AVERROR_INVALIDDATA;
  503. }
  504. if (TPsot >= FF_ARRAY_ELEMS(s->tile[Isot].tile_part)) {
  505. avpriv_request_sample(s->avctx, "Support for %d components", TPsot);
  506. return AVERROR_PATCHWELCOME;
  507. }
  508. s->tile[Isot].tp_idx = TPsot;
  509. tp = s->tile[Isot].tile_part + TPsot;
  510. tp->tile_index = Isot;
  511. tp->tp_end = s->g.buffer + Psot - n - 2;
  512. if (!TPsot) {
  513. Jpeg2000Tile *tile = s->tile + s->curtileno;
  514. /* copy defaults */
  515. memcpy(tile->codsty, s->codsty, s->ncomponents * sizeof(Jpeg2000CodingStyle));
  516. memcpy(tile->qntsty, s->qntsty, s->ncomponents * sizeof(Jpeg2000QuantStyle));
  517. }
  518. return 0;
  519. }
  520. /* Tile-part lengths: see ISO 15444-1:2002, section A.7.1
  521. * Used to know the number of tile parts and lengths.
  522. * There may be multiple TLMs in the header.
  523. * TODO: The function is not used for tile-parts management, nor anywhere else.
  524. * It can be useful to allocate memory for tile parts, before managing the SOT
  525. * markers. Parsing the TLM header is needed to increment the input header
  526. * buffer.
  527. * This marker is mandatory for DCI. */
  528. static uint8_t get_tlm(Jpeg2000DecoderContext *s, int n)
  529. {
  530. uint8_t Stlm, ST, SP, tile_tlm, i;
  531. bytestream2_get_byte(&s->g); /* Ztlm: skipped */
  532. Stlm = bytestream2_get_byte(&s->g);
  533. // too complex ? ST = ((Stlm >> 4) & 0x01) + ((Stlm >> 4) & 0x02);
  534. ST = (Stlm >> 4) & 0x03;
  535. // TODO: Manage case of ST = 0b11 --> raise error
  536. SP = (Stlm >> 6) & 0x01;
  537. tile_tlm = (n - 4) / ((SP + 1) * 2 + ST);
  538. for (i = 0; i < tile_tlm; i++) {
  539. switch (ST) {
  540. case 0:
  541. break;
  542. case 1:
  543. bytestream2_get_byte(&s->g);
  544. break;
  545. case 2:
  546. bytestream2_get_be16(&s->g);
  547. break;
  548. case 3:
  549. bytestream2_get_be32(&s->g);
  550. break;
  551. }
  552. if (SP == 0) {
  553. bytestream2_get_be16(&s->g);
  554. } else {
  555. bytestream2_get_be32(&s->g);
  556. }
  557. }
  558. return 0;
  559. }
  560. static int init_tile(Jpeg2000DecoderContext *s, int tileno)
  561. {
  562. int compno;
  563. int tilex = tileno % s->numXtiles;
  564. int tiley = tileno / s->numXtiles;
  565. Jpeg2000Tile *tile = s->tile + tileno;
  566. if (!tile->comp)
  567. return AVERROR(ENOMEM);
  568. for (compno = 0; compno < s->ncomponents; compno++) {
  569. Jpeg2000Component *comp = tile->comp + compno;
  570. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  571. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  572. int ret; // global bandno
  573. comp->coord_o[0][0] = FFMAX(tilex * s->tile_width + s->tile_offset_x, s->image_offset_x);
  574. comp->coord_o[0][1] = FFMIN((tilex + 1) * s->tile_width + s->tile_offset_x, s->width);
  575. comp->coord_o[1][0] = FFMAX(tiley * s->tile_height + s->tile_offset_y, s->image_offset_y);
  576. comp->coord_o[1][1] = FFMIN((tiley + 1) * s->tile_height + s->tile_offset_y, s->height);
  577. comp->coord[0][0] = ff_jpeg2000_ceildivpow2(comp->coord_o[0][0], s->reduction_factor);
  578. comp->coord[0][1] = ff_jpeg2000_ceildivpow2(comp->coord_o[0][1], s->reduction_factor);
  579. comp->coord[1][0] = ff_jpeg2000_ceildivpow2(comp->coord_o[1][0], s->reduction_factor);
  580. comp->coord[1][1] = ff_jpeg2000_ceildivpow2(comp->coord_o[1][1], s->reduction_factor);
  581. if (ret = ff_jpeg2000_init_component(comp, codsty, qntsty,
  582. s->cbps[compno], s->cdx[compno],
  583. s->cdy[compno], s->avctx))
  584. return ret;
  585. }
  586. return 0;
  587. }
  588. /* Read the number of coding passes. */
  589. static int getnpasses(Jpeg2000DecoderContext *s)
  590. {
  591. int num;
  592. if (!get_bits(s, 1))
  593. return 1;
  594. if (!get_bits(s, 1))
  595. return 2;
  596. if ((num = get_bits(s, 2)) != 3)
  597. return num < 0 ? num : 3 + num;
  598. if ((num = get_bits(s, 5)) != 31)
  599. return num < 0 ? num : 6 + num;
  600. num = get_bits(s, 7);
  601. return num < 0 ? num : 37 + num;
  602. }
  603. static int getlblockinc(Jpeg2000DecoderContext *s)
  604. {
  605. int res = 0, ret;
  606. while (ret = get_bits(s, 1)) {
  607. if (ret < 0)
  608. return ret;
  609. res++;
  610. }
  611. return res;
  612. }
  613. static int jpeg2000_decode_packet(Jpeg2000DecoderContext *s,
  614. Jpeg2000CodingStyle *codsty,
  615. Jpeg2000ResLevel *rlevel, int precno,
  616. int layno, uint8_t *expn, int numgbits)
  617. {
  618. int bandno, cblkno, ret, nb_code_blocks;
  619. if (!(ret = get_bits(s, 1))) {
  620. jpeg2000_flush(s);
  621. return 0;
  622. } else if (ret < 0)
  623. return ret;
  624. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  625. Jpeg2000Band *band = rlevel->band + bandno;
  626. Jpeg2000Prec *prec = band->prec + precno;
  627. if (band->coord[0][0] == band->coord[0][1] ||
  628. band->coord[1][0] == band->coord[1][1])
  629. continue;
  630. nb_code_blocks = prec->nb_codeblocks_height *
  631. prec->nb_codeblocks_width;
  632. for (cblkno = 0; cblkno < nb_code_blocks; cblkno++) {
  633. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  634. int incl, newpasses, llen;
  635. if (cblk->npasses)
  636. incl = get_bits(s, 1);
  637. else
  638. incl = tag_tree_decode(s, prec->cblkincl + cblkno, layno + 1) == layno;
  639. if (!incl)
  640. continue;
  641. else if (incl < 0)
  642. return incl;
  643. if (!cblk->npasses) {
  644. int v = expn[bandno] + numgbits - 1 -
  645. tag_tree_decode(s, prec->zerobits + cblkno, 100);
  646. if (v < 0) {
  647. av_log(s->avctx, AV_LOG_ERROR,
  648. "nonzerobits %d invalid\n", v);
  649. return AVERROR_INVALIDDATA;
  650. }
  651. cblk->nonzerobits = v;
  652. }
  653. if ((newpasses = getnpasses(s)) < 0)
  654. return newpasses;
  655. if ((llen = getlblockinc(s)) < 0)
  656. return llen;
  657. cblk->lblock += llen;
  658. if ((ret = get_bits(s, av_log2(newpasses) + cblk->lblock)) < 0)
  659. return ret;
  660. if (ret > sizeof(cblk->data)) {
  661. avpriv_request_sample(s->avctx,
  662. "Block with lengthinc greater than %zu",
  663. sizeof(cblk->data));
  664. return AVERROR_PATCHWELCOME;
  665. }
  666. cblk->lengthinc = ret;
  667. cblk->npasses += newpasses;
  668. }
  669. }
  670. jpeg2000_flush(s);
  671. if (codsty->csty & JPEG2000_CSTY_EPH) {
  672. if (bytestream2_peek_be16(&s->g) == JPEG2000_EPH)
  673. bytestream2_skip(&s->g, 2);
  674. else
  675. av_log(s->avctx, AV_LOG_ERROR, "EPH marker not found.\n");
  676. }
  677. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  678. Jpeg2000Band *band = rlevel->band + bandno;
  679. Jpeg2000Prec *prec = band->prec + precno;
  680. nb_code_blocks = prec->nb_codeblocks_height * prec->nb_codeblocks_width;
  681. for (cblkno = 0; cblkno < nb_code_blocks; cblkno++) {
  682. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  683. if ( bytestream2_get_bytes_left(&s->g) < cblk->lengthinc
  684. || sizeof(cblk->data) < cblk->length + cblk->lengthinc + 2
  685. )
  686. return AVERROR_INVALIDDATA;
  687. bytestream2_get_bufferu(&s->g, cblk->data + cblk->length, cblk->lengthinc);
  688. cblk->length += cblk->lengthinc;
  689. cblk->lengthinc = 0;
  690. }
  691. }
  692. return 0;
  693. }
  694. static int jpeg2000_decode_packets(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile)
  695. {
  696. int ret = 0;
  697. int layno, reslevelno, compno, precno, ok_reslevel;
  698. int x, y;
  699. s->bit_index = 8;
  700. switch (tile->codsty[0].prog_order) {
  701. case JPEG2000_PGOD_RLCP:
  702. avpriv_request_sample(s->avctx, "Progression order RLCP");
  703. case JPEG2000_PGOD_LRCP:
  704. for (layno = 0; layno < tile->codsty[0].nlayers; layno++) {
  705. ok_reslevel = 1;
  706. for (reslevelno = 0; ok_reslevel; reslevelno++) {
  707. ok_reslevel = 0;
  708. for (compno = 0; compno < s->ncomponents; compno++) {
  709. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  710. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  711. if (reslevelno < codsty->nreslevels) {
  712. Jpeg2000ResLevel *rlevel = tile->comp[compno].reslevel +
  713. reslevelno;
  714. ok_reslevel = 1;
  715. for (precno = 0; precno < rlevel->num_precincts_x * rlevel->num_precincts_y; precno++)
  716. if ((ret = jpeg2000_decode_packet(s,
  717. codsty, rlevel,
  718. precno, layno,
  719. qntsty->expn + (reslevelno ? 3 * (reslevelno - 1) + 1 : 0),
  720. qntsty->nguardbits)) < 0)
  721. return ret;
  722. }
  723. }
  724. }
  725. }
  726. break;
  727. case JPEG2000_PGOD_CPRL:
  728. for (compno = 0; compno < s->ncomponents; compno++) {
  729. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  730. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  731. /* Set bit stream buffer address according to tile-part.
  732. * For DCinema one tile-part per component, so can be
  733. * indexed by component. */
  734. s->g = tile->tile_part[compno].tpg;
  735. /* Position loop (y axis)
  736. * TODO: Automate computing of step 256.
  737. * Fixed here, but to be computed before entering here. */
  738. for (y = 0; y < s->height; y += 256) {
  739. /* Position loop (y axis)
  740. * TODO: automate computing of step 256.
  741. * Fixed here, but to be computed before entering here. */
  742. for (x = 0; x < s->width; x += 256) {
  743. for (reslevelno = 0; reslevelno < codsty->nreslevels; reslevelno++) {
  744. uint16_t prcx, prcy;
  745. uint8_t reducedresno = codsty->nreslevels - 1 -reslevelno; // ==> N_L - r
  746. Jpeg2000ResLevel *rlevel = tile->comp[compno].reslevel + reslevelno;
  747. if (!((y % (1 << (rlevel->log2_prec_height + reducedresno)) == 0) ||
  748. (y == 0))) // TODO: 2nd condition simplified as try0 always =0 for dcinema
  749. continue;
  750. if (!((x % (1 << (rlevel->log2_prec_width + reducedresno)) == 0) ||
  751. (x == 0))) // TODO: 2nd condition simplified as try0 always =0 for dcinema
  752. continue;
  753. // check if a precinct exists
  754. prcx = ff_jpeg2000_ceildivpow2(x, reducedresno) >> rlevel->log2_prec_width;
  755. prcy = ff_jpeg2000_ceildivpow2(y, reducedresno) >> rlevel->log2_prec_height;
  756. precno = prcx + rlevel->num_precincts_x * prcy;
  757. for (layno = 0; layno < tile->codsty[0].nlayers; layno++) {
  758. if ((ret = jpeg2000_decode_packet(s, codsty, rlevel,
  759. precno, layno,
  760. qntsty->expn + (reslevelno ? 3 * (reslevelno - 1) + 1 : 0),
  761. qntsty->nguardbits)) < 0)
  762. return ret;
  763. }
  764. }
  765. }
  766. }
  767. }
  768. break;
  769. case JPEG2000_PGOD_RPCL:
  770. avpriv_request_sample(s->avctx, "Progression order RPCL");
  771. ret = AVERROR_PATCHWELCOME;
  772. break;
  773. case JPEG2000_PGOD_PCRL:
  774. avpriv_request_sample(s->avctx, "Progression order PCRL");
  775. ret = AVERROR_PATCHWELCOME;
  776. break;
  777. default:
  778. break;
  779. }
  780. /* EOC marker reached */
  781. bytestream2_skip(&s->g, 2);
  782. return ret;
  783. }
  784. /* TIER-1 routines */
  785. static void decode_sigpass(Jpeg2000T1Context *t1, int width, int height,
  786. int bpno, int bandno, int bpass_csty_symbol,
  787. int vert_causal_ctx_csty_symbol)
  788. {
  789. int mask = 3 << (bpno - 1), y0, x, y;
  790. for (y0 = 0; y0 < height; y0 += 4)
  791. for (x = 0; x < width; x++)
  792. for (y = y0; y < height && y < y0 + 4; y++) {
  793. if ((t1->flags[y+1][x+1] & JPEG2000_T1_SIG_NB)
  794. && !(t1->flags[y+1][x+1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS))) {
  795. int flags_mask = -1;
  796. if (vert_causal_ctx_csty_symbol && y == y0 + 3)
  797. flags_mask &= ~(JPEG2000_T1_SIG_S | JPEG2000_T1_SIG_SW | JPEG2000_T1_SIG_SE);
  798. if (ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ff_jpeg2000_getsigctxno(t1->flags[y+1][x+1] & flags_mask, bandno))) {
  799. int xorbit, ctxno = ff_jpeg2000_getsgnctxno(t1->flags[y+1][x+1], &xorbit);
  800. if (bpass_csty_symbol)
  801. t1->data[y][x] = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ctxno) ? -mask : mask;
  802. else
  803. t1->data[y][x] = (ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ctxno) ^ xorbit) ?
  804. -mask : mask;
  805. ff_jpeg2000_set_significance(t1, x, y,
  806. t1->data[y][x] < 0);
  807. }
  808. t1->flags[y + 1][x + 1] |= JPEG2000_T1_VIS;
  809. }
  810. }
  811. }
  812. static void decode_refpass(Jpeg2000T1Context *t1, int width, int height,
  813. int bpno)
  814. {
  815. int phalf, nhalf;
  816. int y0, x, y;
  817. phalf = 1 << (bpno - 1);
  818. nhalf = -phalf;
  819. for (y0 = 0; y0 < height; y0 += 4)
  820. for (x = 0; x < width; x++)
  821. for (y = y0; y < height && y < y0 + 4; y++)
  822. if ((t1->flags[y + 1][x + 1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS)) == JPEG2000_T1_SIG) {
  823. int ctxno = ff_jpeg2000_getrefctxno(t1->flags[y + 1][x + 1]);
  824. int r = ff_mqc_decode(&t1->mqc,
  825. t1->mqc.cx_states + ctxno)
  826. ? phalf : nhalf;
  827. t1->data[y][x] += t1->data[y][x] < 0 ? -r : r;
  828. t1->flags[y + 1][x + 1] |= JPEG2000_T1_REF;
  829. }
  830. }
  831. static void decode_clnpass(Jpeg2000DecoderContext *s, Jpeg2000T1Context *t1,
  832. int width, int height, int bpno, int bandno,
  833. int seg_symbols, int vert_causal_ctx_csty_symbol)
  834. {
  835. int mask = 3 << (bpno - 1), y0, x, y, runlen, dec;
  836. for (y0 = 0; y0 < height; y0 += 4) {
  837. for (x = 0; x < width; x++) {
  838. if (y0 + 3 < height &&
  839. !((t1->flags[y0 + 1][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  840. (t1->flags[y0 + 2][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  841. (t1->flags[y0 + 3][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  842. (t1->flags[y0 + 4][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)))) {
  843. if (!ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_RL))
  844. continue;
  845. runlen = ff_mqc_decode(&t1->mqc,
  846. t1->mqc.cx_states + MQC_CX_UNI);
  847. runlen = (runlen << 1) | ff_mqc_decode(&t1->mqc,
  848. t1->mqc.cx_states +
  849. MQC_CX_UNI);
  850. dec = 1;
  851. } else {
  852. runlen = 0;
  853. dec = 0;
  854. }
  855. for (y = y0 + runlen; y < y0 + 4 && y < height; y++) {
  856. if (!dec) {
  857. if (!(t1->flags[y+1][x+1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS))) {
  858. int flags_mask = -1;
  859. if (vert_causal_ctx_csty_symbol && y == y0 + 3)
  860. flags_mask &= ~(JPEG2000_T1_SIG_S | JPEG2000_T1_SIG_SW | JPEG2000_T1_SIG_SE);
  861. dec = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ff_jpeg2000_getsigctxno(t1->flags[y+1][x+1] & flags_mask,
  862. bandno));
  863. }
  864. }
  865. if (dec) {
  866. int xorbit;
  867. int ctxno = ff_jpeg2000_getsgnctxno(t1->flags[y + 1][x + 1],
  868. &xorbit);
  869. t1->data[y][x] = (ff_mqc_decode(&t1->mqc,
  870. t1->mqc.cx_states + ctxno) ^
  871. xorbit)
  872. ? -mask : mask;
  873. ff_jpeg2000_set_significance(t1, x, y, t1->data[y][x] < 0);
  874. }
  875. dec = 0;
  876. t1->flags[y + 1][x + 1] &= ~JPEG2000_T1_VIS;
  877. }
  878. }
  879. }
  880. if (seg_symbols) {
  881. int val;
  882. val = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  883. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  884. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  885. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  886. if (val != 0xa)
  887. av_log(s->avctx, AV_LOG_ERROR,
  888. "Segmentation symbol value incorrect\n");
  889. }
  890. }
  891. static int decode_cblk(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *codsty,
  892. Jpeg2000T1Context *t1, Jpeg2000Cblk *cblk,
  893. int width, int height, int bandpos)
  894. {
  895. int passno = cblk->npasses, pass_t = 2, bpno = cblk->nonzerobits - 1, y;
  896. int clnpass_cnt = 0;
  897. int bpass_csty_symbol = codsty->cblk_style & JPEG2000_CBLK_BYPASS;
  898. int vert_causal_ctx_csty_symbol = codsty->cblk_style & JPEG2000_CBLK_VSC;
  899. av_assert0(width <= JPEG2000_MAX_CBLKW);
  900. av_assert0(height <= JPEG2000_MAX_CBLKH);
  901. for (y = 0; y < height; y++)
  902. memset(t1->data[y], 0, width * sizeof(**t1->data));
  903. /* If code-block contains no compressed data: nothing to do. */
  904. if (!cblk->length)
  905. return 0;
  906. for (y = 0; y < height + 2; y++)
  907. memset(t1->flags[y], 0, (width + 2) * sizeof(**t1->flags));
  908. cblk->data[cblk->length] = 0xff;
  909. cblk->data[cblk->length+1] = 0xff;
  910. ff_mqc_initdec(&t1->mqc, cblk->data);
  911. while (passno--) {
  912. switch(pass_t) {
  913. case 0:
  914. decode_sigpass(t1, width, height, bpno + 1, bandpos,
  915. bpass_csty_symbol && (clnpass_cnt >= 4),
  916. vert_causal_ctx_csty_symbol);
  917. break;
  918. case 1:
  919. decode_refpass(t1, width, height, bpno + 1);
  920. if (bpass_csty_symbol && clnpass_cnt >= 4)
  921. ff_mqc_initdec(&t1->mqc, cblk->data);
  922. break;
  923. case 2:
  924. decode_clnpass(s, t1, width, height, bpno + 1, bandpos,
  925. codsty->cblk_style & JPEG2000_CBLK_SEGSYM,
  926. vert_causal_ctx_csty_symbol);
  927. clnpass_cnt = clnpass_cnt + 1;
  928. if (bpass_csty_symbol && clnpass_cnt >= 4)
  929. ff_mqc_initdec(&t1->mqc, cblk->data);
  930. break;
  931. }
  932. pass_t++;
  933. if (pass_t == 3) {
  934. bpno--;
  935. pass_t = 0;
  936. }
  937. }
  938. return 0;
  939. }
  940. /* TODO: Verify dequantization for lossless case
  941. * comp->data can be float or int
  942. * band->stepsize can be float or int
  943. * depending on the type of DWT transformation.
  944. * see ISO/IEC 15444-1:2002 A.6.1 */
  945. /* Float dequantization of a codeblock.*/
  946. static void dequantization_float(int x, int y, Jpeg2000Cblk *cblk,
  947. Jpeg2000Component *comp,
  948. Jpeg2000T1Context *t1, Jpeg2000Band *band)
  949. {
  950. int i, j;
  951. int w = cblk->coord[0][1] - cblk->coord[0][0];
  952. for (j = 0; j < (cblk->coord[1][1] - cblk->coord[1][0]); ++j) {
  953. float *datap = &comp->f_data[(comp->coord[0][1] - comp->coord[0][0]) * (y + j) + x];
  954. int *src = t1->data[j];
  955. for (i = 0; i < w; ++i)
  956. datap[i] = src[i] * band->f_stepsize;
  957. }
  958. }
  959. /* Integer dequantization of a codeblock.*/
  960. static void dequantization_int(int x, int y, Jpeg2000Cblk *cblk,
  961. Jpeg2000Component *comp,
  962. Jpeg2000T1Context *t1, Jpeg2000Band *band)
  963. {
  964. int i, j;
  965. int w = cblk->coord[0][1] - cblk->coord[0][0];
  966. for (j = 0; j < (cblk->coord[1][1] - cblk->coord[1][0]); ++j) {
  967. int32_t *datap = &comp->i_data[(comp->coord[0][1] - comp->coord[0][0]) * (y + j) + x];
  968. int *src = t1->data[j];
  969. for (i = 0; i < w; ++i)
  970. datap[i] = (src[i] * band->i_stepsize + (1 << 14)) >> 15;
  971. }
  972. }
  973. /* Inverse ICT parameters in float and integer.
  974. * int value = (float value) * (1<<16) */
  975. static const float f_ict_params[4] = {
  976. 1.402f,
  977. 0.34413f,
  978. 0.71414f,
  979. 1.772f
  980. };
  981. static const int i_ict_params[4] = {
  982. 91881,
  983. 22553,
  984. 46802,
  985. 116130
  986. };
  987. static void mct_decode(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile)
  988. {
  989. int i, csize = 1;
  990. int32_t *src[3], i0, i1, i2;
  991. float *srcf[3], i0f, i1f, i2f;
  992. for (i = 0; i < 3; i++)
  993. if (tile->codsty[0].transform == FF_DWT97)
  994. srcf[i] = tile->comp[i].f_data;
  995. else
  996. src [i] = tile->comp[i].i_data;
  997. for (i = 0; i < 2; i++)
  998. csize *= tile->comp[0].coord[i][1] - tile->comp[0].coord[i][0];
  999. switch (tile->codsty[0].transform) {
  1000. case FF_DWT97:
  1001. for (i = 0; i < csize; i++) {
  1002. i0f = *srcf[0] + (f_ict_params[0] * *srcf[2]);
  1003. i1f = *srcf[0] - (f_ict_params[1] * *srcf[1])
  1004. - (f_ict_params[2] * *srcf[2]);
  1005. i2f = *srcf[0] + (f_ict_params[3] * *srcf[1]);
  1006. *srcf[0]++ = i0f;
  1007. *srcf[1]++ = i1f;
  1008. *srcf[2]++ = i2f;
  1009. }
  1010. break;
  1011. case FF_DWT97_INT:
  1012. for (i = 0; i < csize; i++) {
  1013. i0 = *src[0] + (((i_ict_params[0] * *src[2]) + (1 << 15)) >> 16);
  1014. i1 = *src[0] - (((i_ict_params[1] * *src[1]) + (1 << 15)) >> 16)
  1015. - (((i_ict_params[2] * *src[2]) + (1 << 15)) >> 16);
  1016. i2 = *src[0] + (((i_ict_params[3] * *src[1]) + (1 << 15)) >> 16);
  1017. *src[0]++ = i0;
  1018. *src[1]++ = i1;
  1019. *src[2]++ = i2;
  1020. }
  1021. break;
  1022. case FF_DWT53:
  1023. for (i = 0; i < csize; i++) {
  1024. i1 = *src[0] - (*src[2] + *src[1] >> 2);
  1025. i0 = i1 + *src[2];
  1026. i2 = i1 + *src[1];
  1027. *src[0]++ = i0;
  1028. *src[1]++ = i1;
  1029. *src[2]++ = i2;
  1030. }
  1031. break;
  1032. }
  1033. }
  1034. static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
  1035. AVFrame *picture)
  1036. {
  1037. int compno, reslevelno, bandno;
  1038. int x, y;
  1039. uint8_t *line;
  1040. Jpeg2000T1Context t1;
  1041. /* Loop on tile components */
  1042. for (compno = 0; compno < s->ncomponents; compno++) {
  1043. Jpeg2000Component *comp = tile->comp + compno;
  1044. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1045. /* Loop on resolution levels */
  1046. for (reslevelno = 0; reslevelno < codsty->nreslevels2decode; reslevelno++) {
  1047. Jpeg2000ResLevel *rlevel = comp->reslevel + reslevelno;
  1048. /* Loop on bands */
  1049. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  1050. int nb_precincts, precno;
  1051. Jpeg2000Band *band = rlevel->band + bandno;
  1052. int cblkno = 0, bandpos;
  1053. bandpos = bandno + (reslevelno > 0);
  1054. if (band->coord[0][0] == band->coord[0][1] ||
  1055. band->coord[1][0] == band->coord[1][1])
  1056. continue;
  1057. nb_precincts = rlevel->num_precincts_x * rlevel->num_precincts_y;
  1058. /* Loop on precincts */
  1059. for (precno = 0; precno < nb_precincts; precno++) {
  1060. Jpeg2000Prec *prec = band->prec + precno;
  1061. /* Loop on codeblocks */
  1062. for (cblkno = 0; cblkno < prec->nb_codeblocks_width * prec->nb_codeblocks_height; cblkno++) {
  1063. int x, y;
  1064. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  1065. decode_cblk(s, codsty, &t1, cblk,
  1066. cblk->coord[0][1] - cblk->coord[0][0],
  1067. cblk->coord[1][1] - cblk->coord[1][0],
  1068. bandpos);
  1069. x = cblk->coord[0][0];
  1070. y = cblk->coord[1][0];
  1071. if (codsty->transform == FF_DWT97)
  1072. dequantization_float(x, y, cblk, comp, &t1, band);
  1073. else
  1074. dequantization_int(x, y, cblk, comp, &t1, band);
  1075. } /* end cblk */
  1076. } /*end prec */
  1077. } /* end band */
  1078. } /* end reslevel */
  1079. /* inverse DWT */
  1080. ff_dwt_decode(&comp->dwt, codsty->transform == FF_DWT97 ? (void*)comp->f_data : (void*)comp->i_data);
  1081. } /*end comp */
  1082. /* inverse MCT transformation */
  1083. if (tile->codsty[0].mct)
  1084. mct_decode(s, tile);
  1085. if (s->cdef[0] < 0) {
  1086. for (x = 0; x < s->ncomponents; x++)
  1087. s->cdef[x] = x + 1;
  1088. if ((s->ncomponents & 1) == 0)
  1089. s->cdef[s->ncomponents-1] = 0;
  1090. }
  1091. if (s->precision <= 8) {
  1092. for (compno = 0; compno < s->ncomponents; compno++) {
  1093. Jpeg2000Component *comp = tile->comp + compno;
  1094. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1095. float *datap = comp->f_data;
  1096. int32_t *i_datap = comp->i_data;
  1097. int cbps = s->cbps[compno];
  1098. int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
  1099. int planar = !!picture->data[2];
  1100. int pixelsize = planar ? 1 : s->ncomponents;
  1101. int plane = 0;
  1102. if (planar)
  1103. plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
  1104. y = tile->comp[compno].coord[1][0] - s->image_offset_y;
  1105. line = picture->data[plane] + y * picture->linesize[plane];
  1106. for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
  1107. uint8_t *dst;
  1108. x = tile->comp[compno].coord[0][0] - s->image_offset_x;
  1109. dst = line + x * pixelsize + compno*!planar;
  1110. if (codsty->transform == FF_DWT97) {
  1111. for (; x < w; x += s->cdx[compno]) {
  1112. int val = lrintf(*datap) + (1 << (cbps - 1));
  1113. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1114. val = av_clip(val, 0, (1 << cbps) - 1);
  1115. *dst = val << (8 - cbps);
  1116. datap++;
  1117. dst += pixelsize;
  1118. }
  1119. } else {
  1120. for (; x < w; x += s->cdx[compno]) {
  1121. int val = *i_datap + (1 << (cbps - 1));
  1122. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1123. val = av_clip(val, 0, (1 << cbps) - 1);
  1124. *dst = val << (8 - cbps);
  1125. i_datap++;
  1126. dst += pixelsize;
  1127. }
  1128. }
  1129. line += picture->linesize[plane];
  1130. }
  1131. }
  1132. } else {
  1133. for (compno = 0; compno < s->ncomponents; compno++) {
  1134. Jpeg2000Component *comp = tile->comp + compno;
  1135. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1136. float *datap = comp->f_data;
  1137. int32_t *i_datap = comp->i_data;
  1138. uint16_t *linel;
  1139. int cbps = s->cbps[compno];
  1140. int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
  1141. int planar = !!picture->data[2];
  1142. int pixelsize = planar ? 1 : s->ncomponents;
  1143. int plane = 0;
  1144. if (planar)
  1145. plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
  1146. y = tile->comp[compno].coord[1][0] - s->image_offset_y;
  1147. linel = (uint16_t *)picture->data[plane] + y * (picture->linesize[plane] >> 1);
  1148. for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
  1149. uint16_t *dst;
  1150. x = tile->comp[compno].coord[0][0] - s->image_offset_x;
  1151. dst = linel + (x * pixelsize + compno*!planar);
  1152. if (codsty->transform == FF_DWT97) {
  1153. for (; x < w; x += s-> cdx[compno]) {
  1154. int val = lrintf(*datap) + (1 << (cbps - 1));
  1155. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1156. val = av_clip(val, 0, (1 << cbps) - 1);
  1157. /* align 12 bit values in little-endian mode */
  1158. *dst = val << (16 - cbps);
  1159. datap++;
  1160. dst += pixelsize;
  1161. }
  1162. } else {
  1163. for (; x < w; x += s-> cdx[compno]) {
  1164. int val = *i_datap + (1 << (cbps - 1));
  1165. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1166. val = av_clip(val, 0, (1 << cbps) - 1);
  1167. /* align 12 bit values in little-endian mode */
  1168. *dst = val << (16 - cbps);
  1169. i_datap++;
  1170. dst += pixelsize;
  1171. }
  1172. }
  1173. linel += picture->linesize[plane] >> 1;
  1174. }
  1175. }
  1176. }
  1177. return 0;
  1178. }
  1179. static void jpeg2000_dec_cleanup(Jpeg2000DecoderContext *s)
  1180. {
  1181. int tileno, compno;
  1182. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++) {
  1183. if (s->tile[tileno].comp) {
  1184. for (compno = 0; compno < s->ncomponents; compno++) {
  1185. Jpeg2000Component *comp = s->tile[tileno].comp + compno;
  1186. Jpeg2000CodingStyle *codsty = s->tile[tileno].codsty + compno;
  1187. ff_jpeg2000_cleanup(comp, codsty);
  1188. }
  1189. av_freep(&s->tile[tileno].comp);
  1190. }
  1191. }
  1192. av_freep(&s->tile);
  1193. s->numXtiles = s->numYtiles = 0;
  1194. }
  1195. static int jpeg2000_read_main_headers(Jpeg2000DecoderContext *s)
  1196. {
  1197. Jpeg2000CodingStyle *codsty = s->codsty;
  1198. Jpeg2000QuantStyle *qntsty = s->qntsty;
  1199. uint8_t *properties = s->properties;
  1200. for (;;) {
  1201. int len, ret = 0;
  1202. uint16_t marker;
  1203. int oldpos;
  1204. if (bytestream2_get_bytes_left(&s->g) < 2) {
  1205. av_log(s->avctx, AV_LOG_ERROR, "Missing EOC\n");
  1206. break;
  1207. }
  1208. marker = bytestream2_get_be16u(&s->g);
  1209. oldpos = bytestream2_tell(&s->g);
  1210. if (marker == JPEG2000_SOD) {
  1211. Jpeg2000Tile *tile;
  1212. Jpeg2000TilePart *tp;
  1213. if (!s->tile) {
  1214. av_log(s->avctx, AV_LOG_ERROR, "Missing SIZ\n");
  1215. return AVERROR_INVALIDDATA;
  1216. }
  1217. if (s->curtileno < 0) {
  1218. av_log(s->avctx, AV_LOG_ERROR, "Missing SOT\n");
  1219. return AVERROR_INVALIDDATA;
  1220. }
  1221. tile = s->tile + s->curtileno;
  1222. tp = tile->tile_part + tile->tp_idx;
  1223. if (tp->tp_end < s->g.buffer) {
  1224. av_log(s->avctx, AV_LOG_ERROR, "Invalid tpend\n");
  1225. return AVERROR_INVALIDDATA;
  1226. }
  1227. bytestream2_init(&tp->tpg, s->g.buffer, tp->tp_end - s->g.buffer);
  1228. bytestream2_skip(&s->g, tp->tp_end - s->g.buffer);
  1229. continue;
  1230. }
  1231. if (marker == JPEG2000_EOC)
  1232. break;
  1233. len = bytestream2_get_be16(&s->g);
  1234. if (len < 2 || bytestream2_get_bytes_left(&s->g) < len - 2)
  1235. return AVERROR_INVALIDDATA;
  1236. switch (marker) {
  1237. case JPEG2000_SIZ:
  1238. ret = get_siz(s);
  1239. if (!s->tile)
  1240. s->numXtiles = s->numYtiles = 0;
  1241. break;
  1242. case JPEG2000_COC:
  1243. ret = get_coc(s, codsty, properties);
  1244. break;
  1245. case JPEG2000_COD:
  1246. ret = get_cod(s, codsty, properties);
  1247. break;
  1248. case JPEG2000_QCC:
  1249. ret = get_qcc(s, len, qntsty, properties);
  1250. break;
  1251. case JPEG2000_QCD:
  1252. ret = get_qcd(s, len, qntsty, properties);
  1253. break;
  1254. case JPEG2000_SOT:
  1255. if (!(ret = get_sot(s, len))) {
  1256. av_assert1(s->curtileno >= 0);
  1257. codsty = s->tile[s->curtileno].codsty;
  1258. qntsty = s->tile[s->curtileno].qntsty;
  1259. properties = s->tile[s->curtileno].properties;
  1260. }
  1261. break;
  1262. case JPEG2000_COM:
  1263. // the comment is ignored
  1264. bytestream2_skip(&s->g, len - 2);
  1265. break;
  1266. case JPEG2000_TLM:
  1267. // Tile-part lengths
  1268. ret = get_tlm(s, len);
  1269. break;
  1270. default:
  1271. av_log(s->avctx, AV_LOG_ERROR,
  1272. "unsupported marker 0x%.4X at pos 0x%X\n",
  1273. marker, bytestream2_tell(&s->g) - 4);
  1274. bytestream2_skip(&s->g, len - 2);
  1275. break;
  1276. }
  1277. if (bytestream2_tell(&s->g) - oldpos != len || ret) {
  1278. av_log(s->avctx, AV_LOG_ERROR,
  1279. "error during processing marker segment %.4x\n", marker);
  1280. return ret ? ret : -1;
  1281. }
  1282. }
  1283. return 0;
  1284. }
  1285. /* Read bit stream packets --> T2 operation. */
  1286. static int jpeg2000_read_bitstream_packets(Jpeg2000DecoderContext *s)
  1287. {
  1288. int ret = 0;
  1289. int tileno;
  1290. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++) {
  1291. Jpeg2000Tile *tile = s->tile + tileno;
  1292. if (ret = init_tile(s, tileno))
  1293. return ret;
  1294. s->g = tile->tile_part[0].tpg;
  1295. if (ret = jpeg2000_decode_packets(s, tile))
  1296. return ret;
  1297. }
  1298. return 0;
  1299. }
  1300. static int jp2_find_codestream(Jpeg2000DecoderContext *s)
  1301. {
  1302. uint32_t atom_size, atom, atom_end;
  1303. int search_range = 10;
  1304. while (search_range
  1305. &&
  1306. bytestream2_get_bytes_left(&s->g) >= 8) {
  1307. atom_size = bytestream2_get_be32u(&s->g);
  1308. atom = bytestream2_get_be32u(&s->g);
  1309. atom_end = bytestream2_tell(&s->g) + atom_size - 8;
  1310. if (atom == JP2_CODESTREAM)
  1311. return 1;
  1312. if (bytestream2_get_bytes_left(&s->g) < atom_size || atom_end < atom_size)
  1313. return 0;
  1314. if (atom == JP2_HEADER &&
  1315. atom_size >= 16) {
  1316. uint32_t atom2_size, atom2, atom2_end;
  1317. do {
  1318. atom2_size = bytestream2_get_be32u(&s->g);
  1319. atom2 = bytestream2_get_be32u(&s->g);
  1320. atom2_end = bytestream2_tell(&s->g) + atom2_size - 8;
  1321. if (atom2_size < 8 || atom2_end > atom_end || atom2_end < atom2_size)
  1322. break;
  1323. if (atom2 == JP2_CODESTREAM) {
  1324. return 1;
  1325. } else if (atom2 == MKBETAG('c','o','l','r') && atom2_size >= 7) {
  1326. int method = bytestream2_get_byteu(&s->g);
  1327. bytestream2_skipu(&s->g, 2);
  1328. if (method == 1) {
  1329. s->colour_space = bytestream2_get_be32u(&s->g);
  1330. }
  1331. } else if (atom2 == MKBETAG('p','c','l','r') && atom2_size >= 6) {
  1332. int i, size, colour_count, colour_channels, colour_depth[3];
  1333. uint32_t r, g, b;
  1334. colour_count = bytestream2_get_be16u(&s->g);
  1335. colour_channels = bytestream2_get_byteu(&s->g);
  1336. // FIXME: Do not ignore channel_sign
  1337. colour_depth[0] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1338. colour_depth[1] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1339. colour_depth[2] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1340. size = (colour_depth[0] + 7 >> 3) * colour_count +
  1341. (colour_depth[1] + 7 >> 3) * colour_count +
  1342. (colour_depth[2] + 7 >> 3) * colour_count;
  1343. if (colour_count > 256 ||
  1344. colour_channels != 3 ||
  1345. colour_depth[0] > 16 ||
  1346. colour_depth[1] > 16 ||
  1347. colour_depth[2] > 16 ||
  1348. atom2_size < size) {
  1349. avpriv_request_sample(s->avctx, "Unknown palette");
  1350. bytestream2_seek(&s->g, atom2_end, SEEK_SET);
  1351. continue;
  1352. }
  1353. s->pal8 = 1;
  1354. for (i = 0; i < colour_count; i++) {
  1355. if (colour_depth[0] <= 8) {
  1356. r = bytestream2_get_byteu(&s->g) << 8 - colour_depth[0];
  1357. r |= r >> colour_depth[0];
  1358. } else {
  1359. r = bytestream2_get_be16u(&s->g) >> colour_depth[0] - 8;
  1360. }
  1361. if (colour_depth[1] <= 8) {
  1362. g = bytestream2_get_byteu(&s->g) << 8 - colour_depth[1];
  1363. r |= r >> colour_depth[1];
  1364. } else {
  1365. g = bytestream2_get_be16u(&s->g) >> colour_depth[1] - 8;
  1366. }
  1367. if (colour_depth[2] <= 8) {
  1368. b = bytestream2_get_byteu(&s->g) << 8 - colour_depth[2];
  1369. r |= r >> colour_depth[2];
  1370. } else {
  1371. b = bytestream2_get_be16u(&s->g) >> colour_depth[2] - 8;
  1372. }
  1373. s->palette[i] = 0xffu << 24 | r << 16 | g << 8 | b;
  1374. }
  1375. } else if (atom2 == MKBETAG('c','d','e','f') && atom2_size >= 2) {
  1376. int n = bytestream2_get_be16u(&s->g);
  1377. for (; n>0; n--) {
  1378. int cn = bytestream2_get_be16(&s->g);
  1379. int av_unused typ = bytestream2_get_be16(&s->g);
  1380. int asoc = bytestream2_get_be16(&s->g);
  1381. if (cn < 4 || asoc < 4)
  1382. s->cdef[cn] = asoc;
  1383. }
  1384. }
  1385. bytestream2_seek(&s->g, atom2_end, SEEK_SET);
  1386. } while (atom_end - atom2_end >= 8);
  1387. } else {
  1388. search_range--;
  1389. }
  1390. bytestream2_seek(&s->g, atom_end, SEEK_SET);
  1391. }
  1392. return 0;
  1393. }
  1394. static int jpeg2000_decode_frame(AVCodecContext *avctx, void *data,
  1395. int *got_frame, AVPacket *avpkt)
  1396. {
  1397. Jpeg2000DecoderContext *s = avctx->priv_data;
  1398. ThreadFrame frame = { .f = data };
  1399. AVFrame *picture = data;
  1400. int tileno, ret;
  1401. s->avctx = avctx;
  1402. bytestream2_init(&s->g, avpkt->data, avpkt->size);
  1403. s->curtileno = -1;
  1404. memset(s->cdef, -1, sizeof(s->cdef));
  1405. if (bytestream2_get_bytes_left(&s->g) < 2) {
  1406. ret = AVERROR_INVALIDDATA;
  1407. goto end;
  1408. }
  1409. // check if the image is in jp2 format
  1410. if (bytestream2_get_bytes_left(&s->g) >= 12 &&
  1411. (bytestream2_get_be32u(&s->g) == 12) &&
  1412. (bytestream2_get_be32u(&s->g) == JP2_SIG_TYPE) &&
  1413. (bytestream2_get_be32u(&s->g) == JP2_SIG_VALUE)) {
  1414. if (!jp2_find_codestream(s)) {
  1415. av_log(avctx, AV_LOG_ERROR,
  1416. "Could not find Jpeg2000 codestream atom.\n");
  1417. ret = AVERROR_INVALIDDATA;
  1418. goto end;
  1419. }
  1420. } else {
  1421. bytestream2_seek(&s->g, 0, SEEK_SET);
  1422. }
  1423. while (bytestream2_get_bytes_left(&s->g) >= 3 && bytestream2_peek_be16(&s->g) != JPEG2000_SOC)
  1424. bytestream2_skip(&s->g, 1);
  1425. if (bytestream2_get_be16u(&s->g) != JPEG2000_SOC) {
  1426. av_log(avctx, AV_LOG_ERROR, "SOC marker not present\n");
  1427. ret = AVERROR_INVALIDDATA;
  1428. goto end;
  1429. }
  1430. if (ret = jpeg2000_read_main_headers(s))
  1431. goto end;
  1432. /* get picture buffer */
  1433. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  1434. goto end;
  1435. picture->pict_type = AV_PICTURE_TYPE_I;
  1436. picture->key_frame = 1;
  1437. if (ret = jpeg2000_read_bitstream_packets(s))
  1438. goto end;
  1439. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++)
  1440. if (ret = jpeg2000_decode_tile(s, s->tile + tileno, picture))
  1441. goto end;
  1442. jpeg2000_dec_cleanup(s);
  1443. *got_frame = 1;
  1444. if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
  1445. memcpy(picture->data[1], s->palette, 256 * sizeof(uint32_t));
  1446. return bytestream2_tell(&s->g);
  1447. end:
  1448. jpeg2000_dec_cleanup(s);
  1449. return ret;
  1450. }
  1451. static void jpeg2000_init_static_data(AVCodec *codec)
  1452. {
  1453. ff_jpeg2000_init_tier1_luts();
  1454. ff_mqc_init_context_tables();
  1455. }
  1456. #define OFFSET(x) offsetof(Jpeg2000DecoderContext, x)
  1457. #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
  1458. static const AVOption options[] = {
  1459. { "lowres", "Lower the decoding resolution by a power of two",
  1460. OFFSET(reduction_factor), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, JPEG2000_MAX_RESLEVELS - 1, VD },
  1461. { NULL },
  1462. };
  1463. static const AVProfile profiles[] = {
  1464. { FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0, "JPEG 2000 codestream restriction 0" },
  1465. { FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1, "JPEG 2000 codestream restriction 1" },
  1466. { FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION, "JPEG 2000 no codestream restrictions" },
  1467. { FF_PROFILE_JPEG2000_DCINEMA_2K, "JPEG 2000 digital cinema 2K" },
  1468. { FF_PROFILE_JPEG2000_DCINEMA_4K, "JPEG 2000 digital cinema 4K" },
  1469. { FF_PROFILE_UNKNOWN },
  1470. };
  1471. static const AVClass jpeg2000_class = {
  1472. .class_name = "jpeg2000",
  1473. .item_name = av_default_item_name,
  1474. .option = options,
  1475. .version = LIBAVUTIL_VERSION_INT,
  1476. };
  1477. AVCodec ff_jpeg2000_decoder = {
  1478. .name = "jpeg2000",
  1479. .long_name = NULL_IF_CONFIG_SMALL("JPEG 2000"),
  1480. .type = AVMEDIA_TYPE_VIDEO,
  1481. .id = AV_CODEC_ID_JPEG2000,
  1482. .capabilities = CODEC_CAP_FRAME_THREADS,
  1483. .priv_data_size = sizeof(Jpeg2000DecoderContext),
  1484. .init_static_data = jpeg2000_init_static_data,
  1485. .decode = jpeg2000_decode_frame,
  1486. .priv_class = &jpeg2000_class,
  1487. .max_lowres = 5,
  1488. .profiles = NULL_IF_CONFIG_SMALL(profiles)
  1489. };