You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1717 lines
63KB

  1. /*
  2. * JPEG 2000 image decoder
  3. * Copyright (c) 2007 Kamil Nowosad
  4. * Copyright (c) 2013 Nicolas Bertrand <nicoinattendu@gmail.com>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * JPEG 2000 image decoder
  25. */
  26. #include "libavutil/avassert.h"
  27. #include "libavutil/common.h"
  28. #include "libavutil/opt.h"
  29. #include "libavutil/pixdesc.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "internal.h"
  33. #include "thread.h"
  34. #include "jpeg2000.h"
  35. #define JP2_SIG_TYPE 0x6A502020
  36. #define JP2_SIG_VALUE 0x0D0A870A
  37. #define JP2_CODESTREAM 0x6A703263
  38. #define JP2_HEADER 0x6A703268
  39. #define HAD_COC 0x01
  40. #define HAD_QCC 0x02
  41. typedef struct Jpeg2000TilePart {
  42. uint8_t tile_index; // Tile index who refers the tile-part
  43. const uint8_t *tp_end;
  44. GetByteContext tpg; // bit stream in tile-part
  45. } Jpeg2000TilePart;
  46. /* RMK: For JPEG2000 DCINEMA 3 tile-parts in a tile
  47. * one per component, so tile_part elements have a size of 3 */
  48. typedef struct Jpeg2000Tile {
  49. Jpeg2000Component *comp;
  50. uint8_t properties[4];
  51. Jpeg2000CodingStyle codsty[4];
  52. Jpeg2000QuantStyle qntsty[4];
  53. Jpeg2000TilePart tile_part[4];
  54. uint16_t tp_idx; // Tile-part index
  55. } Jpeg2000Tile;
  56. typedef struct Jpeg2000DecoderContext {
  57. AVClass *class;
  58. AVCodecContext *avctx;
  59. GetByteContext g;
  60. int width, height;
  61. int image_offset_x, image_offset_y;
  62. int tile_offset_x, tile_offset_y;
  63. uint8_t cbps[4]; // bits per sample in particular components
  64. uint8_t sgnd[4]; // if a component is signed
  65. uint8_t properties[4];
  66. int cdx[4], cdy[4];
  67. int precision;
  68. int ncomponents;
  69. int colour_space;
  70. uint32_t palette[256];
  71. int8_t pal8;
  72. int cdef[4];
  73. int tile_width, tile_height;
  74. unsigned numXtiles, numYtiles;
  75. int maxtilelen;
  76. Jpeg2000CodingStyle codsty[4];
  77. Jpeg2000QuantStyle qntsty[4];
  78. int bit_index;
  79. int curtileno;
  80. Jpeg2000Tile *tile;
  81. /*options parameters*/
  82. int reduction_factor;
  83. } Jpeg2000DecoderContext;
  84. /* get_bits functions for JPEG2000 packet bitstream
  85. * It is a get_bit function with a bit-stuffing routine. If the value of the
  86. * byte is 0xFF, the next byte includes an extra zero bit stuffed into the MSB.
  87. * cf. ISO-15444-1:2002 / B.10.1 Bit-stuffing routine */
  88. static int get_bits(Jpeg2000DecoderContext *s, int n)
  89. {
  90. int res = 0;
  91. while (--n >= 0) {
  92. res <<= 1;
  93. if (s->bit_index == 0) {
  94. s->bit_index = 7 + (bytestream2_get_byte(&s->g) != 0xFFu);
  95. }
  96. s->bit_index--;
  97. res |= (bytestream2_peek_byte(&s->g) >> s->bit_index) & 1;
  98. }
  99. return res;
  100. }
  101. static void jpeg2000_flush(Jpeg2000DecoderContext *s)
  102. {
  103. if (bytestream2_get_byte(&s->g) == 0xff)
  104. bytestream2_skip(&s->g, 1);
  105. s->bit_index = 8;
  106. }
  107. /* decode the value stored in node */
  108. static int tag_tree_decode(Jpeg2000DecoderContext *s, Jpeg2000TgtNode *node,
  109. int threshold)
  110. {
  111. Jpeg2000TgtNode *stack[30];
  112. int sp = -1, curval = 0;
  113. if (!node)
  114. return AVERROR_INVALIDDATA;
  115. while (node && !node->vis) {
  116. stack[++sp] = node;
  117. node = node->parent;
  118. }
  119. if (node)
  120. curval = node->val;
  121. else
  122. curval = stack[sp]->val;
  123. while (curval < threshold && sp >= 0) {
  124. if (curval < stack[sp]->val)
  125. curval = stack[sp]->val;
  126. while (curval < threshold) {
  127. int ret;
  128. if ((ret = get_bits(s, 1)) > 0) {
  129. stack[sp]->vis++;
  130. break;
  131. } else if (!ret)
  132. curval++;
  133. else
  134. return ret;
  135. }
  136. stack[sp]->val = curval;
  137. sp--;
  138. }
  139. return curval;
  140. }
  141. static int pix_fmt_match(enum AVPixelFormat pix_fmt, int components,
  142. int bpc, uint32_t log2_chroma_wh, int pal8)
  143. {
  144. int match = 1;
  145. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  146. if (desc->nb_components != components) {
  147. return 0;
  148. }
  149. switch (components) {
  150. case 4:
  151. match = match && desc->comp[3].depth_minus1 + 1 >= bpc &&
  152. (log2_chroma_wh >> 14 & 3) == 0 &&
  153. (log2_chroma_wh >> 12 & 3) == 0;
  154. case 3:
  155. match = match && desc->comp[2].depth_minus1 + 1 >= bpc &&
  156. (log2_chroma_wh >> 10 & 3) == desc->log2_chroma_w &&
  157. (log2_chroma_wh >> 8 & 3) == desc->log2_chroma_h;
  158. case 2:
  159. match = match && desc->comp[1].depth_minus1 + 1 >= bpc &&
  160. (log2_chroma_wh >> 6 & 3) == desc->log2_chroma_w &&
  161. (log2_chroma_wh >> 4 & 3) == desc->log2_chroma_h;
  162. case 1:
  163. match = match && desc->comp[0].depth_minus1 + 1 >= bpc &&
  164. (log2_chroma_wh >> 2 & 3) == 0 &&
  165. (log2_chroma_wh & 3) == 0 &&
  166. (desc->flags & AV_PIX_FMT_FLAG_PAL) == pal8 * AV_PIX_FMT_FLAG_PAL;
  167. }
  168. return match;
  169. }
  170. // pix_fmts with lower bpp have to be listed before
  171. // similar pix_fmts with higher bpp.
  172. #define RGB_PIXEL_FORMATS AV_PIX_FMT_PAL8,AV_PIX_FMT_RGB24,AV_PIX_FMT_RGBA,AV_PIX_FMT_RGB48,AV_PIX_FMT_RGBA64
  173. #define GRAY_PIXEL_FORMATS AV_PIX_FMT_GRAY8,AV_PIX_FMT_GRAY8A,AV_PIX_FMT_GRAY16
  174. #define YUV_PIXEL_FORMATS AV_PIX_FMT_YUV410P,AV_PIX_FMT_YUV411P,AV_PIX_FMT_YUVA420P, \
  175. AV_PIX_FMT_YUV420P,AV_PIX_FMT_YUV422P,AV_PIX_FMT_YUVA422P, \
  176. AV_PIX_FMT_YUV440P,AV_PIX_FMT_YUV444P,AV_PIX_FMT_YUVA444P, \
  177. AV_PIX_FMT_YUV420P9,AV_PIX_FMT_YUV422P9,AV_PIX_FMT_YUV444P9, \
  178. AV_PIX_FMT_YUVA420P9,AV_PIX_FMT_YUVA422P9,AV_PIX_FMT_YUVA444P9, \
  179. AV_PIX_FMT_YUV420P10,AV_PIX_FMT_YUV422P10,AV_PIX_FMT_YUV444P10, \
  180. AV_PIX_FMT_YUVA420P10,AV_PIX_FMT_YUVA422P10,AV_PIX_FMT_YUVA444P10, \
  181. AV_PIX_FMT_YUV420P12,AV_PIX_FMT_YUV422P12,AV_PIX_FMT_YUV444P12, \
  182. AV_PIX_FMT_YUV420P14,AV_PIX_FMT_YUV422P14,AV_PIX_FMT_YUV444P14, \
  183. AV_PIX_FMT_YUV420P16,AV_PIX_FMT_YUV422P16,AV_PIX_FMT_YUV444P16, \
  184. AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P16,AV_PIX_FMT_YUVA444P16
  185. #define XYZ_PIXEL_FORMATS AV_PIX_FMT_XYZ12
  186. static const enum AVPixelFormat rgb_pix_fmts[] = {RGB_PIXEL_FORMATS};
  187. static const enum AVPixelFormat gray_pix_fmts[] = {GRAY_PIXEL_FORMATS};
  188. static const enum AVPixelFormat yuv_pix_fmts[] = {YUV_PIXEL_FORMATS};
  189. static const enum AVPixelFormat xyz_pix_fmts[] = {XYZ_PIXEL_FORMATS};
  190. static const enum AVPixelFormat all_pix_fmts[] = {RGB_PIXEL_FORMATS,
  191. GRAY_PIXEL_FORMATS,
  192. YUV_PIXEL_FORMATS,
  193. XYZ_PIXEL_FORMATS};
  194. /* marker segments */
  195. /* get sizes and offsets of image, tiles; number of components */
  196. static int get_siz(Jpeg2000DecoderContext *s)
  197. {
  198. int i;
  199. int ncomponents;
  200. uint32_t log2_chroma_wh = 0;
  201. const enum AVPixelFormat *possible_fmts = NULL;
  202. int possible_fmts_nb = 0;
  203. if (bytestream2_get_bytes_left(&s->g) < 36)
  204. return AVERROR_INVALIDDATA;
  205. s->avctx->profile = bytestream2_get_be16u(&s->g); // Rsiz
  206. s->width = bytestream2_get_be32u(&s->g); // Width
  207. s->height = bytestream2_get_be32u(&s->g); // Height
  208. s->image_offset_x = bytestream2_get_be32u(&s->g); // X0Siz
  209. s->image_offset_y = bytestream2_get_be32u(&s->g); // Y0Siz
  210. s->tile_width = bytestream2_get_be32u(&s->g); // XTSiz
  211. s->tile_height = bytestream2_get_be32u(&s->g); // YTSiz
  212. s->tile_offset_x = bytestream2_get_be32u(&s->g); // XT0Siz
  213. s->tile_offset_y = bytestream2_get_be32u(&s->g); // YT0Siz
  214. ncomponents = bytestream2_get_be16u(&s->g); // CSiz
  215. if (ncomponents <= 0) {
  216. av_log(s->avctx, AV_LOG_ERROR, "Invalid number of components: %d\n",
  217. s->ncomponents);
  218. return AVERROR_INVALIDDATA;
  219. }
  220. if (ncomponents > 4) {
  221. avpriv_request_sample(s->avctx, "Support for %d components",
  222. s->ncomponents);
  223. return AVERROR_PATCHWELCOME;
  224. }
  225. s->ncomponents = ncomponents;
  226. if (s->tile_width <= 0 || s->tile_height <= 0) {
  227. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile dimension %dx%d.\n",
  228. s->tile_width, s->tile_height);
  229. return AVERROR_INVALIDDATA;
  230. }
  231. if (bytestream2_get_bytes_left(&s->g) < 3 * s->ncomponents)
  232. return AVERROR_INVALIDDATA;
  233. for (i = 0; i < s->ncomponents; i++) { // Ssiz_i XRsiz_i, YRsiz_i
  234. uint8_t x = bytestream2_get_byteu(&s->g);
  235. s->cbps[i] = (x & 0x7f) + 1;
  236. s->precision = FFMAX(s->cbps[i], s->precision);
  237. s->sgnd[i] = !!(x & 0x80);
  238. s->cdx[i] = bytestream2_get_byteu(&s->g);
  239. s->cdy[i] = bytestream2_get_byteu(&s->g);
  240. if ( !s->cdx[i] || s->cdx[i] == 3 || s->cdx[i] > 4
  241. || !s->cdy[i] || s->cdy[i] == 3 || s->cdy[i] > 4) {
  242. av_log(s->avctx, AV_LOG_ERROR, "Invalid sample separation %d/%d\n", s->cdx[i], s->cdy[i]);
  243. return AVERROR_INVALIDDATA;
  244. }
  245. log2_chroma_wh |= s->cdy[i] >> 1 << i * 4 | s->cdx[i] >> 1 << i * 4 + 2;
  246. }
  247. s->numXtiles = ff_jpeg2000_ceildiv(s->width - s->tile_offset_x, s->tile_width);
  248. s->numYtiles = ff_jpeg2000_ceildiv(s->height - s->tile_offset_y, s->tile_height);
  249. if (s->numXtiles * (uint64_t)s->numYtiles > INT_MAX/sizeof(*s->tile)) {
  250. s->numXtiles = s->numYtiles = 0;
  251. return AVERROR(EINVAL);
  252. }
  253. s->tile = av_mallocz_array(s->numXtiles * s->numYtiles, sizeof(*s->tile));
  254. if (!s->tile) {
  255. s->numXtiles = s->numYtiles = 0;
  256. return AVERROR(ENOMEM);
  257. }
  258. for (i = 0; i < s->numXtiles * s->numYtiles; i++) {
  259. Jpeg2000Tile *tile = s->tile + i;
  260. tile->comp = av_mallocz(s->ncomponents * sizeof(*tile->comp));
  261. if (!tile->comp)
  262. return AVERROR(ENOMEM);
  263. }
  264. /* compute image size with reduction factor */
  265. s->avctx->width = ff_jpeg2000_ceildivpow2(s->width - s->image_offset_x,
  266. s->reduction_factor);
  267. s->avctx->height = ff_jpeg2000_ceildivpow2(s->height - s->image_offset_y,
  268. s->reduction_factor);
  269. if (s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_2K ||
  270. s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_4K) {
  271. possible_fmts = xyz_pix_fmts;
  272. possible_fmts_nb = FF_ARRAY_ELEMS(xyz_pix_fmts);
  273. } else {
  274. switch (s->colour_space) {
  275. case 16:
  276. possible_fmts = rgb_pix_fmts;
  277. possible_fmts_nb = FF_ARRAY_ELEMS(rgb_pix_fmts);
  278. break;
  279. case 17:
  280. possible_fmts = gray_pix_fmts;
  281. possible_fmts_nb = FF_ARRAY_ELEMS(gray_pix_fmts);
  282. break;
  283. case 18:
  284. possible_fmts = yuv_pix_fmts;
  285. possible_fmts_nb = FF_ARRAY_ELEMS(yuv_pix_fmts);
  286. break;
  287. default:
  288. possible_fmts = all_pix_fmts;
  289. possible_fmts_nb = FF_ARRAY_ELEMS(all_pix_fmts);
  290. break;
  291. }
  292. }
  293. for (i = 0; i < possible_fmts_nb; ++i) {
  294. if (pix_fmt_match(possible_fmts[i], ncomponents, s->precision, log2_chroma_wh, s->pal8)) {
  295. s->avctx->pix_fmt = possible_fmts[i];
  296. break;
  297. }
  298. }
  299. if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
  300. av_log(s->avctx, AV_LOG_ERROR,
  301. "Unknown pix_fmt, profile: %d, colour_space: %d, "
  302. "components: %d, precision: %d, "
  303. "cdx[1]: %d, cdy[1]: %d, cdx[2]: %d, cdy[2]: %d\n",
  304. s->avctx->profile, s->colour_space, ncomponents, s->precision,
  305. ncomponents > 2 ? s->cdx[1] : 0,
  306. ncomponents > 2 ? s->cdy[1] : 0,
  307. ncomponents > 2 ? s->cdx[2] : 0,
  308. ncomponents > 2 ? s->cdy[2] : 0);
  309. }
  310. s->avctx->bits_per_raw_sample = s->precision;
  311. return 0;
  312. }
  313. /* get common part for COD and COC segments */
  314. static int get_cox(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c)
  315. {
  316. uint8_t byte;
  317. if (bytestream2_get_bytes_left(&s->g) < 5)
  318. return AVERROR_INVALIDDATA;
  319. /* nreslevels = number of resolution levels
  320. = number of decomposition level +1 */
  321. c->nreslevels = bytestream2_get_byteu(&s->g) + 1;
  322. if (c->nreslevels >= JPEG2000_MAX_RESLEVELS) {
  323. av_log(s->avctx, AV_LOG_ERROR, "nreslevels %d is invalid\n", c->nreslevels);
  324. return AVERROR_INVALIDDATA;
  325. }
  326. /* compute number of resolution levels to decode */
  327. if (c->nreslevels < s->reduction_factor)
  328. c->nreslevels2decode = 1;
  329. else
  330. c->nreslevels2decode = c->nreslevels - s->reduction_factor;
  331. c->log2_cblk_width = (bytestream2_get_byteu(&s->g) & 15) + 2; // cblk width
  332. c->log2_cblk_height = (bytestream2_get_byteu(&s->g) & 15) + 2; // cblk height
  333. if (c->log2_cblk_width > 10 || c->log2_cblk_height > 10 ||
  334. c->log2_cblk_width + c->log2_cblk_height > 12) {
  335. av_log(s->avctx, AV_LOG_ERROR, "cblk size invalid\n");
  336. return AVERROR_INVALIDDATA;
  337. }
  338. if (c->log2_cblk_width > 6 || c->log2_cblk_height > 6) {
  339. avpriv_request_sample(s->avctx, "cblk size > 64");
  340. return AVERROR_PATCHWELCOME;
  341. }
  342. c->cblk_style = bytestream2_get_byteu(&s->g);
  343. if (c->cblk_style != 0) { // cblk style
  344. av_log(s->avctx, AV_LOG_WARNING, "extra cblk styles %X\n", c->cblk_style);
  345. }
  346. c->transform = bytestream2_get_byteu(&s->g); // DWT transformation type
  347. /* set integer 9/7 DWT in case of BITEXACT flag */
  348. if ((s->avctx->flags & CODEC_FLAG_BITEXACT) && (c->transform == FF_DWT97))
  349. c->transform = FF_DWT97_INT;
  350. if (c->csty & JPEG2000_CSTY_PREC) {
  351. int i;
  352. for (i = 0; i < c->nreslevels; i++) {
  353. byte = bytestream2_get_byte(&s->g);
  354. c->log2_prec_widths[i] = byte & 0x0F; // precinct PPx
  355. c->log2_prec_heights[i] = (byte >> 4) & 0x0F; // precinct PPy
  356. }
  357. } else {
  358. memset(c->log2_prec_widths , 15, sizeof(c->log2_prec_widths ));
  359. memset(c->log2_prec_heights, 15, sizeof(c->log2_prec_heights));
  360. }
  361. return 0;
  362. }
  363. /* get coding parameters for a particular tile or whole image*/
  364. static int get_cod(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c,
  365. uint8_t *properties)
  366. {
  367. Jpeg2000CodingStyle tmp;
  368. int compno, ret;
  369. if (bytestream2_get_bytes_left(&s->g) < 5)
  370. return AVERROR_INVALIDDATA;
  371. tmp.csty = bytestream2_get_byteu(&s->g);
  372. // get progression order
  373. tmp.prog_order = bytestream2_get_byteu(&s->g);
  374. tmp.nlayers = bytestream2_get_be16u(&s->g);
  375. tmp.mct = bytestream2_get_byteu(&s->g); // multiple component transformation
  376. if (tmp.mct && s->ncomponents < 3) {
  377. av_log(s->avctx, AV_LOG_ERROR,
  378. "MCT %d with too few components (%d)\n",
  379. tmp.mct, s->ncomponents);
  380. return AVERROR_INVALIDDATA;
  381. }
  382. if ((ret = get_cox(s, &tmp)) < 0)
  383. return ret;
  384. for (compno = 0; compno < s->ncomponents; compno++)
  385. if (!(properties[compno] & HAD_COC))
  386. memcpy(c + compno, &tmp, sizeof(tmp));
  387. return 0;
  388. }
  389. /* Get coding parameters for a component in the whole image or a
  390. * particular tile. */
  391. static int get_coc(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *c,
  392. uint8_t *properties)
  393. {
  394. int compno, ret;
  395. if (bytestream2_get_bytes_left(&s->g) < 2)
  396. return AVERROR_INVALIDDATA;
  397. compno = bytestream2_get_byteu(&s->g);
  398. if (compno >= s->ncomponents) {
  399. av_log(s->avctx, AV_LOG_ERROR,
  400. "Invalid compno %d. There are %d components in the image.\n",
  401. compno, s->ncomponents);
  402. return AVERROR_INVALIDDATA;
  403. }
  404. c += compno;
  405. c->csty = bytestream2_get_byteu(&s->g);
  406. if ((ret = get_cox(s, c)) < 0)
  407. return ret;
  408. properties[compno] |= HAD_COC;
  409. return 0;
  410. }
  411. /* Get common part for QCD and QCC segments. */
  412. static int get_qcx(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q)
  413. {
  414. int i, x;
  415. if (bytestream2_get_bytes_left(&s->g) < 1)
  416. return AVERROR_INVALIDDATA;
  417. x = bytestream2_get_byteu(&s->g); // Sqcd
  418. q->nguardbits = x >> 5;
  419. q->quantsty = x & 0x1f;
  420. if (q->quantsty == JPEG2000_QSTY_NONE) {
  421. n -= 3;
  422. if (bytestream2_get_bytes_left(&s->g) < n ||
  423. n > JPEG2000_MAX_DECLEVELS*3)
  424. return AVERROR_INVALIDDATA;
  425. for (i = 0; i < n; i++)
  426. q->expn[i] = bytestream2_get_byteu(&s->g) >> 3;
  427. } else if (q->quantsty == JPEG2000_QSTY_SI) {
  428. if (bytestream2_get_bytes_left(&s->g) < 2)
  429. return AVERROR_INVALIDDATA;
  430. x = bytestream2_get_be16u(&s->g);
  431. q->expn[0] = x >> 11;
  432. q->mant[0] = x & 0x7ff;
  433. for (i = 1; i < JPEG2000_MAX_DECLEVELS * 3; i++) {
  434. int curexpn = FFMAX(0, q->expn[0] - (i - 1) / 3);
  435. q->expn[i] = curexpn;
  436. q->mant[i] = q->mant[0];
  437. }
  438. } else {
  439. n = (n - 3) >> 1;
  440. if (bytestream2_get_bytes_left(&s->g) < 2 * n ||
  441. n > JPEG2000_MAX_DECLEVELS*3)
  442. return AVERROR_INVALIDDATA;
  443. for (i = 0; i < n; i++) {
  444. x = bytestream2_get_be16u(&s->g);
  445. q->expn[i] = x >> 11;
  446. q->mant[i] = x & 0x7ff;
  447. }
  448. }
  449. return 0;
  450. }
  451. /* Get quantization parameters for a particular tile or a whole image. */
  452. static int get_qcd(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q,
  453. uint8_t *properties)
  454. {
  455. Jpeg2000QuantStyle tmp;
  456. int compno, ret;
  457. if ((ret = get_qcx(s, n, &tmp)) < 0)
  458. return ret;
  459. for (compno = 0; compno < s->ncomponents; compno++)
  460. if (!(properties[compno] & HAD_QCC))
  461. memcpy(q + compno, &tmp, sizeof(tmp));
  462. return 0;
  463. }
  464. /* Get quantization parameters for a component in the whole image
  465. * on in a particular tile. */
  466. static int get_qcc(Jpeg2000DecoderContext *s, int n, Jpeg2000QuantStyle *q,
  467. uint8_t *properties)
  468. {
  469. int compno;
  470. if (bytestream2_get_bytes_left(&s->g) < 1)
  471. return AVERROR_INVALIDDATA;
  472. compno = bytestream2_get_byteu(&s->g);
  473. if (compno >= s->ncomponents) {
  474. av_log(s->avctx, AV_LOG_ERROR,
  475. "Invalid compno %d. There are %d components in the image.\n",
  476. compno, s->ncomponents);
  477. return AVERROR_INVALIDDATA;
  478. }
  479. properties[compno] |= HAD_QCC;
  480. return get_qcx(s, n - 1, q + compno);
  481. }
  482. /* Get start of tile segment. */
  483. static int get_sot(Jpeg2000DecoderContext *s, int n)
  484. {
  485. Jpeg2000TilePart *tp;
  486. uint16_t Isot;
  487. uint32_t Psot;
  488. uint8_t TPsot;
  489. if (bytestream2_get_bytes_left(&s->g) < 8)
  490. return AVERROR_INVALIDDATA;
  491. s->curtileno = 0;
  492. Isot = bytestream2_get_be16u(&s->g); // Isot
  493. if (Isot >= s->numXtiles * s->numYtiles)
  494. return AVERROR_INVALIDDATA;
  495. s->curtileno = Isot;
  496. Psot = bytestream2_get_be32u(&s->g); // Psot
  497. TPsot = bytestream2_get_byteu(&s->g); // TPsot
  498. /* Read TNSot but not used */
  499. bytestream2_get_byteu(&s->g); // TNsot
  500. if (Psot > bytestream2_get_bytes_left(&s->g) + n + 2) {
  501. av_log(s->avctx, AV_LOG_ERROR, "Psot %d too big\n", Psot);
  502. return AVERROR_INVALIDDATA;
  503. }
  504. if (TPsot >= FF_ARRAY_ELEMS(s->tile[Isot].tile_part)) {
  505. avpriv_request_sample(s->avctx, "Support for %d components", TPsot);
  506. return AVERROR_PATCHWELCOME;
  507. }
  508. s->tile[Isot].tp_idx = TPsot;
  509. tp = s->tile[Isot].tile_part + TPsot;
  510. tp->tile_index = Isot;
  511. tp->tp_end = s->g.buffer + Psot - n - 2;
  512. if (!TPsot) {
  513. Jpeg2000Tile *tile = s->tile + s->curtileno;
  514. /* copy defaults */
  515. memcpy(tile->codsty, s->codsty, s->ncomponents * sizeof(Jpeg2000CodingStyle));
  516. memcpy(tile->qntsty, s->qntsty, s->ncomponents * sizeof(Jpeg2000QuantStyle));
  517. }
  518. return 0;
  519. }
  520. /* Tile-part lengths: see ISO 15444-1:2002, section A.7.1
  521. * Used to know the number of tile parts and lengths.
  522. * There may be multiple TLMs in the header.
  523. * TODO: The function is not used for tile-parts management, nor anywhere else.
  524. * It can be useful to allocate memory for tile parts, before managing the SOT
  525. * markers. Parsing the TLM header is needed to increment the input header
  526. * buffer.
  527. * This marker is mandatory for DCI. */
  528. static uint8_t get_tlm(Jpeg2000DecoderContext *s, int n)
  529. {
  530. uint8_t Stlm, ST, SP, tile_tlm, i;
  531. bytestream2_get_byte(&s->g); /* Ztlm: skipped */
  532. Stlm = bytestream2_get_byte(&s->g);
  533. // too complex ? ST = ((Stlm >> 4) & 0x01) + ((Stlm >> 4) & 0x02);
  534. ST = (Stlm >> 4) & 0x03;
  535. // TODO: Manage case of ST = 0b11 --> raise error
  536. SP = (Stlm >> 6) & 0x01;
  537. tile_tlm = (n - 4) / ((SP + 1) * 2 + ST);
  538. for (i = 0; i < tile_tlm; i++) {
  539. switch (ST) {
  540. case 0:
  541. break;
  542. case 1:
  543. bytestream2_get_byte(&s->g);
  544. break;
  545. case 2:
  546. bytestream2_get_be16(&s->g);
  547. break;
  548. case 3:
  549. bytestream2_get_be32(&s->g);
  550. break;
  551. }
  552. if (SP == 0) {
  553. bytestream2_get_be16(&s->g);
  554. } else {
  555. bytestream2_get_be32(&s->g);
  556. }
  557. }
  558. return 0;
  559. }
  560. static int init_tile(Jpeg2000DecoderContext *s, int tileno)
  561. {
  562. int compno;
  563. int tilex = tileno % s->numXtiles;
  564. int tiley = tileno / s->numXtiles;
  565. Jpeg2000Tile *tile = s->tile + tileno;
  566. if (!tile->comp)
  567. return AVERROR(ENOMEM);
  568. for (compno = 0; compno < s->ncomponents; compno++) {
  569. Jpeg2000Component *comp = tile->comp + compno;
  570. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  571. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  572. int ret; // global bandno
  573. comp->coord_o[0][0] = FFMAX(tilex * s->tile_width + s->tile_offset_x, s->image_offset_x);
  574. comp->coord_o[0][1] = FFMIN((tilex + 1) * s->tile_width + s->tile_offset_x, s->width);
  575. comp->coord_o[1][0] = FFMAX(tiley * s->tile_height + s->tile_offset_y, s->image_offset_y);
  576. comp->coord_o[1][1] = FFMIN((tiley + 1) * s->tile_height + s->tile_offset_y, s->height);
  577. comp->coord[0][0] = ff_jpeg2000_ceildivpow2(comp->coord_o[0][0], s->reduction_factor);
  578. comp->coord[0][1] = ff_jpeg2000_ceildivpow2(comp->coord_o[0][1], s->reduction_factor);
  579. comp->coord[1][0] = ff_jpeg2000_ceildivpow2(comp->coord_o[1][0], s->reduction_factor);
  580. comp->coord[1][1] = ff_jpeg2000_ceildivpow2(comp->coord_o[1][1], s->reduction_factor);
  581. if (ret = ff_jpeg2000_init_component(comp, codsty, qntsty,
  582. s->cbps[compno], s->cdx[compno],
  583. s->cdy[compno], s->avctx))
  584. return ret;
  585. }
  586. return 0;
  587. }
  588. /* Read the number of coding passes. */
  589. static int getnpasses(Jpeg2000DecoderContext *s)
  590. {
  591. int num;
  592. if (!get_bits(s, 1))
  593. return 1;
  594. if (!get_bits(s, 1))
  595. return 2;
  596. if ((num = get_bits(s, 2)) != 3)
  597. return num < 0 ? num : 3 + num;
  598. if ((num = get_bits(s, 5)) != 31)
  599. return num < 0 ? num : 6 + num;
  600. num = get_bits(s, 7);
  601. return num < 0 ? num : 37 + num;
  602. }
  603. static int getlblockinc(Jpeg2000DecoderContext *s)
  604. {
  605. int res = 0, ret;
  606. while (ret = get_bits(s, 1)) {
  607. if (ret < 0)
  608. return ret;
  609. res++;
  610. }
  611. return res;
  612. }
  613. static int jpeg2000_decode_packet(Jpeg2000DecoderContext *s,
  614. Jpeg2000CodingStyle *codsty,
  615. Jpeg2000ResLevel *rlevel, int precno,
  616. int layno, uint8_t *expn, int numgbits)
  617. {
  618. int bandno, cblkno, ret, nb_code_blocks;
  619. if (!(ret = get_bits(s, 1))) {
  620. jpeg2000_flush(s);
  621. return 0;
  622. } else if (ret < 0)
  623. return ret;
  624. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  625. Jpeg2000Band *band = rlevel->band + bandno;
  626. Jpeg2000Prec *prec = band->prec + precno;
  627. if (band->coord[0][0] == band->coord[0][1] ||
  628. band->coord[1][0] == band->coord[1][1])
  629. continue;
  630. nb_code_blocks = prec->nb_codeblocks_height *
  631. prec->nb_codeblocks_width;
  632. for (cblkno = 0; cblkno < nb_code_blocks; cblkno++) {
  633. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  634. int incl, newpasses, llen;
  635. if (cblk->npasses)
  636. incl = get_bits(s, 1);
  637. else
  638. incl = tag_tree_decode(s, prec->cblkincl + cblkno, layno + 1) == layno;
  639. if (!incl)
  640. continue;
  641. else if (incl < 0)
  642. return incl;
  643. if (!cblk->npasses) {
  644. int v = expn[bandno] + numgbits - 1 -
  645. tag_tree_decode(s, prec->zerobits + cblkno, 100);
  646. if (v < 0) {
  647. av_log(s->avctx, AV_LOG_ERROR,
  648. "nonzerobits %d invalid\n", v);
  649. return AVERROR_INVALIDDATA;
  650. }
  651. cblk->nonzerobits = v;
  652. }
  653. if ((newpasses = getnpasses(s)) < 0)
  654. return newpasses;
  655. if ((llen = getlblockinc(s)) < 0)
  656. return llen;
  657. cblk->lblock += llen;
  658. if ((ret = get_bits(s, av_log2(newpasses) + cblk->lblock)) < 0)
  659. return ret;
  660. if (ret > sizeof(cblk->data)) {
  661. avpriv_request_sample(s->avctx,
  662. "Block with lengthinc greater than %zu",
  663. sizeof(cblk->data));
  664. return AVERROR_PATCHWELCOME;
  665. }
  666. cblk->lengthinc = ret;
  667. cblk->npasses += newpasses;
  668. }
  669. }
  670. jpeg2000_flush(s);
  671. if (codsty->csty & JPEG2000_CSTY_EPH) {
  672. if (bytestream2_peek_be16(&s->g) == JPEG2000_EPH)
  673. bytestream2_skip(&s->g, 2);
  674. else
  675. av_log(s->avctx, AV_LOG_ERROR, "EPH marker not found.\n");
  676. }
  677. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  678. Jpeg2000Band *band = rlevel->band + bandno;
  679. Jpeg2000Prec *prec = band->prec + precno;
  680. nb_code_blocks = prec->nb_codeblocks_height * prec->nb_codeblocks_width;
  681. for (cblkno = 0; cblkno < nb_code_blocks; cblkno++) {
  682. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  683. if ( bytestream2_get_bytes_left(&s->g) < cblk->lengthinc
  684. || sizeof(cblk->data) < cblk->length + cblk->lengthinc + 2
  685. ) {
  686. av_log(s->avctx, AV_LOG_ERROR,
  687. "Block length %d or lengthinc %d is too large\n",
  688. cblk->length, cblk->lengthinc);
  689. return AVERROR_INVALIDDATA;
  690. }
  691. bytestream2_get_bufferu(&s->g, cblk->data + cblk->length, cblk->lengthinc);
  692. cblk->length += cblk->lengthinc;
  693. cblk->lengthinc = 0;
  694. }
  695. }
  696. return 0;
  697. }
  698. static int jpeg2000_decode_packets(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile)
  699. {
  700. int ret = 0;
  701. int layno, reslevelno, compno, precno, ok_reslevel;
  702. int x, y;
  703. s->bit_index = 8;
  704. switch (tile->codsty[0].prog_order) {
  705. case JPEG2000_PGOD_RLCP:
  706. avpriv_request_sample(s->avctx, "Progression order RLCP");
  707. case JPEG2000_PGOD_LRCP:
  708. for (layno = 0; layno < tile->codsty[0].nlayers; layno++) {
  709. ok_reslevel = 1;
  710. for (reslevelno = 0; ok_reslevel; reslevelno++) {
  711. ok_reslevel = 0;
  712. for (compno = 0; compno < s->ncomponents; compno++) {
  713. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  714. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  715. if (reslevelno < codsty->nreslevels) {
  716. Jpeg2000ResLevel *rlevel = tile->comp[compno].reslevel +
  717. reslevelno;
  718. ok_reslevel = 1;
  719. for (precno = 0; precno < rlevel->num_precincts_x * rlevel->num_precincts_y; precno++)
  720. if ((ret = jpeg2000_decode_packet(s,
  721. codsty, rlevel,
  722. precno, layno,
  723. qntsty->expn + (reslevelno ? 3 * (reslevelno - 1) + 1 : 0),
  724. qntsty->nguardbits)) < 0)
  725. return ret;
  726. }
  727. }
  728. }
  729. }
  730. break;
  731. case JPEG2000_PGOD_CPRL:
  732. for (compno = 0; compno < s->ncomponents; compno++) {
  733. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  734. Jpeg2000QuantStyle *qntsty = tile->qntsty + compno;
  735. /* Set bit stream buffer address according to tile-part.
  736. * For DCinema one tile-part per component, so can be
  737. * indexed by component. */
  738. s->g = tile->tile_part[compno].tpg;
  739. /* Position loop (y axis)
  740. * TODO: Automate computing of step 256.
  741. * Fixed here, but to be computed before entering here. */
  742. for (y = 0; y < s->height; y += 256) {
  743. /* Position loop (y axis)
  744. * TODO: automate computing of step 256.
  745. * Fixed here, but to be computed before entering here. */
  746. for (x = 0; x < s->width; x += 256) {
  747. for (reslevelno = 0; reslevelno < codsty->nreslevels; reslevelno++) {
  748. uint16_t prcx, prcy;
  749. uint8_t reducedresno = codsty->nreslevels - 1 -reslevelno; // ==> N_L - r
  750. Jpeg2000ResLevel *rlevel = tile->comp[compno].reslevel + reslevelno;
  751. if (!((y % (1 << (rlevel->log2_prec_height + reducedresno)) == 0) ||
  752. (y == 0))) // TODO: 2nd condition simplified as try0 always =0 for dcinema
  753. continue;
  754. if (!((x % (1 << (rlevel->log2_prec_width + reducedresno)) == 0) ||
  755. (x == 0))) // TODO: 2nd condition simplified as try0 always =0 for dcinema
  756. continue;
  757. // check if a precinct exists
  758. prcx = ff_jpeg2000_ceildivpow2(x, reducedresno) >> rlevel->log2_prec_width;
  759. prcy = ff_jpeg2000_ceildivpow2(y, reducedresno) >> rlevel->log2_prec_height;
  760. precno = prcx + rlevel->num_precincts_x * prcy;
  761. for (layno = 0; layno < tile->codsty[0].nlayers; layno++) {
  762. if ((ret = jpeg2000_decode_packet(s, codsty, rlevel,
  763. precno, layno,
  764. qntsty->expn + (reslevelno ? 3 * (reslevelno - 1) + 1 : 0),
  765. qntsty->nguardbits)) < 0)
  766. return ret;
  767. }
  768. }
  769. }
  770. }
  771. }
  772. break;
  773. case JPEG2000_PGOD_RPCL:
  774. avpriv_request_sample(s->avctx, "Progression order RPCL");
  775. ret = AVERROR_PATCHWELCOME;
  776. break;
  777. case JPEG2000_PGOD_PCRL:
  778. avpriv_request_sample(s->avctx, "Progression order PCRL");
  779. ret = AVERROR_PATCHWELCOME;
  780. break;
  781. default:
  782. break;
  783. }
  784. /* EOC marker reached */
  785. bytestream2_skip(&s->g, 2);
  786. return ret;
  787. }
  788. /* TIER-1 routines */
  789. static void decode_sigpass(Jpeg2000T1Context *t1, int width, int height,
  790. int bpno, int bandno, int bpass_csty_symbol,
  791. int vert_causal_ctx_csty_symbol)
  792. {
  793. int mask = 3 << (bpno - 1), y0, x, y;
  794. for (y0 = 0; y0 < height; y0 += 4)
  795. for (x = 0; x < width; x++)
  796. for (y = y0; y < height && y < y0 + 4; y++) {
  797. if ((t1->flags[y+1][x+1] & JPEG2000_T1_SIG_NB)
  798. && !(t1->flags[y+1][x+1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS))) {
  799. int flags_mask = -1;
  800. if (vert_causal_ctx_csty_symbol && y == y0 + 3)
  801. flags_mask &= ~(JPEG2000_T1_SIG_S | JPEG2000_T1_SIG_SW | JPEG2000_T1_SIG_SE);
  802. if (ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ff_jpeg2000_getsigctxno(t1->flags[y+1][x+1] & flags_mask, bandno))) {
  803. int xorbit, ctxno = ff_jpeg2000_getsgnctxno(t1->flags[y+1][x+1], &xorbit);
  804. if (bpass_csty_symbol)
  805. t1->data[y][x] = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ctxno) ? -mask : mask;
  806. else
  807. t1->data[y][x] = (ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ctxno) ^ xorbit) ?
  808. -mask : mask;
  809. ff_jpeg2000_set_significance(t1, x, y,
  810. t1->data[y][x] < 0);
  811. }
  812. t1->flags[y + 1][x + 1] |= JPEG2000_T1_VIS;
  813. }
  814. }
  815. }
  816. static void decode_refpass(Jpeg2000T1Context *t1, int width, int height,
  817. int bpno)
  818. {
  819. int phalf, nhalf;
  820. int y0, x, y;
  821. phalf = 1 << (bpno - 1);
  822. nhalf = -phalf;
  823. for (y0 = 0; y0 < height; y0 += 4)
  824. for (x = 0; x < width; x++)
  825. for (y = y0; y < height && y < y0 + 4; y++)
  826. if ((t1->flags[y + 1][x + 1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS)) == JPEG2000_T1_SIG) {
  827. int ctxno = ff_jpeg2000_getrefctxno(t1->flags[y + 1][x + 1]);
  828. int r = ff_mqc_decode(&t1->mqc,
  829. t1->mqc.cx_states + ctxno)
  830. ? phalf : nhalf;
  831. t1->data[y][x] += t1->data[y][x] < 0 ? -r : r;
  832. t1->flags[y + 1][x + 1] |= JPEG2000_T1_REF;
  833. }
  834. }
  835. static void decode_clnpass(Jpeg2000DecoderContext *s, Jpeg2000T1Context *t1,
  836. int width, int height, int bpno, int bandno,
  837. int seg_symbols, int vert_causal_ctx_csty_symbol)
  838. {
  839. int mask = 3 << (bpno - 1), y0, x, y, runlen, dec;
  840. for (y0 = 0; y0 < height; y0 += 4) {
  841. for (x = 0; x < width; x++) {
  842. if (y0 + 3 < height &&
  843. !((t1->flags[y0 + 1][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  844. (t1->flags[y0 + 2][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  845. (t1->flags[y0 + 3][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)) ||
  846. (t1->flags[y0 + 4][x + 1] & (JPEG2000_T1_SIG_NB | JPEG2000_T1_VIS | JPEG2000_T1_SIG)))) {
  847. if (!ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_RL))
  848. continue;
  849. runlen = ff_mqc_decode(&t1->mqc,
  850. t1->mqc.cx_states + MQC_CX_UNI);
  851. runlen = (runlen << 1) | ff_mqc_decode(&t1->mqc,
  852. t1->mqc.cx_states +
  853. MQC_CX_UNI);
  854. dec = 1;
  855. } else {
  856. runlen = 0;
  857. dec = 0;
  858. }
  859. for (y = y0 + runlen; y < y0 + 4 && y < height; y++) {
  860. if (!dec) {
  861. if (!(t1->flags[y+1][x+1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS))) {
  862. int flags_mask = -1;
  863. if (vert_causal_ctx_csty_symbol && y == y0 + 3)
  864. flags_mask &= ~(JPEG2000_T1_SIG_S | JPEG2000_T1_SIG_SW | JPEG2000_T1_SIG_SE);
  865. dec = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + ff_jpeg2000_getsigctxno(t1->flags[y+1][x+1] & flags_mask,
  866. bandno));
  867. }
  868. }
  869. if (dec) {
  870. int xorbit;
  871. int ctxno = ff_jpeg2000_getsgnctxno(t1->flags[y + 1][x + 1],
  872. &xorbit);
  873. t1->data[y][x] = (ff_mqc_decode(&t1->mqc,
  874. t1->mqc.cx_states + ctxno) ^
  875. xorbit)
  876. ? -mask : mask;
  877. ff_jpeg2000_set_significance(t1, x, y, t1->data[y][x] < 0);
  878. }
  879. dec = 0;
  880. t1->flags[y + 1][x + 1] &= ~JPEG2000_T1_VIS;
  881. }
  882. }
  883. }
  884. if (seg_symbols) {
  885. int val;
  886. val = ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  887. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  888. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  889. val = (val << 1) + ff_mqc_decode(&t1->mqc, t1->mqc.cx_states + MQC_CX_UNI);
  890. if (val != 0xa)
  891. av_log(s->avctx, AV_LOG_ERROR,
  892. "Segmentation symbol value incorrect\n");
  893. }
  894. }
  895. static int decode_cblk(Jpeg2000DecoderContext *s, Jpeg2000CodingStyle *codsty,
  896. Jpeg2000T1Context *t1, Jpeg2000Cblk *cblk,
  897. int width, int height, int bandpos)
  898. {
  899. int passno = cblk->npasses, pass_t = 2, bpno = cblk->nonzerobits - 1, y;
  900. int clnpass_cnt = 0;
  901. int bpass_csty_symbol = codsty->cblk_style & JPEG2000_CBLK_BYPASS;
  902. int vert_causal_ctx_csty_symbol = codsty->cblk_style & JPEG2000_CBLK_VSC;
  903. av_assert0(width <= JPEG2000_MAX_CBLKW);
  904. av_assert0(height <= JPEG2000_MAX_CBLKH);
  905. for (y = 0; y < height; y++)
  906. memset(t1->data[y], 0, width * sizeof(**t1->data));
  907. /* If code-block contains no compressed data: nothing to do. */
  908. if (!cblk->length)
  909. return 0;
  910. for (y = 0; y < height + 2; y++)
  911. memset(t1->flags[y], 0, (width + 2) * sizeof(**t1->flags));
  912. cblk->data[cblk->length] = 0xff;
  913. cblk->data[cblk->length+1] = 0xff;
  914. ff_mqc_initdec(&t1->mqc, cblk->data);
  915. while (passno--) {
  916. switch(pass_t) {
  917. case 0:
  918. decode_sigpass(t1, width, height, bpno + 1, bandpos,
  919. bpass_csty_symbol && (clnpass_cnt >= 4),
  920. vert_causal_ctx_csty_symbol);
  921. break;
  922. case 1:
  923. decode_refpass(t1, width, height, bpno + 1);
  924. if (bpass_csty_symbol && clnpass_cnt >= 4)
  925. ff_mqc_initdec(&t1->mqc, cblk->data);
  926. break;
  927. case 2:
  928. decode_clnpass(s, t1, width, height, bpno + 1, bandpos,
  929. codsty->cblk_style & JPEG2000_CBLK_SEGSYM,
  930. vert_causal_ctx_csty_symbol);
  931. clnpass_cnt = clnpass_cnt + 1;
  932. if (bpass_csty_symbol && clnpass_cnt >= 4)
  933. ff_mqc_initdec(&t1->mqc, cblk->data);
  934. break;
  935. }
  936. pass_t++;
  937. if (pass_t == 3) {
  938. bpno--;
  939. pass_t = 0;
  940. }
  941. }
  942. return 0;
  943. }
  944. /* TODO: Verify dequantization for lossless case
  945. * comp->data can be float or int
  946. * band->stepsize can be float or int
  947. * depending on the type of DWT transformation.
  948. * see ISO/IEC 15444-1:2002 A.6.1 */
  949. /* Float dequantization of a codeblock.*/
  950. static void dequantization_float(int x, int y, Jpeg2000Cblk *cblk,
  951. Jpeg2000Component *comp,
  952. Jpeg2000T1Context *t1, Jpeg2000Band *band)
  953. {
  954. int i, j;
  955. int w = cblk->coord[0][1] - cblk->coord[0][0];
  956. for (j = 0; j < (cblk->coord[1][1] - cblk->coord[1][0]); ++j) {
  957. float *datap = &comp->f_data[(comp->coord[0][1] - comp->coord[0][0]) * (y + j) + x];
  958. int *src = t1->data[j];
  959. for (i = 0; i < w; ++i)
  960. datap[i] = src[i] * band->f_stepsize;
  961. }
  962. }
  963. /* Integer dequantization of a codeblock.*/
  964. static void dequantization_int(int x, int y, Jpeg2000Cblk *cblk,
  965. Jpeg2000Component *comp,
  966. Jpeg2000T1Context *t1, Jpeg2000Band *band)
  967. {
  968. int i, j;
  969. int w = cblk->coord[0][1] - cblk->coord[0][0];
  970. for (j = 0; j < (cblk->coord[1][1] - cblk->coord[1][0]); ++j) {
  971. int32_t *datap = &comp->i_data[(comp->coord[0][1] - comp->coord[0][0]) * (y + j) + x];
  972. int *src = t1->data[j];
  973. for (i = 0; i < w; ++i)
  974. datap[i] = (src[i] * band->i_stepsize + (1 << 14)) >> 15;
  975. }
  976. }
  977. /* Inverse ICT parameters in float and integer.
  978. * int value = (float value) * (1<<16) */
  979. static const float f_ict_params[4] = {
  980. 1.402f,
  981. 0.34413f,
  982. 0.71414f,
  983. 1.772f
  984. };
  985. static const int i_ict_params[4] = {
  986. 91881,
  987. 22553,
  988. 46802,
  989. 116130
  990. };
  991. static void mct_decode(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile)
  992. {
  993. int i, csize = 1;
  994. int32_t *src[3], i0, i1, i2;
  995. float *srcf[3], i0f, i1f, i2f;
  996. for (i = 1; i < 3; i++)
  997. if (tile->codsty[0].transform != tile->codsty[i].transform) {
  998. av_log(s->avctx, AV_LOG_ERROR, "Transforms mismatch, MCT not supported\n");
  999. return;
  1000. }
  1001. for (i = 0; i < 3; i++)
  1002. if (tile->codsty[0].transform == FF_DWT97)
  1003. srcf[i] = tile->comp[i].f_data;
  1004. else
  1005. src [i] = tile->comp[i].i_data;
  1006. for (i = 0; i < 2; i++)
  1007. csize *= tile->comp[0].coord[i][1] - tile->comp[0].coord[i][0];
  1008. switch (tile->codsty[0].transform) {
  1009. case FF_DWT97:
  1010. for (i = 0; i < csize; i++) {
  1011. i0f = *srcf[0] + (f_ict_params[0] * *srcf[2]);
  1012. i1f = *srcf[0] - (f_ict_params[1] * *srcf[1])
  1013. - (f_ict_params[2] * *srcf[2]);
  1014. i2f = *srcf[0] + (f_ict_params[3] * *srcf[1]);
  1015. *srcf[0]++ = i0f;
  1016. *srcf[1]++ = i1f;
  1017. *srcf[2]++ = i2f;
  1018. }
  1019. break;
  1020. case FF_DWT97_INT:
  1021. for (i = 0; i < csize; i++) {
  1022. i0 = *src[0] + (((i_ict_params[0] * *src[2]) + (1 << 15)) >> 16);
  1023. i1 = *src[0] - (((i_ict_params[1] * *src[1]) + (1 << 15)) >> 16)
  1024. - (((i_ict_params[2] * *src[2]) + (1 << 15)) >> 16);
  1025. i2 = *src[0] + (((i_ict_params[3] * *src[1]) + (1 << 15)) >> 16);
  1026. *src[0]++ = i0;
  1027. *src[1]++ = i1;
  1028. *src[2]++ = i2;
  1029. }
  1030. break;
  1031. case FF_DWT53:
  1032. for (i = 0; i < csize; i++) {
  1033. i1 = *src[0] - (*src[2] + *src[1] >> 2);
  1034. i0 = i1 + *src[2];
  1035. i2 = i1 + *src[1];
  1036. *src[0]++ = i0;
  1037. *src[1]++ = i1;
  1038. *src[2]++ = i2;
  1039. }
  1040. break;
  1041. }
  1042. }
  1043. static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
  1044. AVFrame *picture)
  1045. {
  1046. int compno, reslevelno, bandno;
  1047. int x, y;
  1048. uint8_t *line;
  1049. Jpeg2000T1Context t1;
  1050. /* Loop on tile components */
  1051. for (compno = 0; compno < s->ncomponents; compno++) {
  1052. Jpeg2000Component *comp = tile->comp + compno;
  1053. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1054. /* Loop on resolution levels */
  1055. for (reslevelno = 0; reslevelno < codsty->nreslevels2decode; reslevelno++) {
  1056. Jpeg2000ResLevel *rlevel = comp->reslevel + reslevelno;
  1057. /* Loop on bands */
  1058. for (bandno = 0; bandno < rlevel->nbands; bandno++) {
  1059. int nb_precincts, precno;
  1060. Jpeg2000Band *band = rlevel->band + bandno;
  1061. int cblkno = 0, bandpos;
  1062. bandpos = bandno + (reslevelno > 0);
  1063. if (band->coord[0][0] == band->coord[0][1] ||
  1064. band->coord[1][0] == band->coord[1][1])
  1065. continue;
  1066. nb_precincts = rlevel->num_precincts_x * rlevel->num_precincts_y;
  1067. /* Loop on precincts */
  1068. for (precno = 0; precno < nb_precincts; precno++) {
  1069. Jpeg2000Prec *prec = band->prec + precno;
  1070. /* Loop on codeblocks */
  1071. for (cblkno = 0; cblkno < prec->nb_codeblocks_width * prec->nb_codeblocks_height; cblkno++) {
  1072. int x, y;
  1073. Jpeg2000Cblk *cblk = prec->cblk + cblkno;
  1074. decode_cblk(s, codsty, &t1, cblk,
  1075. cblk->coord[0][1] - cblk->coord[0][0],
  1076. cblk->coord[1][1] - cblk->coord[1][0],
  1077. bandpos);
  1078. x = cblk->coord[0][0];
  1079. y = cblk->coord[1][0];
  1080. if (codsty->transform == FF_DWT97)
  1081. dequantization_float(x, y, cblk, comp, &t1, band);
  1082. else
  1083. dequantization_int(x, y, cblk, comp, &t1, band);
  1084. } /* end cblk */
  1085. } /*end prec */
  1086. } /* end band */
  1087. } /* end reslevel */
  1088. /* inverse DWT */
  1089. ff_dwt_decode(&comp->dwt, codsty->transform == FF_DWT97 ? (void*)comp->f_data : (void*)comp->i_data);
  1090. } /*end comp */
  1091. /* inverse MCT transformation */
  1092. if (tile->codsty[0].mct)
  1093. mct_decode(s, tile);
  1094. if (s->cdef[0] < 0) {
  1095. for (x = 0; x < s->ncomponents; x++)
  1096. s->cdef[x] = x + 1;
  1097. if ((s->ncomponents & 1) == 0)
  1098. s->cdef[s->ncomponents-1] = 0;
  1099. }
  1100. if (s->precision <= 8) {
  1101. for (compno = 0; compno < s->ncomponents; compno++) {
  1102. Jpeg2000Component *comp = tile->comp + compno;
  1103. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1104. float *datap = comp->f_data;
  1105. int32_t *i_datap = comp->i_data;
  1106. int cbps = s->cbps[compno];
  1107. int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
  1108. int planar = !!picture->data[2];
  1109. int pixelsize = planar ? 1 : s->ncomponents;
  1110. int plane = 0;
  1111. if (planar)
  1112. plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
  1113. y = tile->comp[compno].coord[1][0] - s->image_offset_y;
  1114. line = picture->data[plane] + y / s->cdy[compno] * picture->linesize[plane];
  1115. for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
  1116. uint8_t *dst;
  1117. x = tile->comp[compno].coord[0][0] - s->image_offset_x;
  1118. dst = line + x / s->cdx[compno] * pixelsize + compno*!planar;
  1119. if (codsty->transform == FF_DWT97) {
  1120. for (; x < w; x += s->cdx[compno]) {
  1121. int val = lrintf(*datap) + (1 << (cbps - 1));
  1122. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1123. val = av_clip(val, 0, (1 << cbps) - 1);
  1124. *dst = val << (8 - cbps);
  1125. datap++;
  1126. dst += pixelsize;
  1127. }
  1128. } else {
  1129. for (; x < w; x += s->cdx[compno]) {
  1130. int val = *i_datap + (1 << (cbps - 1));
  1131. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1132. val = av_clip(val, 0, (1 << cbps) - 1);
  1133. *dst = val << (8 - cbps);
  1134. i_datap++;
  1135. dst += pixelsize;
  1136. }
  1137. }
  1138. line += picture->linesize[plane];
  1139. }
  1140. }
  1141. } else {
  1142. for (compno = 0; compno < s->ncomponents; compno++) {
  1143. Jpeg2000Component *comp = tile->comp + compno;
  1144. Jpeg2000CodingStyle *codsty = tile->codsty + compno;
  1145. float *datap = comp->f_data;
  1146. int32_t *i_datap = comp->i_data;
  1147. uint16_t *linel;
  1148. int cbps = s->cbps[compno];
  1149. int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
  1150. int planar = !!picture->data[2];
  1151. int pixelsize = planar ? 1 : s->ncomponents;
  1152. int plane = 0;
  1153. if (planar)
  1154. plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
  1155. y = tile->comp[compno].coord[1][0] - s->image_offset_y;
  1156. linel = (uint16_t *)picture->data[plane] + y / s->cdy[compno] * (picture->linesize[plane] >> 1);
  1157. for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
  1158. uint16_t *dst;
  1159. x = tile->comp[compno].coord[0][0] - s->image_offset_x;
  1160. dst = linel + (x / s->cdx[compno] * pixelsize + compno*!planar);
  1161. if (codsty->transform == FF_DWT97) {
  1162. for (; x < w; x += s-> cdx[compno]) {
  1163. int val = lrintf(*datap) + (1 << (cbps - 1));
  1164. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1165. val = av_clip(val, 0, (1 << cbps) - 1);
  1166. /* align 12 bit values in little-endian mode */
  1167. *dst = val << (16 - cbps);
  1168. datap++;
  1169. dst += pixelsize;
  1170. }
  1171. } else {
  1172. for (; x < w; x += s-> cdx[compno]) {
  1173. int val = *i_datap + (1 << (cbps - 1));
  1174. /* DC level shift and clip see ISO 15444-1:2002 G.1.2 */
  1175. val = av_clip(val, 0, (1 << cbps) - 1);
  1176. /* align 12 bit values in little-endian mode */
  1177. *dst = val << (16 - cbps);
  1178. i_datap++;
  1179. dst += pixelsize;
  1180. }
  1181. }
  1182. linel += picture->linesize[plane] >> 1;
  1183. }
  1184. }
  1185. }
  1186. return 0;
  1187. }
  1188. static void jpeg2000_dec_cleanup(Jpeg2000DecoderContext *s)
  1189. {
  1190. int tileno, compno;
  1191. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++) {
  1192. if (s->tile[tileno].comp) {
  1193. for (compno = 0; compno < s->ncomponents; compno++) {
  1194. Jpeg2000Component *comp = s->tile[tileno].comp + compno;
  1195. Jpeg2000CodingStyle *codsty = s->tile[tileno].codsty + compno;
  1196. ff_jpeg2000_cleanup(comp, codsty);
  1197. }
  1198. av_freep(&s->tile[tileno].comp);
  1199. }
  1200. }
  1201. av_freep(&s->tile);
  1202. memset(s->codsty, 0, sizeof(s->codsty));
  1203. memset(s->qntsty, 0, sizeof(s->qntsty));
  1204. s->numXtiles = s->numYtiles = 0;
  1205. }
  1206. static int jpeg2000_read_main_headers(Jpeg2000DecoderContext *s)
  1207. {
  1208. Jpeg2000CodingStyle *codsty = s->codsty;
  1209. Jpeg2000QuantStyle *qntsty = s->qntsty;
  1210. uint8_t *properties = s->properties;
  1211. for (;;) {
  1212. int len, ret = 0;
  1213. uint16_t marker;
  1214. int oldpos;
  1215. if (bytestream2_get_bytes_left(&s->g) < 2) {
  1216. av_log(s->avctx, AV_LOG_ERROR, "Missing EOC\n");
  1217. break;
  1218. }
  1219. marker = bytestream2_get_be16u(&s->g);
  1220. oldpos = bytestream2_tell(&s->g);
  1221. if (marker == JPEG2000_SOD) {
  1222. Jpeg2000Tile *tile;
  1223. Jpeg2000TilePart *tp;
  1224. if (!s->tile) {
  1225. av_log(s->avctx, AV_LOG_ERROR, "Missing SIZ\n");
  1226. return AVERROR_INVALIDDATA;
  1227. }
  1228. if (s->curtileno < 0) {
  1229. av_log(s->avctx, AV_LOG_ERROR, "Missing SOT\n");
  1230. return AVERROR_INVALIDDATA;
  1231. }
  1232. tile = s->tile + s->curtileno;
  1233. tp = tile->tile_part + tile->tp_idx;
  1234. if (tp->tp_end < s->g.buffer) {
  1235. av_log(s->avctx, AV_LOG_ERROR, "Invalid tpend\n");
  1236. return AVERROR_INVALIDDATA;
  1237. }
  1238. bytestream2_init(&tp->tpg, s->g.buffer, tp->tp_end - s->g.buffer);
  1239. bytestream2_skip(&s->g, tp->tp_end - s->g.buffer);
  1240. continue;
  1241. }
  1242. if (marker == JPEG2000_EOC)
  1243. break;
  1244. len = bytestream2_get_be16(&s->g);
  1245. if (len < 2 || bytestream2_get_bytes_left(&s->g) < len - 2)
  1246. return AVERROR_INVALIDDATA;
  1247. switch (marker) {
  1248. case JPEG2000_SIZ:
  1249. ret = get_siz(s);
  1250. if (!s->tile)
  1251. s->numXtiles = s->numYtiles = 0;
  1252. break;
  1253. case JPEG2000_COC:
  1254. ret = get_coc(s, codsty, properties);
  1255. break;
  1256. case JPEG2000_COD:
  1257. ret = get_cod(s, codsty, properties);
  1258. break;
  1259. case JPEG2000_QCC:
  1260. ret = get_qcc(s, len, qntsty, properties);
  1261. break;
  1262. case JPEG2000_QCD:
  1263. ret = get_qcd(s, len, qntsty, properties);
  1264. break;
  1265. case JPEG2000_SOT:
  1266. if (!(ret = get_sot(s, len))) {
  1267. av_assert1(s->curtileno >= 0);
  1268. codsty = s->tile[s->curtileno].codsty;
  1269. qntsty = s->tile[s->curtileno].qntsty;
  1270. properties = s->tile[s->curtileno].properties;
  1271. }
  1272. break;
  1273. case JPEG2000_COM:
  1274. // the comment is ignored
  1275. bytestream2_skip(&s->g, len - 2);
  1276. break;
  1277. case JPEG2000_TLM:
  1278. // Tile-part lengths
  1279. ret = get_tlm(s, len);
  1280. break;
  1281. default:
  1282. av_log(s->avctx, AV_LOG_ERROR,
  1283. "unsupported marker 0x%.4X at pos 0x%X\n",
  1284. marker, bytestream2_tell(&s->g) - 4);
  1285. bytestream2_skip(&s->g, len - 2);
  1286. break;
  1287. }
  1288. if (bytestream2_tell(&s->g) - oldpos != len || ret) {
  1289. av_log(s->avctx, AV_LOG_ERROR,
  1290. "error during processing marker segment %.4x\n", marker);
  1291. return ret ? ret : -1;
  1292. }
  1293. }
  1294. return 0;
  1295. }
  1296. /* Read bit stream packets --> T2 operation. */
  1297. static int jpeg2000_read_bitstream_packets(Jpeg2000DecoderContext *s)
  1298. {
  1299. int ret = 0;
  1300. int tileno;
  1301. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++) {
  1302. Jpeg2000Tile *tile = s->tile + tileno;
  1303. if (ret = init_tile(s, tileno))
  1304. return ret;
  1305. s->g = tile->tile_part[0].tpg;
  1306. if (ret = jpeg2000_decode_packets(s, tile))
  1307. return ret;
  1308. }
  1309. return 0;
  1310. }
  1311. static int jp2_find_codestream(Jpeg2000DecoderContext *s)
  1312. {
  1313. uint32_t atom_size, atom, atom_end;
  1314. int search_range = 10;
  1315. while (search_range
  1316. &&
  1317. bytestream2_get_bytes_left(&s->g) >= 8) {
  1318. atom_size = bytestream2_get_be32u(&s->g);
  1319. atom = bytestream2_get_be32u(&s->g);
  1320. atom_end = bytestream2_tell(&s->g) + atom_size - 8;
  1321. if (atom == JP2_CODESTREAM)
  1322. return 1;
  1323. if (bytestream2_get_bytes_left(&s->g) < atom_size || atom_end < atom_size)
  1324. return 0;
  1325. if (atom == JP2_HEADER &&
  1326. atom_size >= 16) {
  1327. uint32_t atom2_size, atom2, atom2_end;
  1328. do {
  1329. atom2_size = bytestream2_get_be32u(&s->g);
  1330. atom2 = bytestream2_get_be32u(&s->g);
  1331. atom2_end = bytestream2_tell(&s->g) + atom2_size - 8;
  1332. if (atom2_size < 8 || atom2_end > atom_end || atom2_end < atom2_size)
  1333. break;
  1334. if (atom2 == JP2_CODESTREAM) {
  1335. return 1;
  1336. } else if (atom2 == MKBETAG('c','o','l','r') && atom2_size >= 7) {
  1337. int method = bytestream2_get_byteu(&s->g);
  1338. bytestream2_skipu(&s->g, 2);
  1339. if (method == 1) {
  1340. s->colour_space = bytestream2_get_be32u(&s->g);
  1341. }
  1342. } else if (atom2 == MKBETAG('p','c','l','r') && atom2_size >= 6) {
  1343. int i, size, colour_count, colour_channels, colour_depth[3];
  1344. uint32_t r, g, b;
  1345. colour_count = bytestream2_get_be16u(&s->g);
  1346. colour_channels = bytestream2_get_byteu(&s->g);
  1347. // FIXME: Do not ignore channel_sign
  1348. colour_depth[0] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1349. colour_depth[1] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1350. colour_depth[2] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
  1351. size = (colour_depth[0] + 7 >> 3) * colour_count +
  1352. (colour_depth[1] + 7 >> 3) * colour_count +
  1353. (colour_depth[2] + 7 >> 3) * colour_count;
  1354. if (colour_count > 256 ||
  1355. colour_channels != 3 ||
  1356. colour_depth[0] > 16 ||
  1357. colour_depth[1] > 16 ||
  1358. colour_depth[2] > 16 ||
  1359. atom2_size < size) {
  1360. avpriv_request_sample(s->avctx, "Unknown palette");
  1361. bytestream2_seek(&s->g, atom2_end, SEEK_SET);
  1362. continue;
  1363. }
  1364. s->pal8 = 1;
  1365. for (i = 0; i < colour_count; i++) {
  1366. if (colour_depth[0] <= 8) {
  1367. r = bytestream2_get_byteu(&s->g) << 8 - colour_depth[0];
  1368. r |= r >> colour_depth[0];
  1369. } else {
  1370. r = bytestream2_get_be16u(&s->g) >> colour_depth[0] - 8;
  1371. }
  1372. if (colour_depth[1] <= 8) {
  1373. g = bytestream2_get_byteu(&s->g) << 8 - colour_depth[1];
  1374. r |= r >> colour_depth[1];
  1375. } else {
  1376. g = bytestream2_get_be16u(&s->g) >> colour_depth[1] - 8;
  1377. }
  1378. if (colour_depth[2] <= 8) {
  1379. b = bytestream2_get_byteu(&s->g) << 8 - colour_depth[2];
  1380. r |= r >> colour_depth[2];
  1381. } else {
  1382. b = bytestream2_get_be16u(&s->g) >> colour_depth[2] - 8;
  1383. }
  1384. s->palette[i] = 0xffu << 24 | r << 16 | g << 8 | b;
  1385. }
  1386. } else if (atom2 == MKBETAG('c','d','e','f') && atom2_size >= 2) {
  1387. int n = bytestream2_get_be16u(&s->g);
  1388. for (; n>0; n--) {
  1389. int cn = bytestream2_get_be16(&s->g);
  1390. int av_unused typ = bytestream2_get_be16(&s->g);
  1391. int asoc = bytestream2_get_be16(&s->g);
  1392. if (cn < 4 || asoc < 4)
  1393. s->cdef[cn] = asoc;
  1394. }
  1395. }
  1396. bytestream2_seek(&s->g, atom2_end, SEEK_SET);
  1397. } while (atom_end - atom2_end >= 8);
  1398. } else {
  1399. search_range--;
  1400. }
  1401. bytestream2_seek(&s->g, atom_end, SEEK_SET);
  1402. }
  1403. return 0;
  1404. }
  1405. static int jpeg2000_decode_frame(AVCodecContext *avctx, void *data,
  1406. int *got_frame, AVPacket *avpkt)
  1407. {
  1408. Jpeg2000DecoderContext *s = avctx->priv_data;
  1409. ThreadFrame frame = { .f = data };
  1410. AVFrame *picture = data;
  1411. int tileno, ret;
  1412. s->avctx = avctx;
  1413. bytestream2_init(&s->g, avpkt->data, avpkt->size);
  1414. s->curtileno = -1;
  1415. memset(s->cdef, -1, sizeof(s->cdef));
  1416. if (bytestream2_get_bytes_left(&s->g) < 2) {
  1417. ret = AVERROR_INVALIDDATA;
  1418. goto end;
  1419. }
  1420. // check if the image is in jp2 format
  1421. if (bytestream2_get_bytes_left(&s->g) >= 12 &&
  1422. (bytestream2_get_be32u(&s->g) == 12) &&
  1423. (bytestream2_get_be32u(&s->g) == JP2_SIG_TYPE) &&
  1424. (bytestream2_get_be32u(&s->g) == JP2_SIG_VALUE)) {
  1425. if (!jp2_find_codestream(s)) {
  1426. av_log(avctx, AV_LOG_ERROR,
  1427. "Could not find Jpeg2000 codestream atom.\n");
  1428. ret = AVERROR_INVALIDDATA;
  1429. goto end;
  1430. }
  1431. } else {
  1432. bytestream2_seek(&s->g, 0, SEEK_SET);
  1433. }
  1434. while (bytestream2_get_bytes_left(&s->g) >= 3 && bytestream2_peek_be16(&s->g) != JPEG2000_SOC)
  1435. bytestream2_skip(&s->g, 1);
  1436. if (bytestream2_get_be16u(&s->g) != JPEG2000_SOC) {
  1437. av_log(avctx, AV_LOG_ERROR, "SOC marker not present\n");
  1438. ret = AVERROR_INVALIDDATA;
  1439. goto end;
  1440. }
  1441. if (ret = jpeg2000_read_main_headers(s))
  1442. goto end;
  1443. /* get picture buffer */
  1444. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  1445. goto end;
  1446. picture->pict_type = AV_PICTURE_TYPE_I;
  1447. picture->key_frame = 1;
  1448. if (ret = jpeg2000_read_bitstream_packets(s))
  1449. goto end;
  1450. for (tileno = 0; tileno < s->numXtiles * s->numYtiles; tileno++)
  1451. if (ret = jpeg2000_decode_tile(s, s->tile + tileno, picture))
  1452. goto end;
  1453. jpeg2000_dec_cleanup(s);
  1454. *got_frame = 1;
  1455. if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
  1456. memcpy(picture->data[1], s->palette, 256 * sizeof(uint32_t));
  1457. return bytestream2_tell(&s->g);
  1458. end:
  1459. jpeg2000_dec_cleanup(s);
  1460. return ret;
  1461. }
  1462. static void jpeg2000_init_static_data(AVCodec *codec)
  1463. {
  1464. ff_jpeg2000_init_tier1_luts();
  1465. ff_mqc_init_context_tables();
  1466. }
  1467. #define OFFSET(x) offsetof(Jpeg2000DecoderContext, x)
  1468. #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
  1469. static const AVOption options[] = {
  1470. { "lowres", "Lower the decoding resolution by a power of two",
  1471. OFFSET(reduction_factor), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, JPEG2000_MAX_RESLEVELS - 1, VD },
  1472. { NULL },
  1473. };
  1474. static const AVProfile profiles[] = {
  1475. { FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0, "JPEG 2000 codestream restriction 0" },
  1476. { FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1, "JPEG 2000 codestream restriction 1" },
  1477. { FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION, "JPEG 2000 no codestream restrictions" },
  1478. { FF_PROFILE_JPEG2000_DCINEMA_2K, "JPEG 2000 digital cinema 2K" },
  1479. { FF_PROFILE_JPEG2000_DCINEMA_4K, "JPEG 2000 digital cinema 4K" },
  1480. { FF_PROFILE_UNKNOWN },
  1481. };
  1482. static const AVClass jpeg2000_class = {
  1483. .class_name = "jpeg2000",
  1484. .item_name = av_default_item_name,
  1485. .option = options,
  1486. .version = LIBAVUTIL_VERSION_INT,
  1487. };
  1488. AVCodec ff_jpeg2000_decoder = {
  1489. .name = "jpeg2000",
  1490. .long_name = NULL_IF_CONFIG_SMALL("JPEG 2000"),
  1491. .type = AVMEDIA_TYPE_VIDEO,
  1492. .id = AV_CODEC_ID_JPEG2000,
  1493. .capabilities = CODEC_CAP_FRAME_THREADS,
  1494. .priv_data_size = sizeof(Jpeg2000DecoderContext),
  1495. .init_static_data = jpeg2000_init_static_data,
  1496. .decode = jpeg2000_decode_frame,
  1497. .priv_class = &jpeg2000_class,
  1498. .max_lowres = 5,
  1499. .profiles = NULL_IF_CONFIG_SMALL(profiles)
  1500. };