You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

670 lines
24KB

  1. /*
  2. * JPEG 2000 encoding support via OpenJPEG
  3. * Copyright (c) 2011 Michael Bradshaw <mjbshaw gmail com>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * JPEG 2000 encoder using libopenjpeg
  24. */
  25. #define OPJ_STATIC
  26. #include "libavutil/avassert.h"
  27. #include "libavutil/common.h"
  28. #include "libavutil/imgutils.h"
  29. #include "libavutil/intreadwrite.h"
  30. #include "libavutil/opt.h"
  31. #include "avcodec.h"
  32. #include "internal.h"
  33. #if HAVE_OPENJPEG_1_5_OPENJPEG_H
  34. # include <openjpeg-1.5/openjpeg.h>
  35. #else
  36. # include <openjpeg.h>
  37. #endif
  38. typedef struct {
  39. AVClass *avclass;
  40. opj_image_t *image;
  41. opj_cio_t *stream;
  42. opj_cparameters_t enc_params;
  43. opj_cinfo_t *compress;
  44. opj_event_mgr_t event_mgr;
  45. int format;
  46. int profile;
  47. int prog_order;
  48. int cinema_mode;
  49. int numresolution;
  50. int numlayers;
  51. int disto_alloc;
  52. int fixed_alloc;
  53. int fixed_quality;
  54. } LibOpenJPEGContext;
  55. static void error_callback(const char *msg, void *data)
  56. {
  57. av_log(data, AV_LOG_ERROR, "%s\n", msg);
  58. }
  59. static void warning_callback(const char *msg, void *data)
  60. {
  61. av_log(data, AV_LOG_WARNING, "%s\n", msg);
  62. }
  63. static void info_callback(const char *msg, void *data)
  64. {
  65. av_log(data, AV_LOG_DEBUG, "%s\n", msg);
  66. }
  67. static opj_image_t *mj2_create_image(AVCodecContext *avctx, opj_cparameters_t *parameters)
  68. {
  69. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
  70. opj_image_cmptparm_t cmptparm[4] = {{0}};
  71. opj_image_t *img;
  72. int i;
  73. int sub_dx[4];
  74. int sub_dy[4];
  75. int numcomps;
  76. OPJ_COLOR_SPACE color_space = CLRSPC_UNKNOWN;
  77. sub_dx[0] = sub_dx[3] = 1;
  78. sub_dy[0] = sub_dy[3] = 1;
  79. sub_dx[1] = sub_dx[2] = 1 << desc->log2_chroma_w;
  80. sub_dy[1] = sub_dy[2] = 1 << desc->log2_chroma_h;
  81. numcomps = desc->nb_components;
  82. switch (avctx->pix_fmt) {
  83. case AV_PIX_FMT_GRAY8:
  84. case AV_PIX_FMT_YA8:
  85. case AV_PIX_FMT_GRAY16:
  86. color_space = CLRSPC_GRAY;
  87. break;
  88. case AV_PIX_FMT_RGB24:
  89. case AV_PIX_FMT_RGBA:
  90. case AV_PIX_FMT_RGB48:
  91. case AV_PIX_FMT_RGBA64:
  92. case AV_PIX_FMT_GBR24P:
  93. case AV_PIX_FMT_GBRP9:
  94. case AV_PIX_FMT_GBRP10:
  95. case AV_PIX_FMT_GBRP12:
  96. case AV_PIX_FMT_GBRP14:
  97. case AV_PIX_FMT_GBRP16:
  98. case AV_PIX_FMT_XYZ12:
  99. color_space = CLRSPC_SRGB;
  100. break;
  101. case AV_PIX_FMT_YUV410P:
  102. case AV_PIX_FMT_YUV411P:
  103. case AV_PIX_FMT_YUV420P:
  104. case AV_PIX_FMT_YUV422P:
  105. case AV_PIX_FMT_YUV440P:
  106. case AV_PIX_FMT_YUV444P:
  107. case AV_PIX_FMT_YUVA420P:
  108. case AV_PIX_FMT_YUVA422P:
  109. case AV_PIX_FMT_YUVA444P:
  110. case AV_PIX_FMT_YUV420P9:
  111. case AV_PIX_FMT_YUV422P9:
  112. case AV_PIX_FMT_YUV444P9:
  113. case AV_PIX_FMT_YUVA420P9:
  114. case AV_PIX_FMT_YUVA422P9:
  115. case AV_PIX_FMT_YUVA444P9:
  116. case AV_PIX_FMT_YUV420P10:
  117. case AV_PIX_FMT_YUV422P10:
  118. case AV_PIX_FMT_YUV444P10:
  119. case AV_PIX_FMT_YUVA420P10:
  120. case AV_PIX_FMT_YUVA422P10:
  121. case AV_PIX_FMT_YUVA444P10:
  122. case AV_PIX_FMT_YUV420P12:
  123. case AV_PIX_FMT_YUV422P12:
  124. case AV_PIX_FMT_YUV444P12:
  125. case AV_PIX_FMT_YUV420P14:
  126. case AV_PIX_FMT_YUV422P14:
  127. case AV_PIX_FMT_YUV444P14:
  128. case AV_PIX_FMT_YUV420P16:
  129. case AV_PIX_FMT_YUV422P16:
  130. case AV_PIX_FMT_YUV444P16:
  131. case AV_PIX_FMT_YUVA420P16:
  132. case AV_PIX_FMT_YUVA422P16:
  133. case AV_PIX_FMT_YUVA444P16:
  134. color_space = CLRSPC_SYCC;
  135. break;
  136. default:
  137. av_log(avctx, AV_LOG_ERROR,
  138. "The requested pixel format '%s' is not supported\n",
  139. av_get_pix_fmt_name(avctx->pix_fmt));
  140. return NULL;
  141. }
  142. for (i = 0; i < numcomps; i++) {
  143. cmptparm[i].prec = desc->comp[i].depth_minus1 + 1;
  144. cmptparm[i].bpp = desc->comp[i].depth_minus1 + 1;
  145. cmptparm[i].sgnd = 0;
  146. cmptparm[i].dx = sub_dx[i];
  147. cmptparm[i].dy = sub_dy[i];
  148. cmptparm[i].w = (avctx->width + sub_dx[i] - 1) / sub_dx[i];
  149. cmptparm[i].h = (avctx->height + sub_dy[i] - 1) / sub_dy[i];
  150. }
  151. img = opj_image_create(numcomps, cmptparm, color_space);
  152. // x0, y0 is the top left corner of the image
  153. // x1, y1 is the width, height of the reference grid
  154. img->x0 = 0;
  155. img->y0 = 0;
  156. img->x1 = (avctx->width - 1) * parameters->subsampling_dx + 1;
  157. img->y1 = (avctx->height - 1) * parameters->subsampling_dy + 1;
  158. return img;
  159. }
  160. static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx)
  161. {
  162. LibOpenJPEGContext *ctx = avctx->priv_data;
  163. int err = AVERROR(ENOMEM);
  164. opj_set_default_encoder_parameters(&ctx->enc_params);
  165. ctx->enc_params.cp_rsiz = ctx->profile;
  166. ctx->enc_params.mode = !!avctx->global_quality;
  167. ctx->enc_params.cp_cinema = ctx->cinema_mode;
  168. ctx->enc_params.prog_order = ctx->prog_order;
  169. ctx->enc_params.numresolution = ctx->numresolution;
  170. ctx->enc_params.cp_disto_alloc = ctx->disto_alloc;
  171. ctx->enc_params.cp_fixed_alloc = ctx->fixed_alloc;
  172. ctx->enc_params.cp_fixed_quality = ctx->fixed_quality;
  173. ctx->enc_params.tcp_numlayers = ctx->numlayers;
  174. ctx->enc_params.tcp_rates[0] = FFMAX(avctx->compression_level, 0) * 2;
  175. if (ctx->cinema_mode > 0) {
  176. ctx->enc_params.irreversible = 1;
  177. ctx->enc_params.tcp_mct = 1;
  178. ctx->enc_params.tile_size_on = 0;
  179. /* no subsampling */
  180. ctx->enc_params.cp_tdx=1;
  181. ctx->enc_params.cp_tdy=1;
  182. ctx->enc_params.subsampling_dx = 1;
  183. ctx->enc_params.subsampling_dy = 1;
  184. /* Tile and Image shall be at (0,0) */
  185. ctx->enc_params.cp_tx0 = 0;
  186. ctx->enc_params.cp_ty0 = 0;
  187. ctx->enc_params.image_offset_x0 = 0;
  188. ctx->enc_params.image_offset_y0 = 0;
  189. /* Codeblock size= 32*32 */
  190. ctx->enc_params.cblockw_init = 32;
  191. ctx->enc_params.cblockh_init = 32;
  192. ctx->enc_params.csty |= 0x01;
  193. /* No ROI */
  194. ctx->enc_params.roi_compno = -1;
  195. if (ctx->enc_params.prog_order != CPRL) {
  196. av_log(avctx, AV_LOG_ERROR, "prog_order forced to CPRL\n");
  197. ctx->enc_params.prog_order = CPRL;
  198. }
  199. ctx->enc_params.tp_flag = 'C';
  200. ctx->enc_params.tp_on = 1;
  201. }
  202. ctx->compress = opj_create_compress(ctx->format);
  203. if (!ctx->compress) {
  204. av_log(avctx, AV_LOG_ERROR, "Error creating the compressor\n");
  205. return AVERROR(ENOMEM);
  206. }
  207. ctx->image = mj2_create_image(avctx, &ctx->enc_params);
  208. if (!ctx->image) {
  209. av_log(avctx, AV_LOG_ERROR, "Error creating the mj2 image\n");
  210. err = AVERROR(EINVAL);
  211. goto fail;
  212. }
  213. opj_setup_encoder(ctx->compress, &ctx->enc_params, ctx->image);
  214. ctx->stream = opj_cio_open((opj_common_ptr) ctx->compress, NULL, 0);
  215. if (!ctx->stream) {
  216. av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n");
  217. err = AVERROR(ENOMEM);
  218. goto fail;
  219. }
  220. avctx->coded_frame = av_frame_alloc();
  221. if (!avctx->coded_frame) {
  222. av_log(avctx, AV_LOG_ERROR, "Error allocating coded frame\n");
  223. goto fail;
  224. }
  225. memset(&ctx->event_mgr, 0, sizeof(opj_event_mgr_t));
  226. ctx->event_mgr.info_handler = info_callback;
  227. ctx->event_mgr.error_handler = error_callback;
  228. ctx->event_mgr.warning_handler = warning_callback;
  229. opj_set_event_mgr((opj_common_ptr) ctx->compress, &ctx->event_mgr, avctx);
  230. return 0;
  231. fail:
  232. opj_cio_close(ctx->stream);
  233. ctx->stream = NULL;
  234. opj_destroy_compress(ctx->compress);
  235. ctx->compress = NULL;
  236. opj_image_destroy(ctx->image);
  237. ctx->image = NULL;
  238. av_freep(&avctx->coded_frame);
  239. return err;
  240. }
  241. static int libopenjpeg_copy_packed8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  242. {
  243. int compno;
  244. int x;
  245. int y;
  246. int *image_line;
  247. int frame_index;
  248. const int numcomps = image->numcomps;
  249. for (compno = 0; compno < numcomps; ++compno) {
  250. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  251. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  252. return 0;
  253. }
  254. }
  255. for (compno = 0; compno < numcomps; ++compno) {
  256. for (y = 0; y < avctx->height; ++y) {
  257. image_line = image->comps[compno].data + y * image->comps[compno].w;
  258. frame_index = y * frame->linesize[0] + compno;
  259. for (x = 0; x < avctx->width; ++x) {
  260. image_line[x] = frame->data[0][frame_index];
  261. frame_index += numcomps;
  262. }
  263. for (; x < image->comps[compno].w; ++x) {
  264. image_line[x] = image_line[x - 1];
  265. }
  266. }
  267. for (; y < image->comps[compno].h; ++y) {
  268. image_line = image->comps[compno].data + y * image->comps[compno].w;
  269. for (x = 0; x < image->comps[compno].w; ++x) {
  270. image_line[x] = image_line[x - image->comps[compno].w];
  271. }
  272. }
  273. }
  274. return 1;
  275. }
  276. // for XYZ 12 bit
  277. static int libopenjpeg_copy_packed12(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  278. {
  279. int compno;
  280. int x, y;
  281. int *image_line;
  282. int frame_index;
  283. const int numcomps = image->numcomps;
  284. uint16_t *frame_ptr = (uint16_t *)frame->data[0];
  285. for (compno = 0; compno < numcomps; ++compno) {
  286. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  287. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  288. return 0;
  289. }
  290. }
  291. for (compno = 0; compno < numcomps; ++compno) {
  292. for (y = 0; y < avctx->height; ++y) {
  293. image_line = image->comps[compno].data + y * image->comps[compno].w;
  294. frame_index = y * (frame->linesize[0] / 2) + compno;
  295. for (x = 0; x < avctx->width; ++x) {
  296. image_line[x] = frame_ptr[frame_index] >> 4;
  297. frame_index += numcomps;
  298. }
  299. for (; x < image->comps[compno].w; ++x) {
  300. image_line[x] = image_line[x - 1];
  301. }
  302. }
  303. for (; y < image->comps[compno].h; ++y) {
  304. image_line = image->comps[compno].data + y * image->comps[compno].w;
  305. for (x = 0; x < image->comps[compno].w; ++x) {
  306. image_line[x] = image_line[x - image->comps[compno].w];
  307. }
  308. }
  309. }
  310. return 1;
  311. }
  312. static int libopenjpeg_copy_packed16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  313. {
  314. int compno;
  315. int x;
  316. int y;
  317. int *image_line;
  318. int frame_index;
  319. const int numcomps = image->numcomps;
  320. uint16_t *frame_ptr = (uint16_t*)frame->data[0];
  321. for (compno = 0; compno < numcomps; ++compno) {
  322. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  323. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  324. return 0;
  325. }
  326. }
  327. for (compno = 0; compno < numcomps; ++compno) {
  328. for (y = 0; y < avctx->height; ++y) {
  329. image_line = image->comps[compno].data + y * image->comps[compno].w;
  330. frame_index = y * (frame->linesize[0] / 2) + compno;
  331. for (x = 0; x < avctx->width; ++x) {
  332. image_line[x] = frame_ptr[frame_index];
  333. frame_index += numcomps;
  334. }
  335. for (; x < image->comps[compno].w; ++x) {
  336. image_line[x] = image_line[x - 1];
  337. }
  338. }
  339. for (; y < image->comps[compno].h; ++y) {
  340. image_line = image->comps[compno].data + y * image->comps[compno].w;
  341. for (x = 0; x < image->comps[compno].w; ++x) {
  342. image_line[x] = image_line[x - image->comps[compno].w];
  343. }
  344. }
  345. }
  346. return 1;
  347. }
  348. static int libopenjpeg_copy_unpacked8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  349. {
  350. int compno;
  351. int x;
  352. int y;
  353. int width;
  354. int height;
  355. int *image_line;
  356. int frame_index;
  357. const int numcomps = image->numcomps;
  358. for (compno = 0; compno < numcomps; ++compno) {
  359. if (image->comps[compno].w > frame->linesize[compno]) {
  360. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  361. return 0;
  362. }
  363. }
  364. for (compno = 0; compno < numcomps; ++compno) {
  365. width = avctx->width / image->comps[compno].dx;
  366. height = avctx->height / image->comps[compno].dy;
  367. for (y = 0; y < height; ++y) {
  368. image_line = image->comps[compno].data + y * image->comps[compno].w;
  369. frame_index = y * frame->linesize[compno];
  370. for (x = 0; x < width; ++x)
  371. image_line[x] = frame->data[compno][frame_index++];
  372. for (; x < image->comps[compno].w; ++x) {
  373. image_line[x] = image_line[x - 1];
  374. }
  375. }
  376. for (; y < image->comps[compno].h; ++y) {
  377. image_line = image->comps[compno].data + y * image->comps[compno].w;
  378. for (x = 0; x < image->comps[compno].w; ++x) {
  379. image_line[x] = image_line[x - image->comps[compno].w];
  380. }
  381. }
  382. }
  383. return 1;
  384. }
  385. static int libopenjpeg_copy_unpacked16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  386. {
  387. int compno;
  388. int x;
  389. int y;
  390. int width;
  391. int height;
  392. int *image_line;
  393. int frame_index;
  394. const int numcomps = image->numcomps;
  395. uint16_t *frame_ptr;
  396. for (compno = 0; compno < numcomps; ++compno) {
  397. if (image->comps[compno].w > frame->linesize[compno]) {
  398. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  399. return 0;
  400. }
  401. }
  402. for (compno = 0; compno < numcomps; ++compno) {
  403. width = avctx->width / image->comps[compno].dx;
  404. height = avctx->height / image->comps[compno].dy;
  405. frame_ptr = (uint16_t *)frame->data[compno];
  406. for (y = 0; y < height; ++y) {
  407. image_line = image->comps[compno].data + y * image->comps[compno].w;
  408. frame_index = y * (frame->linesize[compno] / 2);
  409. for (x = 0; x < width; ++x)
  410. image_line[x] = frame_ptr[frame_index++];
  411. for (; x < image->comps[compno].w; ++x) {
  412. image_line[x] = image_line[x - 1];
  413. }
  414. }
  415. for (; y < image->comps[compno].h; ++y) {
  416. image_line = image->comps[compno].data + y * image->comps[compno].w;
  417. for (x = 0; x < image->comps[compno].w; ++x) {
  418. image_line[x] = image_line[x - image->comps[compno].w];
  419. }
  420. }
  421. }
  422. return 1;
  423. }
  424. static int libopenjpeg_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  425. const AVFrame *frame, int *got_packet)
  426. {
  427. LibOpenJPEGContext *ctx = avctx->priv_data;
  428. opj_cinfo_t *compress = ctx->compress;
  429. opj_image_t *image = ctx->image;
  430. opj_cio_t *stream = ctx->stream;
  431. int cpyresult = 0;
  432. int ret, len;
  433. AVFrame *gbrframe;
  434. switch (avctx->pix_fmt) {
  435. case AV_PIX_FMT_RGB24:
  436. case AV_PIX_FMT_RGBA:
  437. case AV_PIX_FMT_YA8:
  438. cpyresult = libopenjpeg_copy_packed8(avctx, frame, image);
  439. break;
  440. case AV_PIX_FMT_XYZ12:
  441. cpyresult = libopenjpeg_copy_packed12(avctx, frame, image);
  442. break;
  443. case AV_PIX_FMT_RGB48:
  444. case AV_PIX_FMT_RGBA64:
  445. cpyresult = libopenjpeg_copy_packed16(avctx, frame, image);
  446. break;
  447. case AV_PIX_FMT_GBR24P:
  448. case AV_PIX_FMT_GBRP9:
  449. case AV_PIX_FMT_GBRP10:
  450. case AV_PIX_FMT_GBRP12:
  451. case AV_PIX_FMT_GBRP14:
  452. case AV_PIX_FMT_GBRP16:
  453. gbrframe = av_frame_clone(frame);
  454. if (!gbrframe)
  455. return AVERROR(ENOMEM);
  456. gbrframe->data[0] = frame->data[2]; // swap to be rgb
  457. gbrframe->data[1] = frame->data[0];
  458. gbrframe->data[2] = frame->data[1];
  459. gbrframe->linesize[0] = frame->linesize[2];
  460. gbrframe->linesize[1] = frame->linesize[0];
  461. gbrframe->linesize[2] = frame->linesize[1];
  462. if (avctx->pix_fmt == AV_PIX_FMT_GBR24P) {
  463. cpyresult = libopenjpeg_copy_unpacked8(avctx, gbrframe, image);
  464. } else {
  465. cpyresult = libopenjpeg_copy_unpacked16(avctx, gbrframe, image);
  466. }
  467. av_frame_free(&gbrframe);
  468. break;
  469. case AV_PIX_FMT_GRAY8:
  470. case AV_PIX_FMT_YUV410P:
  471. case AV_PIX_FMT_YUV411P:
  472. case AV_PIX_FMT_YUV420P:
  473. case AV_PIX_FMT_YUV422P:
  474. case AV_PIX_FMT_YUV440P:
  475. case AV_PIX_FMT_YUV444P:
  476. case AV_PIX_FMT_YUVA420P:
  477. case AV_PIX_FMT_YUVA422P:
  478. case AV_PIX_FMT_YUVA444P:
  479. cpyresult = libopenjpeg_copy_unpacked8(avctx, frame, image);
  480. break;
  481. case AV_PIX_FMT_GRAY16:
  482. case AV_PIX_FMT_YUV420P9:
  483. case AV_PIX_FMT_YUV422P9:
  484. case AV_PIX_FMT_YUV444P9:
  485. case AV_PIX_FMT_YUVA420P9:
  486. case AV_PIX_FMT_YUVA422P9:
  487. case AV_PIX_FMT_YUVA444P9:
  488. case AV_PIX_FMT_YUV444P10:
  489. case AV_PIX_FMT_YUV422P10:
  490. case AV_PIX_FMT_YUV420P10:
  491. case AV_PIX_FMT_YUVA444P10:
  492. case AV_PIX_FMT_YUVA422P10:
  493. case AV_PIX_FMT_YUVA420P10:
  494. case AV_PIX_FMT_YUV420P12:
  495. case AV_PIX_FMT_YUV422P12:
  496. case AV_PIX_FMT_YUV444P12:
  497. case AV_PIX_FMT_YUV420P14:
  498. case AV_PIX_FMT_YUV422P14:
  499. case AV_PIX_FMT_YUV444P14:
  500. case AV_PIX_FMT_YUV444P16:
  501. case AV_PIX_FMT_YUV422P16:
  502. case AV_PIX_FMT_YUV420P16:
  503. case AV_PIX_FMT_YUVA444P16:
  504. case AV_PIX_FMT_YUVA422P16:
  505. case AV_PIX_FMT_YUVA420P16:
  506. cpyresult = libopenjpeg_copy_unpacked16(avctx, frame, image);
  507. break;
  508. default:
  509. av_log(avctx, AV_LOG_ERROR,
  510. "The frame's pixel format '%s' is not supported\n",
  511. av_get_pix_fmt_name(avctx->pix_fmt));
  512. return AVERROR(EINVAL);
  513. break;
  514. }
  515. if (!cpyresult) {
  516. av_log(avctx, AV_LOG_ERROR,
  517. "Could not copy the frame data to the internal image buffer\n");
  518. return -1;
  519. }
  520. cio_seek(stream, 0);
  521. if (!opj_encode(compress, stream, image, NULL)) {
  522. av_log(avctx, AV_LOG_ERROR, "Error during the opj encode\n");
  523. return -1;
  524. }
  525. len = cio_tell(stream);
  526. if ((ret = ff_alloc_packet2(avctx, pkt, len)) < 0) {
  527. return ret;
  528. }
  529. memcpy(pkt->data, stream->buffer, len);
  530. pkt->flags |= AV_PKT_FLAG_KEY;
  531. *got_packet = 1;
  532. return 0;
  533. }
  534. static av_cold int libopenjpeg_encode_close(AVCodecContext *avctx)
  535. {
  536. LibOpenJPEGContext *ctx = avctx->priv_data;
  537. opj_cio_close(ctx->stream);
  538. ctx->stream = NULL;
  539. opj_destroy_compress(ctx->compress);
  540. ctx->compress = NULL;
  541. opj_image_destroy(ctx->image);
  542. ctx->image = NULL;
  543. av_freep(&avctx->coded_frame);
  544. return 0;
  545. }
  546. #define OFFSET(x) offsetof(LibOpenJPEGContext, x)
  547. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  548. static const AVOption options[] = {
  549. { "format", "Codec Format", OFFSET(format), AV_OPT_TYPE_INT, { .i64 = CODEC_JP2 }, CODEC_J2K, CODEC_JP2, VE, "format" },
  550. { "j2k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CODEC_J2K }, 0, 0, VE, "format" },
  551. { "jp2", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CODEC_JP2 }, 0, 0, VE, "format" },
  552. { "profile", NULL, OFFSET(profile), AV_OPT_TYPE_INT, { .i64 = STD_RSIZ }, STD_RSIZ, CINEMA4K, VE, "profile" },
  553. { "jpeg2000", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = STD_RSIZ }, 0, 0, VE, "profile" },
  554. { "cinema2k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CINEMA2K }, 0, 0, VE, "profile" },
  555. { "cinema4k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CINEMA4K }, 0, 0, VE, "profile" },
  556. { "cinema_mode", "Digital Cinema", OFFSET(cinema_mode), AV_OPT_TYPE_INT, { .i64 = OFF }, OFF, CINEMA4K_24, VE, "cinema_mode" },
  557. { "off", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OFF }, 0, 0, VE, "cinema_mode" },
  558. { "2k_24", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CINEMA2K_24 }, 0, 0, VE, "cinema_mode" },
  559. { "2k_48", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CINEMA2K_48 }, 0, 0, VE, "cinema_mode" },
  560. { "4k_24", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CINEMA4K_24 }, 0, 0, VE, "cinema_mode" },
  561. { "prog_order", "Progression Order", OFFSET(prog_order), AV_OPT_TYPE_INT, { .i64 = LRCP }, LRCP, CPRL, VE, "prog_order" },
  562. { "lrcp", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LRCP }, 0, 0, VE, "prog_order" },
  563. { "rlcp", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = RLCP }, 0, 0, VE, "prog_order" },
  564. { "rpcl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = RPCL }, 0, 0, VE, "prog_order" },
  565. { "pcrl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PCRL }, 0, 0, VE, "prog_order" },
  566. { "cprl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = CPRL }, 0, 0, VE, "prog_order" },
  567. { "numresolution", NULL, OFFSET(numresolution), AV_OPT_TYPE_INT, { .i64 = 6 }, 1, INT_MAX, VE },
  568. { "numlayers", NULL, OFFSET(numlayers), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 10, VE },
  569. { "disto_alloc", NULL, OFFSET(disto_alloc), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE },
  570. { "fixed_alloc", NULL, OFFSET(fixed_alloc), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  571. { "fixed_quality", NULL, OFFSET(fixed_quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  572. { NULL },
  573. };
  574. static const AVClass openjpeg_class = {
  575. .class_name = "libopenjpeg",
  576. .item_name = av_default_item_name,
  577. .option = options,
  578. .version = LIBAVUTIL_VERSION_INT,
  579. };
  580. AVCodec ff_libopenjpeg_encoder = {
  581. .name = "libopenjpeg",
  582. .long_name = NULL_IF_CONFIG_SMALL("OpenJPEG JPEG 2000"),
  583. .type = AVMEDIA_TYPE_VIDEO,
  584. .id = AV_CODEC_ID_JPEG2000,
  585. .priv_data_size = sizeof(LibOpenJPEGContext),
  586. .init = libopenjpeg_encode_init,
  587. .encode2 = libopenjpeg_encode_frame,
  588. .close = libopenjpeg_encode_close,
  589. .capabilities = CODEC_CAP_FRAME_THREADS | CODEC_CAP_INTRA_ONLY,
  590. .pix_fmts = (const enum AVPixelFormat[]) {
  591. AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA, AV_PIX_FMT_RGB48,
  592. AV_PIX_FMT_RGBA64, AV_PIX_FMT_GBR24P,
  593. AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  594. AV_PIX_FMT_GRAY8, AV_PIX_FMT_YA8, AV_PIX_FMT_GRAY16,
  595. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVA420P,
  596. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVA422P,
  597. AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUVA444P,
  598. AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
  599. AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
  600. AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
  601. AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
  602. AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12,
  603. AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV444P14,
  604. AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
  605. AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
  606. AV_PIX_FMT_XYZ12,
  607. AV_PIX_FMT_NONE
  608. },
  609. .priv_class = &openjpeg_class,
  610. };