You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

790 lines
27KB

  1. /*
  2. * JPEG 2000 encoding support via OpenJPEG
  3. * Copyright (c) 2011 Michael Bradshaw <mjbshaw gmail com>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * JPEG 2000 encoder using libopenjpeg
  24. */
  25. #include "libavutil/avassert.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "internal.h"
  32. #include <openjpeg.h>
  33. typedef struct LibOpenJPEGContext {
  34. AVClass *avclass;
  35. opj_cparameters_t enc_params;
  36. int format;
  37. int profile;
  38. int prog_order;
  39. int cinema_mode;
  40. int numresolution;
  41. int irreversible;
  42. int disto_alloc;
  43. int fixed_quality;
  44. } LibOpenJPEGContext;
  45. static void error_callback(const char *msg, void *data)
  46. {
  47. av_log(data, AV_LOG_ERROR, "%s\n", msg);
  48. }
  49. static void warning_callback(const char *msg, void *data)
  50. {
  51. av_log(data, AV_LOG_WARNING, "%s\n", msg);
  52. }
  53. static void info_callback(const char *msg, void *data)
  54. {
  55. av_log(data, AV_LOG_DEBUG, "%s\n", msg);
  56. }
  57. typedef struct PacketWriter {
  58. int pos;
  59. AVPacket *packet;
  60. } PacketWriter;
  61. static OPJ_SIZE_T stream_write(void *out_buffer, OPJ_SIZE_T nb_bytes, void *user_data)
  62. {
  63. PacketWriter *writer = user_data;
  64. AVPacket *packet = writer->packet;
  65. int remaining = packet->size - writer->pos;
  66. if (nb_bytes > remaining) {
  67. OPJ_SIZE_T needed = nb_bytes - remaining;
  68. int max_growth = INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE - packet->size;
  69. if (needed > max_growth) {
  70. return (OPJ_SIZE_T)-1;
  71. }
  72. if (av_grow_packet(packet, (int)needed)) {
  73. return (OPJ_SIZE_T)-1;
  74. }
  75. }
  76. memcpy(packet->data + writer->pos, out_buffer, nb_bytes);
  77. writer->pos += (int)nb_bytes;
  78. return nb_bytes;
  79. }
  80. static OPJ_OFF_T stream_skip(OPJ_OFF_T nb_bytes, void *user_data)
  81. {
  82. PacketWriter *writer = user_data;
  83. AVPacket *packet = writer->packet;
  84. if (nb_bytes < 0) {
  85. if (writer->pos == 0) {
  86. return (OPJ_SIZE_T)-1;
  87. }
  88. if (nb_bytes + writer->pos < 0) {
  89. nb_bytes = -writer->pos;
  90. }
  91. } else {
  92. int remaining = packet->size - writer->pos;
  93. if (nb_bytes > remaining) {
  94. OPJ_SIZE_T needed = nb_bytes - remaining;
  95. int max_growth = INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE - packet->size;
  96. if (needed > max_growth) {
  97. return (OPJ_SIZE_T)-1;
  98. }
  99. if (av_grow_packet(packet, (int)needed)) {
  100. return (OPJ_SIZE_T)-1;
  101. }
  102. }
  103. }
  104. writer->pos += (int)nb_bytes;
  105. return nb_bytes;
  106. }
  107. static OPJ_BOOL stream_seek(OPJ_OFF_T nb_bytes, void *user_data)
  108. {
  109. PacketWriter *writer = user_data;
  110. AVPacket *packet = writer->packet;
  111. if (nb_bytes < 0) {
  112. return OPJ_FALSE;
  113. }
  114. if (nb_bytes > packet->size) {
  115. if (nb_bytes > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE ||
  116. av_grow_packet(packet, (int)nb_bytes - packet->size)) {
  117. return OPJ_FALSE;
  118. }
  119. }
  120. writer->pos = (int)nb_bytes;
  121. return OPJ_TRUE;
  122. }
  123. static void cinema_parameters(opj_cparameters_t *p)
  124. {
  125. p->tile_size_on = 0;
  126. p->cp_tdx = 1;
  127. p->cp_tdy = 1;
  128. /* Tile part */
  129. p->tp_flag = 'C';
  130. p->tp_on = 1;
  131. /* Tile and Image shall be at (0, 0) */
  132. p->cp_tx0 = 0;
  133. p->cp_ty0 = 0;
  134. p->image_offset_x0 = 0;
  135. p->image_offset_y0 = 0;
  136. /* Codeblock size= 32 * 32 */
  137. p->cblockw_init = 32;
  138. p->cblockh_init = 32;
  139. p->csty |= 0x01;
  140. /* The progression order shall be CPRL */
  141. p->prog_order = OPJ_CPRL;
  142. /* No ROI */
  143. p->roi_compno = -1;
  144. /* No subsampling */
  145. p->subsampling_dx = 1;
  146. p->subsampling_dy = 1;
  147. /* 9-7 transform */
  148. p->irreversible = 1;
  149. p->tcp_mct = 1;
  150. }
  151. static opj_image_t *mj2_create_image(AVCodecContext *avctx, opj_cparameters_t *parameters)
  152. {
  153. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
  154. opj_image_cmptparm_t cmptparm[4] = {{0}};
  155. opj_image_t *img;
  156. int i;
  157. int sub_dx[4];
  158. int sub_dy[4];
  159. int numcomps;
  160. OPJ_COLOR_SPACE color_space = OPJ_CLRSPC_UNKNOWN;
  161. sub_dx[0] = sub_dx[3] = 1;
  162. sub_dy[0] = sub_dy[3] = 1;
  163. sub_dx[1] = sub_dx[2] = 1 << desc->log2_chroma_w;
  164. sub_dy[1] = sub_dy[2] = 1 << desc->log2_chroma_h;
  165. numcomps = desc->nb_components;
  166. switch (avctx->pix_fmt) {
  167. case AV_PIX_FMT_GRAY8:
  168. case AV_PIX_FMT_YA8:
  169. case AV_PIX_FMT_GRAY10:
  170. case AV_PIX_FMT_GRAY12:
  171. case AV_PIX_FMT_GRAY14:
  172. case AV_PIX_FMT_GRAY16:
  173. case AV_PIX_FMT_YA16:
  174. color_space = OPJ_CLRSPC_GRAY;
  175. break;
  176. case AV_PIX_FMT_RGB24:
  177. case AV_PIX_FMT_RGBA:
  178. case AV_PIX_FMT_RGB48:
  179. case AV_PIX_FMT_RGBA64:
  180. case AV_PIX_FMT_GBR24P:
  181. case AV_PIX_FMT_GBRP9:
  182. case AV_PIX_FMT_GBRP10:
  183. case AV_PIX_FMT_GBRP12:
  184. case AV_PIX_FMT_GBRP14:
  185. case AV_PIX_FMT_GBRP16:
  186. case AV_PIX_FMT_XYZ12:
  187. color_space = OPJ_CLRSPC_SRGB;
  188. break;
  189. case AV_PIX_FMT_YUV410P:
  190. case AV_PIX_FMT_YUV411P:
  191. case AV_PIX_FMT_YUV420P:
  192. case AV_PIX_FMT_YUV422P:
  193. case AV_PIX_FMT_YUV440P:
  194. case AV_PIX_FMT_YUV444P:
  195. case AV_PIX_FMT_YUVA420P:
  196. case AV_PIX_FMT_YUVA422P:
  197. case AV_PIX_FMT_YUVA444P:
  198. case AV_PIX_FMT_YUV420P9:
  199. case AV_PIX_FMT_YUV422P9:
  200. case AV_PIX_FMT_YUV444P9:
  201. case AV_PIX_FMT_YUVA420P9:
  202. case AV_PIX_FMT_YUVA422P9:
  203. case AV_PIX_FMT_YUVA444P9:
  204. case AV_PIX_FMT_YUV420P10:
  205. case AV_PIX_FMT_YUV422P10:
  206. case AV_PIX_FMT_YUV444P10:
  207. case AV_PIX_FMT_YUVA420P10:
  208. case AV_PIX_FMT_YUVA422P10:
  209. case AV_PIX_FMT_YUVA444P10:
  210. case AV_PIX_FMT_YUV420P12:
  211. case AV_PIX_FMT_YUV422P12:
  212. case AV_PIX_FMT_YUV444P12:
  213. case AV_PIX_FMT_YUV420P14:
  214. case AV_PIX_FMT_YUV422P14:
  215. case AV_PIX_FMT_YUV444P14:
  216. case AV_PIX_FMT_YUV420P16:
  217. case AV_PIX_FMT_YUV422P16:
  218. case AV_PIX_FMT_YUV444P16:
  219. case AV_PIX_FMT_YUVA420P16:
  220. case AV_PIX_FMT_YUVA422P16:
  221. case AV_PIX_FMT_YUVA444P16:
  222. color_space = OPJ_CLRSPC_SYCC;
  223. break;
  224. default:
  225. av_log(avctx, AV_LOG_ERROR,
  226. "The requested pixel format '%s' is not supported\n",
  227. av_get_pix_fmt_name(avctx->pix_fmt));
  228. return NULL;
  229. }
  230. for (i = 0; i < numcomps; i++) {
  231. cmptparm[i].prec = desc->comp[i].depth;
  232. cmptparm[i].bpp = desc->comp[i].depth;
  233. cmptparm[i].sgnd = 0;
  234. cmptparm[i].dx = sub_dx[i];
  235. cmptparm[i].dy = sub_dy[i];
  236. cmptparm[i].w = (avctx->width + sub_dx[i] - 1) / sub_dx[i];
  237. cmptparm[i].h = (avctx->height + sub_dy[i] - 1) / sub_dy[i];
  238. }
  239. img = opj_image_create(numcomps, cmptparm, color_space);
  240. if (!img)
  241. return NULL;
  242. // x0, y0 is the top left corner of the image
  243. // x1, y1 is the width, height of the reference grid
  244. img->x0 = 0;
  245. img->y0 = 0;
  246. img->x1 = (avctx->width - 1) * parameters->subsampling_dx + 1;
  247. img->y1 = (avctx->height - 1) * parameters->subsampling_dy + 1;
  248. return img;
  249. }
  250. static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx)
  251. {
  252. LibOpenJPEGContext *ctx = avctx->priv_data;
  253. int err = 0;
  254. opj_set_default_encoder_parameters(&ctx->enc_params);
  255. switch (ctx->cinema_mode) {
  256. case OPJ_CINEMA2K_24:
  257. ctx->enc_params.rsiz = OPJ_PROFILE_CINEMA_2K;
  258. ctx->enc_params.max_cs_size = OPJ_CINEMA_24_CS;
  259. ctx->enc_params.max_comp_size = OPJ_CINEMA_24_COMP;
  260. break;
  261. case OPJ_CINEMA2K_48:
  262. ctx->enc_params.rsiz = OPJ_PROFILE_CINEMA_2K;
  263. ctx->enc_params.max_cs_size = OPJ_CINEMA_48_CS;
  264. ctx->enc_params.max_comp_size = OPJ_CINEMA_48_COMP;
  265. break;
  266. case OPJ_CINEMA4K_24:
  267. ctx->enc_params.rsiz = OPJ_PROFILE_CINEMA_4K;
  268. ctx->enc_params.max_cs_size = OPJ_CINEMA_24_CS;
  269. ctx->enc_params.max_comp_size = OPJ_CINEMA_24_COMP;
  270. break;
  271. }
  272. switch (ctx->profile) {
  273. case OPJ_CINEMA2K:
  274. if (ctx->enc_params.rsiz == OPJ_PROFILE_CINEMA_4K) {
  275. err = AVERROR(EINVAL);
  276. break;
  277. }
  278. ctx->enc_params.rsiz = OPJ_PROFILE_CINEMA_2K;
  279. break;
  280. case OPJ_CINEMA4K:
  281. if (ctx->enc_params.rsiz == OPJ_PROFILE_CINEMA_2K) {
  282. err = AVERROR(EINVAL);
  283. break;
  284. }
  285. ctx->enc_params.rsiz = OPJ_PROFILE_CINEMA_4K;
  286. break;
  287. }
  288. if (err) {
  289. av_log(avctx, AV_LOG_ERROR,
  290. "Invalid parameter pairing: cinema_mode and profile conflict.\n");
  291. return err;
  292. }
  293. if (!ctx->numresolution) {
  294. ctx->numresolution = 6;
  295. while (FFMIN(avctx->width, avctx->height) >> ctx->numresolution < 1)
  296. ctx->numresolution --;
  297. }
  298. ctx->enc_params.prog_order = ctx->prog_order;
  299. ctx->enc_params.numresolution = ctx->numresolution;
  300. ctx->enc_params.irreversible = ctx->irreversible;
  301. ctx->enc_params.cp_disto_alloc = ctx->disto_alloc;
  302. ctx->enc_params.cp_fixed_quality = ctx->fixed_quality;
  303. ctx->enc_params.tcp_numlayers = 1;
  304. ctx->enc_params.tcp_rates[0] = FFMAX(avctx->compression_level, 0) * 2;
  305. if (ctx->cinema_mode > 0) {
  306. cinema_parameters(&ctx->enc_params);
  307. }
  308. return 0;
  309. }
  310. static int libopenjpeg_copy_packed8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  311. {
  312. int compno;
  313. int x;
  314. int y;
  315. int *image_line;
  316. int frame_index;
  317. const int numcomps = image->numcomps;
  318. for (compno = 0; compno < numcomps; ++compno) {
  319. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  320. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  321. return 0;
  322. }
  323. }
  324. for (compno = 0; compno < numcomps; ++compno) {
  325. for (y = 0; y < avctx->height; ++y) {
  326. image_line = image->comps[compno].data + y * image->comps[compno].w;
  327. frame_index = y * frame->linesize[0] + compno;
  328. for (x = 0; x < avctx->width; ++x) {
  329. image_line[x] = frame->data[0][frame_index];
  330. frame_index += numcomps;
  331. }
  332. for (; x < image->comps[compno].w; ++x) {
  333. image_line[x] = image_line[x - 1];
  334. }
  335. }
  336. for (; y < image->comps[compno].h; ++y) {
  337. image_line = image->comps[compno].data + y * image->comps[compno].w;
  338. for (x = 0; x < image->comps[compno].w; ++x) {
  339. image_line[x] = image_line[x - (int)image->comps[compno].w];
  340. }
  341. }
  342. }
  343. return 1;
  344. }
  345. // for XYZ 12 bit
  346. static int libopenjpeg_copy_packed12(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  347. {
  348. int compno;
  349. int x, y;
  350. int *image_line;
  351. int frame_index;
  352. const int numcomps = image->numcomps;
  353. uint16_t *frame_ptr = (uint16_t *)frame->data[0];
  354. for (compno = 0; compno < numcomps; ++compno) {
  355. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  356. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  357. return 0;
  358. }
  359. }
  360. for (compno = 0; compno < numcomps; ++compno) {
  361. for (y = 0; y < avctx->height; ++y) {
  362. image_line = image->comps[compno].data + y * image->comps[compno].w;
  363. frame_index = y * (frame->linesize[0] / 2) + compno;
  364. for (x = 0; x < avctx->width; ++x) {
  365. image_line[x] = frame_ptr[frame_index] >> 4;
  366. frame_index += numcomps;
  367. }
  368. for (; x < image->comps[compno].w; ++x) {
  369. image_line[x] = image_line[x - 1];
  370. }
  371. }
  372. for (; y < image->comps[compno].h; ++y) {
  373. image_line = image->comps[compno].data + y * image->comps[compno].w;
  374. for (x = 0; x < image->comps[compno].w; ++x) {
  375. image_line[x] = image_line[x - (int)image->comps[compno].w];
  376. }
  377. }
  378. }
  379. return 1;
  380. }
  381. static int libopenjpeg_copy_packed16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  382. {
  383. int compno;
  384. int x;
  385. int y;
  386. int *image_line;
  387. int frame_index;
  388. const int numcomps = image->numcomps;
  389. uint16_t *frame_ptr = (uint16_t*)frame->data[0];
  390. for (compno = 0; compno < numcomps; ++compno) {
  391. if (image->comps[compno].w > frame->linesize[0] / numcomps) {
  392. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  393. return 0;
  394. }
  395. }
  396. for (compno = 0; compno < numcomps; ++compno) {
  397. for (y = 0; y < avctx->height; ++y) {
  398. image_line = image->comps[compno].data + y * image->comps[compno].w;
  399. frame_index = y * (frame->linesize[0] / 2) + compno;
  400. for (x = 0; x < avctx->width; ++x) {
  401. image_line[x] = frame_ptr[frame_index];
  402. frame_index += numcomps;
  403. }
  404. for (; x < image->comps[compno].w; ++x) {
  405. image_line[x] = image_line[x - 1];
  406. }
  407. }
  408. for (; y < image->comps[compno].h; ++y) {
  409. image_line = image->comps[compno].data + y * image->comps[compno].w;
  410. for (x = 0; x < image->comps[compno].w; ++x) {
  411. image_line[x] = image_line[x - (int)image->comps[compno].w];
  412. }
  413. }
  414. }
  415. return 1;
  416. }
  417. static int libopenjpeg_copy_unpacked8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  418. {
  419. int compno;
  420. int x;
  421. int y;
  422. int width;
  423. int height;
  424. int *image_line;
  425. int frame_index;
  426. const int numcomps = image->numcomps;
  427. for (compno = 0; compno < numcomps; ++compno) {
  428. if (image->comps[compno].w > frame->linesize[compno]) {
  429. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  430. return 0;
  431. }
  432. }
  433. for (compno = 0; compno < numcomps; ++compno) {
  434. width = (avctx->width + image->comps[compno].dx - 1) / image->comps[compno].dx;
  435. height = (avctx->height + image->comps[compno].dy - 1) / image->comps[compno].dy;
  436. for (y = 0; y < height; ++y) {
  437. image_line = image->comps[compno].data + y * image->comps[compno].w;
  438. frame_index = y * frame->linesize[compno];
  439. for (x = 0; x < width; ++x)
  440. image_line[x] = frame->data[compno][frame_index++];
  441. for (; x < image->comps[compno].w; ++x) {
  442. image_line[x] = image_line[x - 1];
  443. }
  444. }
  445. for (; y < image->comps[compno].h; ++y) {
  446. image_line = image->comps[compno].data + y * image->comps[compno].w;
  447. for (x = 0; x < image->comps[compno].w; ++x) {
  448. image_line[x] = image_line[x - (int)image->comps[compno].w];
  449. }
  450. }
  451. }
  452. return 1;
  453. }
  454. static int libopenjpeg_copy_unpacked16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
  455. {
  456. int compno;
  457. int x;
  458. int y;
  459. int width;
  460. int height;
  461. int *image_line;
  462. int frame_index;
  463. const int numcomps = image->numcomps;
  464. uint16_t *frame_ptr;
  465. for (compno = 0; compno < numcomps; ++compno) {
  466. if (image->comps[compno].w > frame->linesize[compno]) {
  467. av_log(avctx, AV_LOG_ERROR, "Error: frame's linesize is too small for the image\n");
  468. return 0;
  469. }
  470. }
  471. for (compno = 0; compno < numcomps; ++compno) {
  472. width = (avctx->width + image->comps[compno].dx - 1) / image->comps[compno].dx;
  473. height = (avctx->height + image->comps[compno].dy - 1) / image->comps[compno].dy;
  474. frame_ptr = (uint16_t *)frame->data[compno];
  475. for (y = 0; y < height; ++y) {
  476. image_line = image->comps[compno].data + y * image->comps[compno].w;
  477. frame_index = y * (frame->linesize[compno] / 2);
  478. for (x = 0; x < width; ++x)
  479. image_line[x] = frame_ptr[frame_index++];
  480. for (; x < image->comps[compno].w; ++x) {
  481. image_line[x] = image_line[x - 1];
  482. }
  483. }
  484. for (; y < image->comps[compno].h; ++y) {
  485. image_line = image->comps[compno].data + y * image->comps[compno].w;
  486. for (x = 0; x < image->comps[compno].w; ++x) {
  487. image_line[x] = image_line[x - (int)image->comps[compno].w];
  488. }
  489. }
  490. }
  491. return 1;
  492. }
  493. static int libopenjpeg_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  494. const AVFrame *frame, int *got_packet)
  495. {
  496. LibOpenJPEGContext *ctx = avctx->priv_data;
  497. int ret;
  498. AVFrame *gbrframe;
  499. int cpyresult = 0;
  500. PacketWriter writer = { 0 };
  501. opj_codec_t *compress = NULL;
  502. opj_stream_t *stream = NULL;
  503. opj_image_t *image = mj2_create_image(avctx, &ctx->enc_params);
  504. if (!image) {
  505. av_log(avctx, AV_LOG_ERROR, "Error creating the mj2 image\n");
  506. ret = AVERROR(EINVAL);
  507. goto done;
  508. }
  509. switch (avctx->pix_fmt) {
  510. case AV_PIX_FMT_RGB24:
  511. case AV_PIX_FMT_RGBA:
  512. case AV_PIX_FMT_YA8:
  513. cpyresult = libopenjpeg_copy_packed8(avctx, frame, image);
  514. break;
  515. case AV_PIX_FMT_XYZ12:
  516. cpyresult = libopenjpeg_copy_packed12(avctx, frame, image);
  517. break;
  518. case AV_PIX_FMT_RGB48:
  519. case AV_PIX_FMT_RGBA64:
  520. case AV_PIX_FMT_YA16:
  521. cpyresult = libopenjpeg_copy_packed16(avctx, frame, image);
  522. break;
  523. case AV_PIX_FMT_GBR24P:
  524. case AV_PIX_FMT_GBRP9:
  525. case AV_PIX_FMT_GBRP10:
  526. case AV_PIX_FMT_GBRP12:
  527. case AV_PIX_FMT_GBRP14:
  528. case AV_PIX_FMT_GBRP16:
  529. gbrframe = av_frame_clone(frame);
  530. if (!gbrframe) {
  531. ret = AVERROR(ENOMEM);
  532. goto done;
  533. }
  534. gbrframe->data[0] = frame->data[2]; // swap to be rgb
  535. gbrframe->data[1] = frame->data[0];
  536. gbrframe->data[2] = frame->data[1];
  537. gbrframe->linesize[0] = frame->linesize[2];
  538. gbrframe->linesize[1] = frame->linesize[0];
  539. gbrframe->linesize[2] = frame->linesize[1];
  540. if (avctx->pix_fmt == AV_PIX_FMT_GBR24P) {
  541. cpyresult = libopenjpeg_copy_unpacked8(avctx, gbrframe, image);
  542. } else {
  543. cpyresult = libopenjpeg_copy_unpacked16(avctx, gbrframe, image);
  544. }
  545. av_frame_free(&gbrframe);
  546. break;
  547. case AV_PIX_FMT_GRAY8:
  548. case AV_PIX_FMT_YUV410P:
  549. case AV_PIX_FMT_YUV411P:
  550. case AV_PIX_FMT_YUV420P:
  551. case AV_PIX_FMT_YUV422P:
  552. case AV_PIX_FMT_YUV440P:
  553. case AV_PIX_FMT_YUV444P:
  554. case AV_PIX_FMT_YUVA420P:
  555. case AV_PIX_FMT_YUVA422P:
  556. case AV_PIX_FMT_YUVA444P:
  557. cpyresult = libopenjpeg_copy_unpacked8(avctx, frame, image);
  558. break;
  559. case AV_PIX_FMT_GRAY10:
  560. case AV_PIX_FMT_GRAY12:
  561. case AV_PIX_FMT_GRAY14:
  562. case AV_PIX_FMT_GRAY16:
  563. case AV_PIX_FMT_YUV420P9:
  564. case AV_PIX_FMT_YUV422P9:
  565. case AV_PIX_FMT_YUV444P9:
  566. case AV_PIX_FMT_YUVA420P9:
  567. case AV_PIX_FMT_YUVA422P9:
  568. case AV_PIX_FMT_YUVA444P9:
  569. case AV_PIX_FMT_YUV444P10:
  570. case AV_PIX_FMT_YUV422P10:
  571. case AV_PIX_FMT_YUV420P10:
  572. case AV_PIX_FMT_YUVA444P10:
  573. case AV_PIX_FMT_YUVA422P10:
  574. case AV_PIX_FMT_YUVA420P10:
  575. case AV_PIX_FMT_YUV420P12:
  576. case AV_PIX_FMT_YUV422P12:
  577. case AV_PIX_FMT_YUV444P12:
  578. case AV_PIX_FMT_YUV420P14:
  579. case AV_PIX_FMT_YUV422P14:
  580. case AV_PIX_FMT_YUV444P14:
  581. case AV_PIX_FMT_YUV444P16:
  582. case AV_PIX_FMT_YUV422P16:
  583. case AV_PIX_FMT_YUV420P16:
  584. case AV_PIX_FMT_YUVA444P16:
  585. case AV_PIX_FMT_YUVA422P16:
  586. case AV_PIX_FMT_YUVA420P16:
  587. cpyresult = libopenjpeg_copy_unpacked16(avctx, frame, image);
  588. break;
  589. default:
  590. av_log(avctx, AV_LOG_ERROR,
  591. "The frame's pixel format '%s' is not supported\n",
  592. av_get_pix_fmt_name(avctx->pix_fmt));
  593. ret = AVERROR(EINVAL);
  594. goto done;
  595. break;
  596. }
  597. if (!cpyresult) {
  598. av_log(avctx, AV_LOG_ERROR,
  599. "Could not copy the frame data to the internal image buffer\n");
  600. ret = -1;
  601. goto done;
  602. }
  603. if ((ret = ff_alloc_packet2(avctx, pkt, 1024, 0)) < 0) {
  604. goto done;
  605. }
  606. compress = opj_create_compress(ctx->format);
  607. if (!compress) {
  608. av_log(avctx, AV_LOG_ERROR, "Error creating the compressor\n");
  609. ret = AVERROR(ENOMEM);
  610. goto done;
  611. }
  612. if (!opj_set_error_handler(compress, error_callback, avctx) ||
  613. !opj_set_warning_handler(compress, warning_callback, avctx) ||
  614. !opj_set_info_handler(compress, info_callback, avctx)) {
  615. av_log(avctx, AV_LOG_ERROR, "Error setting the compressor handlers\n");
  616. ret = AVERROR_EXTERNAL;
  617. goto done;
  618. }
  619. if (!opj_setup_encoder(compress, &ctx->enc_params, image)) {
  620. av_log(avctx, AV_LOG_ERROR, "Error setting up the compressor\n");
  621. ret = AVERROR_EXTERNAL;
  622. goto done;
  623. }
  624. stream = opj_stream_default_create(OPJ_STREAM_WRITE);
  625. if (!stream) {
  626. av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n");
  627. ret = AVERROR(ENOMEM);
  628. goto done;
  629. }
  630. writer.packet = pkt;
  631. opj_stream_set_write_function(stream, stream_write);
  632. opj_stream_set_skip_function(stream, stream_skip);
  633. opj_stream_set_seek_function(stream, stream_seek);
  634. opj_stream_set_user_data(stream, &writer, NULL);
  635. if (!opj_start_compress(compress, image, stream) ||
  636. !opj_encode(compress, stream) ||
  637. !opj_end_compress(compress, stream)) {
  638. av_log(avctx, AV_LOG_ERROR, "Error during the opj encode\n");
  639. ret = AVERROR_EXTERNAL;
  640. goto done;
  641. }
  642. av_shrink_packet(pkt, writer.pos);
  643. pkt->flags |= AV_PKT_FLAG_KEY;
  644. *got_packet = 1;
  645. ret = 0;
  646. done:
  647. opj_stream_destroy(stream);
  648. opj_destroy_codec(compress);
  649. opj_image_destroy(image);
  650. return ret;
  651. }
  652. #define OFFSET(x) offsetof(LibOpenJPEGContext, x)
  653. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  654. static const AVOption options[] = {
  655. { "format", "Codec Format", OFFSET(format), AV_OPT_TYPE_INT, { .i64 = OPJ_CODEC_JP2 }, OPJ_CODEC_J2K, OPJ_CODEC_JP2, VE, "format" },
  656. { "j2k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CODEC_J2K }, 0, 0, VE, "format" },
  657. { "jp2", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CODEC_JP2 }, 0, 0, VE, "format" },
  658. { "profile", NULL, OFFSET(profile), AV_OPT_TYPE_INT, { .i64 = OPJ_STD_RSIZ }, OPJ_STD_RSIZ, OPJ_CINEMA4K, VE, "profile" },
  659. { "jpeg2000", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_STD_RSIZ }, 0, 0, VE, "profile" },
  660. { "cinema2k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CINEMA2K }, 0, 0, VE, "profile" },
  661. { "cinema4k", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CINEMA4K }, 0, 0, VE, "profile" },
  662. { "cinema_mode", "Digital Cinema", OFFSET(cinema_mode), AV_OPT_TYPE_INT, { .i64 = OPJ_OFF }, OPJ_OFF, OPJ_CINEMA4K_24, VE, "cinema_mode" },
  663. { "off", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_OFF }, 0, 0, VE, "cinema_mode" },
  664. { "2k_24", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CINEMA2K_24 }, 0, 0, VE, "cinema_mode" },
  665. { "2k_48", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CINEMA2K_48 }, 0, 0, VE, "cinema_mode" },
  666. { "4k_24", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CINEMA4K_24 }, 0, 0, VE, "cinema_mode" },
  667. { "prog_order", "Progression Order", OFFSET(prog_order), AV_OPT_TYPE_INT, { .i64 = OPJ_LRCP }, OPJ_LRCP, OPJ_CPRL, VE, "prog_order" },
  668. { "lrcp", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_LRCP }, 0, 0, VE, "prog_order" },
  669. { "rlcp", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_RLCP }, 0, 0, VE, "prog_order" },
  670. { "rpcl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_RPCL }, 0, 0, VE, "prog_order" },
  671. { "pcrl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_PCRL }, 0, 0, VE, "prog_order" },
  672. { "cprl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ_CPRL }, 0, 0, VE, "prog_order" },
  673. { "numresolution", NULL, OFFSET(numresolution), AV_OPT_TYPE_INT, { .i64 = 6 }, 0, 33, VE },
  674. { "irreversible", NULL, OFFSET(irreversible), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  675. { "disto_alloc", NULL, OFFSET(disto_alloc), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE },
  676. { "fixed_quality", NULL, OFFSET(fixed_quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  677. { NULL },
  678. };
  679. static const AVClass openjpeg_class = {
  680. .class_name = "libopenjpeg",
  681. .item_name = av_default_item_name,
  682. .option = options,
  683. .version = LIBAVUTIL_VERSION_INT,
  684. };
  685. AVCodec ff_libopenjpeg_encoder = {
  686. .name = "libopenjpeg",
  687. .long_name = NULL_IF_CONFIG_SMALL("OpenJPEG JPEG 2000"),
  688. .type = AVMEDIA_TYPE_VIDEO,
  689. .id = AV_CODEC_ID_JPEG2000,
  690. .priv_data_size = sizeof(LibOpenJPEGContext),
  691. .init = libopenjpeg_encode_init,
  692. .encode2 = libopenjpeg_encode_frame,
  693. .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
  694. .pix_fmts = (const enum AVPixelFormat[]) {
  695. AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA, AV_PIX_FMT_RGB48,
  696. AV_PIX_FMT_RGBA64, AV_PIX_FMT_GBR24P,
  697. AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  698. AV_PIX_FMT_GRAY8, AV_PIX_FMT_YA8, AV_PIX_FMT_GRAY16, AV_PIX_FMT_YA16,
  699. AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14,
  700. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVA420P,
  701. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVA422P,
  702. AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUVA444P,
  703. AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
  704. AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
  705. AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
  706. AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
  707. AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12,
  708. AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV444P14,
  709. AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
  710. AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
  711. AV_PIX_FMT_XYZ12,
  712. AV_PIX_FMT_NONE
  713. },
  714. .priv_class = &openjpeg_class,
  715. .wrapper_name = "libopenjpeg",
  716. };