You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

697 lines
21KB

  1. /*
  2. * DPX (.dpx) image decoder
  3. * Copyright (c) 2009 Jimmy Christensen
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavutil/avstring.h"
  22. #include "libavutil/intreadwrite.h"
  23. #include "libavutil/intfloat.h"
  24. #include "libavutil/imgutils.h"
  25. #include "libavutil/timecode.h"
  26. #include "bytestream.h"
  27. #include "avcodec.h"
  28. #include "internal.h"
  29. enum DPX_TRC {
  30. DPX_TRC_USER_DEFINED = 0,
  31. DPX_TRC_PRINTING_DENSITY = 1,
  32. DPX_TRC_LINEAR = 2,
  33. DPX_TRC_LOGARITHMIC = 3,
  34. DPX_TRC_UNSPECIFIED_VIDEO = 4,
  35. DPX_TRC_SMPTE_274 = 5,
  36. DPX_TRC_ITU_R_709_4 = 6,
  37. DPX_TRC_ITU_R_601_625 = 7,
  38. DPX_TRC_ITU_R_601_525 = 8,
  39. DPX_TRC_SMPTE_170 = 9,
  40. DPX_TRC_ITU_R_624_4_PAL = 10,
  41. DPX_TRC_Z_LINEAR = 11,
  42. DPX_TRC_Z_HOMOGENEOUS = 12,
  43. };
  44. enum DPX_COL_SPEC {
  45. DPX_COL_SPEC_USER_DEFINED = 0,
  46. DPX_COL_SPEC_PRINTING_DENSITY = 1,
  47. /* 2 = N/A */
  48. /* 3 = N/A */
  49. DPX_COL_SPEC_UNSPECIFIED_VIDEO = 4,
  50. DPX_COL_SPEC_SMPTE_274 = 5,
  51. DPX_COL_SPEC_ITU_R_709_4 = 6,
  52. DPX_COL_SPEC_ITU_R_601_625 = 7,
  53. DPX_COL_SPEC_ITU_R_601_525 = 8,
  54. DPX_COL_SPEC_SMPTE_170 = 9,
  55. DPX_COL_SPEC_ITU_R_624_4_PAL = 10,
  56. /* 11 = N/A */
  57. /* 12 = N/A */
  58. };
  59. static unsigned int read16(const uint8_t **ptr, int is_big)
  60. {
  61. unsigned int temp;
  62. if (is_big) {
  63. temp = AV_RB16(*ptr);
  64. } else {
  65. temp = AV_RL16(*ptr);
  66. }
  67. *ptr += 2;
  68. return temp;
  69. }
  70. static unsigned int read32(const uint8_t **ptr, int is_big)
  71. {
  72. unsigned int temp;
  73. if (is_big) {
  74. temp = AV_RB32(*ptr);
  75. } else {
  76. temp = AV_RL32(*ptr);
  77. }
  78. *ptr += 4;
  79. return temp;
  80. }
  81. static uint16_t read10in32_gray(const uint8_t **ptr, uint32_t *lbuf,
  82. int *n_datum, int is_big, int shift)
  83. {
  84. uint16_t temp;
  85. if (*n_datum)
  86. (*n_datum)--;
  87. else {
  88. *lbuf = read32(ptr, is_big);
  89. *n_datum = 2;
  90. }
  91. temp = *lbuf >> shift & 0x3FF;
  92. *lbuf = *lbuf >> 10;
  93. return temp;
  94. }
  95. static uint16_t read10in32(const uint8_t **ptr, uint32_t *lbuf,
  96. int *n_datum, int is_big, int shift)
  97. {
  98. if (*n_datum)
  99. (*n_datum)--;
  100. else {
  101. *lbuf = read32(ptr, is_big);
  102. *n_datum = 2;
  103. }
  104. *lbuf = *lbuf << 10 | *lbuf >> shift & 0x3FFFFF;
  105. return *lbuf & 0x3FF;
  106. }
  107. static uint16_t read12in32(const uint8_t **ptr, uint32_t *lbuf,
  108. int *n_datum, int is_big)
  109. {
  110. if (*n_datum)
  111. (*n_datum)--;
  112. else {
  113. *lbuf = read32(ptr, is_big);
  114. *n_datum = 7;
  115. }
  116. switch (*n_datum){
  117. case 7: return *lbuf & 0xFFF;
  118. case 6: return (*lbuf >> 12) & 0xFFF;
  119. case 5: {
  120. uint32_t c = *lbuf >> 24;
  121. *lbuf = read32(ptr, is_big);
  122. c |= *lbuf << 8;
  123. return c & 0xFFF;
  124. }
  125. case 4: return (*lbuf >> 4) & 0xFFF;
  126. case 3: return (*lbuf >> 16) & 0xFFF;
  127. case 2: {
  128. uint32_t c = *lbuf >> 28;
  129. *lbuf = read32(ptr, is_big);
  130. c |= *lbuf << 4;
  131. return c & 0xFFF;
  132. }
  133. case 1: return (*lbuf >> 8) & 0xFFF;
  134. default: return *lbuf >> 20;
  135. }
  136. }
  137. static int decode_frame(AVCodecContext *avctx,
  138. void *data,
  139. int *got_frame,
  140. AVPacket *avpkt)
  141. {
  142. const uint8_t *buf = avpkt->data;
  143. int buf_size = avpkt->size;
  144. AVFrame *const p = data;
  145. uint8_t *ptr[AV_NUM_DATA_POINTERS];
  146. uint32_t header_version, version = 0;
  147. char creator[101];
  148. char input_device[33];
  149. unsigned int offset;
  150. int magic_num, endian;
  151. int x, y, stride, i, j, ret;
  152. int w, h, bits_per_color, descriptor, elements, packing;
  153. int yuv, color_trc, color_spec;
  154. int encoding, need_align = 0, unpadded_10bit = 0;
  155. unsigned int rgbBuffer = 0;
  156. int n_datum = 0;
  157. if (avpkt->size <= 1634) {
  158. av_log(avctx, AV_LOG_ERROR, "Packet too small for DPX header\n");
  159. return AVERROR_INVALIDDATA;
  160. }
  161. magic_num = AV_RB32(buf);
  162. buf += 4;
  163. /* Check if the files "magic number" is "SDPX" which means it uses
  164. * big-endian or XPDS which is for little-endian files */
  165. if (magic_num == AV_RL32("SDPX")) {
  166. endian = 0;
  167. } else if (magic_num == AV_RB32("SDPX")) {
  168. endian = 1;
  169. } else {
  170. av_log(avctx, AV_LOG_ERROR, "DPX marker not found\n");
  171. return AVERROR_INVALIDDATA;
  172. }
  173. offset = read32(&buf, endian);
  174. if (avpkt->size <= offset) {
  175. av_log(avctx, AV_LOG_ERROR, "Invalid data start offset\n");
  176. return AVERROR_INVALIDDATA;
  177. }
  178. header_version = read32(&buf, 0);
  179. if (header_version == MKTAG('V','1','.','0'))
  180. version = 1;
  181. if (header_version == MKTAG('V','2','.','0'))
  182. version = 2;
  183. if (!version)
  184. av_log(avctx, AV_LOG_WARNING, "Unknown header format version %s.\n",
  185. av_fourcc2str(header_version));
  186. // Check encryption
  187. buf = avpkt->data + 660;
  188. ret = read32(&buf, endian);
  189. if (ret != 0xFFFFFFFF) {
  190. avpriv_report_missing_feature(avctx, "Encryption");
  191. av_log(avctx, AV_LOG_WARNING, "The image is encrypted and may "
  192. "not properly decode.\n");
  193. }
  194. // Need to end in 0x304 offset from start of file
  195. buf = avpkt->data + 0x304;
  196. w = read32(&buf, endian);
  197. h = read32(&buf, endian);
  198. if ((ret = ff_set_dimensions(avctx, w, h)) < 0)
  199. return ret;
  200. // Need to end in 0x320 to read the descriptor
  201. buf += 20;
  202. descriptor = buf[0];
  203. color_trc = buf[1];
  204. color_spec = buf[2];
  205. // Need to end in 0x323 to read the bits per color
  206. buf += 3;
  207. avctx->bits_per_raw_sample =
  208. bits_per_color = buf[0];
  209. buf++;
  210. packing = read16(&buf, endian);
  211. encoding = read16(&buf, endian);
  212. if (encoding) {
  213. avpriv_report_missing_feature(avctx, "Encoding %d", encoding);
  214. return AVERROR_PATCHWELCOME;
  215. }
  216. buf += 820;
  217. avctx->sample_aspect_ratio.num = read32(&buf, endian);
  218. avctx->sample_aspect_ratio.den = read32(&buf, endian);
  219. if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0)
  220. av_reduce(&avctx->sample_aspect_ratio.num, &avctx->sample_aspect_ratio.den,
  221. avctx->sample_aspect_ratio.num, avctx->sample_aspect_ratio.den,
  222. 0x10000);
  223. else
  224. avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
  225. /* preferred frame rate from Motion-picture film header */
  226. if (offset >= 1724 + 4) {
  227. buf = avpkt->data + 1724;
  228. i = read32(&buf, endian);
  229. if(i && i != 0xFFFFFFFF) {
  230. AVRational q = av_d2q(av_int2float(i), 4096);
  231. if (q.num > 0 && q.den > 0)
  232. avctx->framerate = q;
  233. }
  234. }
  235. /* alternative frame rate from television header */
  236. if (offset >= 1940 + 4 &&
  237. !(avctx->framerate.num && avctx->framerate.den)) {
  238. buf = avpkt->data + 1940;
  239. i = read32(&buf, endian);
  240. if(i && i != 0xFFFFFFFF) {
  241. AVRational q = av_d2q(av_int2float(i), 4096);
  242. if (q.num > 0 && q.den > 0)
  243. avctx->framerate = q;
  244. }
  245. }
  246. /* SMPTE TC from television header */
  247. if (offset >= 1920 + 4) {
  248. uint32_t tc;
  249. uint32_t *tc_sd;
  250. char tcbuf[AV_TIMECODE_STR_SIZE];
  251. buf = avpkt->data + 1920;
  252. // read32 to native endian, av_bswap32 to opposite of native for
  253. // compatibility with av_timecode_make_smpte_tc_string2 etc
  254. tc = av_bswap32(read32(&buf, endian));
  255. if (i != 0xFFFFFFFF) {
  256. AVFrameSideData *tcside =
  257. av_frame_new_side_data(p, AV_FRAME_DATA_S12M_TIMECODE,
  258. sizeof(uint32_t) * 4);
  259. if (!tcside)
  260. return AVERROR(ENOMEM);
  261. tc_sd = (uint32_t*)tcside->data;
  262. tc_sd[0] = 1;
  263. tc_sd[1] = tc;
  264. av_timecode_make_smpte_tc_string2(tcbuf, avctx->framerate,
  265. tc_sd[1], 0, 0);
  266. av_dict_set(&p->metadata, "timecode", tcbuf, 0);
  267. }
  268. }
  269. /* color range from television header */
  270. if (offset >= 1964 + 4) {
  271. buf = avpkt->data + 1952;
  272. i = read32(&buf, endian);
  273. buf = avpkt->data + 1964;
  274. j = read32(&buf, endian);
  275. if (i != 0xFFFFFFFF && j != 0xFFFFFFFF) {
  276. float minCV, maxCV;
  277. minCV = av_int2float(i);
  278. maxCV = av_int2float(j);
  279. if (bits_per_color >= 1 &&
  280. minCV == 0.0f && maxCV == ((1<<bits_per_color) - 1)) {
  281. avctx->color_range = AVCOL_RANGE_JPEG;
  282. } else if (bits_per_color >= 8 &&
  283. minCV == (1 <<(bits_per_color - 4)) &&
  284. maxCV == (235<<(bits_per_color - 8))) {
  285. avctx->color_range = AVCOL_RANGE_MPEG;
  286. }
  287. }
  288. }
  289. switch (descriptor) {
  290. case 6: // Y
  291. elements = 1;
  292. yuv = 1;
  293. break;
  294. case 50: // RGB
  295. elements = 3;
  296. break;
  297. case 52: // ABGR
  298. case 51: // RGBA
  299. elements = 4;
  300. break;
  301. case 100: // UYVY422
  302. elements = 2;
  303. yuv = 1;
  304. break;
  305. case 102: // UYV444
  306. elements = 3;
  307. yuv = 1;
  308. break;
  309. case 103: // UYVA4444
  310. elements = 4;
  311. yuv = 1;
  312. break;
  313. default:
  314. avpriv_report_missing_feature(avctx, "Descriptor %d", descriptor);
  315. return AVERROR_PATCHWELCOME;
  316. }
  317. switch (bits_per_color) {
  318. case 8:
  319. stride = avctx->width * elements;
  320. break;
  321. case 10:
  322. if (!packing) {
  323. av_log(avctx, AV_LOG_ERROR, "Packing to 32bit required\n");
  324. return -1;
  325. }
  326. stride = (avctx->width * elements + 2) / 3 * 4;
  327. break;
  328. case 12:
  329. stride = avctx->width * elements;
  330. if (packing) {
  331. stride *= 2;
  332. } else {
  333. stride *= 3;
  334. if (stride % 8) {
  335. stride /= 8;
  336. stride++;
  337. stride *= 8;
  338. }
  339. stride /= 2;
  340. }
  341. break;
  342. case 16:
  343. stride = 2 * avctx->width * elements;
  344. break;
  345. case 1:
  346. case 32:
  347. case 64:
  348. avpriv_report_missing_feature(avctx, "Depth %d", bits_per_color);
  349. return AVERROR_PATCHWELCOME;
  350. default:
  351. return AVERROR_INVALIDDATA;
  352. }
  353. switch (color_trc) {
  354. case DPX_TRC_LINEAR:
  355. avctx->color_trc = AVCOL_TRC_LINEAR;
  356. break;
  357. case DPX_TRC_SMPTE_274:
  358. case DPX_TRC_ITU_R_709_4:
  359. avctx->color_trc = AVCOL_TRC_BT709;
  360. break;
  361. case DPX_TRC_ITU_R_601_625:
  362. case DPX_TRC_ITU_R_601_525:
  363. case DPX_TRC_SMPTE_170:
  364. avctx->color_trc = AVCOL_TRC_SMPTE170M;
  365. break;
  366. case DPX_TRC_ITU_R_624_4_PAL:
  367. avctx->color_trc = AVCOL_TRC_GAMMA28;
  368. break;
  369. case DPX_TRC_USER_DEFINED:
  370. case DPX_TRC_UNSPECIFIED_VIDEO:
  371. /* Nothing to do */
  372. break;
  373. default:
  374. av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX transfer characteristic "
  375. "%d to color_trc.\n", color_trc);
  376. break;
  377. }
  378. switch (color_spec) {
  379. case DPX_COL_SPEC_SMPTE_274:
  380. case DPX_COL_SPEC_ITU_R_709_4:
  381. avctx->color_primaries = AVCOL_PRI_BT709;
  382. break;
  383. case DPX_COL_SPEC_ITU_R_601_625:
  384. case DPX_COL_SPEC_ITU_R_624_4_PAL:
  385. avctx->color_primaries = AVCOL_PRI_BT470BG;
  386. break;
  387. case DPX_COL_SPEC_ITU_R_601_525:
  388. case DPX_COL_SPEC_SMPTE_170:
  389. avctx->color_primaries = AVCOL_PRI_SMPTE170M;
  390. break;
  391. case DPX_COL_SPEC_USER_DEFINED:
  392. case DPX_COL_SPEC_UNSPECIFIED_VIDEO:
  393. /* Nothing to do */
  394. break;
  395. default:
  396. av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX color specification "
  397. "%d to color_primaries.\n", color_spec);
  398. break;
  399. }
  400. if (yuv) {
  401. switch (color_spec) {
  402. case DPX_COL_SPEC_SMPTE_274:
  403. case DPX_COL_SPEC_ITU_R_709_4:
  404. avctx->colorspace = AVCOL_SPC_BT709;
  405. break;
  406. case DPX_COL_SPEC_ITU_R_601_625:
  407. case DPX_COL_SPEC_ITU_R_624_4_PAL:
  408. avctx->colorspace = AVCOL_SPC_BT470BG;
  409. break;
  410. case DPX_COL_SPEC_ITU_R_601_525:
  411. case DPX_COL_SPEC_SMPTE_170:
  412. avctx->colorspace = AVCOL_SPC_SMPTE170M;
  413. break;
  414. case DPX_COL_SPEC_USER_DEFINED:
  415. case DPX_COL_SPEC_UNSPECIFIED_VIDEO:
  416. /* Nothing to do */
  417. break;
  418. default:
  419. av_log(avctx, AV_LOG_INFO, "Cannot map DPX color specification "
  420. "%d to colorspace.\n", color_spec);
  421. break;
  422. }
  423. } else {
  424. avctx->colorspace = AVCOL_SPC_RGB;
  425. }
  426. // Table 3c: Runs will always break at scan line boundaries. Packing
  427. // will always break to the next 32-bit word at scan-line boundaries.
  428. // Unfortunately, the encoder produced invalid files, so attempt
  429. // to detect it
  430. need_align = FFALIGN(stride, 4);
  431. if (need_align*avctx->height + (int64_t)offset > avpkt->size) {
  432. // Alignment seems unappliable, try without
  433. if (stride*avctx->height + (int64_t)offset > avpkt->size) {
  434. av_log(avctx, AV_LOG_ERROR, "Overread buffer. Invalid header?\n");
  435. return AVERROR_INVALIDDATA;
  436. } else {
  437. av_log(avctx, AV_LOG_INFO, "Decoding DPX without scanline "
  438. "alignment.\n");
  439. need_align = 0;
  440. }
  441. } else {
  442. need_align -= stride;
  443. stride = FFALIGN(stride, 4);
  444. }
  445. switch (1000 * descriptor + 10 * bits_per_color + endian) {
  446. case 6081:
  447. case 6080:
  448. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  449. break;
  450. case 6121:
  451. case 6120:
  452. avctx->pix_fmt = AV_PIX_FMT_GRAY12;
  453. break;
  454. case 50081:
  455. case 50080:
  456. avctx->pix_fmt = AV_PIX_FMT_RGB24;
  457. break;
  458. case 52081:
  459. case 52080:
  460. avctx->pix_fmt = AV_PIX_FMT_ABGR;
  461. break;
  462. case 51081:
  463. case 51080:
  464. avctx->pix_fmt = AV_PIX_FMT_RGBA;
  465. break;
  466. case 50100:
  467. case 50101:
  468. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  469. break;
  470. case 51100:
  471. case 51101:
  472. avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
  473. break;
  474. case 50120:
  475. case 50121:
  476. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  477. break;
  478. case 51120:
  479. case 51121:
  480. avctx->pix_fmt = AV_PIX_FMT_GBRAP12;
  481. break;
  482. case 6100:
  483. case 6101:
  484. avctx->pix_fmt = AV_PIX_FMT_GRAY10;
  485. break;
  486. case 6161:
  487. avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
  488. break;
  489. case 6160:
  490. avctx->pix_fmt = AV_PIX_FMT_GRAY16LE;
  491. break;
  492. case 50161:
  493. avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
  494. break;
  495. case 50160:
  496. avctx->pix_fmt = AV_PIX_FMT_RGB48LE;
  497. break;
  498. case 51161:
  499. avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
  500. break;
  501. case 51160:
  502. avctx->pix_fmt = AV_PIX_FMT_RGBA64LE;
  503. break;
  504. case 100081:
  505. avctx->pix_fmt = AV_PIX_FMT_UYVY422;
  506. break;
  507. case 102081:
  508. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  509. break;
  510. case 103081:
  511. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  512. break;
  513. default:
  514. av_log(avctx, AV_LOG_ERROR, "Unsupported format\n");
  515. return AVERROR_PATCHWELCOME;
  516. }
  517. ff_set_sar(avctx, avctx->sample_aspect_ratio);
  518. if ((ret = ff_get_buffer(avctx, p, 0)) < 0)
  519. return ret;
  520. av_strlcpy(creator, avpkt->data + 160, 100);
  521. creator[100] = '\0';
  522. av_dict_set(&p->metadata, "Creator", creator, 0);
  523. av_strlcpy(input_device, avpkt->data + 1556, 32);
  524. input_device[32] = '\0';
  525. av_dict_set(&p->metadata, "Input Device", input_device, 0);
  526. // Some devices do not pad 10bit samples to whole 32bit words per row
  527. if (!memcmp(input_device, "Scanity", 7) ||
  528. !memcmp(creator, "Lasergraphics Inc.", 18)) {
  529. unpadded_10bit = 1;
  530. }
  531. // Move pointer to offset from start of file
  532. buf = avpkt->data + offset;
  533. for (i=0; i<AV_NUM_DATA_POINTERS; i++)
  534. ptr[i] = p->data[i];
  535. switch (bits_per_color) {
  536. case 10:
  537. for (x = 0; x < avctx->height; x++) {
  538. uint16_t *dst[4] = {(uint16_t*)ptr[0],
  539. (uint16_t*)ptr[1],
  540. (uint16_t*)ptr[2],
  541. (uint16_t*)ptr[3]};
  542. int shift = elements > 1 ? packing == 1 ? 22 : 20 : packing == 1 ? 2 : 0;
  543. for (y = 0; y < avctx->width; y++) {
  544. if (elements >= 3)
  545. *dst[2]++ = read10in32(&buf, &rgbBuffer,
  546. &n_datum, endian, shift);
  547. if (elements == 1)
  548. *dst[0]++ = read10in32_gray(&buf, &rgbBuffer,
  549. &n_datum, endian, shift);
  550. else
  551. *dst[0]++ = read10in32(&buf, &rgbBuffer,
  552. &n_datum, endian, shift);
  553. if (elements >= 2)
  554. *dst[1]++ = read10in32(&buf, &rgbBuffer,
  555. &n_datum, endian, shift);
  556. if (elements == 4)
  557. *dst[3]++ =
  558. read10in32(&buf, &rgbBuffer,
  559. &n_datum, endian, shift);
  560. }
  561. if (!unpadded_10bit)
  562. n_datum = 0;
  563. for (i = 0; i < elements; i++)
  564. ptr[i] += p->linesize[i];
  565. }
  566. break;
  567. case 12:
  568. for (x = 0; x < avctx->height; x++) {
  569. uint16_t *dst[4] = {(uint16_t*)ptr[0],
  570. (uint16_t*)ptr[1],
  571. (uint16_t*)ptr[2],
  572. (uint16_t*)ptr[3]};
  573. int shift = packing == 1 ? 4 : 0;
  574. for (y = 0; y < avctx->width; y++) {
  575. if (packing) {
  576. if (elements >= 3)
  577. *dst[2]++ = read16(&buf, endian) >> shift & 0xFFF;
  578. *dst[0]++ = read16(&buf, endian) >> shift & 0xFFF;
  579. if (elements >= 2)
  580. *dst[1]++ = read16(&buf, endian) >> shift & 0xFFF;
  581. if (elements == 4)
  582. *dst[3]++ = read16(&buf, endian) >> shift & 0xFFF;
  583. } else {
  584. if (elements >= 3)
  585. *dst[2]++ = read12in32(&buf, &rgbBuffer,
  586. &n_datum, endian);
  587. *dst[0]++ = read12in32(&buf, &rgbBuffer,
  588. &n_datum, endian);
  589. if (elements >= 2)
  590. *dst[1]++ = read12in32(&buf, &rgbBuffer,
  591. &n_datum, endian);
  592. if (elements == 4)
  593. *dst[3]++ = read12in32(&buf, &rgbBuffer,
  594. &n_datum, endian);
  595. }
  596. }
  597. n_datum = 0;
  598. for (i = 0; i < elements; i++)
  599. ptr[i] += p->linesize[i];
  600. // Jump to next aligned position
  601. buf += need_align;
  602. }
  603. break;
  604. case 16:
  605. elements *= 2;
  606. case 8:
  607. if ( avctx->pix_fmt == AV_PIX_FMT_YUVA444P
  608. || avctx->pix_fmt == AV_PIX_FMT_YUV444P) {
  609. for (x = 0; x < avctx->height; x++) {
  610. ptr[0] = p->data[0] + x * p->linesize[0];
  611. ptr[1] = p->data[1] + x * p->linesize[1];
  612. ptr[2] = p->data[2] + x * p->linesize[2];
  613. ptr[3] = p->data[3] + x * p->linesize[3];
  614. for (y = 0; y < avctx->width; y++) {
  615. *ptr[1]++ = *buf++;
  616. *ptr[0]++ = *buf++;
  617. *ptr[2]++ = *buf++;
  618. if (avctx->pix_fmt == AV_PIX_FMT_YUVA444P)
  619. *ptr[3]++ = *buf++;
  620. }
  621. }
  622. } else {
  623. av_image_copy_plane(ptr[0], p->linesize[0],
  624. buf, stride,
  625. elements * avctx->width, avctx->height);
  626. }
  627. break;
  628. }
  629. *got_frame = 1;
  630. return buf_size;
  631. }
  632. AVCodec ff_dpx_decoder = {
  633. .name = "dpx",
  634. .long_name = NULL_IF_CONFIG_SMALL("DPX (Digital Picture Exchange) image"),
  635. .type = AVMEDIA_TYPE_VIDEO,
  636. .id = AV_CODEC_ID_DPX,
  637. .decode = decode_frame,
  638. .capabilities = AV_CODEC_CAP_DR1,
  639. };