You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

762 lines
23KB

  1. /*
  2. * DPX (.dpx) image decoder
  3. * Copyright (c) 2009 Jimmy Christensen
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavutil/avstring.h"
  22. #include "libavutil/intreadwrite.h"
  23. #include "libavutil/intfloat.h"
  24. #include "libavutil/imgutils.h"
  25. #include "libavutil/timecode.h"
  26. #include "bytestream.h"
  27. #include "avcodec.h"
  28. #include "internal.h"
  29. enum DPX_TRC {
  30. DPX_TRC_USER_DEFINED = 0,
  31. DPX_TRC_PRINTING_DENSITY = 1,
  32. DPX_TRC_LINEAR = 2,
  33. DPX_TRC_LOGARITHMIC = 3,
  34. DPX_TRC_UNSPECIFIED_VIDEO = 4,
  35. DPX_TRC_SMPTE_274 = 5,
  36. DPX_TRC_ITU_R_709_4 = 6,
  37. DPX_TRC_ITU_R_601_625 = 7,
  38. DPX_TRC_ITU_R_601_525 = 8,
  39. DPX_TRC_SMPTE_170 = 9,
  40. DPX_TRC_ITU_R_624_4_PAL = 10,
  41. DPX_TRC_Z_LINEAR = 11,
  42. DPX_TRC_Z_HOMOGENEOUS = 12,
  43. };
  44. enum DPX_COL_SPEC {
  45. DPX_COL_SPEC_USER_DEFINED = 0,
  46. DPX_COL_SPEC_PRINTING_DENSITY = 1,
  47. /* 2 = N/A */
  48. /* 3 = N/A */
  49. DPX_COL_SPEC_UNSPECIFIED_VIDEO = 4,
  50. DPX_COL_SPEC_SMPTE_274 = 5,
  51. DPX_COL_SPEC_ITU_R_709_4 = 6,
  52. DPX_COL_SPEC_ITU_R_601_625 = 7,
  53. DPX_COL_SPEC_ITU_R_601_525 = 8,
  54. DPX_COL_SPEC_SMPTE_170 = 9,
  55. DPX_COL_SPEC_ITU_R_624_4_PAL = 10,
  56. /* 11 = N/A */
  57. /* 12 = N/A */
  58. };
  59. static unsigned int read16(const uint8_t **ptr, int is_big)
  60. {
  61. unsigned int temp;
  62. if (is_big) {
  63. temp = AV_RB16(*ptr);
  64. } else {
  65. temp = AV_RL16(*ptr);
  66. }
  67. *ptr += 2;
  68. return temp;
  69. }
  70. static unsigned int read32(const uint8_t **ptr, int is_big)
  71. {
  72. unsigned int temp;
  73. if (is_big) {
  74. temp = AV_RB32(*ptr);
  75. } else {
  76. temp = AV_RL32(*ptr);
  77. }
  78. *ptr += 4;
  79. return temp;
  80. }
  81. static uint16_t read10in32_gray(const uint8_t **ptr, uint32_t *lbuf,
  82. int *n_datum, int is_big, int shift)
  83. {
  84. uint16_t temp;
  85. if (*n_datum)
  86. (*n_datum)--;
  87. else {
  88. *lbuf = read32(ptr, is_big);
  89. *n_datum = 2;
  90. }
  91. temp = *lbuf >> shift & 0x3FF;
  92. *lbuf = *lbuf >> 10;
  93. return temp;
  94. }
  95. static uint16_t read10in32(const uint8_t **ptr, uint32_t *lbuf,
  96. int *n_datum, int is_big, int shift)
  97. {
  98. if (*n_datum)
  99. (*n_datum)--;
  100. else {
  101. *lbuf = read32(ptr, is_big);
  102. *n_datum = 2;
  103. }
  104. *lbuf = *lbuf << 10 | *lbuf >> shift & 0x3FFFFF;
  105. return *lbuf & 0x3FF;
  106. }
  107. static uint16_t read12in32(const uint8_t **ptr, uint32_t *lbuf,
  108. int *n_datum, int is_big)
  109. {
  110. if (*n_datum)
  111. (*n_datum)--;
  112. else {
  113. *lbuf = read32(ptr, is_big);
  114. *n_datum = 7;
  115. }
  116. switch (*n_datum){
  117. case 7: return *lbuf & 0xFFF;
  118. case 6: return (*lbuf >> 12) & 0xFFF;
  119. case 5: {
  120. uint32_t c = *lbuf >> 24;
  121. *lbuf = read32(ptr, is_big);
  122. c |= *lbuf << 8;
  123. return c & 0xFFF;
  124. }
  125. case 4: return (*lbuf >> 4) & 0xFFF;
  126. case 3: return (*lbuf >> 16) & 0xFFF;
  127. case 2: {
  128. uint32_t c = *lbuf >> 28;
  129. *lbuf = read32(ptr, is_big);
  130. c |= *lbuf << 4;
  131. return c & 0xFFF;
  132. }
  133. case 1: return (*lbuf >> 8) & 0xFFF;
  134. default: return *lbuf >> 20;
  135. }
  136. }
  137. static int decode_frame(AVCodecContext *avctx,
  138. void *data,
  139. int *got_frame,
  140. AVPacket *avpkt)
  141. {
  142. const uint8_t *buf = avpkt->data;
  143. int buf_size = avpkt->size;
  144. AVFrame *const p = data;
  145. uint8_t *ptr[AV_NUM_DATA_POINTERS];
  146. uint32_t header_version, version = 0;
  147. char creator[101] = { 0 };
  148. char input_device[33] = { 0 };
  149. unsigned int offset;
  150. int magic_num, endian;
  151. int x, y, stride, i, j, ret;
  152. int w, h, bits_per_color, descriptor, elements, packing;
  153. int yuv, color_trc, color_spec;
  154. int encoding, need_align = 0, unpadded_10bit = 0;
  155. unsigned int rgbBuffer = 0;
  156. int n_datum = 0;
  157. if (avpkt->size <= 1634) {
  158. av_log(avctx, AV_LOG_ERROR, "Packet too small for DPX header\n");
  159. return AVERROR_INVALIDDATA;
  160. }
  161. magic_num = AV_RB32(buf);
  162. buf += 4;
  163. /* Check if the files "magic number" is "SDPX" which means it uses
  164. * big-endian or XPDS which is for little-endian files */
  165. if (magic_num == AV_RL32("SDPX")) {
  166. endian = 0;
  167. } else if (magic_num == AV_RB32("SDPX")) {
  168. endian = 1;
  169. } else {
  170. av_log(avctx, AV_LOG_ERROR, "DPX marker not found\n");
  171. return AVERROR_INVALIDDATA;
  172. }
  173. offset = read32(&buf, endian);
  174. if (avpkt->size <= offset) {
  175. av_log(avctx, AV_LOG_ERROR, "Invalid data start offset\n");
  176. return AVERROR_INVALIDDATA;
  177. }
  178. header_version = read32(&buf, 0);
  179. if (header_version == MKTAG('V','1','.','0'))
  180. version = 1;
  181. if (header_version == MKTAG('V','2','.','0'))
  182. version = 2;
  183. if (!version)
  184. av_log(avctx, AV_LOG_WARNING, "Unknown header format version %s.\n",
  185. av_fourcc2str(header_version));
  186. // Check encryption
  187. buf = avpkt->data + 660;
  188. ret = read32(&buf, endian);
  189. if (ret != 0xFFFFFFFF) {
  190. avpriv_report_missing_feature(avctx, "Encryption");
  191. av_log(avctx, AV_LOG_WARNING, "The image is encrypted and may "
  192. "not properly decode.\n");
  193. }
  194. // Need to end in 0x304 offset from start of file
  195. buf = avpkt->data + 0x304;
  196. w = read32(&buf, endian);
  197. h = read32(&buf, endian);
  198. if ((ret = ff_set_dimensions(avctx, w, h)) < 0)
  199. return ret;
  200. // Need to end in 0x320 to read the descriptor
  201. buf += 20;
  202. descriptor = buf[0];
  203. color_trc = buf[1];
  204. color_spec = buf[2];
  205. // Need to end in 0x323 to read the bits per color
  206. buf += 3;
  207. avctx->bits_per_raw_sample =
  208. bits_per_color = buf[0];
  209. buf++;
  210. packing = read16(&buf, endian);
  211. encoding = read16(&buf, endian);
  212. if (encoding) {
  213. avpriv_report_missing_feature(avctx, "Encoding %d", encoding);
  214. return AVERROR_PATCHWELCOME;
  215. }
  216. buf += 820;
  217. avctx->sample_aspect_ratio.num = read32(&buf, endian);
  218. avctx->sample_aspect_ratio.den = read32(&buf, endian);
  219. if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0)
  220. av_reduce(&avctx->sample_aspect_ratio.num, &avctx->sample_aspect_ratio.den,
  221. avctx->sample_aspect_ratio.num, avctx->sample_aspect_ratio.den,
  222. 0x10000);
  223. else
  224. avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
  225. /* preferred frame rate from Motion-picture film header */
  226. if (offset >= 1724 + 4) {
  227. buf = avpkt->data + 1724;
  228. i = read32(&buf, endian);
  229. if(i && i != 0xFFFFFFFF) {
  230. AVRational q = av_d2q(av_int2float(i), 4096);
  231. if (q.num > 0 && q.den > 0)
  232. avctx->framerate = q;
  233. }
  234. }
  235. /* alternative frame rate from television header */
  236. if (offset >= 1940 + 4 &&
  237. !(avctx->framerate.num && avctx->framerate.den)) {
  238. buf = avpkt->data + 1940;
  239. i = read32(&buf, endian);
  240. if(i && i != 0xFFFFFFFF) {
  241. AVRational q = av_d2q(av_int2float(i), 4096);
  242. if (q.num > 0 && q.den > 0)
  243. avctx->framerate = q;
  244. }
  245. }
  246. /* SMPTE TC from television header */
  247. if (offset >= 1920 + 4) {
  248. uint32_t tc;
  249. uint32_t *tc_sd;
  250. char tcbuf[AV_TIMECODE_STR_SIZE];
  251. buf = avpkt->data + 1920;
  252. // read32 to native endian, av_bswap32 to opposite of native for
  253. // compatibility with av_timecode_make_smpte_tc_string2 etc
  254. tc = av_bswap32(read32(&buf, endian));
  255. if (i != 0xFFFFFFFF) {
  256. AVFrameSideData *tcside =
  257. av_frame_new_side_data(p, AV_FRAME_DATA_S12M_TIMECODE,
  258. sizeof(uint32_t) * 4);
  259. if (!tcside)
  260. return AVERROR(ENOMEM);
  261. tc_sd = (uint32_t*)tcside->data;
  262. tc_sd[0] = 1;
  263. tc_sd[1] = tc;
  264. av_timecode_make_smpte_tc_string2(tcbuf, avctx->framerate,
  265. tc_sd[1], 0, 0);
  266. av_dict_set(&p->metadata, "timecode", tcbuf, 0);
  267. }
  268. }
  269. /* color range from television header */
  270. if (offset >= 1964 + 4) {
  271. buf = avpkt->data + 1952;
  272. i = read32(&buf, endian);
  273. buf = avpkt->data + 1964;
  274. j = read32(&buf, endian);
  275. if (i != 0xFFFFFFFF && j != 0xFFFFFFFF) {
  276. float minCV, maxCV;
  277. minCV = av_int2float(i);
  278. maxCV = av_int2float(j);
  279. if (bits_per_color >= 1 &&
  280. minCV == 0.0f && maxCV == ((1<<bits_per_color) - 1)) {
  281. avctx->color_range = AVCOL_RANGE_JPEG;
  282. } else if (bits_per_color >= 8 &&
  283. minCV == (1 <<(bits_per_color - 4)) &&
  284. maxCV == (235<<(bits_per_color - 8))) {
  285. avctx->color_range = AVCOL_RANGE_MPEG;
  286. }
  287. }
  288. }
  289. switch (descriptor) {
  290. case 1: // R
  291. case 2: // G
  292. case 3: // B
  293. case 4: // A
  294. case 6: // Y
  295. elements = 1;
  296. yuv = 1;
  297. break;
  298. case 50: // RGB
  299. elements = 3;
  300. yuv = 0;
  301. break;
  302. case 52: // ABGR
  303. case 51: // RGBA
  304. elements = 4;
  305. yuv = 0;
  306. break;
  307. case 100: // UYVY422
  308. elements = 2;
  309. yuv = 1;
  310. break;
  311. case 102: // UYV444
  312. elements = 3;
  313. yuv = 1;
  314. break;
  315. case 103: // UYVA4444
  316. elements = 4;
  317. yuv = 1;
  318. break;
  319. default:
  320. avpriv_report_missing_feature(avctx, "Descriptor %d", descriptor);
  321. return AVERROR_PATCHWELCOME;
  322. }
  323. switch (bits_per_color) {
  324. case 8:
  325. stride = avctx->width * elements;
  326. break;
  327. case 10:
  328. if (!packing) {
  329. av_log(avctx, AV_LOG_ERROR, "Packing to 32bit required\n");
  330. return -1;
  331. }
  332. stride = (avctx->width * elements + 2) / 3 * 4;
  333. break;
  334. case 12:
  335. stride = avctx->width * elements;
  336. if (packing) {
  337. stride *= 2;
  338. } else {
  339. stride *= 3;
  340. if (stride % 8) {
  341. stride /= 8;
  342. stride++;
  343. stride *= 8;
  344. }
  345. stride /= 2;
  346. }
  347. break;
  348. case 16:
  349. stride = 2 * avctx->width * elements;
  350. break;
  351. case 32:
  352. stride = 4 * avctx->width * elements;
  353. break;
  354. case 1:
  355. case 64:
  356. avpriv_report_missing_feature(avctx, "Depth %d", bits_per_color);
  357. return AVERROR_PATCHWELCOME;
  358. default:
  359. return AVERROR_INVALIDDATA;
  360. }
  361. switch (color_trc) {
  362. case DPX_TRC_LINEAR:
  363. avctx->color_trc = AVCOL_TRC_LINEAR;
  364. break;
  365. case DPX_TRC_SMPTE_274:
  366. case DPX_TRC_ITU_R_709_4:
  367. avctx->color_trc = AVCOL_TRC_BT709;
  368. break;
  369. case DPX_TRC_ITU_R_601_625:
  370. case DPX_TRC_ITU_R_601_525:
  371. case DPX_TRC_SMPTE_170:
  372. avctx->color_trc = AVCOL_TRC_SMPTE170M;
  373. break;
  374. case DPX_TRC_ITU_R_624_4_PAL:
  375. avctx->color_trc = AVCOL_TRC_GAMMA28;
  376. break;
  377. case DPX_TRC_USER_DEFINED:
  378. case DPX_TRC_UNSPECIFIED_VIDEO:
  379. /* Nothing to do */
  380. break;
  381. default:
  382. av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX transfer characteristic "
  383. "%d to color_trc.\n", color_trc);
  384. break;
  385. }
  386. switch (color_spec) {
  387. case DPX_COL_SPEC_SMPTE_274:
  388. case DPX_COL_SPEC_ITU_R_709_4:
  389. avctx->color_primaries = AVCOL_PRI_BT709;
  390. break;
  391. case DPX_COL_SPEC_ITU_R_601_625:
  392. case DPX_COL_SPEC_ITU_R_624_4_PAL:
  393. avctx->color_primaries = AVCOL_PRI_BT470BG;
  394. break;
  395. case DPX_COL_SPEC_ITU_R_601_525:
  396. case DPX_COL_SPEC_SMPTE_170:
  397. avctx->color_primaries = AVCOL_PRI_SMPTE170M;
  398. break;
  399. case DPX_COL_SPEC_USER_DEFINED:
  400. case DPX_COL_SPEC_UNSPECIFIED_VIDEO:
  401. /* Nothing to do */
  402. break;
  403. default:
  404. av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX color specification "
  405. "%d to color_primaries.\n", color_spec);
  406. break;
  407. }
  408. if (yuv) {
  409. switch (color_spec) {
  410. case DPX_COL_SPEC_SMPTE_274:
  411. case DPX_COL_SPEC_ITU_R_709_4:
  412. avctx->colorspace = AVCOL_SPC_BT709;
  413. break;
  414. case DPX_COL_SPEC_ITU_R_601_625:
  415. case DPX_COL_SPEC_ITU_R_624_4_PAL:
  416. avctx->colorspace = AVCOL_SPC_BT470BG;
  417. break;
  418. case DPX_COL_SPEC_ITU_R_601_525:
  419. case DPX_COL_SPEC_SMPTE_170:
  420. avctx->colorspace = AVCOL_SPC_SMPTE170M;
  421. break;
  422. case DPX_COL_SPEC_USER_DEFINED:
  423. case DPX_COL_SPEC_UNSPECIFIED_VIDEO:
  424. /* Nothing to do */
  425. break;
  426. default:
  427. av_log(avctx, AV_LOG_INFO, "Cannot map DPX color specification "
  428. "%d to colorspace.\n", color_spec);
  429. break;
  430. }
  431. } else {
  432. avctx->colorspace = AVCOL_SPC_RGB;
  433. }
  434. // Table 3c: Runs will always break at scan line boundaries. Packing
  435. // will always break to the next 32-bit word at scan-line boundaries.
  436. // Unfortunately, the encoder produced invalid files, so attempt
  437. // to detect it
  438. need_align = FFALIGN(stride, 4);
  439. if (need_align*avctx->height + (int64_t)offset > avpkt->size) {
  440. // Alignment seems unappliable, try without
  441. if (stride*avctx->height + (int64_t)offset > avpkt->size) {
  442. av_log(avctx, AV_LOG_ERROR, "Overread buffer. Invalid header?\n");
  443. return AVERROR_INVALIDDATA;
  444. } else {
  445. av_log(avctx, AV_LOG_INFO, "Decoding DPX without scanline "
  446. "alignment.\n");
  447. need_align = 0;
  448. }
  449. } else {
  450. need_align -= stride;
  451. stride = FFALIGN(stride, 4);
  452. }
  453. switch (1000 * descriptor + 10 * bits_per_color + endian) {
  454. case 6081:
  455. case 6080:
  456. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  457. break;
  458. case 6121:
  459. case 6120:
  460. avctx->pix_fmt = AV_PIX_FMT_GRAY12;
  461. break;
  462. case 1320:
  463. case 2320:
  464. case 3320:
  465. case 4320:
  466. case 6320:
  467. avctx->pix_fmt = AV_PIX_FMT_GRAYF32LE;
  468. break;
  469. case 1321:
  470. case 2321:
  471. case 3321:
  472. case 4321:
  473. case 6321:
  474. avctx->pix_fmt = AV_PIX_FMT_GRAYF32BE;
  475. break;
  476. case 50081:
  477. case 50080:
  478. avctx->pix_fmt = AV_PIX_FMT_RGB24;
  479. break;
  480. case 52081:
  481. case 52080:
  482. avctx->pix_fmt = AV_PIX_FMT_ABGR;
  483. break;
  484. case 51081:
  485. case 51080:
  486. avctx->pix_fmt = AV_PIX_FMT_RGBA;
  487. break;
  488. case 50100:
  489. case 50101:
  490. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  491. break;
  492. case 51100:
  493. case 51101:
  494. avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
  495. break;
  496. case 50120:
  497. case 50121:
  498. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  499. break;
  500. case 51120:
  501. case 51121:
  502. avctx->pix_fmt = AV_PIX_FMT_GBRAP12;
  503. break;
  504. case 6100:
  505. case 6101:
  506. avctx->pix_fmt = AV_PIX_FMT_GRAY10;
  507. break;
  508. case 6161:
  509. avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
  510. break;
  511. case 6160:
  512. avctx->pix_fmt = AV_PIX_FMT_GRAY16LE;
  513. break;
  514. case 50161:
  515. avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
  516. break;
  517. case 50160:
  518. avctx->pix_fmt = AV_PIX_FMT_RGB48LE;
  519. break;
  520. case 51161:
  521. avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
  522. break;
  523. case 51160:
  524. avctx->pix_fmt = AV_PIX_FMT_RGBA64LE;
  525. break;
  526. case 50320:
  527. avctx->pix_fmt = AV_PIX_FMT_GBRPF32LE;
  528. break;
  529. case 50321:
  530. avctx->pix_fmt = AV_PIX_FMT_GBRPF32BE;
  531. break;
  532. case 51320:
  533. avctx->pix_fmt = AV_PIX_FMT_GBRAPF32LE;
  534. break;
  535. case 51321:
  536. avctx->pix_fmt = AV_PIX_FMT_GBRAPF32BE;
  537. break;
  538. case 100081:
  539. avctx->pix_fmt = AV_PIX_FMT_UYVY422;
  540. break;
  541. case 102081:
  542. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  543. break;
  544. case 103081:
  545. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  546. break;
  547. default:
  548. av_log(avctx, AV_LOG_ERROR, "Unsupported format %d\n",
  549. 1000 * descriptor + 10 * bits_per_color + endian);
  550. return AVERROR_PATCHWELCOME;
  551. }
  552. ff_set_sar(avctx, avctx->sample_aspect_ratio);
  553. if ((ret = ff_get_buffer(avctx, p, 0)) < 0)
  554. return ret;
  555. av_strlcpy(creator, avpkt->data + 160, 100);
  556. creator[100] = '\0';
  557. av_dict_set(&p->metadata, "Creator", creator, 0);
  558. av_strlcpy(input_device, avpkt->data + 1556, 32);
  559. input_device[32] = '\0';
  560. av_dict_set(&p->metadata, "Input Device", input_device, 0);
  561. // Some devices do not pad 10bit samples to whole 32bit words per row
  562. if (!memcmp(input_device, "Scanity", 7) ||
  563. !memcmp(creator, "Lasergraphics Inc.", 18)) {
  564. unpadded_10bit = 1;
  565. }
  566. // Move pointer to offset from start of file
  567. buf = avpkt->data + offset;
  568. for (i=0; i<AV_NUM_DATA_POINTERS; i++)
  569. ptr[i] = p->data[i];
  570. switch (bits_per_color) {
  571. case 10:
  572. for (x = 0; x < avctx->height; x++) {
  573. uint16_t *dst[4] = {(uint16_t*)ptr[0],
  574. (uint16_t*)ptr[1],
  575. (uint16_t*)ptr[2],
  576. (uint16_t*)ptr[3]};
  577. int shift = elements > 1 ? packing == 1 ? 22 : 20 : packing == 1 ? 2 : 0;
  578. for (y = 0; y < avctx->width; y++) {
  579. if (elements >= 3)
  580. *dst[2]++ = read10in32(&buf, &rgbBuffer,
  581. &n_datum, endian, shift);
  582. if (elements == 1)
  583. *dst[0]++ = read10in32_gray(&buf, &rgbBuffer,
  584. &n_datum, endian, shift);
  585. else
  586. *dst[0]++ = read10in32(&buf, &rgbBuffer,
  587. &n_datum, endian, shift);
  588. if (elements >= 2)
  589. *dst[1]++ = read10in32(&buf, &rgbBuffer,
  590. &n_datum, endian, shift);
  591. if (elements == 4)
  592. *dst[3]++ =
  593. read10in32(&buf, &rgbBuffer,
  594. &n_datum, endian, shift);
  595. }
  596. if (!unpadded_10bit)
  597. n_datum = 0;
  598. for (i = 0; i < elements; i++)
  599. ptr[i] += p->linesize[i];
  600. }
  601. break;
  602. case 12:
  603. for (x = 0; x < avctx->height; x++) {
  604. uint16_t *dst[4] = {(uint16_t*)ptr[0],
  605. (uint16_t*)ptr[1],
  606. (uint16_t*)ptr[2],
  607. (uint16_t*)ptr[3]};
  608. int shift = packing == 1 ? 4 : 0;
  609. for (y = 0; y < avctx->width; y++) {
  610. if (packing) {
  611. if (elements >= 3)
  612. *dst[2]++ = read16(&buf, endian) >> shift & 0xFFF;
  613. *dst[0]++ = read16(&buf, endian) >> shift & 0xFFF;
  614. if (elements >= 2)
  615. *dst[1]++ = read16(&buf, endian) >> shift & 0xFFF;
  616. if (elements == 4)
  617. *dst[3]++ = read16(&buf, endian) >> shift & 0xFFF;
  618. } else {
  619. if (elements >= 3)
  620. *dst[2]++ = read12in32(&buf, &rgbBuffer,
  621. &n_datum, endian);
  622. *dst[0]++ = read12in32(&buf, &rgbBuffer,
  623. &n_datum, endian);
  624. if (elements >= 2)
  625. *dst[1]++ = read12in32(&buf, &rgbBuffer,
  626. &n_datum, endian);
  627. if (elements == 4)
  628. *dst[3]++ = read12in32(&buf, &rgbBuffer,
  629. &n_datum, endian);
  630. }
  631. }
  632. n_datum = 0;
  633. for (i = 0; i < elements; i++)
  634. ptr[i] += p->linesize[i];
  635. // Jump to next aligned position
  636. buf += need_align;
  637. }
  638. break;
  639. case 32:
  640. if (elements == 1) {
  641. av_image_copy_plane(ptr[0], p->linesize[0],
  642. buf, stride,
  643. elements * avctx->width * 4, avctx->height);
  644. } else {
  645. for (y = 0; y < avctx->height; y++) {
  646. ptr[0] = p->data[0] + y * p->linesize[0];
  647. ptr[1] = p->data[1] + y * p->linesize[1];
  648. ptr[2] = p->data[2] + y * p->linesize[2];
  649. ptr[3] = p->data[3] + y * p->linesize[3];
  650. for (x = 0; x < avctx->width; x++) {
  651. AV_WN32(ptr[2], AV_RN32(buf));
  652. AV_WN32(ptr[0], AV_RN32(buf + 4));
  653. AV_WN32(ptr[1], AV_RN32(buf + 8));
  654. if (avctx->pix_fmt == AV_PIX_FMT_GBRAPF32BE ||
  655. avctx->pix_fmt == AV_PIX_FMT_GBRAPF32LE) {
  656. AV_WN32(ptr[3], AV_RN32(buf + 12));
  657. buf += 4;
  658. ptr[3] += 4;
  659. }
  660. buf += 12;
  661. ptr[2] += 4;
  662. ptr[0] += 4;
  663. ptr[1] += 4;
  664. }
  665. }
  666. }
  667. break;
  668. case 16:
  669. elements *= 2;
  670. case 8:
  671. if ( avctx->pix_fmt == AV_PIX_FMT_YUVA444P
  672. || avctx->pix_fmt == AV_PIX_FMT_YUV444P) {
  673. for (x = 0; x < avctx->height; x++) {
  674. ptr[0] = p->data[0] + x * p->linesize[0];
  675. ptr[1] = p->data[1] + x * p->linesize[1];
  676. ptr[2] = p->data[2] + x * p->linesize[2];
  677. ptr[3] = p->data[3] + x * p->linesize[3];
  678. for (y = 0; y < avctx->width; y++) {
  679. *ptr[1]++ = *buf++;
  680. *ptr[0]++ = *buf++;
  681. *ptr[2]++ = *buf++;
  682. if (avctx->pix_fmt == AV_PIX_FMT_YUVA444P)
  683. *ptr[3]++ = *buf++;
  684. }
  685. }
  686. } else {
  687. av_image_copy_plane(ptr[0], p->linesize[0],
  688. buf, stride,
  689. elements * avctx->width, avctx->height);
  690. }
  691. break;
  692. }
  693. *got_frame = 1;
  694. return buf_size;
  695. }
  696. AVCodec ff_dpx_decoder = {
  697. .name = "dpx",
  698. .long_name = NULL_IF_CONFIG_SMALL("DPX (Digital Picture Exchange) image"),
  699. .type = AVMEDIA_TYPE_VIDEO,
  700. .id = AV_CODEC_ID_DPX,
  701. .decode = decode_frame,
  702. .capabilities = AV_CODEC_CAP_DR1,
  703. };