You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1032 lines
31KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #define PALETTE_COUNT 256
  44. typedef struct IpvideoContext {
  45. AVCodecContext *avctx;
  46. DSPContext dsp;
  47. AVFrame second_last_frame;
  48. AVFrame last_frame;
  49. AVFrame current_frame;
  50. const unsigned char *decoding_map;
  51. int decoding_map_size;
  52. int is_16bpp;
  53. GetByteContext stream_ptr, mv_ptr;
  54. unsigned char *pixel_ptr;
  55. int line_inc;
  56. int stride;
  57. int upper_motion_limit_offset;
  58. uint32_t pal[256];
  59. } IpvideoContext;
  60. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  61. {
  62. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  63. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  64. + delta_x * (1 + s->is_16bpp);
  65. if (motion_offset < 0) {
  66. av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
  67. return -1;
  68. } else if (motion_offset > s->upper_motion_limit_offset) {
  69. av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
  70. motion_offset, s->upper_motion_limit_offset);
  71. return -1;
  72. }
  73. if (src->data[0] == NULL) {
  74. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  75. return AVERROR(EINVAL);
  76. }
  77. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  78. s->current_frame.linesize[0], 8);
  79. return 0;
  80. }
  81. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  82. {
  83. return copy_from(s, &s->last_frame, 0, 0);
  84. }
  85. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  86. {
  87. return copy_from(s, &s->second_last_frame, 0, 0);
  88. }
  89. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  90. {
  91. unsigned char B;
  92. int x, y;
  93. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  94. if (!s->is_16bpp) {
  95. B = bytestream2_get_byte(&s->stream_ptr);
  96. } else {
  97. B = bytestream2_get_byte(&s->mv_ptr);
  98. }
  99. if (B < 56) {
  100. x = 8 + (B % 7);
  101. y = B / 7;
  102. } else {
  103. x = -14 + ((B - 56) % 29);
  104. y = 8 + ((B - 56) / 29);
  105. }
  106. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  107. return copy_from(s, &s->second_last_frame, x, y);
  108. }
  109. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  110. {
  111. unsigned char B;
  112. int x, y;
  113. /* copy 8x8 block from current frame from an up/left block */
  114. /* need 1 more byte for motion */
  115. if (!s->is_16bpp) {
  116. B = bytestream2_get_byte(&s->stream_ptr);
  117. } else {
  118. B = bytestream2_get_byte(&s->mv_ptr);
  119. }
  120. if (B < 56) {
  121. x = -(8 + (B % 7));
  122. y = -(B / 7);
  123. } else {
  124. x = -(-14 + ((B - 56) % 29));
  125. y = -( 8 + ((B - 56) / 29));
  126. }
  127. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  128. return copy_from(s, &s->current_frame, x, y);
  129. }
  130. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  131. {
  132. int x, y;
  133. unsigned char B, BL, BH;
  134. /* copy a block from the previous frame; need 1 more byte */
  135. if (!s->is_16bpp) {
  136. B = bytestream2_get_byte(&s->stream_ptr);
  137. } else {
  138. B = bytestream2_get_byte(&s->mv_ptr);
  139. }
  140. BL = B & 0x0F;
  141. BH = (B >> 4) & 0x0F;
  142. x = -8 + BL;
  143. y = -8 + BH;
  144. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  145. return copy_from(s, &s->last_frame, x, y);
  146. }
  147. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  148. {
  149. signed char x, y;
  150. /* copy a block from the previous frame using an expanded range;
  151. * need 2 more bytes */
  152. x = bytestream2_get_byte(&s->stream_ptr);
  153. y = bytestream2_get_byte(&s->stream_ptr);
  154. av_dlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  155. return copy_from(s, &s->last_frame, x, y);
  156. }
  157. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  158. {
  159. /* mystery opcode? skip multiple blocks? */
  160. av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
  161. /* report success */
  162. return 0;
  163. }
  164. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  165. {
  166. int x, y;
  167. unsigned char P[2];
  168. unsigned int flags;
  169. /* 2-color encoding */
  170. P[0] = bytestream2_get_byte(&s->stream_ptr);
  171. P[1] = bytestream2_get_byte(&s->stream_ptr);
  172. if (P[0] <= P[1]) {
  173. /* need 8 more bytes from the stream */
  174. for (y = 0; y < 8; y++) {
  175. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  176. for (; flags != 1; flags >>= 1)
  177. *s->pixel_ptr++ = P[flags & 1];
  178. s->pixel_ptr += s->line_inc;
  179. }
  180. } else {
  181. /* need 2 more bytes from the stream */
  182. flags = bytestream2_get_le16(&s->stream_ptr);
  183. for (y = 0; y < 8; y += 2) {
  184. for (x = 0; x < 8; x += 2, flags >>= 1) {
  185. s->pixel_ptr[x ] =
  186. s->pixel_ptr[x + 1 ] =
  187. s->pixel_ptr[x + s->stride] =
  188. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  189. }
  190. s->pixel_ptr += s->stride * 2;
  191. }
  192. }
  193. /* report success */
  194. return 0;
  195. }
  196. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  197. {
  198. int x, y;
  199. unsigned char P[4];
  200. unsigned int flags = 0;
  201. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  202. * either top and bottom or left and right halves */
  203. P[0] = bytestream2_get_byte(&s->stream_ptr);
  204. P[1] = bytestream2_get_byte(&s->stream_ptr);
  205. if (P[0] <= P[1]) {
  206. for (y = 0; y < 16; y++) {
  207. // new values for each 4x4 block
  208. if (!(y & 3)) {
  209. if (y) {
  210. P[0] = bytestream2_get_byte(&s->stream_ptr);
  211. P[1] = bytestream2_get_byte(&s->stream_ptr);
  212. }
  213. flags = bytestream2_get_le16(&s->stream_ptr);
  214. }
  215. for (x = 0; x < 4; x++, flags >>= 1)
  216. *s->pixel_ptr++ = P[flags & 1];
  217. s->pixel_ptr += s->stride - 4;
  218. // switch to right half
  219. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  220. }
  221. } else {
  222. flags = bytestream2_get_le32(&s->stream_ptr);
  223. P[2] = bytestream2_get_byte(&s->stream_ptr);
  224. P[3] = bytestream2_get_byte(&s->stream_ptr);
  225. if (P[2] <= P[3]) {
  226. /* vertical split; left & right halves are 2-color encoded */
  227. for (y = 0; y < 16; y++) {
  228. for (x = 0; x < 4; x++, flags >>= 1)
  229. *s->pixel_ptr++ = P[flags & 1];
  230. s->pixel_ptr += s->stride - 4;
  231. // switch to right half
  232. if (y == 7) {
  233. s->pixel_ptr -= 8 * s->stride - 4;
  234. P[0] = P[2];
  235. P[1] = P[3];
  236. flags = bytestream2_get_le32(&s->stream_ptr);
  237. }
  238. }
  239. } else {
  240. /* horizontal split; top & bottom halves are 2-color encoded */
  241. for (y = 0; y < 8; y++) {
  242. if (y == 4) {
  243. P[0] = P[2];
  244. P[1] = P[3];
  245. flags = bytestream2_get_le32(&s->stream_ptr);
  246. }
  247. for (x = 0; x < 8; x++, flags >>= 1)
  248. *s->pixel_ptr++ = P[flags & 1];
  249. s->pixel_ptr += s->line_inc;
  250. }
  251. }
  252. }
  253. /* report success */
  254. return 0;
  255. }
  256. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  257. {
  258. int x, y;
  259. unsigned char P[4];
  260. /* 4-color encoding */
  261. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  262. if (P[0] <= P[1]) {
  263. if (P[2] <= P[3]) {
  264. /* 1 of 4 colors for each pixel, need 16 more bytes */
  265. for (y = 0; y < 8; y++) {
  266. /* get the next set of 8 2-bit flags */
  267. int flags = bytestream2_get_le16(&s->stream_ptr);
  268. for (x = 0; x < 8; x++, flags >>= 2)
  269. *s->pixel_ptr++ = P[flags & 0x03];
  270. s->pixel_ptr += s->line_inc;
  271. }
  272. } else {
  273. uint32_t flags;
  274. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  275. flags = bytestream2_get_le32(&s->stream_ptr);
  276. for (y = 0; y < 8; y += 2) {
  277. for (x = 0; x < 8; x += 2, flags >>= 2) {
  278. s->pixel_ptr[x ] =
  279. s->pixel_ptr[x + 1 ] =
  280. s->pixel_ptr[x + s->stride] =
  281. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  282. }
  283. s->pixel_ptr += s->stride * 2;
  284. }
  285. }
  286. } else {
  287. uint64_t flags;
  288. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  289. flags = bytestream2_get_le64(&s->stream_ptr);
  290. if (P[2] <= P[3]) {
  291. for (y = 0; y < 8; y++) {
  292. for (x = 0; x < 8; x += 2, flags >>= 2) {
  293. s->pixel_ptr[x ] =
  294. s->pixel_ptr[x + 1] = P[flags & 0x03];
  295. }
  296. s->pixel_ptr += s->stride;
  297. }
  298. } else {
  299. for (y = 0; y < 8; y += 2) {
  300. for (x = 0; x < 8; x++, flags >>= 2) {
  301. s->pixel_ptr[x ] =
  302. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  303. }
  304. s->pixel_ptr += s->stride * 2;
  305. }
  306. }
  307. }
  308. /* report success */
  309. return 0;
  310. }
  311. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  312. {
  313. int x, y;
  314. unsigned char P[8];
  315. int flags = 0;
  316. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  317. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  318. * either top and bottom or left and right halves */
  319. if (P[0] <= P[1]) {
  320. /* 4-color encoding for each quadrant; need 32 bytes */
  321. for (y = 0; y < 16; y++) {
  322. // new values for each 4x4 block
  323. if (!(y & 3)) {
  324. if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
  325. flags = bytestream2_get_le32(&s->stream_ptr);
  326. }
  327. for (x = 0; x < 4; x++, flags >>= 2)
  328. *s->pixel_ptr++ = P[flags & 0x03];
  329. s->pixel_ptr += s->stride - 4;
  330. // switch to right half
  331. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  332. }
  333. } else {
  334. // vertical split?
  335. int vert;
  336. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  337. bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
  338. vert = P[4] <= P[5];
  339. /* 4-color encoding for either left and right or top and bottom
  340. * halves */
  341. for (y = 0; y < 16; y++) {
  342. for (x = 0; x < 4; x++, flags >>= 2)
  343. *s->pixel_ptr++ = P[flags & 0x03];
  344. if (vert) {
  345. s->pixel_ptr += s->stride - 4;
  346. // switch to right half
  347. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  348. } else if (y & 1) s->pixel_ptr += s->line_inc;
  349. // load values for second half
  350. if (y == 7) {
  351. memcpy(P, P + 4, 4);
  352. flags = bytestream2_get_le64(&s->stream_ptr);
  353. }
  354. }
  355. }
  356. /* report success */
  357. return 0;
  358. }
  359. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  360. {
  361. int y;
  362. /* 64-color encoding (each pixel in block is a different color) */
  363. for (y = 0; y < 8; y++) {
  364. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  365. s->pixel_ptr += s->stride;
  366. }
  367. /* report success */
  368. return 0;
  369. }
  370. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  371. {
  372. int x, y;
  373. /* 16-color block encoding: each 2x2 block is a different color */
  374. for (y = 0; y < 8; y += 2) {
  375. for (x = 0; x < 8; x += 2) {
  376. s->pixel_ptr[x ] =
  377. s->pixel_ptr[x + 1 ] =
  378. s->pixel_ptr[x + s->stride] =
  379. s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
  380. }
  381. s->pixel_ptr += s->stride * 2;
  382. }
  383. /* report success */
  384. return 0;
  385. }
  386. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  387. {
  388. int y;
  389. unsigned char P[2];
  390. /* 4-color block encoding: each 4x4 block is a different color */
  391. for (y = 0; y < 8; y++) {
  392. if (!(y & 3)) {
  393. P[0] = bytestream2_get_byte(&s->stream_ptr);
  394. P[1] = bytestream2_get_byte(&s->stream_ptr);
  395. }
  396. memset(s->pixel_ptr, P[0], 4);
  397. memset(s->pixel_ptr + 4, P[1], 4);
  398. s->pixel_ptr += s->stride;
  399. }
  400. /* report success */
  401. return 0;
  402. }
  403. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  404. {
  405. int y;
  406. unsigned char pix;
  407. /* 1-color encoding: the whole block is 1 solid color */
  408. pix = bytestream2_get_byte(&s->stream_ptr);
  409. for (y = 0; y < 8; y++) {
  410. memset(s->pixel_ptr, pix, 8);
  411. s->pixel_ptr += s->stride;
  412. }
  413. /* report success */
  414. return 0;
  415. }
  416. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  417. {
  418. int x, y;
  419. unsigned char sample[2];
  420. /* dithered encoding */
  421. sample[0] = bytestream2_get_byte(&s->stream_ptr);
  422. sample[1] = bytestream2_get_byte(&s->stream_ptr);
  423. for (y = 0; y < 8; y++) {
  424. for (x = 0; x < 8; x += 2) {
  425. *s->pixel_ptr++ = sample[ y & 1 ];
  426. *s->pixel_ptr++ = sample[!(y & 1)];
  427. }
  428. s->pixel_ptr += s->line_inc;
  429. }
  430. /* report success */
  431. return 0;
  432. }
  433. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  434. {
  435. signed char x, y;
  436. /* copy a block from the second last frame using an expanded range */
  437. x = bytestream2_get_byte(&s->stream_ptr);
  438. y = bytestream2_get_byte(&s->stream_ptr);
  439. av_dlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  440. return copy_from(s, &s->second_last_frame, x, y);
  441. }
  442. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  443. {
  444. int x, y;
  445. uint16_t P[2];
  446. unsigned int flags;
  447. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  448. /* 2-color encoding */
  449. P[0] = bytestream2_get_le16(&s->stream_ptr);
  450. P[1] = bytestream2_get_le16(&s->stream_ptr);
  451. if (!(P[0] & 0x8000)) {
  452. for (y = 0; y < 8; y++) {
  453. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  454. for (; flags != 1; flags >>= 1)
  455. *pixel_ptr++ = P[flags & 1];
  456. pixel_ptr += s->line_inc;
  457. }
  458. } else {
  459. flags = bytestream2_get_le16(&s->stream_ptr);
  460. for (y = 0; y < 8; y += 2) {
  461. for (x = 0; x < 8; x += 2, flags >>= 1) {
  462. pixel_ptr[x ] =
  463. pixel_ptr[x + 1 ] =
  464. pixel_ptr[x + s->stride] =
  465. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  466. }
  467. pixel_ptr += s->stride * 2;
  468. }
  469. }
  470. return 0;
  471. }
  472. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  473. {
  474. int x, y;
  475. uint16_t P[4];
  476. unsigned int flags = 0;
  477. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  478. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  479. * either top and bottom or left and right halves */
  480. P[0] = bytestream2_get_le16(&s->stream_ptr);
  481. P[1] = bytestream2_get_le16(&s->stream_ptr);
  482. if (!(P[0] & 0x8000)) {
  483. for (y = 0; y < 16; y++) {
  484. // new values for each 4x4 block
  485. if (!(y & 3)) {
  486. if (y) {
  487. P[0] = bytestream2_get_le16(&s->stream_ptr);
  488. P[1] = bytestream2_get_le16(&s->stream_ptr);
  489. }
  490. flags = bytestream2_get_le16(&s->stream_ptr);
  491. }
  492. for (x = 0; x < 4; x++, flags >>= 1)
  493. *pixel_ptr++ = P[flags & 1];
  494. pixel_ptr += s->stride - 4;
  495. // switch to right half
  496. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  497. }
  498. } else {
  499. flags = bytestream2_get_le32(&s->stream_ptr);
  500. P[2] = bytestream2_get_le16(&s->stream_ptr);
  501. P[3] = bytestream2_get_le16(&s->stream_ptr);
  502. if (!(P[2] & 0x8000)) {
  503. /* vertical split; left & right halves are 2-color encoded */
  504. for (y = 0; y < 16; y++) {
  505. for (x = 0; x < 4; x++, flags >>= 1)
  506. *pixel_ptr++ = P[flags & 1];
  507. pixel_ptr += s->stride - 4;
  508. // switch to right half
  509. if (y == 7) {
  510. pixel_ptr -= 8 * s->stride - 4;
  511. P[0] = P[2];
  512. P[1] = P[3];
  513. flags = bytestream2_get_le32(&s->stream_ptr);
  514. }
  515. }
  516. } else {
  517. /* horizontal split; top & bottom halves are 2-color encoded */
  518. for (y = 0; y < 8; y++) {
  519. if (y == 4) {
  520. P[0] = P[2];
  521. P[1] = P[3];
  522. flags = bytestream2_get_le32(&s->stream_ptr);
  523. }
  524. for (x = 0; x < 8; x++, flags >>= 1)
  525. *pixel_ptr++ = P[flags & 1];
  526. pixel_ptr += s->line_inc;
  527. }
  528. }
  529. }
  530. /* report success */
  531. return 0;
  532. }
  533. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  534. {
  535. int x, y;
  536. uint16_t P[4];
  537. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  538. /* 4-color encoding */
  539. for (x = 0; x < 4; x++)
  540. P[x] = bytestream2_get_le16(&s->stream_ptr);
  541. if (!(P[0] & 0x8000)) {
  542. if (!(P[2] & 0x8000)) {
  543. /* 1 of 4 colors for each pixel */
  544. for (y = 0; y < 8; y++) {
  545. /* get the next set of 8 2-bit flags */
  546. int flags = bytestream2_get_le16(&s->stream_ptr);
  547. for (x = 0; x < 8; x++, flags >>= 2)
  548. *pixel_ptr++ = P[flags & 0x03];
  549. pixel_ptr += s->line_inc;
  550. }
  551. } else {
  552. uint32_t flags;
  553. /* 1 of 4 colors for each 2x2 block */
  554. flags = bytestream2_get_le32(&s->stream_ptr);
  555. for (y = 0; y < 8; y += 2) {
  556. for (x = 0; x < 8; x += 2, flags >>= 2) {
  557. pixel_ptr[x ] =
  558. pixel_ptr[x + 1 ] =
  559. pixel_ptr[x + s->stride] =
  560. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  561. }
  562. pixel_ptr += s->stride * 2;
  563. }
  564. }
  565. } else {
  566. uint64_t flags;
  567. /* 1 of 4 colors for each 2x1 or 1x2 block */
  568. flags = bytestream2_get_le64(&s->stream_ptr);
  569. if (!(P[2] & 0x8000)) {
  570. for (y = 0; y < 8; y++) {
  571. for (x = 0; x < 8; x += 2, flags >>= 2) {
  572. pixel_ptr[x ] =
  573. pixel_ptr[x + 1] = P[flags & 0x03];
  574. }
  575. pixel_ptr += s->stride;
  576. }
  577. } else {
  578. for (y = 0; y < 8; y += 2) {
  579. for (x = 0; x < 8; x++, flags >>= 2) {
  580. pixel_ptr[x ] =
  581. pixel_ptr[x + s->stride] = P[flags & 0x03];
  582. }
  583. pixel_ptr += s->stride * 2;
  584. }
  585. }
  586. }
  587. /* report success */
  588. return 0;
  589. }
  590. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  591. {
  592. int x, y;
  593. uint16_t P[8];
  594. int flags = 0;
  595. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  596. for (x = 0; x < 4; x++)
  597. P[x] = bytestream2_get_le16(&s->stream_ptr);
  598. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  599. * either top and bottom or left and right halves */
  600. if (!(P[0] & 0x8000)) {
  601. /* 4-color encoding for each quadrant */
  602. for (y = 0; y < 16; y++) {
  603. // new values for each 4x4 block
  604. if (!(y & 3)) {
  605. if (y)
  606. for (x = 0; x < 4; x++)
  607. P[x] = bytestream2_get_le16(&s->stream_ptr);
  608. flags = bytestream2_get_le32(&s->stream_ptr);
  609. }
  610. for (x = 0; x < 4; x++, flags >>= 2)
  611. *pixel_ptr++ = P[flags & 0x03];
  612. pixel_ptr += s->stride - 4;
  613. // switch to right half
  614. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  615. }
  616. } else {
  617. // vertical split?
  618. int vert;
  619. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  620. for (x = 4; x < 8; x++)
  621. P[x] = bytestream2_get_le16(&s->stream_ptr);
  622. vert = !(P[4] & 0x8000);
  623. /* 4-color encoding for either left and right or top and bottom
  624. * halves */
  625. for (y = 0; y < 16; y++) {
  626. for (x = 0; x < 4; x++, flags >>= 2)
  627. *pixel_ptr++ = P[flags & 0x03];
  628. if (vert) {
  629. pixel_ptr += s->stride - 4;
  630. // switch to right half
  631. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  632. } else if (y & 1) pixel_ptr += s->line_inc;
  633. // load values for second half
  634. if (y == 7) {
  635. memcpy(P, P + 4, 8);
  636. flags = bytestream2_get_le64(&s->stream_ptr);
  637. }
  638. }
  639. }
  640. /* report success */
  641. return 0;
  642. }
  643. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  644. {
  645. int x, y;
  646. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  647. /* 64-color encoding (each pixel in block is a different color) */
  648. for (y = 0; y < 8; y++) {
  649. for (x = 0; x < 8; x++)
  650. pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
  651. pixel_ptr += s->stride;
  652. }
  653. /* report success */
  654. return 0;
  655. }
  656. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  657. {
  658. int x, y;
  659. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  660. /* 16-color block encoding: each 2x2 block is a different color */
  661. for (y = 0; y < 8; y += 2) {
  662. for (x = 0; x < 8; x += 2) {
  663. pixel_ptr[x ] =
  664. pixel_ptr[x + 1 ] =
  665. pixel_ptr[x + s->stride] =
  666. pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
  667. }
  668. pixel_ptr += s->stride * 2;
  669. }
  670. /* report success */
  671. return 0;
  672. }
  673. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  674. {
  675. int x, y;
  676. uint16_t P[2];
  677. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  678. /* 4-color block encoding: each 4x4 block is a different color */
  679. for (y = 0; y < 8; y++) {
  680. if (!(y & 3)) {
  681. P[0] = bytestream2_get_le16(&s->stream_ptr);
  682. P[1] = bytestream2_get_le16(&s->stream_ptr);
  683. }
  684. for (x = 0; x < 8; x++)
  685. pixel_ptr[x] = P[x >> 2];
  686. pixel_ptr += s->stride;
  687. }
  688. /* report success */
  689. return 0;
  690. }
  691. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  692. {
  693. int x, y;
  694. uint16_t pix;
  695. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  696. /* 1-color encoding: the whole block is 1 solid color */
  697. pix = bytestream2_get_le16(&s->stream_ptr);
  698. for (y = 0; y < 8; y++) {
  699. for (x = 0; x < 8; x++)
  700. pixel_ptr[x] = pix;
  701. pixel_ptr += s->stride;
  702. }
  703. /* report success */
  704. return 0;
  705. }
  706. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  707. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  708. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  709. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  710. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  711. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  712. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  713. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  714. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  715. };
  716. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  717. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  718. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  719. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  720. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  721. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  722. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  723. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  724. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  725. };
  726. static void ipvideo_decode_opcodes(IpvideoContext *s)
  727. {
  728. int x, y;
  729. unsigned char opcode;
  730. int ret;
  731. static int frame = 0;
  732. GetBitContext gb;
  733. av_dlog(s->avctx, "frame %d\n", frame);
  734. frame++;
  735. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  736. if (!s->is_16bpp) {
  737. /* this is PAL8, so make the palette available */
  738. memcpy(s->current_frame.data[1], s->pal, AVPALETTE_SIZE);
  739. s->stride = s->current_frame.linesize[0];
  740. } else {
  741. s->stride = s->current_frame.linesize[0] >> 1;
  742. s->mv_ptr = s->stream_ptr;
  743. bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
  744. }
  745. s->line_inc = s->stride - 8;
  746. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  747. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  748. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  749. for (y = 0; y < s->avctx->height; y += 8) {
  750. for (x = 0; x < s->avctx->width; x += 8) {
  751. opcode = get_bits(&gb, 4);
  752. av_dlog(s->avctx,
  753. " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
  754. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  755. if (!s->is_16bpp) {
  756. s->pixel_ptr = s->current_frame.data[0] + x
  757. + y*s->current_frame.linesize[0];
  758. ret = ipvideo_decode_block[opcode](s);
  759. } else {
  760. s->pixel_ptr = s->current_frame.data[0] + x*2
  761. + y*s->current_frame.linesize[0];
  762. ret = ipvideo_decode_block16[opcode](s);
  763. }
  764. if (ret != 0) {
  765. av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
  766. frame, x, y);
  767. return;
  768. }
  769. }
  770. }
  771. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  772. av_log(s->avctx, AV_LOG_ERROR,
  773. "decode finished with %d bytes left over\n",
  774. bytestream2_get_bytes_left(&s->stream_ptr));
  775. }
  776. }
  777. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  778. {
  779. IpvideoContext *s = avctx->priv_data;
  780. s->avctx = avctx;
  781. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  782. avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
  783. ff_dsputil_init(&s->dsp, avctx);
  784. avcodec_get_frame_defaults(&s->second_last_frame);
  785. avcodec_get_frame_defaults(&s->last_frame);
  786. avcodec_get_frame_defaults(&s->current_frame);
  787. s->current_frame.data[0] = s->last_frame.data[0] =
  788. s->second_last_frame.data[0] = NULL;
  789. return 0;
  790. }
  791. static int ipvideo_decode_frame(AVCodecContext *avctx,
  792. void *data, int *data_size,
  793. AVPacket *avpkt)
  794. {
  795. const uint8_t *buf = avpkt->data;
  796. int buf_size = avpkt->size;
  797. IpvideoContext *s = avctx->priv_data;
  798. /* decoding map contains 4 bits of information per 8x8 block */
  799. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  800. /* compressed buffer needs to be large enough to at least hold an entire
  801. * decoding map */
  802. if (buf_size < s->decoding_map_size)
  803. return buf_size;
  804. s->decoding_map = buf;
  805. bytestream2_init(&s->stream_ptr, buf + s->decoding_map_size,
  806. buf_size - s->decoding_map_size);
  807. s->current_frame.reference = 3;
  808. if (avctx->get_buffer(avctx, &s->current_frame)) {
  809. av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
  810. return -1;
  811. }
  812. if (!s->is_16bpp) {
  813. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
  814. if (pal) {
  815. s->current_frame.palette_has_changed = 1;
  816. memcpy(s->pal, pal, AVPALETTE_SIZE);
  817. }
  818. }
  819. ipvideo_decode_opcodes(s);
  820. *data_size = sizeof(AVFrame);
  821. *(AVFrame*)data = s->current_frame;
  822. /* shuffle frames */
  823. if (s->second_last_frame.data[0])
  824. avctx->release_buffer(avctx, &s->second_last_frame);
  825. s->second_last_frame = s->last_frame;
  826. s->last_frame = s->current_frame;
  827. s->current_frame.data[0] = NULL; /* catch any access attempts */
  828. /* report that the buffer was completely consumed */
  829. return buf_size;
  830. }
  831. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  832. {
  833. IpvideoContext *s = avctx->priv_data;
  834. /* release the last frame */
  835. if (s->last_frame.data[0])
  836. avctx->release_buffer(avctx, &s->last_frame);
  837. if (s->second_last_frame.data[0])
  838. avctx->release_buffer(avctx, &s->second_last_frame);
  839. return 0;
  840. }
  841. AVCodec ff_interplay_video_decoder = {
  842. .name = "interplayvideo",
  843. .type = AVMEDIA_TYPE_VIDEO,
  844. .id = AV_CODEC_ID_INTERPLAY_VIDEO,
  845. .priv_data_size = sizeof(IpvideoContext),
  846. .init = ipvideo_decode_init,
  847. .close = ipvideo_decode_end,
  848. .decode = ipvideo_decode_frame,
  849. .capabilities = CODEC_CAP_DR1 | CODEC_CAP_PARAM_CHANGE,
  850. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  851. };