You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1105 lines
33KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define ALT_BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #define PALETTE_COUNT 256
  44. typedef struct IpvideoContext {
  45. AVCodecContext *avctx;
  46. DSPContext dsp;
  47. AVFrame second_last_frame;
  48. AVFrame last_frame;
  49. AVFrame current_frame;
  50. const unsigned char *decoding_map;
  51. int decoding_map_size;
  52. const unsigned char *buf;
  53. int size;
  54. int is_16bpp;
  55. const unsigned char *stream_ptr;
  56. const unsigned char *stream_end;
  57. const uint8_t *mv_ptr;
  58. const uint8_t *mv_end;
  59. unsigned char *pixel_ptr;
  60. int line_inc;
  61. int stride;
  62. int upper_motion_limit_offset;
  63. uint32_t pal[256];
  64. } IpvideoContext;
  65. #define CHECK_STREAM_PTR(stream_ptr, stream_end, n) \
  66. if (stream_end - stream_ptr < n) { \
  67. av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  68. stream_ptr + n, stream_end); \
  69. return -1; \
  70. }
  71. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  72. {
  73. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  74. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  75. + delta_x * (1 + s->is_16bpp);
  76. if (motion_offset < 0) {
  77. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  78. return -1;
  79. } else if (motion_offset > s->upper_motion_limit_offset) {
  80. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  81. motion_offset, s->upper_motion_limit_offset);
  82. return -1;
  83. }
  84. if (src->data[0] == NULL) {
  85. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  86. return AVERROR(EINVAL);
  87. }
  88. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  89. s->current_frame.linesize[0], 8);
  90. return 0;
  91. }
  92. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  93. {
  94. return copy_from(s, &s->last_frame, 0, 0);
  95. }
  96. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  97. {
  98. return copy_from(s, &s->second_last_frame, 0, 0);
  99. }
  100. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  101. {
  102. unsigned char B;
  103. int x, y;
  104. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  105. if (!s->is_16bpp) {
  106. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  107. B = *s->stream_ptr++;
  108. } else {
  109. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  110. B = *s->mv_ptr++;
  111. }
  112. if (B < 56) {
  113. x = 8 + (B % 7);
  114. y = B / 7;
  115. } else {
  116. x = -14 + ((B - 56) % 29);
  117. y = 8 + ((B - 56) / 29);
  118. }
  119. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  120. return copy_from(s, &s->second_last_frame, x, y);
  121. }
  122. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  123. {
  124. unsigned char B;
  125. int x, y;
  126. /* copy 8x8 block from current frame from an up/left block */
  127. /* need 1 more byte for motion */
  128. if (!s->is_16bpp) {
  129. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  130. B = *s->stream_ptr++;
  131. } else {
  132. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  133. B = *s->mv_ptr++;
  134. }
  135. if (B < 56) {
  136. x = -(8 + (B % 7));
  137. y = -(B / 7);
  138. } else {
  139. x = -(-14 + ((B - 56) % 29));
  140. y = -( 8 + ((B - 56) / 29));
  141. }
  142. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  143. return copy_from(s, &s->current_frame, x, y);
  144. }
  145. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  146. {
  147. int x, y;
  148. unsigned char B, BL, BH;
  149. /* copy a block from the previous frame; need 1 more byte */
  150. if (!s->is_16bpp) {
  151. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  152. B = *s->stream_ptr++;
  153. } else {
  154. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  155. B = *s->mv_ptr++;
  156. }
  157. BL = B & 0x0F;
  158. BH = (B >> 4) & 0x0F;
  159. x = -8 + BL;
  160. y = -8 + BH;
  161. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  162. return copy_from(s, &s->last_frame, x, y);
  163. }
  164. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  165. {
  166. signed char x, y;
  167. /* copy a block from the previous frame using an expanded range;
  168. * need 2 more bytes */
  169. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  170. x = *s->stream_ptr++;
  171. y = *s->stream_ptr++;
  172. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  173. return copy_from(s, &s->last_frame, x, y);
  174. }
  175. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  176. {
  177. /* mystery opcode? skip multiple blocks? */
  178. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  179. /* report success */
  180. return 0;
  181. }
  182. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  183. {
  184. int x, y;
  185. unsigned char P[2];
  186. unsigned int flags;
  187. /* 2-color encoding */
  188. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  189. P[0] = *s->stream_ptr++;
  190. P[1] = *s->stream_ptr++;
  191. if (P[0] <= P[1]) {
  192. /* need 8 more bytes from the stream */
  193. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  194. for (y = 0; y < 8; y++) {
  195. flags = *s->stream_ptr++ | 0x100;
  196. for (; flags != 1; flags >>= 1)
  197. *s->pixel_ptr++ = P[flags & 1];
  198. s->pixel_ptr += s->line_inc;
  199. }
  200. } else {
  201. /* need 2 more bytes from the stream */
  202. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  203. flags = bytestream_get_le16(&s->stream_ptr);
  204. for (y = 0; y < 8; y += 2) {
  205. for (x = 0; x < 8; x += 2, flags >>= 1) {
  206. s->pixel_ptr[x ] =
  207. s->pixel_ptr[x + 1 ] =
  208. s->pixel_ptr[x + s->stride] =
  209. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  210. }
  211. s->pixel_ptr += s->stride * 2;
  212. }
  213. }
  214. /* report success */
  215. return 0;
  216. }
  217. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  218. {
  219. int x, y;
  220. unsigned char P[2];
  221. unsigned int flags = 0;
  222. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  223. * either top and bottom or left and right halves */
  224. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  225. P[0] = *s->stream_ptr++;
  226. P[1] = *s->stream_ptr++;
  227. if (P[0] <= P[1]) {
  228. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 14);
  229. s->stream_ptr -= 2;
  230. for (y = 0; y < 16; y++) {
  231. // new values for each 4x4 block
  232. if (!(y & 3)) {
  233. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  234. flags = bytestream_get_le16(&s->stream_ptr);
  235. }
  236. for (x = 0; x < 4; x++, flags >>= 1)
  237. *s->pixel_ptr++ = P[flags & 1];
  238. s->pixel_ptr += s->stride - 4;
  239. // switch to right half
  240. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  241. }
  242. } else {
  243. /* need 10 more bytes */
  244. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 10);
  245. if (s->stream_ptr[4] <= s->stream_ptr[5]) {
  246. flags = bytestream_get_le32(&s->stream_ptr);
  247. /* vertical split; left & right halves are 2-color encoded */
  248. for (y = 0; y < 16; y++) {
  249. for (x = 0; x < 4; x++, flags >>= 1)
  250. *s->pixel_ptr++ = P[flags & 1];
  251. s->pixel_ptr += s->stride - 4;
  252. // switch to right half
  253. if (y == 7) {
  254. s->pixel_ptr -= 8 * s->stride - 4;
  255. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  256. flags = bytestream_get_le32(&s->stream_ptr);
  257. }
  258. }
  259. } else {
  260. /* horizontal split; top & bottom halves are 2-color encoded */
  261. for (y = 0; y < 8; y++) {
  262. if (y == 4) {
  263. P[0] = *s->stream_ptr++;
  264. P[1] = *s->stream_ptr++;
  265. }
  266. flags = *s->stream_ptr++ | 0x100;
  267. for (; flags != 1; flags >>= 1)
  268. *s->pixel_ptr++ = P[flags & 1];
  269. s->pixel_ptr += s->line_inc;
  270. }
  271. }
  272. }
  273. /* report success */
  274. return 0;
  275. }
  276. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  277. {
  278. int x, y;
  279. unsigned char P[4];
  280. /* 4-color encoding */
  281. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  282. memcpy(P, s->stream_ptr, 4);
  283. s->stream_ptr += 4;
  284. if (P[0] <= P[1]) {
  285. if (P[2] <= P[3]) {
  286. /* 1 of 4 colors for each pixel, need 16 more bytes */
  287. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  288. for (y = 0; y < 8; y++) {
  289. /* get the next set of 8 2-bit flags */
  290. int flags = bytestream_get_le16(&s->stream_ptr);
  291. for (x = 0; x < 8; x++, flags >>= 2)
  292. *s->pixel_ptr++ = P[flags & 0x03];
  293. s->pixel_ptr += s->line_inc;
  294. }
  295. } else {
  296. uint32_t flags;
  297. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  298. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  299. flags = bytestream_get_le32(&s->stream_ptr);
  300. for (y = 0; y < 8; y += 2) {
  301. for (x = 0; x < 8; x += 2, flags >>= 2) {
  302. s->pixel_ptr[x ] =
  303. s->pixel_ptr[x + 1 ] =
  304. s->pixel_ptr[x + s->stride] =
  305. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  306. }
  307. s->pixel_ptr += s->stride * 2;
  308. }
  309. }
  310. } else {
  311. uint64_t flags;
  312. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  313. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  314. flags = bytestream_get_le64(&s->stream_ptr);
  315. if (P[2] <= P[3]) {
  316. for (y = 0; y < 8; y++) {
  317. for (x = 0; x < 8; x += 2, flags >>= 2) {
  318. s->pixel_ptr[x ] =
  319. s->pixel_ptr[x + 1] = P[flags & 0x03];
  320. }
  321. s->pixel_ptr += s->stride;
  322. }
  323. } else {
  324. for (y = 0; y < 8; y += 2) {
  325. for (x = 0; x < 8; x++, flags >>= 2) {
  326. s->pixel_ptr[x ] =
  327. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  328. }
  329. s->pixel_ptr += s->stride * 2;
  330. }
  331. }
  332. }
  333. /* report success */
  334. return 0;
  335. }
  336. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  337. {
  338. int x, y;
  339. unsigned char P[4];
  340. int flags = 0;
  341. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  342. * either top and bottom or left and right halves */
  343. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  344. if (s->stream_ptr[0] <= s->stream_ptr[1]) {
  345. /* 4-color encoding for each quadrant; need 32 bytes */
  346. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  347. for (y = 0; y < 16; y++) {
  348. // new values for each 4x4 block
  349. if (!(y & 3)) {
  350. memcpy(P, s->stream_ptr, 4);
  351. s->stream_ptr += 4;
  352. flags = bytestream_get_le32(&s->stream_ptr);
  353. }
  354. for (x = 0; x < 4; x++, flags >>= 2)
  355. *s->pixel_ptr++ = P[flags & 0x03];
  356. s->pixel_ptr += s->stride - 4;
  357. // switch to right half
  358. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  359. }
  360. } else {
  361. // vertical split?
  362. int vert = s->stream_ptr[12] <= s->stream_ptr[13];
  363. uint64_t flags = 0;
  364. /* 4-color encoding for either left and right or top and bottom
  365. * halves */
  366. for (y = 0; y < 16; y++) {
  367. // load values for each half
  368. if (!(y & 7)) {
  369. memcpy(P, s->stream_ptr, 4);
  370. s->stream_ptr += 4;
  371. flags = bytestream_get_le64(&s->stream_ptr);
  372. }
  373. for (x = 0; x < 4; x++, flags >>= 2)
  374. *s->pixel_ptr++ = P[flags & 0x03];
  375. if (vert) {
  376. s->pixel_ptr += s->stride - 4;
  377. // switch to right half
  378. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  379. } else if (y & 1) s->pixel_ptr += s->line_inc;
  380. }
  381. }
  382. /* report success */
  383. return 0;
  384. }
  385. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  386. {
  387. int y;
  388. /* 64-color encoding (each pixel in block is a different color) */
  389. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 64);
  390. for (y = 0; y < 8; y++) {
  391. memcpy(s->pixel_ptr, s->stream_ptr, 8);
  392. s->stream_ptr += 8;
  393. s->pixel_ptr += s->stride;
  394. }
  395. /* report success */
  396. return 0;
  397. }
  398. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  399. {
  400. int x, y;
  401. /* 16-color block encoding: each 2x2 block is a different color */
  402. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  403. for (y = 0; y < 8; y += 2) {
  404. for (x = 0; x < 8; x += 2) {
  405. s->pixel_ptr[x ] =
  406. s->pixel_ptr[x + 1 ] =
  407. s->pixel_ptr[x + s->stride] =
  408. s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
  409. }
  410. s->pixel_ptr += s->stride * 2;
  411. }
  412. /* report success */
  413. return 0;
  414. }
  415. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  416. {
  417. int y;
  418. unsigned char P[2];
  419. /* 4-color block encoding: each 4x4 block is a different color */
  420. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  421. for (y = 0; y < 8; y++) {
  422. if (!(y & 3)) {
  423. P[0] = *s->stream_ptr++;
  424. P[1] = *s->stream_ptr++;
  425. }
  426. memset(s->pixel_ptr, P[0], 4);
  427. memset(s->pixel_ptr + 4, P[1], 4);
  428. s->pixel_ptr += s->stride;
  429. }
  430. /* report success */
  431. return 0;
  432. }
  433. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  434. {
  435. int y;
  436. unsigned char pix;
  437. /* 1-color encoding: the whole block is 1 solid color */
  438. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  439. pix = *s->stream_ptr++;
  440. for (y = 0; y < 8; y++) {
  441. memset(s->pixel_ptr, pix, 8);
  442. s->pixel_ptr += s->stride;
  443. }
  444. /* report success */
  445. return 0;
  446. }
  447. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  448. {
  449. int x, y;
  450. unsigned char sample[2];
  451. /* dithered encoding */
  452. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  453. sample[0] = *s->stream_ptr++;
  454. sample[1] = *s->stream_ptr++;
  455. for (y = 0; y < 8; y++) {
  456. for (x = 0; x < 8; x += 2) {
  457. *s->pixel_ptr++ = sample[ y & 1 ];
  458. *s->pixel_ptr++ = sample[!(y & 1)];
  459. }
  460. s->pixel_ptr += s->line_inc;
  461. }
  462. /* report success */
  463. return 0;
  464. }
  465. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  466. {
  467. signed char x, y;
  468. /* copy a block from the second last frame using an expanded range */
  469. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  470. x = *s->stream_ptr++;
  471. y = *s->stream_ptr++;
  472. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  473. return copy_from(s, &s->second_last_frame, x, y);
  474. }
  475. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  476. {
  477. int x, y;
  478. uint16_t P[2];
  479. unsigned int flags;
  480. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  481. /* 2-color encoding */
  482. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  483. P[0] = bytestream_get_le16(&s->stream_ptr);
  484. P[1] = bytestream_get_le16(&s->stream_ptr);
  485. if (!(P[0] & 0x8000)) {
  486. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  487. for (y = 0; y < 8; y++) {
  488. flags = *s->stream_ptr++ | 0x100;
  489. for (; flags != 1; flags >>= 1)
  490. *pixel_ptr++ = P[flags & 1];
  491. pixel_ptr += s->line_inc;
  492. }
  493. } else {
  494. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  495. flags = bytestream_get_le16(&s->stream_ptr);
  496. for (y = 0; y < 8; y += 2) {
  497. for (x = 0; x < 8; x += 2, flags >>= 1) {
  498. pixel_ptr[x ] =
  499. pixel_ptr[x + 1 ] =
  500. pixel_ptr[x + s->stride] =
  501. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  502. }
  503. pixel_ptr += s->stride * 2;
  504. }
  505. }
  506. return 0;
  507. }
  508. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  509. {
  510. int x, y;
  511. uint16_t P[2];
  512. unsigned int flags = 0;
  513. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  514. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  515. * either top and bottom or left and right halves */
  516. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  517. P[0] = bytestream_get_le16(&s->stream_ptr);
  518. P[1] = bytestream_get_le16(&s->stream_ptr);
  519. if (!(P[0] & 0x8000)) {
  520. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  521. s->stream_ptr -= 4;
  522. for (y = 0; y < 16; y++) {
  523. // new values for each 4x4 block
  524. if (!(y & 3)) {
  525. P[0] = bytestream_get_le16(&s->stream_ptr);
  526. P[1] = bytestream_get_le16(&s->stream_ptr);
  527. flags = bytestream_get_le16(&s->stream_ptr);
  528. }
  529. for (x = 0; x < 4; x++, flags >>= 1)
  530. *pixel_ptr++ = P[flags & 1];
  531. pixel_ptr += s->stride - 4;
  532. // switch to right half
  533. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  534. }
  535. } else {
  536. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 12);
  537. if (!(AV_RL16(s->stream_ptr + 4) & 0x8000)) {
  538. flags = bytestream_get_le32(&s->stream_ptr);
  539. /* vertical split; left & right halves are 2-color encoded */
  540. for (y = 0; y < 16; y++) {
  541. for (x = 0; x < 4; x++, flags >>= 1)
  542. *pixel_ptr++ = P[flags & 1];
  543. pixel_ptr += s->stride - 4;
  544. // switch to right half
  545. if (y == 7) {
  546. pixel_ptr -= 8 * s->stride - 4;
  547. P[0] = bytestream_get_le16(&s->stream_ptr);
  548. P[1] = bytestream_get_le16(&s->stream_ptr);
  549. flags = bytestream_get_le32(&s->stream_ptr);
  550. }
  551. }
  552. } else {
  553. /* horizontal split; top & bottom halves are 2-color encoded */
  554. for (y = 0; y < 8; y++) {
  555. if (y == 4) {
  556. P[0] = bytestream_get_le16(&s->stream_ptr);
  557. P[1] = bytestream_get_le16(&s->stream_ptr);
  558. }
  559. flags = *s->stream_ptr++ | 0x100;
  560. for (; flags != 1; flags >>= 1)
  561. *pixel_ptr++ = P[flags & 1];
  562. pixel_ptr += s->line_inc;
  563. }
  564. }
  565. }
  566. /* report success */
  567. return 0;
  568. }
  569. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  570. {
  571. int x, y;
  572. uint16_t P[4];
  573. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  574. /* 4-color encoding */
  575. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  576. for (x = 0; x < 4; x++)
  577. P[x] = bytestream_get_le16(&s->stream_ptr);
  578. if (!(P[0] & 0x8000)) {
  579. if (!(P[2] & 0x8000)) {
  580. /* 1 of 4 colors for each pixel */
  581. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  582. for (y = 0; y < 8; y++) {
  583. /* get the next set of 8 2-bit flags */
  584. int flags = bytestream_get_le16(&s->stream_ptr);
  585. for (x = 0; x < 8; x++, flags >>= 2)
  586. *pixel_ptr++ = P[flags & 0x03];
  587. pixel_ptr += s->line_inc;
  588. }
  589. } else {
  590. uint32_t flags;
  591. /* 1 of 4 colors for each 2x2 block */
  592. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  593. flags = bytestream_get_le32(&s->stream_ptr);
  594. for (y = 0; y < 8; y += 2) {
  595. for (x = 0; x < 8; x += 2, flags >>= 2) {
  596. pixel_ptr[x ] =
  597. pixel_ptr[x + 1 ] =
  598. pixel_ptr[x + s->stride] =
  599. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  600. }
  601. pixel_ptr += s->stride * 2;
  602. }
  603. }
  604. } else {
  605. uint64_t flags;
  606. /* 1 of 4 colors for each 2x1 or 1x2 block */
  607. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  608. flags = bytestream_get_le64(&s->stream_ptr);
  609. if (!(P[2] & 0x8000)) {
  610. for (y = 0; y < 8; y++) {
  611. for (x = 0; x < 8; x += 2, flags >>= 2) {
  612. pixel_ptr[x ] =
  613. pixel_ptr[x + 1] = P[flags & 0x03];
  614. }
  615. pixel_ptr += s->stride;
  616. }
  617. } else {
  618. for (y = 0; y < 8; y += 2) {
  619. for (x = 0; x < 8; x++, flags >>= 2) {
  620. pixel_ptr[x ] =
  621. pixel_ptr[x + s->stride] = P[flags & 0x03];
  622. }
  623. pixel_ptr += s->stride * 2;
  624. }
  625. }
  626. }
  627. /* report success */
  628. return 0;
  629. }
  630. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  631. {
  632. int x, y;
  633. uint16_t P[4];
  634. int flags = 0;
  635. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  636. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  637. * either top and bottom or left and right halves */
  638. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  639. if (!(AV_RL16(s->stream_ptr) & 0x8000)) {
  640. /* 4-color encoding for each quadrant */
  641. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 48);
  642. for (y = 0; y < 16; y++) {
  643. // new values for each 4x4 block
  644. if (!(y & 3)) {
  645. for (x = 0; x < 4; x++)
  646. P[x] = bytestream_get_le16(&s->stream_ptr);
  647. flags = bytestream_get_le32(&s->stream_ptr);
  648. }
  649. for (x = 0; x < 4; x++, flags >>= 2)
  650. *pixel_ptr++ = P[flags & 0x03];
  651. pixel_ptr += s->stride - 4;
  652. // switch to right half
  653. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  654. }
  655. } else {
  656. // vertical split?
  657. int vert = !(AV_RL16(s->stream_ptr + 16) & 0x8000);
  658. uint64_t flags = 0;
  659. /* 4-color encoding for either left and right or top and bottom
  660. * halves */
  661. for (y = 0; y < 16; y++) {
  662. // load values for each half
  663. if (!(y & 7)) {
  664. for (x = 0; x < 4; x++)
  665. P[x] = bytestream_get_le16(&s->stream_ptr);
  666. flags = bytestream_get_le64(&s->stream_ptr);
  667. }
  668. for (x = 0; x < 4; x++, flags >>= 2)
  669. *pixel_ptr++ = P[flags & 0x03];
  670. if (vert) {
  671. pixel_ptr += s->stride - 4;
  672. // switch to right half
  673. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  674. } else if (y & 1) pixel_ptr += s->line_inc;
  675. }
  676. }
  677. /* report success */
  678. return 0;
  679. }
  680. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  681. {
  682. int x, y;
  683. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  684. /* 64-color encoding (each pixel in block is a different color) */
  685. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 128);
  686. for (y = 0; y < 8; y++) {
  687. for (x = 0; x < 8; x++)
  688. pixel_ptr[x] = bytestream_get_le16(&s->stream_ptr);
  689. pixel_ptr += s->stride;
  690. }
  691. /* report success */
  692. return 0;
  693. }
  694. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  695. {
  696. int x, y;
  697. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  698. /* 16-color block encoding: each 2x2 block is a different color */
  699. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  700. for (y = 0; y < 8; y += 2) {
  701. for (x = 0; x < 8; x += 2) {
  702. pixel_ptr[x ] =
  703. pixel_ptr[x + 1 ] =
  704. pixel_ptr[x + s->stride] =
  705. pixel_ptr[x + 1 + s->stride] = bytestream_get_le16(&s->stream_ptr);
  706. }
  707. pixel_ptr += s->stride * 2;
  708. }
  709. /* report success */
  710. return 0;
  711. }
  712. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  713. {
  714. int x, y;
  715. uint16_t P[2];
  716. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  717. /* 4-color block encoding: each 4x4 block is a different color */
  718. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  719. for (y = 0; y < 8; y++) {
  720. if (!(y & 3)) {
  721. P[0] = bytestream_get_le16(&s->stream_ptr);
  722. P[1] = bytestream_get_le16(&s->stream_ptr);
  723. }
  724. for (x = 0; x < 8; x++)
  725. pixel_ptr[x] = P[x >> 2];
  726. pixel_ptr += s->stride;
  727. }
  728. /* report success */
  729. return 0;
  730. }
  731. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  732. {
  733. int x, y;
  734. uint16_t pix;
  735. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  736. /* 1-color encoding: the whole block is 1 solid color */
  737. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  738. pix = bytestream_get_le16(&s->stream_ptr);
  739. for (y = 0; y < 8; y++) {
  740. for (x = 0; x < 8; x++)
  741. pixel_ptr[x] = pix;
  742. pixel_ptr += s->stride;
  743. }
  744. /* report success */
  745. return 0;
  746. }
  747. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  748. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  749. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  750. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  751. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  752. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  753. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  754. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  755. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  756. };
  757. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  758. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  759. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  760. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  761. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  762. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  763. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  764. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  765. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  766. };
  767. static void ipvideo_decode_opcodes(IpvideoContext *s)
  768. {
  769. int x, y;
  770. unsigned char opcode;
  771. int ret;
  772. static int frame = 0;
  773. GetBitContext gb;
  774. av_dlog(NULL, "------------------ frame %d\n", frame);
  775. frame++;
  776. if (!s->is_16bpp) {
  777. /* this is PAL8, so make the palette available */
  778. memcpy(s->current_frame.data[1], s->pal, AVPALETTE_SIZE);
  779. s->stride = s->current_frame.linesize[0];
  780. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  781. s->stream_end = s->buf + s->size;
  782. } else {
  783. s->stride = s->current_frame.linesize[0] >> 1;
  784. s->stream_ptr = s->buf + 16;
  785. s->stream_end =
  786. s->mv_ptr = s->buf + 14 + AV_RL16(s->buf+14);
  787. s->mv_end = s->buf + s->size;
  788. }
  789. s->line_inc = s->stride - 8;
  790. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  791. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  792. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  793. for (y = 0; y < s->avctx->height; y += 8) {
  794. for (x = 0; x < s->avctx->width; x += 8) {
  795. opcode = get_bits(&gb, 4);
  796. av_dlog(NULL, " block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  797. x, y, opcode, s->stream_ptr);
  798. if (!s->is_16bpp) {
  799. s->pixel_ptr = s->current_frame.data[0] + x
  800. + y*s->current_frame.linesize[0];
  801. ret = ipvideo_decode_block[opcode](s);
  802. } else {
  803. s->pixel_ptr = s->current_frame.data[0] + x*2
  804. + y*s->current_frame.linesize[0];
  805. ret = ipvideo_decode_block16[opcode](s);
  806. }
  807. if (ret != 0) {
  808. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  809. frame, x, y);
  810. return;
  811. }
  812. }
  813. }
  814. if (s->stream_end - s->stream_ptr > 1) {
  815. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
  816. s->stream_end - s->stream_ptr);
  817. }
  818. }
  819. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  820. {
  821. IpvideoContext *s = avctx->priv_data;
  822. s->avctx = avctx;
  823. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  824. avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
  825. dsputil_init(&s->dsp, avctx);
  826. /* decoding map contains 4 bits of information per 8x8 block */
  827. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  828. avcodec_get_frame_defaults(&s->second_last_frame);
  829. avcodec_get_frame_defaults(&s->last_frame);
  830. avcodec_get_frame_defaults(&s->current_frame);
  831. s->current_frame.data[0] = s->last_frame.data[0] =
  832. s->second_last_frame.data[0] = NULL;
  833. return 0;
  834. }
  835. static int ipvideo_decode_frame(AVCodecContext *avctx,
  836. void *data, int *data_size,
  837. AVPacket *avpkt)
  838. {
  839. const uint8_t *buf = avpkt->data;
  840. int buf_size = avpkt->size;
  841. IpvideoContext *s = avctx->priv_data;
  842. /* compressed buffer needs to be large enough to at least hold an entire
  843. * decoding map */
  844. if (buf_size < s->decoding_map_size)
  845. return buf_size;
  846. s->decoding_map = buf;
  847. s->buf = buf + s->decoding_map_size;
  848. s->size = buf_size - s->decoding_map_size;
  849. s->current_frame.reference = 3;
  850. if (avctx->get_buffer(avctx, &s->current_frame)) {
  851. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  852. return -1;
  853. }
  854. if (!s->is_16bpp) {
  855. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
  856. if (pal) {
  857. s->current_frame.palette_has_changed = 1;
  858. memcpy(s->pal, pal, AVPALETTE_SIZE);
  859. }
  860. }
  861. ipvideo_decode_opcodes(s);
  862. *data_size = sizeof(AVFrame);
  863. *(AVFrame*)data = s->current_frame;
  864. /* shuffle frames */
  865. if (s->second_last_frame.data[0])
  866. avctx->release_buffer(avctx, &s->second_last_frame);
  867. s->second_last_frame = s->last_frame;
  868. s->last_frame = s->current_frame;
  869. s->current_frame.data[0] = NULL; /* catch any access attempts */
  870. /* report that the buffer was completely consumed */
  871. return buf_size;
  872. }
  873. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  874. {
  875. IpvideoContext *s = avctx->priv_data;
  876. /* release the last frame */
  877. if (s->last_frame.data[0])
  878. avctx->release_buffer(avctx, &s->last_frame);
  879. if (s->second_last_frame.data[0])
  880. avctx->release_buffer(avctx, &s->second_last_frame);
  881. return 0;
  882. }
  883. AVCodec ff_interplay_video_decoder = {
  884. .name = "interplayvideo",
  885. .type = AVMEDIA_TYPE_VIDEO,
  886. .id = CODEC_ID_INTERPLAY_VIDEO,
  887. .priv_data_size = sizeof(IpvideoContext),
  888. .init = ipvideo_decode_init,
  889. .close = ipvideo_decode_end,
  890. .decode = ipvideo_decode_frame,
  891. .capabilities = CODEC_CAP_DR1,
  892. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  893. };