You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

978 lines
28KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. *
  19. */
  20. /**
  21. * @file interplayvideo.c
  22. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  23. * For more information about the Interplay MVE format, visit:
  24. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  25. * This code is written in such a way that the identifiers match up
  26. * with the encoding descriptions in the document.
  27. *
  28. * This decoder presently only supports a PAL8 output colorspace.
  29. *
  30. * An Interplay video frame consists of 2 parts: The decoding map and
  31. * the video data. A demuxer must load these 2 parts together in a single
  32. * buffer before sending it through the stream to this decoder.
  33. */
  34. #include <stdio.h>
  35. #include <stdlib.h>
  36. #include <string.h>
  37. #include <unistd.h>
  38. #include "common.h"
  39. #include "avcodec.h"
  40. #include "dsputil.h"
  41. #define PALETTE_COUNT 256
  42. /* debugging support */
  43. #define DEBUG_INTERPLAY 0
  44. #if DEBUG_INTERPLAY
  45. #define debug_interplay printf
  46. #else
  47. static inline void debug_interplay(const char *format, ...) { }
  48. #endif
  49. typedef struct IpvideoContext {
  50. AVCodecContext *avctx;
  51. DSPContext dsp;
  52. AVFrame last_frame;
  53. AVFrame current_frame;
  54. int first_frame;
  55. unsigned char *decoding_map;
  56. int decoding_map_size;
  57. unsigned char *buf;
  58. int size;
  59. unsigned char palette[PALETTE_COUNT * 4];
  60. } IpvideoContext;
  61. #define CHECK_STREAM_PTR(n) \
  62. if ((sg_stream_ptr + n) > sg_stream_end) { \
  63. printf ("Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  64. sg_stream_ptr + n, sg_stream_end); \
  65. return -1; \
  66. }
  67. static void ipvideo_new_palette(IpvideoContext *s, unsigned char *palette) {
  68. int i;
  69. unsigned char r, g, b;
  70. unsigned int *palette32;
  71. switch (s->avctx->pix_fmt) {
  72. case PIX_FMT_PAL8:
  73. palette32 = (unsigned int *)s->palette;
  74. for (i = 0; i < PALETTE_COUNT; i++) {
  75. r = *palette++;
  76. g = *palette++;
  77. b = *palette++;
  78. palette32[i] = (r << 16) | (g << 8) | (b);
  79. }
  80. break;
  81. default:
  82. printf ("Interplay video: Unhandled video format\n");
  83. break;
  84. }
  85. }
  86. static unsigned char *sg_stream_ptr;
  87. static unsigned char *sg_stream_end;
  88. static unsigned char *sg_current_plane;
  89. static unsigned char *sg_output_plane;
  90. static unsigned char *sg_last_plane;
  91. static int sg_line_inc;
  92. static int sg_stride;
  93. static int sg_upper_motion_limit_offset;
  94. static DSPContext sg_dsp;
  95. static int ipvideo_decode_block_opcode_0x0_0x1(void)
  96. {
  97. int x, y;
  98. unsigned char *src_block;
  99. /* skip block, which actually means to copy from previous frame */
  100. src_block = sg_last_plane + (sg_output_plane - sg_current_plane);
  101. for (y = 0; y < 8; y++) {
  102. for (x = 0; x < 8; x++) {
  103. *sg_output_plane++ = *src_block++;
  104. }
  105. sg_output_plane += sg_line_inc;
  106. src_block += sg_line_inc;
  107. }
  108. /* report success */
  109. return 0;
  110. }
  111. #define COPY_FROM_CURRENT() \
  112. motion_offset = current_offset; \
  113. motion_offset += y * sg_stride; \
  114. motion_offset += x; \
  115. if (motion_offset < 0) { \
  116. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  117. return -1; \
  118. } else if (motion_offset > sg_upper_motion_limit_offset) { \
  119. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  120. motion_offset, sg_upper_motion_limit_offset); \
  121. return -1; \
  122. } \
  123. sg_dsp.put_pixels_tab[0][0](sg_output_plane, \
  124. sg_current_plane + motion_offset, sg_stride, 8);
  125. #define COPY_FROM_PREVIOUS() \
  126. motion_offset = current_offset; \
  127. motion_offset += y * sg_stride; \
  128. motion_offset += x; \
  129. if (motion_offset < 0) { \
  130. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  131. return -1; \
  132. } else if (motion_offset > sg_upper_motion_limit_offset) { \
  133. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  134. motion_offset, sg_upper_motion_limit_offset); \
  135. return -1; \
  136. } \
  137. sg_dsp.put_pixels_tab[0][0](sg_output_plane, \
  138. sg_last_plane + motion_offset, sg_stride, 8);
  139. static int ipvideo_decode_block_opcode_0x2(void)
  140. {
  141. unsigned char B;
  142. int x, y;
  143. int motion_offset;
  144. int current_offset = sg_output_plane - sg_current_plane;
  145. /* This is the opcode which claims to copy data from within the same
  146. * frame at a coordinate which has not been rendered yet. Assume that
  147. * it is supposed to be copied from the previous frame. */
  148. /* need 1 more byte for motion */
  149. CHECK_STREAM_PTR(1);
  150. B = *sg_stream_ptr++;
  151. if (B < 56) {
  152. x = 8 + (B % 7);
  153. y = B / 7;
  154. } else {
  155. x = -14 + ((B - 56) % 29);
  156. y = 8 + ((B - 56) / 29);
  157. }
  158. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  159. COPY_FROM_PREVIOUS();
  160. /* report success */
  161. return 0;
  162. }
  163. static int ipvideo_decode_block_opcode_0x3(void)
  164. {
  165. unsigned char B;
  166. int x, y;
  167. int motion_offset;
  168. int current_offset = sg_output_plane - sg_current_plane;
  169. /* copy 8x8 block from current frame from an up/left block */
  170. /* need 1 more byte for motion */
  171. CHECK_STREAM_PTR(1);
  172. B = *sg_stream_ptr++;
  173. if (B < 56) {
  174. x = -(8 + (B % 7));
  175. y = -(B / 7);
  176. } else {
  177. x = -(-14 + ((B - 56) % 29));
  178. y = -( 8 + ((B - 56) / 29));
  179. }
  180. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  181. COPY_FROM_CURRENT();
  182. /* report success */
  183. return 0;
  184. }
  185. static int ipvideo_decode_block_opcode_0x4(void)
  186. {
  187. int x, y;
  188. unsigned char B, BL, BH;
  189. int motion_offset;
  190. int current_offset = sg_output_plane - sg_current_plane;
  191. /* copy a block from the previous frame; need 1 more byte */
  192. CHECK_STREAM_PTR(1);
  193. B = *sg_stream_ptr++;
  194. BL = B & 0x0F;
  195. BH = (B >> 4) & 0x0F;
  196. x = -8 + BL;
  197. y = -8 + BH;
  198. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  199. COPY_FROM_PREVIOUS();
  200. /* report success */
  201. return 0;
  202. }
  203. static int ipvideo_decode_block_opcode_0x5(void)
  204. {
  205. signed char x, y;
  206. int motion_offset;
  207. int current_offset = sg_output_plane - sg_current_plane;
  208. /* copy a block from the previous frame using an expanded range;
  209. * need 2 more bytes */
  210. CHECK_STREAM_PTR(2);
  211. x = *sg_stream_ptr++;
  212. y = *sg_stream_ptr++;
  213. debug_interplay (" motion bytes = %d, %d\n", x, y);
  214. COPY_FROM_PREVIOUS();
  215. /* report success */
  216. return 0;
  217. }
  218. static int ipvideo_decode_block_opcode_0x6(void)
  219. {
  220. /* mystery opcode? skip multiple blocks? */
  221. printf (" Interplay video: Help! Mystery opcode 0x6 seen\n");
  222. /* report success */
  223. return 0;
  224. }
  225. static int ipvideo_decode_block_opcode_0x7(void)
  226. {
  227. int x, y;
  228. unsigned char P0, P1;
  229. unsigned char B[8];
  230. unsigned int flags;
  231. int bitmask;
  232. /* 2-color encoding */
  233. CHECK_STREAM_PTR(2);
  234. P0 = *sg_stream_ptr++;
  235. P1 = *sg_stream_ptr++;
  236. if (P0 <= P1) {
  237. /* need 8 more bytes from the stream */
  238. CHECK_STREAM_PTR(8);
  239. for (y = 0; y < 8; y++)
  240. B[y] = *sg_stream_ptr++;
  241. for (y = 0; y < 8; y++) {
  242. flags = B[y];
  243. for (x = 0x80; x != 0; x >>= 1) {
  244. if (flags & x)
  245. *sg_output_plane++ = P1;
  246. else
  247. *sg_output_plane++ = P0;
  248. }
  249. sg_output_plane += sg_line_inc;
  250. }
  251. } else {
  252. /* need 2 more bytes from the stream */
  253. CHECK_STREAM_PTR(2);
  254. B[0] = *sg_stream_ptr++;
  255. B[1] = *sg_stream_ptr++;
  256. flags = (B[0] << 8) | B[1];
  257. bitmask = 0x8000;
  258. for (y = 0; y < 8; y += 2) {
  259. for (x = 0; x < 8; x += 2, bitmask >>= 1) {
  260. if (flags & bitmask) {
  261. *(sg_output_plane + x) = P0;
  262. *(sg_output_plane + x + 1) = P0;
  263. *(sg_output_plane + sg_stride + x) = P0;
  264. *(sg_output_plane + sg_stride + x + 1) = P0;
  265. } else {
  266. *(sg_output_plane + x) = P1;
  267. *(sg_output_plane + x + 1) = P1;
  268. *(sg_output_plane + sg_stride + x) = P1;
  269. *(sg_output_plane + sg_stride + x + 1) = P1;
  270. }
  271. }
  272. sg_output_plane += sg_stride * 2;
  273. }
  274. }
  275. /* report success */
  276. return 0;
  277. }
  278. static int ipvideo_decode_block_opcode_0x8(void)
  279. {
  280. int x, y;
  281. unsigned char P[8];
  282. unsigned char B[8];
  283. unsigned int flags = 0;
  284. unsigned int bitmask = 0;
  285. unsigned char P0 = 0, P1 = 0;
  286. int lower_half = 0;
  287. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  288. * either top and bottom or left and right halves */
  289. CHECK_STREAM_PTR(2);
  290. P[0] = *sg_stream_ptr++;
  291. P[1] = *sg_stream_ptr++;
  292. if (P[0] <= P[1]) {
  293. /* need 12 more bytes */
  294. CHECK_STREAM_PTR(12);
  295. B[0] = *sg_stream_ptr++; B[1] = *sg_stream_ptr++;
  296. P[2] = *sg_stream_ptr++; P[3] = *sg_stream_ptr++;
  297. B[2] = *sg_stream_ptr++; B[3] = *sg_stream_ptr++;
  298. P[4] = *sg_stream_ptr++; P[5] = *sg_stream_ptr++;
  299. B[4] = *sg_stream_ptr++; B[5] = *sg_stream_ptr++;
  300. P[6] = *sg_stream_ptr++; P[7] = *sg_stream_ptr++;
  301. B[6] = *sg_stream_ptr++; B[7] = *sg_stream_ptr++;
  302. for (y = 0; y < 8; y++) {
  303. /* time to reload flags? */
  304. if (y == 0) {
  305. flags =
  306. ((B[0] & 0xF0) << 24) | ((B[4] & 0xF0) << 20) |
  307. ((B[0] & 0x0F) << 20) | ((B[4] & 0x0F) << 16) |
  308. ((B[1] & 0xF0) << 8) | ((B[5] & 0xF0) << 4) |
  309. ((B[1] & 0x0F) << 4) | ((B[5] & 0x0F) << 0);
  310. bitmask = 0x80000000;
  311. lower_half = 0; /* still on top half */
  312. } else if (y == 4) {
  313. flags =
  314. ((B[2] & 0xF0) << 24) | ((B[6] & 0xF0) << 20) |
  315. ((B[2] & 0x0F) << 20) | ((B[6] & 0x0F) << 16) |
  316. ((B[3] & 0xF0) << 8) | ((B[7] & 0xF0) << 4) |
  317. ((B[3] & 0x0F) << 4) | ((B[7] & 0x0F) << 0);
  318. bitmask = 0x80000000;
  319. lower_half = 4;
  320. }
  321. for (x = 0; x < 8; x++, bitmask >>= 1) {
  322. /* get the pixel values ready for this quadrant */
  323. if (x == 0) {
  324. P0 = P[lower_half + 0];
  325. P1 = P[lower_half + 1];
  326. } else if (x == 4) {
  327. P0 = P[lower_half + 2];
  328. P1 = P[lower_half + 3];
  329. }
  330. if (flags & bitmask)
  331. *sg_output_plane++ = P1;
  332. else
  333. *sg_output_plane++ = P0;
  334. }
  335. sg_output_plane += sg_line_inc;
  336. }
  337. } else {
  338. /* need 10 more bytes */
  339. CHECK_STREAM_PTR(10);
  340. B[0] = *sg_stream_ptr++; B[1] = *sg_stream_ptr++;
  341. B[2] = *sg_stream_ptr++; B[3] = *sg_stream_ptr++;
  342. P[2] = *sg_stream_ptr++; P[3] = *sg_stream_ptr++;
  343. B[4] = *sg_stream_ptr++; B[5] = *sg_stream_ptr++;
  344. B[6] = *sg_stream_ptr++; B[7] = *sg_stream_ptr++;
  345. if (P[2] <= P[3]) {
  346. /* vertical split; left & right halves are 2-color encoded */
  347. for (y = 0; y < 8; y++) {
  348. /* time to reload flags? */
  349. if (y == 0) {
  350. flags =
  351. ((B[0] & 0xF0) << 24) | ((B[4] & 0xF0) << 20) |
  352. ((B[0] & 0x0F) << 20) | ((B[4] & 0x0F) << 16) |
  353. ((B[1] & 0xF0) << 8) | ((B[5] & 0xF0) << 4) |
  354. ((B[1] & 0x0F) << 4) | ((B[5] & 0x0F) << 0);
  355. bitmask = 0x80000000;
  356. } else if (y == 4) {
  357. flags =
  358. ((B[2] & 0xF0) << 24) | ((B[6] & 0xF0) << 20) |
  359. ((B[2] & 0x0F) << 20) | ((B[6] & 0x0F) << 16) |
  360. ((B[3] & 0xF0) << 8) | ((B[7] & 0xF0) << 4) |
  361. ((B[3] & 0x0F) << 4) | ((B[7] & 0x0F) << 0);
  362. bitmask = 0x80000000;
  363. }
  364. for (x = 0; x < 8; x++, bitmask >>= 1) {
  365. /* get the pixel values ready for this half */
  366. if (x == 0) {
  367. P0 = P[0];
  368. P1 = P[1];
  369. } else if (x == 4) {
  370. P0 = P[2];
  371. P1 = P[3];
  372. }
  373. if (flags & bitmask)
  374. *sg_output_plane++ = P0;
  375. else
  376. *sg_output_plane++ = P1;
  377. }
  378. sg_output_plane += sg_line_inc;
  379. }
  380. } else {
  381. /* horizontal split; top & bottom halves are 2-color encoded */
  382. for (y = 0; y < 8; y++) {
  383. flags = B[y];
  384. if (y == 0) {
  385. P0 = P[0];
  386. P1 = P[1];
  387. } else if (y == 4) {
  388. P0 = P[2];
  389. P1 = P[3];
  390. }
  391. for (bitmask = 0x80; bitmask != 0; bitmask >>= 1) {
  392. if (flags & bitmask)
  393. *sg_output_plane++ = P0;
  394. else
  395. *sg_output_plane++ = P1;
  396. }
  397. sg_output_plane += sg_line_inc;
  398. }
  399. }
  400. }
  401. /* report success */
  402. return 0;
  403. }
  404. static int ipvideo_decode_block_opcode_0x9(void)
  405. {
  406. int x, y;
  407. unsigned char P[4];
  408. unsigned int flags = 0;
  409. int shifter = 0;
  410. unsigned char pix;
  411. /* 4-color encoding */
  412. CHECK_STREAM_PTR(4);
  413. for (y = 0; y < 4; y++)
  414. P[y] = *sg_stream_ptr++;
  415. if ((P[0] <= P[1]) && (P[2] <= P[3])) {
  416. /* 1 of 4 colors for each pixel, need 16 more bytes */
  417. CHECK_STREAM_PTR(16);
  418. for (y = 0; y < 8; y++) {
  419. /* get the next set of 8 2-bit flags */
  420. flags = (sg_stream_ptr[0] << 8) | sg_stream_ptr[1];
  421. sg_stream_ptr += 2;
  422. for (x = 0, shifter = 14; x < 8; x++, shifter -= 2) {
  423. *sg_output_plane++ = P[(flags >> shifter) & 0x03];
  424. }
  425. sg_output_plane += sg_line_inc;
  426. }
  427. } else if ((P[0] <= P[1]) && (P[2] > P[3])) {
  428. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  429. CHECK_STREAM_PTR(4);
  430. flags = 0;
  431. flags = (flags << 8) | *sg_stream_ptr++;
  432. flags = (flags << 8) | *sg_stream_ptr++;
  433. flags = (flags << 8) | *sg_stream_ptr++;
  434. flags = (flags << 8) | *sg_stream_ptr++;
  435. shifter = 30;
  436. for (y = 0; y < 8; y += 2) {
  437. for (x = 0; x < 8; x += 2, shifter -= 2) {
  438. pix = P[(flags >> shifter) & 0x03];
  439. *(sg_output_plane + x) = pix;
  440. *(sg_output_plane + x + 1) = pix;
  441. *(sg_output_plane + sg_stride + x) = pix;
  442. *(sg_output_plane + sg_stride + x + 1) = pix;
  443. }
  444. sg_output_plane += sg_stride * 2;
  445. }
  446. } else if ((P[0] > P[1]) && (P[2] <= P[3])) {
  447. /* 1 of 4 colors for each 2x1 block, need 8 more bytes */
  448. CHECK_STREAM_PTR(8);
  449. for (y = 0; y < 8; y++) {
  450. /* time to reload flags? */
  451. if ((y == 0) || (y == 4)) {
  452. flags = 0;
  453. flags = (flags << 8) | *sg_stream_ptr++;
  454. flags = (flags << 8) | *sg_stream_ptr++;
  455. flags = (flags << 8) | *sg_stream_ptr++;
  456. flags = (flags << 8) | *sg_stream_ptr++;
  457. shifter = 30;
  458. }
  459. for (x = 0; x < 8; x += 2, shifter -= 2) {
  460. pix = P[(flags >> shifter) & 0x03];
  461. *(sg_output_plane + x) = pix;
  462. *(sg_output_plane + x + 1) = pix;
  463. }
  464. sg_output_plane += sg_stride;
  465. }
  466. } else {
  467. /* 1 of 4 colors for each 1x2 block, need 8 more bytes */
  468. CHECK_STREAM_PTR(8);
  469. for (y = 0; y < 8; y += 2) {
  470. /* time to reload flags? */
  471. if ((y == 0) || (y == 4)) {
  472. flags = 0;
  473. flags = (flags << 8) | *sg_stream_ptr++;
  474. flags = (flags << 8) | *sg_stream_ptr++;
  475. flags = (flags << 8) | *sg_stream_ptr++;
  476. flags = (flags << 8) | *sg_stream_ptr++;
  477. shifter = 30;
  478. }
  479. for (x = 0; x < 8; x++, shifter -= 2) {
  480. pix = P[(flags >> shifter) & 0x03];
  481. *(sg_output_plane + x) = pix;
  482. *(sg_output_plane + sg_stride + x) = pix;
  483. }
  484. sg_output_plane += sg_stride * 2;
  485. }
  486. }
  487. /* report success */
  488. return 0;
  489. }
  490. static int ipvideo_decode_block_opcode_0xA(void)
  491. {
  492. int x, y;
  493. unsigned char P[16];
  494. unsigned char B[16];
  495. int flags = 0;
  496. int shifter = 0;
  497. int index;
  498. int split;
  499. int lower_half;
  500. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  501. * either top and bottom or left and right halves */
  502. CHECK_STREAM_PTR(4);
  503. for (y = 0; y < 4; y++)
  504. P[y] = *sg_stream_ptr++;
  505. if (P[0] <= P[1]) {
  506. /* 4-color encoding for each quadrant; need 28 more bytes */
  507. CHECK_STREAM_PTR(28);
  508. for (y = 0; y < 4; y++)
  509. B[y] = *sg_stream_ptr++;
  510. for (y = 4; y < 16; y += 4) {
  511. for (x = y; x < y + 4; x++)
  512. P[x] = *sg_stream_ptr++;
  513. for (x = y; x < y + 4; x++)
  514. B[x] = *sg_stream_ptr++;
  515. }
  516. for (y = 0; y < 8; y++) {
  517. lower_half = (y >= 4) ? 4 : 0;
  518. flags = (B[y] << 8) | B[y + 8];
  519. for (x = 0, shifter = 14; x < 8; x++, shifter -= 2) {
  520. split = (x >= 4) ? 8 : 0;
  521. index = split + lower_half + ((flags >> shifter) & 0x03);
  522. *sg_output_plane++ = P[index];
  523. }
  524. sg_output_plane += sg_line_inc;
  525. }
  526. } else {
  527. /* 4-color encoding for either left and right or top and bottom
  528. * halves; need 20 more bytes */
  529. CHECK_STREAM_PTR(20);
  530. for (y = 0; y < 8; y++)
  531. B[y] = *sg_stream_ptr++;
  532. for (y = 4; y < 8; y++)
  533. P[y] = *sg_stream_ptr++;
  534. for (y = 8; y < 16; y++)
  535. B[y] = *sg_stream_ptr++;
  536. if (P[4] <= P[5]) {
  537. /* block is divided into left and right halves */
  538. for (y = 0; y < 8; y++) {
  539. flags = (B[y] << 8) | B[y + 8];
  540. split = 0;
  541. for (x = 0, shifter = 14; x < 8; x++, shifter -= 2) {
  542. if (x == 4)
  543. split = 4;
  544. *sg_output_plane++ = P[split + ((flags >> shifter) & 0x03)];
  545. }
  546. sg_output_plane += sg_line_inc;
  547. }
  548. } else {
  549. /* block is divided into top and bottom halves */
  550. split = 0;
  551. for (y = 0; y < 8; y++) {
  552. flags = (B[y * 2] << 8) | B[y * 2 + 1];
  553. if (y == 4)
  554. split = 4;
  555. for (x = 0, shifter = 14; x < 8; x++, shifter -= 2)
  556. *sg_output_plane++ = P[split + ((flags >> shifter) & 0x03)];
  557. sg_output_plane += sg_line_inc;
  558. }
  559. }
  560. }
  561. /* report success */
  562. return 0;
  563. }
  564. static int ipvideo_decode_block_opcode_0xB(void)
  565. {
  566. int x, y;
  567. /* 64-color encoding (each pixel in block is a different color) */
  568. CHECK_STREAM_PTR(64);
  569. for (y = 0; y < 8; y++) {
  570. for (x = 0; x < 8; x++) {
  571. *sg_output_plane++ = *sg_stream_ptr++;
  572. }
  573. sg_output_plane += sg_line_inc;
  574. }
  575. /* report success */
  576. return 0;
  577. }
  578. static int ipvideo_decode_block_opcode_0xC(void)
  579. {
  580. int x, y;
  581. unsigned char pix;
  582. /* 16-color block encoding: each 2x2 block is a different color */
  583. CHECK_STREAM_PTR(16);
  584. for (y = 0; y < 8; y += 2) {
  585. for (x = 0; x < 8; x += 2) {
  586. pix = *sg_stream_ptr++;
  587. *(sg_output_plane + x) = pix;
  588. *(sg_output_plane + x + 1) = pix;
  589. *(sg_output_plane + sg_stride + x) = pix;
  590. *(sg_output_plane + sg_stride + x + 1) = pix;
  591. }
  592. sg_output_plane += sg_stride * 2;
  593. }
  594. /* report success */
  595. return 0;
  596. }
  597. static int ipvideo_decode_block_opcode_0xD(void)
  598. {
  599. int x, y;
  600. unsigned char P[4];
  601. unsigned char index = 0;
  602. /* 4-color block encoding: each 4x4 block is a different color */
  603. CHECK_STREAM_PTR(4);
  604. for (y = 0; y < 4; y++)
  605. P[y] = *sg_stream_ptr++;
  606. for (y = 0; y < 8; y++) {
  607. if (y < 4)
  608. index = 0;
  609. else
  610. index = 2;
  611. for (x = 0; x < 8; x++) {
  612. if (x == 4)
  613. index++;
  614. *sg_output_plane++ = P[index];
  615. }
  616. sg_output_plane += sg_line_inc;
  617. }
  618. /* report success */
  619. return 0;
  620. }
  621. static int ipvideo_decode_block_opcode_0xE(void)
  622. {
  623. int x, y;
  624. unsigned char pix;
  625. /* 1-color encoding: the whole block is 1 solid color */
  626. CHECK_STREAM_PTR(1);
  627. pix = *sg_stream_ptr++;
  628. for (y = 0; y < 8; y++) {
  629. for (x = 0; x < 8; x++) {
  630. *sg_output_plane++ = pix;
  631. }
  632. sg_output_plane += sg_line_inc;
  633. }
  634. /* report success */
  635. return 0;
  636. }
  637. static int ipvideo_decode_block_opcode_0xF(void)
  638. {
  639. int x, y;
  640. unsigned char sample0, sample1;
  641. /* dithered encoding */
  642. CHECK_STREAM_PTR(2);
  643. sample0 = *sg_stream_ptr++;
  644. sample1 = *sg_stream_ptr++;
  645. for (y = 0; y < 8; y++) {
  646. for (x = 0; x < 8; x += 2) {
  647. if (y & 1) {
  648. *sg_output_plane++ = sample1;
  649. *sg_output_plane++ = sample0;
  650. } else {
  651. *sg_output_plane++ = sample0;
  652. *sg_output_plane++ = sample1;
  653. }
  654. }
  655. sg_output_plane += sg_line_inc;
  656. }
  657. /* report success */
  658. return 0;
  659. }
  660. static int (*ipvideo_decode_block[16])(void);
  661. static void ipvideo_decode_opcodes(IpvideoContext *s)
  662. {
  663. int x, y;
  664. int index = 0;
  665. unsigned char opcode;
  666. int ret;
  667. int code_counts[16];
  668. static int frame = 0;
  669. debug_interplay("------------------ frame %d\n", frame);
  670. frame++;
  671. for (x = 0; x < 16; x++)
  672. code_counts[x] = 0;
  673. /* this is PAL8, so make the palette available */
  674. if (s->avctx->pix_fmt == PIX_FMT_PAL8)
  675. memcpy(s->current_frame.data[1], s->palette, PALETTE_COUNT * 4);
  676. switch (s->avctx->pix_fmt) {
  677. case PIX_FMT_PAL8:
  678. sg_stride = s->current_frame.linesize[0];
  679. sg_stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  680. sg_stream_end = s->buf + s->size;
  681. sg_line_inc = sg_stride - 8;
  682. sg_current_plane = s->current_frame.data[0];
  683. sg_last_plane = s->last_frame.data[0];
  684. sg_upper_motion_limit_offset = (s->avctx->height - 8) * sg_stride
  685. + s->avctx->width - 8;
  686. sg_dsp = s->dsp;
  687. for (y = 0; y < (sg_stride * s->avctx->height); y += sg_stride * 8) {
  688. for (x = y; x < y + s->avctx->width; x += 8) {
  689. /* bottom nibble first, then top nibble (which makes it
  690. * hard to use a GetBitcontext) */
  691. if (index & 1)
  692. opcode = s->decoding_map[index >> 1] >> 4;
  693. else
  694. opcode = s->decoding_map[index >> 1] & 0xF;
  695. index++;
  696. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  697. x - y, y / sg_stride, opcode, sg_stream_ptr);
  698. code_counts[opcode]++;
  699. sg_output_plane = sg_current_plane + x;
  700. ret = ipvideo_decode_block[opcode]();
  701. if (ret != 0) {
  702. printf(" Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  703. frame, x - y, y / sg_stride);
  704. return;
  705. }
  706. }
  707. }
  708. if ((sg_stream_ptr != sg_stream_end) &&
  709. (sg_stream_ptr + 1 != sg_stream_end)) {
  710. printf (" Interplay video: decode finished with %d bytes left over\n",
  711. sg_stream_end - sg_stream_ptr);
  712. }
  713. break;
  714. default:
  715. printf ("Interplay video: Unhandled video format\n");
  716. break;
  717. }
  718. }
  719. static int ipvideo_decode_init(AVCodecContext *avctx)
  720. {
  721. IpvideoContext *s = avctx->priv_data;
  722. s->avctx = avctx;
  723. if (s->avctx->extradata_size != sizeof(AVPaletteControl)) {
  724. printf (" Interplay video: expected extradata_size of %d\n",
  725. sizeof(AVPaletteControl));
  726. return -1;
  727. }
  728. avctx->pix_fmt = PIX_FMT_PAL8;
  729. avctx->has_b_frames = 0;
  730. dsputil_init(&s->dsp, avctx);
  731. s->first_frame = 1;
  732. /* decoding map contains 4 bits of information per 8x8 block */
  733. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  734. /* assign block decode functions */
  735. ipvideo_decode_block[0x0] = ipvideo_decode_block_opcode_0x0_0x1;
  736. ipvideo_decode_block[0x1] = ipvideo_decode_block_opcode_0x0_0x1;
  737. ipvideo_decode_block[0x2] = ipvideo_decode_block_opcode_0x2;
  738. ipvideo_decode_block[0x3] = ipvideo_decode_block_opcode_0x3;
  739. ipvideo_decode_block[0x4] = ipvideo_decode_block_opcode_0x4;
  740. ipvideo_decode_block[0x5] = ipvideo_decode_block_opcode_0x5;
  741. ipvideo_decode_block[0x6] = ipvideo_decode_block_opcode_0x6;
  742. ipvideo_decode_block[0x7] = ipvideo_decode_block_opcode_0x7;
  743. ipvideo_decode_block[0x8] = ipvideo_decode_block_opcode_0x8;
  744. ipvideo_decode_block[0x9] = ipvideo_decode_block_opcode_0x9;
  745. ipvideo_decode_block[0xA] = ipvideo_decode_block_opcode_0xA;
  746. ipvideo_decode_block[0xB] = ipvideo_decode_block_opcode_0xB;
  747. ipvideo_decode_block[0xC] = ipvideo_decode_block_opcode_0xC;
  748. ipvideo_decode_block[0xD] = ipvideo_decode_block_opcode_0xD;
  749. ipvideo_decode_block[0xE] = ipvideo_decode_block_opcode_0xE;
  750. ipvideo_decode_block[0xF] = ipvideo_decode_block_opcode_0xF;
  751. return 0;
  752. }
  753. static int ipvideo_decode_frame(AVCodecContext *avctx,
  754. void *data, int *data_size,
  755. uint8_t *buf, int buf_size)
  756. {
  757. IpvideoContext *s = avctx->priv_data;
  758. AVPaletteControl *palette_control = (AVPaletteControl *)avctx->extradata;
  759. if (palette_control->palette_changed) {
  760. /* load the new palette and reset the palette control */
  761. ipvideo_new_palette(s, palette_control->palette);
  762. palette_control->palette_changed = 0;
  763. }
  764. s->decoding_map = buf;
  765. s->buf = buf + s->decoding_map_size;
  766. s->size = buf_size - s->decoding_map_size;
  767. if (avctx->get_buffer(avctx, &s->current_frame)) {
  768. printf (" Interplay Video: get_buffer() failed\n");
  769. return -1;
  770. }
  771. ipvideo_decode_opcodes(s);
  772. /* release the last frame if it is allocated */
  773. if (s->first_frame)
  774. s->first_frame = 0;
  775. else
  776. avctx->release_buffer(avctx, &s->last_frame);
  777. /* shuffle frames */
  778. s->last_frame = s->current_frame;
  779. *data_size = sizeof(AVFrame);
  780. *(AVFrame*)data = s->current_frame;
  781. /* report that the buffer was completely consumed */
  782. return buf_size;
  783. }
  784. static int ipvideo_decode_end(AVCodecContext *avctx)
  785. {
  786. IpvideoContext *s = avctx->priv_data;
  787. /* release the last frame */
  788. avctx->release_buffer(avctx, &s->last_frame);
  789. return 0;
  790. }
  791. AVCodec interplay_video_decoder = {
  792. "interplayvideo",
  793. CODEC_TYPE_VIDEO,
  794. CODEC_ID_INTERPLAY_VIDEO,
  795. sizeof(IpvideoContext),
  796. ipvideo_decode_init,
  797. NULL,
  798. ipvideo_decode_end,
  799. ipvideo_decode_frame,
  800. CODEC_CAP_DR1,
  801. };