You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1014 lines
29KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. *
  19. */
  20. /**
  21. * @file interplayvideo.c
  22. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  23. * For more information about the Interplay MVE format, visit:
  24. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  25. * This code is written in such a way that the identifiers match up
  26. * with the encoding descriptions in the document.
  27. *
  28. * This decoder presently only supports a PAL8 output colorspace.
  29. *
  30. * An Interplay video frame consists of 2 parts: The decoding map and
  31. * the video data. A demuxer must load these 2 parts together in a single
  32. * buffer before sending it through the stream to this decoder.
  33. */
  34. #include <stdio.h>
  35. #include <stdlib.h>
  36. #include <string.h>
  37. #include <unistd.h>
  38. #include "common.h"
  39. #include "avcodec.h"
  40. #include "dsputil.h"
  41. #define PALETTE_COUNT 256
  42. /* debugging support */
  43. #define DEBUG_INTERPLAY 0
  44. #if DEBUG_INTERPLAY
  45. #define debug_interplay printf
  46. #else
  47. static inline void debug_interplay(const char *format, ...) { }
  48. #endif
  49. typedef struct IpvideoContext {
  50. AVCodecContext *avctx;
  51. DSPContext dsp;
  52. AVFrame second_last_frame;
  53. AVFrame last_frame;
  54. AVFrame current_frame;
  55. unsigned char *decoding_map;
  56. int decoding_map_size;
  57. unsigned char *buf;
  58. int size;
  59. unsigned char palette[PALETTE_COUNT * 4];
  60. } IpvideoContext;
  61. #define CHECK_STREAM_PTR(n) \
  62. if ((sg_stream_ptr + n) > sg_stream_end) { \
  63. printf ("Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  64. sg_stream_ptr + n, sg_stream_end); \
  65. return -1; \
  66. }
  67. static void ipvideo_new_palette(IpvideoContext *s, unsigned char *palette) {
  68. int i;
  69. unsigned char r, g, b;
  70. unsigned int *palette32;
  71. switch (s->avctx->pix_fmt) {
  72. case PIX_FMT_PAL8:
  73. palette32 = (unsigned int *)s->palette;
  74. for (i = 0; i < PALETTE_COUNT; i++) {
  75. r = *palette++;
  76. g = *palette++;
  77. b = *palette++;
  78. palette32[i] = (r << 16) | (g << 8) | (b);
  79. }
  80. break;
  81. default:
  82. printf ("Interplay video: Unhandled video format\n");
  83. break;
  84. }
  85. }
  86. static unsigned char *sg_stream_ptr;
  87. static unsigned char *sg_stream_end;
  88. static unsigned char *sg_current_plane;
  89. static unsigned char *sg_output_plane;
  90. static unsigned char *sg_last_plane;
  91. static unsigned char *sg_second_last_plane;
  92. static int sg_line_inc;
  93. static int sg_stride;
  94. static int sg_upper_motion_limit_offset;
  95. static DSPContext sg_dsp;
  96. static int ipvideo_decode_block_opcode_0x0(void)
  97. {
  98. int x, y;
  99. unsigned char *src_block;
  100. /* skip block, which actually means to copy from previous frame */
  101. src_block = sg_last_plane + (sg_output_plane - sg_current_plane);
  102. for (y = 0; y < 8; y++) {
  103. for (x = 0; x < 8; x++) {
  104. *sg_output_plane++ = *src_block++;
  105. }
  106. sg_output_plane += sg_line_inc;
  107. src_block += sg_line_inc;
  108. }
  109. /* report success */
  110. return 0;
  111. }
  112. static int ipvideo_decode_block_opcode_0x1(void)
  113. {
  114. int x, y;
  115. unsigned char *src_block;
  116. /* copy block from two frames behind */
  117. src_block = sg_second_last_plane + (sg_output_plane - sg_current_plane);
  118. for (y = 0; y < 8; y++) {
  119. for (x = 0; x < 8; x++) {
  120. *sg_output_plane++ = *src_block++;
  121. }
  122. sg_output_plane += sg_line_inc;
  123. src_block += sg_line_inc;
  124. }
  125. /* report success */
  126. return 0;
  127. }
  128. #define COPY_FROM_CURRENT() \
  129. motion_offset = current_offset; \
  130. motion_offset += y * sg_stride; \
  131. motion_offset += x; \
  132. if (motion_offset < 0) { \
  133. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  134. return -1; \
  135. } else if (motion_offset > sg_upper_motion_limit_offset) { \
  136. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  137. motion_offset, sg_upper_motion_limit_offset); \
  138. return -1; \
  139. } \
  140. sg_dsp.put_pixels_tab[0][0](sg_output_plane, \
  141. sg_current_plane + motion_offset, sg_stride, 8);
  142. #define COPY_FROM_PREVIOUS() \
  143. motion_offset = current_offset; \
  144. motion_offset += y * sg_stride; \
  145. motion_offset += x; \
  146. if (motion_offset < 0) { \
  147. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  148. return -1; \
  149. } else if (motion_offset > sg_upper_motion_limit_offset) { \
  150. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  151. motion_offset, sg_upper_motion_limit_offset); \
  152. return -1; \
  153. } \
  154. sg_dsp.put_pixels_tab[0][0](sg_output_plane, \
  155. sg_last_plane + motion_offset, sg_stride, 8);
  156. #define COPY_FROM_SECOND_LAST() \
  157. motion_offset = current_offset; \
  158. motion_offset += y * sg_stride; \
  159. motion_offset += x; \
  160. if (motion_offset < 0) { \
  161. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  162. return -1; \
  163. } else if (motion_offset > sg_upper_motion_limit_offset) { \
  164. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  165. motion_offset, sg_upper_motion_limit_offset); \
  166. return -1; \
  167. } \
  168. sg_dsp.put_pixels_tab[0][0](sg_output_plane, \
  169. sg_second_last_plane + motion_offset, sg_stride, 8);
  170. static int ipvideo_decode_block_opcode_0x2(void)
  171. {
  172. unsigned char B;
  173. int x, y;
  174. int motion_offset;
  175. int current_offset = sg_output_plane - sg_current_plane;
  176. /* need 1 more byte for motion */
  177. CHECK_STREAM_PTR(1);
  178. B = *sg_stream_ptr++;
  179. if (B < 56) {
  180. x = 8 + (B % 7);
  181. y = B / 7;
  182. } else {
  183. x = -14 + ((B - 56) % 29);
  184. y = 8 + ((B - 56) / 29);
  185. }
  186. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  187. COPY_FROM_SECOND_LAST();
  188. /* report success */
  189. return 0;
  190. }
  191. static int ipvideo_decode_block_opcode_0x3(void)
  192. {
  193. unsigned char B;
  194. int x, y;
  195. int motion_offset;
  196. int current_offset = sg_output_plane - sg_current_plane;
  197. /* copy 8x8 block from current frame from an up/left block */
  198. /* need 1 more byte for motion */
  199. CHECK_STREAM_PTR(1);
  200. B = *sg_stream_ptr++;
  201. if (B < 56) {
  202. x = -(8 + (B % 7));
  203. y = -(B / 7);
  204. } else {
  205. x = -(-14 + ((B - 56) % 29));
  206. y = -( 8 + ((B - 56) / 29));
  207. }
  208. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  209. COPY_FROM_CURRENT();
  210. /* report success */
  211. return 0;
  212. }
  213. static int ipvideo_decode_block_opcode_0x4(void)
  214. {
  215. int x, y;
  216. unsigned char B, BL, BH;
  217. int motion_offset;
  218. int current_offset = sg_output_plane - sg_current_plane;
  219. /* copy a block from the previous frame; need 1 more byte */
  220. CHECK_STREAM_PTR(1);
  221. B = *sg_stream_ptr++;
  222. BL = B & 0x0F;
  223. BH = (B >> 4) & 0x0F;
  224. x = -8 + BL;
  225. y = -8 + BH;
  226. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  227. COPY_FROM_PREVIOUS();
  228. /* report success */
  229. return 0;
  230. }
  231. static int ipvideo_decode_block_opcode_0x5(void)
  232. {
  233. signed char x, y;
  234. int motion_offset;
  235. int current_offset = sg_output_plane - sg_current_plane;
  236. /* copy a block from the previous frame using an expanded range;
  237. * need 2 more bytes */
  238. CHECK_STREAM_PTR(2);
  239. x = *sg_stream_ptr++;
  240. y = *sg_stream_ptr++;
  241. debug_interplay (" motion bytes = %d, %d\n", x, y);
  242. COPY_FROM_PREVIOUS();
  243. /* report success */
  244. return 0;
  245. }
  246. static int ipvideo_decode_block_opcode_0x6(void)
  247. {
  248. /* mystery opcode? skip multiple blocks? */
  249. printf (" Interplay video: Help! Mystery opcode 0x6 seen\n");
  250. /* report success */
  251. return 0;
  252. }
  253. static int ipvideo_decode_block_opcode_0x7(void)
  254. {
  255. int x, y;
  256. unsigned char P0, P1;
  257. unsigned char B[8];
  258. unsigned int flags;
  259. int bitmask;
  260. /* 2-color encoding */
  261. CHECK_STREAM_PTR(2);
  262. P0 = *sg_stream_ptr++;
  263. P1 = *sg_stream_ptr++;
  264. if (P0 <= P1) {
  265. /* need 8 more bytes from the stream */
  266. CHECK_STREAM_PTR(8);
  267. for (y = 0; y < 8; y++)
  268. B[y] = *sg_stream_ptr++;
  269. for (y = 0; y < 8; y++) {
  270. flags = B[y];
  271. for (x = 0x01; x <= 0x80; x <<= 1) {
  272. if (flags & x)
  273. *sg_output_plane++ = P1;
  274. else
  275. *sg_output_plane++ = P0;
  276. }
  277. sg_output_plane += sg_line_inc;
  278. }
  279. } else {
  280. /* need 2 more bytes from the stream */
  281. CHECK_STREAM_PTR(2);
  282. B[0] = *sg_stream_ptr++;
  283. B[1] = *sg_stream_ptr++;
  284. flags = (B[1] << 8) | B[0];
  285. bitmask = 0x0001;
  286. for (y = 0; y < 8; y += 2) {
  287. for (x = 0; x < 8; x += 2, bitmask <<= 1) {
  288. if (flags & bitmask) {
  289. *(sg_output_plane + x) = P1;
  290. *(sg_output_plane + x + 1) = P1;
  291. *(sg_output_plane + sg_stride + x) = P1;
  292. *(sg_output_plane + sg_stride + x + 1) = P1;
  293. } else {
  294. *(sg_output_plane + x) = P0;
  295. *(sg_output_plane + x + 1) = P0;
  296. *(sg_output_plane + sg_stride + x) = P0;
  297. *(sg_output_plane + sg_stride + x + 1) = P0;
  298. }
  299. }
  300. sg_output_plane += sg_stride * 2;
  301. }
  302. }
  303. /* report success */
  304. return 0;
  305. }
  306. static int ipvideo_decode_block_opcode_0x8(void)
  307. {
  308. int x, y;
  309. unsigned char P[8];
  310. unsigned char B[8];
  311. unsigned int flags = 0;
  312. unsigned int bitmask = 0;
  313. unsigned char P0 = 0, P1 = 0;
  314. int lower_half = 0;
  315. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  316. * either top and bottom or left and right halves */
  317. CHECK_STREAM_PTR(2);
  318. P[0] = *sg_stream_ptr++;
  319. P[1] = *sg_stream_ptr++;
  320. if (P[0] <= P[1]) {
  321. /* need 12 more bytes */
  322. CHECK_STREAM_PTR(12);
  323. B[0] = *sg_stream_ptr++; B[1] = *sg_stream_ptr++;
  324. P[2] = *sg_stream_ptr++; P[3] = *sg_stream_ptr++;
  325. B[2] = *sg_stream_ptr++; B[3] = *sg_stream_ptr++;
  326. P[4] = *sg_stream_ptr++; P[5] = *sg_stream_ptr++;
  327. B[4] = *sg_stream_ptr++; B[5] = *sg_stream_ptr++;
  328. P[6] = *sg_stream_ptr++; P[7] = *sg_stream_ptr++;
  329. B[6] = *sg_stream_ptr++; B[7] = *sg_stream_ptr++;
  330. for (y = 0; y < 8; y++) {
  331. /* time to reload flags? */
  332. if (y == 0) {
  333. flags =
  334. ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
  335. ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
  336. ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
  337. ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
  338. bitmask = 0x00000001;
  339. lower_half = 0; /* still on top half */
  340. } else if (y == 4) {
  341. flags =
  342. ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
  343. ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
  344. ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
  345. ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
  346. bitmask = 0x00000001;
  347. lower_half = 2;
  348. }
  349. for (x = 0; x < 8; x++, bitmask <<= 1) {
  350. /* get the pixel values ready for this quadrant */
  351. if (x == 0) {
  352. P0 = P[lower_half + 0];
  353. P1 = P[lower_half + 1];
  354. } else if (x == 4) {
  355. P0 = P[lower_half + 4];
  356. P1 = P[lower_half + 5];
  357. }
  358. if (flags & bitmask)
  359. *sg_output_plane++ = P1;
  360. else
  361. *sg_output_plane++ = P0;
  362. }
  363. sg_output_plane += sg_line_inc;
  364. }
  365. } else {
  366. /* need 10 more bytes */
  367. CHECK_STREAM_PTR(10);
  368. B[0] = *sg_stream_ptr++; B[1] = *sg_stream_ptr++;
  369. B[2] = *sg_stream_ptr++; B[3] = *sg_stream_ptr++;
  370. P[2] = *sg_stream_ptr++; P[3] = *sg_stream_ptr++;
  371. B[4] = *sg_stream_ptr++; B[5] = *sg_stream_ptr++;
  372. B[6] = *sg_stream_ptr++; B[7] = *sg_stream_ptr++;
  373. if (P[2] <= P[3]) {
  374. /* vertical split; left & right halves are 2-color encoded */
  375. for (y = 0; y < 8; y++) {
  376. /* time to reload flags? */
  377. if (y == 0) {
  378. flags =
  379. ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
  380. ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
  381. ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
  382. ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
  383. bitmask = 0x00000001;
  384. } else if (y == 4) {
  385. flags =
  386. ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
  387. ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
  388. ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
  389. ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
  390. bitmask = 0x00000001;
  391. }
  392. for (x = 0; x < 8; x++, bitmask <<= 1) {
  393. /* get the pixel values ready for this half */
  394. if (x == 0) {
  395. P0 = P[0];
  396. P1 = P[1];
  397. } else if (x == 4) {
  398. P0 = P[2];
  399. P1 = P[3];
  400. }
  401. if (flags & bitmask)
  402. *sg_output_plane++ = P1;
  403. else
  404. *sg_output_plane++ = P0;
  405. }
  406. sg_output_plane += sg_line_inc;
  407. }
  408. } else {
  409. /* horizontal split; top & bottom halves are 2-color encoded */
  410. for (y = 0; y < 8; y++) {
  411. flags = B[y];
  412. if (y == 0) {
  413. P0 = P[0];
  414. P1 = P[1];
  415. } else if (y == 4) {
  416. P0 = P[2];
  417. P1 = P[3];
  418. }
  419. for (bitmask = 0x01; bitmask <= 0x80; bitmask <<= 1) {
  420. if (flags & bitmask)
  421. *sg_output_plane++ = P1;
  422. else
  423. *sg_output_plane++ = P0;
  424. }
  425. sg_output_plane += sg_line_inc;
  426. }
  427. }
  428. }
  429. /* report success */
  430. return 0;
  431. }
  432. static int ipvideo_decode_block_opcode_0x9(void)
  433. {
  434. int x, y;
  435. unsigned char P[4];
  436. unsigned char B[4];
  437. unsigned int flags = 0;
  438. int shifter = 0;
  439. unsigned char pix;
  440. /* 4-color encoding */
  441. CHECK_STREAM_PTR(4);
  442. for (y = 0; y < 4; y++)
  443. P[y] = *sg_stream_ptr++;
  444. if ((P[0] <= P[1]) && (P[2] <= P[3])) {
  445. /* 1 of 4 colors for each pixel, need 16 more bytes */
  446. CHECK_STREAM_PTR(16);
  447. for (y = 0; y < 8; y++) {
  448. /* get the next set of 8 2-bit flags */
  449. flags = (sg_stream_ptr[1] << 8) | sg_stream_ptr[0];
  450. sg_stream_ptr += 2;
  451. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  452. *sg_output_plane++ = P[(flags >> shifter) & 0x03];
  453. }
  454. sg_output_plane += sg_line_inc;
  455. }
  456. } else if ((P[0] <= P[1]) && (P[2] > P[3])) {
  457. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  458. CHECK_STREAM_PTR(4);
  459. B[0] = *sg_stream_ptr++;
  460. B[1] = *sg_stream_ptr++;
  461. B[2] = *sg_stream_ptr++;
  462. B[3] = *sg_stream_ptr++;
  463. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  464. shifter = 0;
  465. for (y = 0; y < 8; y += 2) {
  466. for (x = 0; x < 8; x += 2, shifter += 2) {
  467. pix = P[(flags >> shifter) & 0x03];
  468. *(sg_output_plane + x) = pix;
  469. *(sg_output_plane + x + 1) = pix;
  470. *(sg_output_plane + sg_stride + x) = pix;
  471. *(sg_output_plane + sg_stride + x + 1) = pix;
  472. }
  473. sg_output_plane += sg_stride * 2;
  474. }
  475. } else if ((P[0] > P[1]) && (P[2] <= P[3])) {
  476. /* 1 of 4 colors for each 2x1 block, need 8 more bytes */
  477. CHECK_STREAM_PTR(8);
  478. for (y = 0; y < 8; y++) {
  479. /* time to reload flags? */
  480. if ((y == 0) || (y == 4)) {
  481. B[0] = *sg_stream_ptr++;
  482. B[1] = *sg_stream_ptr++;
  483. B[2] = *sg_stream_ptr++;
  484. B[3] = *sg_stream_ptr++;
  485. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  486. shifter = 0;
  487. }
  488. for (x = 0; x < 8; x += 2, shifter += 2) {
  489. pix = P[(flags >> shifter) & 0x03];
  490. *(sg_output_plane + x) = pix;
  491. *(sg_output_plane + x + 1) = pix;
  492. }
  493. sg_output_plane += sg_stride;
  494. }
  495. } else {
  496. /* 1 of 4 colors for each 1x2 block, need 8 more bytes */
  497. CHECK_STREAM_PTR(8);
  498. for (y = 0; y < 8; y += 2) {
  499. /* time to reload flags? */
  500. if ((y == 0) || (y == 4)) {
  501. B[0] = *sg_stream_ptr++;
  502. B[1] = *sg_stream_ptr++;
  503. B[2] = *sg_stream_ptr++;
  504. B[3] = *sg_stream_ptr++;
  505. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  506. shifter = 0;
  507. }
  508. for (x = 0; x < 8; x++, shifter += 2) {
  509. pix = P[(flags >> shifter) & 0x03];
  510. *(sg_output_plane + x) = pix;
  511. *(sg_output_plane + sg_stride + x) = pix;
  512. }
  513. sg_output_plane += sg_stride * 2;
  514. }
  515. }
  516. /* report success */
  517. return 0;
  518. }
  519. static int ipvideo_decode_block_opcode_0xA(void)
  520. {
  521. int x, y;
  522. unsigned char P[16];
  523. unsigned char B[16];
  524. int flags = 0;
  525. int shifter = 0;
  526. int index;
  527. int split;
  528. int lower_half;
  529. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  530. * either top and bottom or left and right halves */
  531. CHECK_STREAM_PTR(4);
  532. for (y = 0; y < 4; y++)
  533. P[y] = *sg_stream_ptr++;
  534. if (P[0] <= P[1]) {
  535. /* 4-color encoding for each quadrant; need 28 more bytes */
  536. CHECK_STREAM_PTR(28);
  537. for (y = 0; y < 4; y++)
  538. B[y] = *sg_stream_ptr++;
  539. for (y = 4; y < 16; y += 4) {
  540. for (x = y; x < y + 4; x++)
  541. P[x] = *sg_stream_ptr++;
  542. for (x = y; x < y + 4; x++)
  543. B[x] = *sg_stream_ptr++;
  544. }
  545. for (y = 0; y < 8; y++) {
  546. lower_half = (y >= 4) ? 4 : 0;
  547. flags = (B[y + 8] << 8) | B[y];
  548. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  549. split = (x >= 4) ? 8 : 0;
  550. index = split + lower_half + ((flags >> shifter) & 0x03);
  551. *sg_output_plane++ = P[index];
  552. }
  553. sg_output_plane += sg_line_inc;
  554. }
  555. } else {
  556. /* 4-color encoding for either left and right or top and bottom
  557. * halves; need 20 more bytes */
  558. CHECK_STREAM_PTR(20);
  559. for (y = 0; y < 8; y++)
  560. B[y] = *sg_stream_ptr++;
  561. for (y = 4; y < 8; y++)
  562. P[y] = *sg_stream_ptr++;
  563. for (y = 8; y < 16; y++)
  564. B[y] = *sg_stream_ptr++;
  565. if (P[4] <= P[5]) {
  566. /* block is divided into left and right halves */
  567. for (y = 0; y < 8; y++) {
  568. flags = (B[y + 8] << 8) | B[y];
  569. split = 0;
  570. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  571. if (x == 4)
  572. split = 4;
  573. *sg_output_plane++ = P[split + ((flags >> shifter) & 0x03)];
  574. }
  575. sg_output_plane += sg_line_inc;
  576. }
  577. } else {
  578. /* block is divided into top and bottom halves */
  579. split = 0;
  580. for (y = 0; y < 8; y++) {
  581. flags = (B[y * 2 + 1] << 8) | B[y * 2];
  582. if (y == 4)
  583. split = 4;
  584. for (x = 0, shifter = 0; x < 8; x++, shifter += 2)
  585. *sg_output_plane++ = P[split + ((flags >> shifter) & 0x03)];
  586. sg_output_plane += sg_line_inc;
  587. }
  588. }
  589. }
  590. /* report success */
  591. return 0;
  592. }
  593. static int ipvideo_decode_block_opcode_0xB(void)
  594. {
  595. int x, y;
  596. /* 64-color encoding (each pixel in block is a different color) */
  597. CHECK_STREAM_PTR(64);
  598. for (y = 0; y < 8; y++) {
  599. for (x = 0; x < 8; x++) {
  600. *sg_output_plane++ = *sg_stream_ptr++;
  601. }
  602. sg_output_plane += sg_line_inc;
  603. }
  604. /* report success */
  605. return 0;
  606. }
  607. static int ipvideo_decode_block_opcode_0xC(void)
  608. {
  609. int x, y;
  610. unsigned char pix;
  611. /* 16-color block encoding: each 2x2 block is a different color */
  612. CHECK_STREAM_PTR(16);
  613. for (y = 0; y < 8; y += 2) {
  614. for (x = 0; x < 8; x += 2) {
  615. pix = *sg_stream_ptr++;
  616. *(sg_output_plane + x) = pix;
  617. *(sg_output_plane + x + 1) = pix;
  618. *(sg_output_plane + sg_stride + x) = pix;
  619. *(sg_output_plane + sg_stride + x + 1) = pix;
  620. }
  621. sg_output_plane += sg_stride * 2;
  622. }
  623. /* report success */
  624. return 0;
  625. }
  626. static int ipvideo_decode_block_opcode_0xD(void)
  627. {
  628. int x, y;
  629. unsigned char P[4];
  630. unsigned char index = 0;
  631. /* 4-color block encoding: each 4x4 block is a different color */
  632. CHECK_STREAM_PTR(4);
  633. for (y = 0; y < 4; y++)
  634. P[y] = *sg_stream_ptr++;
  635. for (y = 0; y < 8; y++) {
  636. if (y < 4)
  637. index = 0;
  638. else
  639. index = 2;
  640. for (x = 0; x < 8; x++) {
  641. if (x == 4)
  642. index++;
  643. *sg_output_plane++ = P[index];
  644. }
  645. sg_output_plane += sg_line_inc;
  646. }
  647. /* report success */
  648. return 0;
  649. }
  650. static int ipvideo_decode_block_opcode_0xE(void)
  651. {
  652. int x, y;
  653. unsigned char pix;
  654. /* 1-color encoding: the whole block is 1 solid color */
  655. CHECK_STREAM_PTR(1);
  656. pix = *sg_stream_ptr++;
  657. for (y = 0; y < 8; y++) {
  658. for (x = 0; x < 8; x++) {
  659. *sg_output_plane++ = pix;
  660. }
  661. sg_output_plane += sg_line_inc;
  662. }
  663. /* report success */
  664. return 0;
  665. }
  666. static int ipvideo_decode_block_opcode_0xF(void)
  667. {
  668. int x, y;
  669. unsigned char sample0, sample1;
  670. /* dithered encoding */
  671. CHECK_STREAM_PTR(2);
  672. sample0 = *sg_stream_ptr++;
  673. sample1 = *sg_stream_ptr++;
  674. for (y = 0; y < 8; y++) {
  675. for (x = 0; x < 8; x += 2) {
  676. if (y & 1) {
  677. *sg_output_plane++ = sample1;
  678. *sg_output_plane++ = sample0;
  679. } else {
  680. *sg_output_plane++ = sample0;
  681. *sg_output_plane++ = sample1;
  682. }
  683. }
  684. sg_output_plane += sg_line_inc;
  685. }
  686. /* report success */
  687. return 0;
  688. }
  689. static int (*ipvideo_decode_block[16])(void);
  690. static void ipvideo_decode_opcodes(IpvideoContext *s)
  691. {
  692. int x, y;
  693. int index = 0;
  694. unsigned char opcode;
  695. int ret;
  696. int code_counts[16];
  697. static int frame = 0;
  698. debug_interplay("------------------ frame %d\n", frame);
  699. frame++;
  700. for (x = 0; x < 16; x++)
  701. code_counts[x] = 0;
  702. /* this is PAL8, so make the palette available */
  703. if (s->avctx->pix_fmt == PIX_FMT_PAL8)
  704. memcpy(s->current_frame.data[1], s->palette, PALETTE_COUNT * 4);
  705. switch (s->avctx->pix_fmt) {
  706. case PIX_FMT_PAL8:
  707. sg_stride = s->current_frame.linesize[0];
  708. sg_stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  709. sg_stream_end = s->buf + s->size;
  710. sg_line_inc = sg_stride - 8;
  711. sg_current_plane = s->current_frame.data[0];
  712. sg_last_plane = s->last_frame.data[0];
  713. sg_second_last_plane = s->second_last_frame.data[0];
  714. sg_upper_motion_limit_offset = (s->avctx->height - 8) * sg_stride
  715. + s->avctx->width - 8;
  716. sg_dsp = s->dsp;
  717. for (y = 0; y < (sg_stride * s->avctx->height); y += sg_stride * 8) {
  718. for (x = y; x < y + s->avctx->width; x += 8) {
  719. /* bottom nibble first, then top nibble (which makes it
  720. * hard to use a GetBitcontext) */
  721. if (index & 1)
  722. opcode = s->decoding_map[index >> 1] >> 4;
  723. else
  724. opcode = s->decoding_map[index >> 1] & 0xF;
  725. index++;
  726. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  727. x - y, y / sg_stride, opcode, sg_stream_ptr);
  728. code_counts[opcode]++;
  729. sg_output_plane = sg_current_plane + x;
  730. ret = ipvideo_decode_block[opcode]();
  731. if (ret != 0) {
  732. printf(" Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  733. frame, x - y, y / sg_stride);
  734. return;
  735. }
  736. }
  737. }
  738. if ((sg_stream_ptr != sg_stream_end) &&
  739. (sg_stream_ptr + 1 != sg_stream_end)) {
  740. printf (" Interplay video: decode finished with %d bytes left over\n",
  741. sg_stream_end - sg_stream_ptr);
  742. }
  743. break;
  744. default:
  745. printf ("Interplay video: Unhandled video format\n");
  746. break;
  747. }
  748. }
  749. static int ipvideo_decode_init(AVCodecContext *avctx)
  750. {
  751. IpvideoContext *s = avctx->priv_data;
  752. s->avctx = avctx;
  753. if (s->avctx->extradata_size != sizeof(AVPaletteControl)) {
  754. printf (" Interplay video: expected extradata_size of %d\n",
  755. sizeof(AVPaletteControl));
  756. return -1;
  757. }
  758. avctx->pix_fmt = PIX_FMT_PAL8;
  759. avctx->has_b_frames = 0;
  760. dsputil_init(&s->dsp, avctx);
  761. /* decoding map contains 4 bits of information per 8x8 block */
  762. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  763. /* assign block decode functions */
  764. ipvideo_decode_block[0x0] = ipvideo_decode_block_opcode_0x0;
  765. ipvideo_decode_block[0x1] = ipvideo_decode_block_opcode_0x1;
  766. ipvideo_decode_block[0x2] = ipvideo_decode_block_opcode_0x2;
  767. ipvideo_decode_block[0x3] = ipvideo_decode_block_opcode_0x3;
  768. ipvideo_decode_block[0x4] = ipvideo_decode_block_opcode_0x4;
  769. ipvideo_decode_block[0x5] = ipvideo_decode_block_opcode_0x5;
  770. ipvideo_decode_block[0x6] = ipvideo_decode_block_opcode_0x6;
  771. ipvideo_decode_block[0x7] = ipvideo_decode_block_opcode_0x7;
  772. ipvideo_decode_block[0x8] = ipvideo_decode_block_opcode_0x8;
  773. ipvideo_decode_block[0x9] = ipvideo_decode_block_opcode_0x9;
  774. ipvideo_decode_block[0xA] = ipvideo_decode_block_opcode_0xA;
  775. ipvideo_decode_block[0xB] = ipvideo_decode_block_opcode_0xB;
  776. ipvideo_decode_block[0xC] = ipvideo_decode_block_opcode_0xC;
  777. ipvideo_decode_block[0xD] = ipvideo_decode_block_opcode_0xD;
  778. ipvideo_decode_block[0xE] = ipvideo_decode_block_opcode_0xE;
  779. ipvideo_decode_block[0xF] = ipvideo_decode_block_opcode_0xF;
  780. s->current_frame.data[0] = s->last_frame.data[0] =
  781. s->second_last_frame.data[0] = NULL;
  782. return 0;
  783. }
  784. static int ipvideo_decode_frame(AVCodecContext *avctx,
  785. void *data, int *data_size,
  786. uint8_t *buf, int buf_size)
  787. {
  788. IpvideoContext *s = avctx->priv_data;
  789. AVPaletteControl *palette_control = (AVPaletteControl *)avctx->extradata;
  790. if (palette_control->palette_changed) {
  791. /* load the new palette and reset the palette control */
  792. ipvideo_new_palette(s, palette_control->palette);
  793. palette_control->palette_changed = 0;
  794. }
  795. s->decoding_map = buf;
  796. s->buf = buf + s->decoding_map_size;
  797. s->size = buf_size - s->decoding_map_size;
  798. s->current_frame.reference = 3;
  799. if (avctx->get_buffer(avctx, &s->current_frame)) {
  800. printf (" Interplay Video: get_buffer() failed\n");
  801. return -1;
  802. }
  803. ipvideo_decode_opcodes(s);
  804. *data_size = sizeof(AVFrame);
  805. *(AVFrame*)data = s->current_frame;
  806. /* shuffle frames */
  807. if (s->second_last_frame.data[0])
  808. avctx->release_buffer(avctx, &s->second_last_frame);
  809. s->second_last_frame = s->last_frame;
  810. s->last_frame = s->current_frame;
  811. s->current_frame.data[0] = NULL; /* catch any access attempts */
  812. /* report that the buffer was completely consumed */
  813. return buf_size;
  814. }
  815. static int ipvideo_decode_end(AVCodecContext *avctx)
  816. {
  817. IpvideoContext *s = avctx->priv_data;
  818. /* release the last frame */
  819. if (s->last_frame.data[0])
  820. avctx->release_buffer(avctx, &s->last_frame);
  821. if (s->second_last_frame.data[0])
  822. avctx->release_buffer(avctx, &s->second_last_frame);
  823. return 0;
  824. }
  825. AVCodec interplay_video_decoder = {
  826. "interplayvideo",
  827. CODEC_TYPE_VIDEO,
  828. CODEC_ID_INTERPLAY_VIDEO,
  829. sizeof(IpvideoContext),
  830. ipvideo_decode_init,
  831. NULL,
  832. ipvideo_decode_end,
  833. ipvideo_decode_frame,
  834. CODEC_CAP_DR1,
  835. };