You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

977 lines
28KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. *
  19. */
  20. /**
  21. * @file interplayvideo.c
  22. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  23. * For more information about the Interplay MVE format, visit:
  24. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  25. * This code is written in such a way that the identifiers match up
  26. * with the encoding descriptions in the document.
  27. *
  28. * This decoder presently only supports a PAL8 output colorspace.
  29. *
  30. * An Interplay video frame consists of 2 parts: The decoding map and
  31. * the video data. A demuxer must load these 2 parts together in a single
  32. * buffer before sending it through the stream to this decoder.
  33. */
  34. #include <stdio.h>
  35. #include <stdlib.h>
  36. #include <string.h>
  37. #include <unistd.h>
  38. #include "common.h"
  39. #include "avcodec.h"
  40. #include "dsputil.h"
  41. #define PALETTE_COUNT 256
  42. /* debugging support */
  43. #define DEBUG_INTERPLAY 0
  44. #if DEBUG_INTERPLAY
  45. #define debug_interplay printf
  46. #else
  47. static inline void debug_interplay(const char *format, ...) { }
  48. #endif
  49. typedef struct IpvideoContext {
  50. AVCodecContext *avctx;
  51. DSPContext dsp;
  52. AVFrame second_last_frame;
  53. AVFrame last_frame;
  54. AVFrame current_frame;
  55. unsigned char *decoding_map;
  56. int decoding_map_size;
  57. unsigned char *buf;
  58. int size;
  59. unsigned char palette[PALETTE_COUNT * 4];
  60. unsigned char *stream_ptr;
  61. unsigned char *stream_end;
  62. unsigned char *pixel_ptr;
  63. int line_inc;
  64. int stride;
  65. int upper_motion_limit_offset;
  66. } IpvideoContext;
  67. #define CHECK_STREAM_PTR(n) \
  68. if ((s->stream_ptr + n) > s->stream_end) { \
  69. printf ("Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  70. s->stream_ptr + n, s->stream_end); \
  71. return -1; \
  72. }
  73. static void ipvideo_new_palette(IpvideoContext *s, unsigned char *palette) {
  74. int i;
  75. unsigned char r, g, b;
  76. unsigned int *palette32;
  77. palette32 = (unsigned int *)s->palette;
  78. for (i = 0; i < PALETTE_COUNT; i++) {
  79. r = *palette++;
  80. g = *palette++;
  81. b = *palette++;
  82. palette32[i] = (r << 16) | (g << 8) | (b);
  83. }
  84. }
  85. #define COPY_FROM_CURRENT() \
  86. motion_offset = current_offset; \
  87. motion_offset += y * s->stride; \
  88. motion_offset += x; \
  89. if (motion_offset < 0) { \
  90. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  91. return -1; \
  92. } else if (motion_offset > s->upper_motion_limit_offset) { \
  93. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  94. motion_offset, s->upper_motion_limit_offset); \
  95. return -1; \
  96. } \
  97. s->dsp.put_pixels_tab[0][0](s->pixel_ptr, \
  98. s->current_frame.data[0] + motion_offset, s->stride, 8);
  99. #define COPY_FROM_PREVIOUS() \
  100. motion_offset = current_offset; \
  101. motion_offset += y * s->stride; \
  102. motion_offset += x; \
  103. if (motion_offset < 0) { \
  104. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  105. return -1; \
  106. } else if (motion_offset > s->upper_motion_limit_offset) { \
  107. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  108. motion_offset, s->upper_motion_limit_offset); \
  109. return -1; \
  110. } \
  111. s->dsp.put_pixels_tab[0][0](s->pixel_ptr, \
  112. s->last_frame.data[0] + motion_offset, s->stride, 8);
  113. #define COPY_FROM_SECOND_LAST() \
  114. motion_offset = current_offset; \
  115. motion_offset += y * s->stride; \
  116. motion_offset += x; \
  117. if (motion_offset < 0) { \
  118. printf (" Interplay video: motion offset < 0 (%d)\n", motion_offset); \
  119. return -1; \
  120. } else if (motion_offset > s->upper_motion_limit_offset) { \
  121. printf (" Interplay video: motion offset above limit (%d >= %d)\n", \
  122. motion_offset, s->upper_motion_limit_offset); \
  123. return -1; \
  124. } \
  125. s->dsp.put_pixels_tab[0][0](s->pixel_ptr, \
  126. s->second_last_frame.data[0] + motion_offset, s->stride, 8);
  127. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  128. {
  129. int x, y;
  130. int motion_offset;
  131. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  132. /* copy a block from the previous frame */
  133. x = y = 0;
  134. COPY_FROM_PREVIOUS();
  135. /* report success */
  136. return 0;
  137. }
  138. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  139. {
  140. int x, y;
  141. int motion_offset;
  142. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  143. /* copy block from 2 frames ago */
  144. x = y = 0;
  145. COPY_FROM_SECOND_LAST();
  146. /* report success */
  147. return 0;
  148. }
  149. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  150. {
  151. unsigned char B;
  152. int x, y;
  153. int motion_offset;
  154. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  155. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  156. CHECK_STREAM_PTR(1);
  157. B = *s->stream_ptr++;
  158. if (B < 56) {
  159. x = 8 + (B % 7);
  160. y = B / 7;
  161. } else {
  162. x = -14 + ((B - 56) % 29);
  163. y = 8 + ((B - 56) / 29);
  164. }
  165. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  166. COPY_FROM_SECOND_LAST();
  167. /* report success */
  168. return 0;
  169. }
  170. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  171. {
  172. unsigned char B;
  173. int x, y;
  174. int motion_offset;
  175. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  176. /* copy 8x8 block from current frame from an up/left block */
  177. /* need 1 more byte for motion */
  178. CHECK_STREAM_PTR(1);
  179. B = *s->stream_ptr++;
  180. if (B < 56) {
  181. x = -(8 + (B % 7));
  182. y = -(B / 7);
  183. } else {
  184. x = -(-14 + ((B - 56) % 29));
  185. y = -( 8 + ((B - 56) / 29));
  186. }
  187. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  188. COPY_FROM_CURRENT();
  189. /* report success */
  190. return 0;
  191. }
  192. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  193. {
  194. int x, y;
  195. unsigned char B, BL, BH;
  196. int motion_offset;
  197. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  198. /* copy a block from the previous frame; need 1 more byte */
  199. CHECK_STREAM_PTR(1);
  200. B = *s->stream_ptr++;
  201. BL = B & 0x0F;
  202. BH = (B >> 4) & 0x0F;
  203. x = -8 + BL;
  204. y = -8 + BH;
  205. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  206. COPY_FROM_PREVIOUS();
  207. /* report success */
  208. return 0;
  209. }
  210. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  211. {
  212. signed char x, y;
  213. int motion_offset;
  214. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  215. /* copy a block from the previous frame using an expanded range;
  216. * need 2 more bytes */
  217. CHECK_STREAM_PTR(2);
  218. x = *s->stream_ptr++;
  219. y = *s->stream_ptr++;
  220. debug_interplay (" motion bytes = %d, %d\n", x, y);
  221. COPY_FROM_PREVIOUS();
  222. /* report success */
  223. return 0;
  224. }
  225. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  226. {
  227. /* mystery opcode? skip multiple blocks? */
  228. printf (" Interplay video: Help! Mystery opcode 0x6 seen\n");
  229. /* report success */
  230. return 0;
  231. }
  232. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  233. {
  234. int x, y;
  235. unsigned char P0, P1;
  236. unsigned char B[8];
  237. unsigned int flags;
  238. int bitmask;
  239. /* 2-color encoding */
  240. CHECK_STREAM_PTR(2);
  241. P0 = *s->stream_ptr++;
  242. P1 = *s->stream_ptr++;
  243. if (P0 <= P1) {
  244. /* need 8 more bytes from the stream */
  245. CHECK_STREAM_PTR(8);
  246. for (y = 0; y < 8; y++)
  247. B[y] = *s->stream_ptr++;
  248. for (y = 0; y < 8; y++) {
  249. flags = B[y];
  250. for (x = 0x01; x <= 0x80; x <<= 1) {
  251. if (flags & x)
  252. *s->pixel_ptr++ = P1;
  253. else
  254. *s->pixel_ptr++ = P0;
  255. }
  256. s->pixel_ptr += s->line_inc;
  257. }
  258. } else {
  259. /* need 2 more bytes from the stream */
  260. CHECK_STREAM_PTR(2);
  261. B[0] = *s->stream_ptr++;
  262. B[1] = *s->stream_ptr++;
  263. flags = (B[1] << 8) | B[0];
  264. bitmask = 0x0001;
  265. for (y = 0; y < 8; y += 2) {
  266. for (x = 0; x < 8; x += 2, bitmask <<= 1) {
  267. if (flags & bitmask) {
  268. *(s->pixel_ptr + x) = P1;
  269. *(s->pixel_ptr + x + 1) = P1;
  270. *(s->pixel_ptr + s->stride + x) = P1;
  271. *(s->pixel_ptr + s->stride + x + 1) = P1;
  272. } else {
  273. *(s->pixel_ptr + x) = P0;
  274. *(s->pixel_ptr + x + 1) = P0;
  275. *(s->pixel_ptr + s->stride + x) = P0;
  276. *(s->pixel_ptr + s->stride + x + 1) = P0;
  277. }
  278. }
  279. s->pixel_ptr += s->stride * 2;
  280. }
  281. }
  282. /* report success */
  283. return 0;
  284. }
  285. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  286. {
  287. int x, y;
  288. unsigned char P[8];
  289. unsigned char B[8];
  290. unsigned int flags = 0;
  291. unsigned int bitmask = 0;
  292. unsigned char P0 = 0, P1 = 0;
  293. int lower_half = 0;
  294. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  295. * either top and bottom or left and right halves */
  296. CHECK_STREAM_PTR(2);
  297. P[0] = *s->stream_ptr++;
  298. P[1] = *s->stream_ptr++;
  299. if (P[0] <= P[1]) {
  300. /* need 12 more bytes */
  301. CHECK_STREAM_PTR(12);
  302. B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
  303. P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
  304. B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
  305. P[4] = *s->stream_ptr++; P[5] = *s->stream_ptr++;
  306. B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
  307. P[6] = *s->stream_ptr++; P[7] = *s->stream_ptr++;
  308. B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
  309. for (y = 0; y < 8; y++) {
  310. /* time to reload flags? */
  311. if (y == 0) {
  312. flags =
  313. ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
  314. ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
  315. ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
  316. ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
  317. bitmask = 0x00000001;
  318. lower_half = 0; /* still on top half */
  319. } else if (y == 4) {
  320. flags =
  321. ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
  322. ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
  323. ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
  324. ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
  325. bitmask = 0x00000001;
  326. lower_half = 2;
  327. }
  328. for (x = 0; x < 8; x++, bitmask <<= 1) {
  329. /* get the pixel values ready for this quadrant */
  330. if (x == 0) {
  331. P0 = P[lower_half + 0];
  332. P1 = P[lower_half + 1];
  333. } else if (x == 4) {
  334. P0 = P[lower_half + 4];
  335. P1 = P[lower_half + 5];
  336. }
  337. if (flags & bitmask)
  338. *s->pixel_ptr++ = P1;
  339. else
  340. *s->pixel_ptr++ = P0;
  341. }
  342. s->pixel_ptr += s->line_inc;
  343. }
  344. } else {
  345. /* need 10 more bytes */
  346. CHECK_STREAM_PTR(10);
  347. B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
  348. B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
  349. P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
  350. B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
  351. B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
  352. if (P[2] <= P[3]) {
  353. /* vertical split; left & right halves are 2-color encoded */
  354. for (y = 0; y < 8; y++) {
  355. /* time to reload flags? */
  356. if (y == 0) {
  357. flags =
  358. ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
  359. ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
  360. ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
  361. ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
  362. bitmask = 0x00000001;
  363. } else if (y == 4) {
  364. flags =
  365. ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
  366. ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
  367. ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
  368. ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
  369. bitmask = 0x00000001;
  370. }
  371. for (x = 0; x < 8; x++, bitmask <<= 1) {
  372. /* get the pixel values ready for this half */
  373. if (x == 0) {
  374. P0 = P[0];
  375. P1 = P[1];
  376. } else if (x == 4) {
  377. P0 = P[2];
  378. P1 = P[3];
  379. }
  380. if (flags & bitmask)
  381. *s->pixel_ptr++ = P1;
  382. else
  383. *s->pixel_ptr++ = P0;
  384. }
  385. s->pixel_ptr += s->line_inc;
  386. }
  387. } else {
  388. /* horizontal split; top & bottom halves are 2-color encoded */
  389. for (y = 0; y < 8; y++) {
  390. flags = B[y];
  391. if (y == 0) {
  392. P0 = P[0];
  393. P1 = P[1];
  394. } else if (y == 4) {
  395. P0 = P[2];
  396. P1 = P[3];
  397. }
  398. for (bitmask = 0x01; bitmask <= 0x80; bitmask <<= 1) {
  399. if (flags & bitmask)
  400. *s->pixel_ptr++ = P1;
  401. else
  402. *s->pixel_ptr++ = P0;
  403. }
  404. s->pixel_ptr += s->line_inc;
  405. }
  406. }
  407. }
  408. /* report success */
  409. return 0;
  410. }
  411. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  412. {
  413. int x, y;
  414. unsigned char P[4];
  415. unsigned char B[4];
  416. unsigned int flags = 0;
  417. int shifter = 0;
  418. unsigned char pix;
  419. /* 4-color encoding */
  420. CHECK_STREAM_PTR(4);
  421. for (y = 0; y < 4; y++)
  422. P[y] = *s->stream_ptr++;
  423. if ((P[0] <= P[1]) && (P[2] <= P[3])) {
  424. /* 1 of 4 colors for each pixel, need 16 more bytes */
  425. CHECK_STREAM_PTR(16);
  426. for (y = 0; y < 8; y++) {
  427. /* get the next set of 8 2-bit flags */
  428. flags = (s->stream_ptr[1] << 8) | s->stream_ptr[0];
  429. s->stream_ptr += 2;
  430. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  431. *s->pixel_ptr++ = P[(flags >> shifter) & 0x03];
  432. }
  433. s->pixel_ptr += s->line_inc;
  434. }
  435. } else if ((P[0] <= P[1]) && (P[2] > P[3])) {
  436. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  437. CHECK_STREAM_PTR(4);
  438. B[0] = *s->stream_ptr++;
  439. B[1] = *s->stream_ptr++;
  440. B[2] = *s->stream_ptr++;
  441. B[3] = *s->stream_ptr++;
  442. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  443. shifter = 0;
  444. for (y = 0; y < 8; y += 2) {
  445. for (x = 0; x < 8; x += 2, shifter += 2) {
  446. pix = P[(flags >> shifter) & 0x03];
  447. *(s->pixel_ptr + x) = pix;
  448. *(s->pixel_ptr + x + 1) = pix;
  449. *(s->pixel_ptr + s->stride + x) = pix;
  450. *(s->pixel_ptr + s->stride + x + 1) = pix;
  451. }
  452. s->pixel_ptr += s->stride * 2;
  453. }
  454. } else if ((P[0] > P[1]) && (P[2] <= P[3])) {
  455. /* 1 of 4 colors for each 2x1 block, need 8 more bytes */
  456. CHECK_STREAM_PTR(8);
  457. for (y = 0; y < 8; y++) {
  458. /* time to reload flags? */
  459. if ((y == 0) || (y == 4)) {
  460. B[0] = *s->stream_ptr++;
  461. B[1] = *s->stream_ptr++;
  462. B[2] = *s->stream_ptr++;
  463. B[3] = *s->stream_ptr++;
  464. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  465. shifter = 0;
  466. }
  467. for (x = 0; x < 8; x += 2, shifter += 2) {
  468. pix = P[(flags >> shifter) & 0x03];
  469. *(s->pixel_ptr + x) = pix;
  470. *(s->pixel_ptr + x + 1) = pix;
  471. }
  472. s->pixel_ptr += s->stride;
  473. }
  474. } else {
  475. /* 1 of 4 colors for each 1x2 block, need 8 more bytes */
  476. CHECK_STREAM_PTR(8);
  477. for (y = 0; y < 8; y += 2) {
  478. /* time to reload flags? */
  479. if ((y == 0) || (y == 4)) {
  480. B[0] = *s->stream_ptr++;
  481. B[1] = *s->stream_ptr++;
  482. B[2] = *s->stream_ptr++;
  483. B[3] = *s->stream_ptr++;
  484. flags = (B[3] << 24) | (B[2] << 16) | (B[1] << 8) | B[0];
  485. shifter = 0;
  486. }
  487. for (x = 0; x < 8; x++, shifter += 2) {
  488. pix = P[(flags >> shifter) & 0x03];
  489. *(s->pixel_ptr + x) = pix;
  490. *(s->pixel_ptr + s->stride + x) = pix;
  491. }
  492. s->pixel_ptr += s->stride * 2;
  493. }
  494. }
  495. /* report success */
  496. return 0;
  497. }
  498. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  499. {
  500. int x, y;
  501. unsigned char P[16];
  502. unsigned char B[16];
  503. int flags = 0;
  504. int shifter = 0;
  505. int index;
  506. int split;
  507. int lower_half;
  508. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  509. * either top and bottom or left and right halves */
  510. CHECK_STREAM_PTR(4);
  511. for (y = 0; y < 4; y++)
  512. P[y] = *s->stream_ptr++;
  513. if (P[0] <= P[1]) {
  514. /* 4-color encoding for each quadrant; need 28 more bytes */
  515. CHECK_STREAM_PTR(28);
  516. for (y = 0; y < 4; y++)
  517. B[y] = *s->stream_ptr++;
  518. for (y = 4; y < 16; y += 4) {
  519. for (x = y; x < y + 4; x++)
  520. P[x] = *s->stream_ptr++;
  521. for (x = y; x < y + 4; x++)
  522. B[x] = *s->stream_ptr++;
  523. }
  524. for (y = 0; y < 8; y++) {
  525. lower_half = (y >= 4) ? 4 : 0;
  526. flags = (B[y + 8] << 8) | B[y];
  527. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  528. split = (x >= 4) ? 8 : 0;
  529. index = split + lower_half + ((flags >> shifter) & 0x03);
  530. *s->pixel_ptr++ = P[index];
  531. }
  532. s->pixel_ptr += s->line_inc;
  533. }
  534. } else {
  535. /* 4-color encoding for either left and right or top and bottom
  536. * halves; need 20 more bytes */
  537. CHECK_STREAM_PTR(20);
  538. for (y = 0; y < 8; y++)
  539. B[y] = *s->stream_ptr++;
  540. for (y = 4; y < 8; y++)
  541. P[y] = *s->stream_ptr++;
  542. for (y = 8; y < 16; y++)
  543. B[y] = *s->stream_ptr++;
  544. if (P[4] <= P[5]) {
  545. /* block is divided into left and right halves */
  546. for (y = 0; y < 8; y++) {
  547. flags = (B[y + 8] << 8) | B[y];
  548. split = 0;
  549. for (x = 0, shifter = 0; x < 8; x++, shifter += 2) {
  550. if (x == 4)
  551. split = 4;
  552. *s->pixel_ptr++ = P[split + ((flags >> shifter) & 0x03)];
  553. }
  554. s->pixel_ptr += s->line_inc;
  555. }
  556. } else {
  557. /* block is divided into top and bottom halves */
  558. split = 0;
  559. for (y = 0; y < 8; y++) {
  560. flags = (B[y * 2 + 1] << 8) | B[y * 2];
  561. if (y == 4)
  562. split = 4;
  563. for (x = 0, shifter = 0; x < 8; x++, shifter += 2)
  564. *s->pixel_ptr++ = P[split + ((flags >> shifter) & 0x03)];
  565. s->pixel_ptr += s->line_inc;
  566. }
  567. }
  568. }
  569. /* report success */
  570. return 0;
  571. }
  572. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  573. {
  574. int x, y;
  575. /* 64-color encoding (each pixel in block is a different color) */
  576. CHECK_STREAM_PTR(64);
  577. for (y = 0; y < 8; y++) {
  578. for (x = 0; x < 8; x++) {
  579. *s->pixel_ptr++ = *s->stream_ptr++;
  580. }
  581. s->pixel_ptr += s->line_inc;
  582. }
  583. /* report success */
  584. return 0;
  585. }
  586. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  587. {
  588. int x, y;
  589. unsigned char pix;
  590. /* 16-color block encoding: each 2x2 block is a different color */
  591. CHECK_STREAM_PTR(16);
  592. for (y = 0; y < 8; y += 2) {
  593. for (x = 0; x < 8; x += 2) {
  594. pix = *s->stream_ptr++;
  595. *(s->pixel_ptr + x) = pix;
  596. *(s->pixel_ptr + x + 1) = pix;
  597. *(s->pixel_ptr + s->stride + x) = pix;
  598. *(s->pixel_ptr + s->stride + x + 1) = pix;
  599. }
  600. s->pixel_ptr += s->stride * 2;
  601. }
  602. /* report success */
  603. return 0;
  604. }
  605. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  606. {
  607. int x, y;
  608. unsigned char P[4];
  609. unsigned char index = 0;
  610. /* 4-color block encoding: each 4x4 block is a different color */
  611. CHECK_STREAM_PTR(4);
  612. for (y = 0; y < 4; y++)
  613. P[y] = *s->stream_ptr++;
  614. for (y = 0; y < 8; y++) {
  615. if (y < 4)
  616. index = 0;
  617. else
  618. index = 2;
  619. for (x = 0; x < 8; x++) {
  620. if (x == 4)
  621. index++;
  622. *s->pixel_ptr++ = P[index];
  623. }
  624. s->pixel_ptr += s->line_inc;
  625. }
  626. /* report success */
  627. return 0;
  628. }
  629. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  630. {
  631. int x, y;
  632. unsigned char pix;
  633. /* 1-color encoding: the whole block is 1 solid color */
  634. CHECK_STREAM_PTR(1);
  635. pix = *s->stream_ptr++;
  636. for (y = 0; y < 8; y++) {
  637. for (x = 0; x < 8; x++) {
  638. *s->pixel_ptr++ = pix;
  639. }
  640. s->pixel_ptr += s->line_inc;
  641. }
  642. /* report success */
  643. return 0;
  644. }
  645. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  646. {
  647. int x, y;
  648. unsigned char sample0, sample1;
  649. /* dithered encoding */
  650. CHECK_STREAM_PTR(2);
  651. sample0 = *s->stream_ptr++;
  652. sample1 = *s->stream_ptr++;
  653. for (y = 0; y < 8; y++) {
  654. for (x = 0; x < 8; x += 2) {
  655. if (y & 1) {
  656. *s->pixel_ptr++ = sample1;
  657. *s->pixel_ptr++ = sample0;
  658. } else {
  659. *s->pixel_ptr++ = sample0;
  660. *s->pixel_ptr++ = sample1;
  661. }
  662. }
  663. s->pixel_ptr += s->line_inc;
  664. }
  665. /* report success */
  666. return 0;
  667. }
  668. static int (*ipvideo_decode_block[16])(IpvideoContext *s);
  669. static void ipvideo_decode_opcodes(IpvideoContext *s)
  670. {
  671. int x, y;
  672. int index = 0;
  673. unsigned char opcode;
  674. int ret;
  675. int code_counts[16];
  676. static int frame = 0;
  677. debug_interplay("------------------ frame %d\n", frame);
  678. frame++;
  679. for (x = 0; x < 16; x++)
  680. code_counts[x] = 0;
  681. /* this is PAL8, so make the palette available */
  682. memcpy(s->current_frame.data[1], s->palette, PALETTE_COUNT * 4);
  683. s->stride = s->current_frame.linesize[0];
  684. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  685. s->stream_end = s->buf + s->size;
  686. s->line_inc = s->stride - 8;
  687. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->stride
  688. + s->avctx->width - 8;
  689. s->dsp = s->dsp;
  690. for (y = 0; y < (s->stride * s->avctx->height); y += s->stride * 8) {
  691. for (x = y; x < y + s->avctx->width; x += 8) {
  692. /* bottom nibble first, then top nibble (which makes it
  693. * hard to use a GetBitcontext) */
  694. if (index & 1)
  695. opcode = s->decoding_map[index >> 1] >> 4;
  696. else
  697. opcode = s->decoding_map[index >> 1] & 0xF;
  698. index++;
  699. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  700. x - y, y / s->stride, opcode, s->stream_ptr);
  701. code_counts[opcode]++;
  702. s->pixel_ptr = s->current_frame.data[0] + x;
  703. ret = ipvideo_decode_block[opcode](s);
  704. if (ret != 0) {
  705. printf(" Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  706. frame, x - y, y / s->stride);
  707. return;
  708. }
  709. }
  710. }
  711. if ((s->stream_ptr != s->stream_end) &&
  712. (s->stream_ptr + 1 != s->stream_end)) {
  713. printf (" Interplay video: decode finished with %d bytes left over\n",
  714. s->stream_end - s->stream_ptr);
  715. }
  716. }
  717. static int ipvideo_decode_init(AVCodecContext *avctx)
  718. {
  719. IpvideoContext *s = avctx->priv_data;
  720. s->avctx = avctx;
  721. if (s->avctx->extradata_size != sizeof(AVPaletteControl)) {
  722. printf (" Interplay video: expected extradata_size of %d\n",
  723. sizeof(AVPaletteControl));
  724. return -1;
  725. }
  726. avctx->pix_fmt = PIX_FMT_PAL8;
  727. avctx->has_b_frames = 0;
  728. dsputil_init(&s->dsp, avctx);
  729. /* decoding map contains 4 bits of information per 8x8 block */
  730. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  731. /* assign block decode functions */
  732. ipvideo_decode_block[0x0] = ipvideo_decode_block_opcode_0x0;
  733. ipvideo_decode_block[0x1] = ipvideo_decode_block_opcode_0x1;
  734. ipvideo_decode_block[0x2] = ipvideo_decode_block_opcode_0x2;
  735. ipvideo_decode_block[0x3] = ipvideo_decode_block_opcode_0x3;
  736. ipvideo_decode_block[0x4] = ipvideo_decode_block_opcode_0x4;
  737. ipvideo_decode_block[0x5] = ipvideo_decode_block_opcode_0x5;
  738. ipvideo_decode_block[0x6] = ipvideo_decode_block_opcode_0x6;
  739. ipvideo_decode_block[0x7] = ipvideo_decode_block_opcode_0x7;
  740. ipvideo_decode_block[0x8] = ipvideo_decode_block_opcode_0x8;
  741. ipvideo_decode_block[0x9] = ipvideo_decode_block_opcode_0x9;
  742. ipvideo_decode_block[0xA] = ipvideo_decode_block_opcode_0xA;
  743. ipvideo_decode_block[0xB] = ipvideo_decode_block_opcode_0xB;
  744. ipvideo_decode_block[0xC] = ipvideo_decode_block_opcode_0xC;
  745. ipvideo_decode_block[0xD] = ipvideo_decode_block_opcode_0xD;
  746. ipvideo_decode_block[0xE] = ipvideo_decode_block_opcode_0xE;
  747. ipvideo_decode_block[0xF] = ipvideo_decode_block_opcode_0xF;
  748. s->current_frame.data[0] = s->last_frame.data[0] =
  749. s->second_last_frame.data[0] = NULL;
  750. return 0;
  751. }
  752. static int ipvideo_decode_frame(AVCodecContext *avctx,
  753. void *data, int *data_size,
  754. uint8_t *buf, int buf_size)
  755. {
  756. IpvideoContext *s = avctx->priv_data;
  757. AVPaletteControl *palette_control = (AVPaletteControl *)avctx->extradata;
  758. if (palette_control->palette_changed) {
  759. /* load the new palette and reset the palette control */
  760. ipvideo_new_palette(s, palette_control->palette);
  761. palette_control->palette_changed = 0;
  762. }
  763. s->decoding_map = buf;
  764. s->buf = buf + s->decoding_map_size;
  765. s->size = buf_size - s->decoding_map_size;
  766. s->current_frame.reference = 3;
  767. if (avctx->get_buffer(avctx, &s->current_frame)) {
  768. printf (" Interplay Video: get_buffer() failed\n");
  769. return -1;
  770. }
  771. ipvideo_decode_opcodes(s);
  772. *data_size = sizeof(AVFrame);
  773. *(AVFrame*)data = s->current_frame;
  774. /* shuffle frames */
  775. if (s->second_last_frame.data[0])
  776. avctx->release_buffer(avctx, &s->second_last_frame);
  777. s->second_last_frame = s->last_frame;
  778. s->last_frame = s->current_frame;
  779. s->current_frame.data[0] = NULL; /* catch any access attempts */
  780. /* report that the buffer was completely consumed */
  781. return buf_size;
  782. }
  783. static int ipvideo_decode_end(AVCodecContext *avctx)
  784. {
  785. IpvideoContext *s = avctx->priv_data;
  786. /* release the last frame */
  787. if (s->last_frame.data[0])
  788. avctx->release_buffer(avctx, &s->last_frame);
  789. if (s->second_last_frame.data[0])
  790. avctx->release_buffer(avctx, &s->second_last_frame);
  791. return 0;
  792. }
  793. AVCodec interplay_video_decoder = {
  794. "interplayvideo",
  795. CODEC_TYPE_VIDEO,
  796. CODEC_ID_INTERPLAY_VIDEO,
  797. sizeof(IpvideoContext),
  798. ipvideo_decode_init,
  799. NULL,
  800. ipvideo_decode_end,
  801. ipvideo_decode_frame,
  802. CODEC_CAP_DR1,
  803. };