You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1112 lines
33KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define ALT_BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #define PALETTE_COUNT 256
  44. /* debugging support */
  45. #define DEBUG_INTERPLAY 0
  46. #if DEBUG_INTERPLAY
  47. #define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__)
  48. #else
  49. static inline void debug_interplay(const char *format, ...) { }
  50. #endif
  51. typedef struct IpvideoContext {
  52. AVCodecContext *avctx;
  53. DSPContext dsp;
  54. AVFrame second_last_frame;
  55. AVFrame last_frame;
  56. AVFrame current_frame;
  57. const unsigned char *decoding_map;
  58. int decoding_map_size;
  59. const unsigned char *buf;
  60. int size;
  61. int is_16bpp;
  62. const unsigned char *stream_ptr;
  63. const unsigned char *stream_end;
  64. const uint8_t *mv_ptr;
  65. const uint8_t *mv_end;
  66. unsigned char *pixel_ptr;
  67. int line_inc;
  68. int stride;
  69. int upper_motion_limit_offset;
  70. } IpvideoContext;
  71. #define CHECK_STREAM_PTR(stream_ptr, stream_end, n) \
  72. if (stream_end - stream_ptr < n) { \
  73. av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  74. stream_ptr + n, stream_end); \
  75. return -1; \
  76. }
  77. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  78. {
  79. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  80. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  81. + delta_x * (1 + s->is_16bpp);
  82. if (motion_offset < 0) {
  83. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  84. return -1;
  85. } else if (motion_offset > s->upper_motion_limit_offset) {
  86. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  87. motion_offset, s->upper_motion_limit_offset);
  88. return -1;
  89. }
  90. if (src->data[0] == NULL) {
  91. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  92. return AVERROR(EINVAL);
  93. }
  94. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  95. s->current_frame.linesize[0], 8);
  96. return 0;
  97. }
  98. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  99. {
  100. return copy_from(s, &s->last_frame, 0, 0);
  101. }
  102. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  103. {
  104. return copy_from(s, &s->second_last_frame, 0, 0);
  105. }
  106. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  107. {
  108. unsigned char B;
  109. int x, y;
  110. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  111. if (!s->is_16bpp) {
  112. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  113. B = *s->stream_ptr++;
  114. } else {
  115. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  116. B = *s->mv_ptr++;
  117. }
  118. if (B < 56) {
  119. x = 8 + (B % 7);
  120. y = B / 7;
  121. } else {
  122. x = -14 + ((B - 56) % 29);
  123. y = 8 + ((B - 56) / 29);
  124. }
  125. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  126. return copy_from(s, &s->second_last_frame, x, y);
  127. }
  128. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  129. {
  130. unsigned char B;
  131. int x, y;
  132. /* copy 8x8 block from current frame from an up/left block */
  133. /* need 1 more byte for motion */
  134. if (!s->is_16bpp) {
  135. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  136. B = *s->stream_ptr++;
  137. } else {
  138. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  139. B = *s->mv_ptr++;
  140. }
  141. if (B < 56) {
  142. x = -(8 + (B % 7));
  143. y = -(B / 7);
  144. } else {
  145. x = -(-14 + ((B - 56) % 29));
  146. y = -( 8 + ((B - 56) / 29));
  147. }
  148. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  149. return copy_from(s, &s->current_frame, x, y);
  150. }
  151. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  152. {
  153. int x, y;
  154. unsigned char B, BL, BH;
  155. /* copy a block from the previous frame; need 1 more byte */
  156. if (!s->is_16bpp) {
  157. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  158. B = *s->stream_ptr++;
  159. } else {
  160. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  161. B = *s->mv_ptr++;
  162. }
  163. BL = B & 0x0F;
  164. BH = (B >> 4) & 0x0F;
  165. x = -8 + BL;
  166. y = -8 + BH;
  167. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  168. return copy_from(s, &s->last_frame, x, y);
  169. }
  170. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  171. {
  172. signed char x, y;
  173. /* copy a block from the previous frame using an expanded range;
  174. * need 2 more bytes */
  175. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  176. x = *s->stream_ptr++;
  177. y = *s->stream_ptr++;
  178. debug_interplay (" motion bytes = %d, %d\n", x, y);
  179. return copy_from(s, &s->last_frame, x, y);
  180. }
  181. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  182. {
  183. /* mystery opcode? skip multiple blocks? */
  184. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  185. /* report success */
  186. return 0;
  187. }
  188. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  189. {
  190. int x, y;
  191. unsigned char P[2];
  192. unsigned int flags;
  193. /* 2-color encoding */
  194. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  195. P[0] = *s->stream_ptr++;
  196. P[1] = *s->stream_ptr++;
  197. if (P[0] <= P[1]) {
  198. /* need 8 more bytes from the stream */
  199. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  200. for (y = 0; y < 8; y++) {
  201. flags = *s->stream_ptr++ | 0x100;
  202. for (; flags != 1; flags >>= 1)
  203. *s->pixel_ptr++ = P[flags & 1];
  204. s->pixel_ptr += s->line_inc;
  205. }
  206. } else {
  207. /* need 2 more bytes from the stream */
  208. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  209. flags = bytestream_get_le16(&s->stream_ptr);
  210. for (y = 0; y < 8; y += 2) {
  211. for (x = 0; x < 8; x += 2, flags >>= 1) {
  212. s->pixel_ptr[x ] =
  213. s->pixel_ptr[x + 1 ] =
  214. s->pixel_ptr[x + s->stride] =
  215. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  216. }
  217. s->pixel_ptr += s->stride * 2;
  218. }
  219. }
  220. /* report success */
  221. return 0;
  222. }
  223. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  224. {
  225. int x, y;
  226. unsigned char P[2];
  227. unsigned int flags = 0;
  228. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  229. * either top and bottom or left and right halves */
  230. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  231. P[0] = *s->stream_ptr++;
  232. P[1] = *s->stream_ptr++;
  233. if (P[0] <= P[1]) {
  234. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 14);
  235. s->stream_ptr -= 2;
  236. for (y = 0; y < 16; y++) {
  237. // new values for each 4x4 block
  238. if (!(y & 3)) {
  239. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  240. flags = bytestream_get_le16(&s->stream_ptr);
  241. }
  242. for (x = 0; x < 4; x++, flags >>= 1)
  243. *s->pixel_ptr++ = P[flags & 1];
  244. s->pixel_ptr += s->stride - 4;
  245. // switch to right half
  246. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  247. }
  248. } else {
  249. /* need 10 more bytes */
  250. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 10);
  251. if (s->stream_ptr[4] <= s->stream_ptr[5]) {
  252. flags = bytestream_get_le32(&s->stream_ptr);
  253. /* vertical split; left & right halves are 2-color encoded */
  254. for (y = 0; y < 16; y++) {
  255. for (x = 0; x < 4; x++, flags >>= 1)
  256. *s->pixel_ptr++ = P[flags & 1];
  257. s->pixel_ptr += s->stride - 4;
  258. // switch to right half
  259. if (y == 7) {
  260. s->pixel_ptr -= 8 * s->stride - 4;
  261. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  262. flags = bytestream_get_le32(&s->stream_ptr);
  263. }
  264. }
  265. } else {
  266. /* horizontal split; top & bottom halves are 2-color encoded */
  267. for (y = 0; y < 8; y++) {
  268. if (y == 4) {
  269. P[0] = *s->stream_ptr++;
  270. P[1] = *s->stream_ptr++;
  271. }
  272. flags = *s->stream_ptr++ | 0x100;
  273. for (; flags != 1; flags >>= 1)
  274. *s->pixel_ptr++ = P[flags & 1];
  275. s->pixel_ptr += s->line_inc;
  276. }
  277. }
  278. }
  279. /* report success */
  280. return 0;
  281. }
  282. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  283. {
  284. int x, y;
  285. unsigned char P[4];
  286. /* 4-color encoding */
  287. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  288. memcpy(P, s->stream_ptr, 4);
  289. s->stream_ptr += 4;
  290. if (P[0] <= P[1]) {
  291. if (P[2] <= P[3]) {
  292. /* 1 of 4 colors for each pixel, need 16 more bytes */
  293. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  294. for (y = 0; y < 8; y++) {
  295. /* get the next set of 8 2-bit flags */
  296. int flags = bytestream_get_le16(&s->stream_ptr);
  297. for (x = 0; x < 8; x++, flags >>= 2)
  298. *s->pixel_ptr++ = P[flags & 0x03];
  299. s->pixel_ptr += s->line_inc;
  300. }
  301. } else {
  302. uint32_t flags;
  303. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  304. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  305. flags = bytestream_get_le32(&s->stream_ptr);
  306. for (y = 0; y < 8; y += 2) {
  307. for (x = 0; x < 8; x += 2, flags >>= 2) {
  308. s->pixel_ptr[x ] =
  309. s->pixel_ptr[x + 1 ] =
  310. s->pixel_ptr[x + s->stride] =
  311. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  312. }
  313. s->pixel_ptr += s->stride * 2;
  314. }
  315. }
  316. } else {
  317. uint64_t flags;
  318. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  319. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  320. flags = bytestream_get_le64(&s->stream_ptr);
  321. if (P[2] <= P[3]) {
  322. for (y = 0; y < 8; y++) {
  323. for (x = 0; x < 8; x += 2, flags >>= 2) {
  324. s->pixel_ptr[x ] =
  325. s->pixel_ptr[x + 1] = P[flags & 0x03];
  326. }
  327. s->pixel_ptr += s->stride;
  328. }
  329. } else {
  330. for (y = 0; y < 8; y += 2) {
  331. for (x = 0; x < 8; x++, flags >>= 2) {
  332. s->pixel_ptr[x ] =
  333. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  334. }
  335. s->pixel_ptr += s->stride * 2;
  336. }
  337. }
  338. }
  339. /* report success */
  340. return 0;
  341. }
  342. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  343. {
  344. int x, y;
  345. unsigned char P[4];
  346. int flags = 0;
  347. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  348. * either top and bottom or left and right halves */
  349. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  350. if (s->stream_ptr[0] <= s->stream_ptr[1]) {
  351. /* 4-color encoding for each quadrant; need 32 bytes */
  352. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  353. for (y = 0; y < 16; y++) {
  354. // new values for each 4x4 block
  355. if (!(y & 3)) {
  356. memcpy(P, s->stream_ptr, 4);
  357. s->stream_ptr += 4;
  358. flags = bytestream_get_le32(&s->stream_ptr);
  359. }
  360. for (x = 0; x < 4; x++, flags >>= 2)
  361. *s->pixel_ptr++ = P[flags & 0x03];
  362. s->pixel_ptr += s->stride - 4;
  363. // switch to right half
  364. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  365. }
  366. } else {
  367. // vertical split?
  368. int vert = s->stream_ptr[12] <= s->stream_ptr[13];
  369. uint64_t flags = 0;
  370. /* 4-color encoding for either left and right or top and bottom
  371. * halves */
  372. for (y = 0; y < 16; y++) {
  373. // load values for each half
  374. if (!(y & 7)) {
  375. memcpy(P, s->stream_ptr, 4);
  376. s->stream_ptr += 4;
  377. flags = bytestream_get_le64(&s->stream_ptr);
  378. }
  379. for (x = 0; x < 4; x++, flags >>= 2)
  380. *s->pixel_ptr++ = P[flags & 0x03];
  381. if (vert) {
  382. s->pixel_ptr += s->stride - 4;
  383. // switch to right half
  384. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  385. } else if (y & 1) s->pixel_ptr += s->line_inc;
  386. }
  387. }
  388. /* report success */
  389. return 0;
  390. }
  391. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  392. {
  393. int y;
  394. /* 64-color encoding (each pixel in block is a different color) */
  395. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 64);
  396. for (y = 0; y < 8; y++) {
  397. memcpy(s->pixel_ptr, s->stream_ptr, 8);
  398. s->stream_ptr += 8;
  399. s->pixel_ptr += s->stride;
  400. }
  401. /* report success */
  402. return 0;
  403. }
  404. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  405. {
  406. int x, y;
  407. /* 16-color block encoding: each 2x2 block is a different color */
  408. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  409. for (y = 0; y < 8; y += 2) {
  410. for (x = 0; x < 8; x += 2) {
  411. s->pixel_ptr[x ] =
  412. s->pixel_ptr[x + 1 ] =
  413. s->pixel_ptr[x + s->stride] =
  414. s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
  415. }
  416. s->pixel_ptr += s->stride * 2;
  417. }
  418. /* report success */
  419. return 0;
  420. }
  421. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  422. {
  423. int y;
  424. unsigned char P[2];
  425. /* 4-color block encoding: each 4x4 block is a different color */
  426. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  427. for (y = 0; y < 8; y++) {
  428. if (!(y & 3)) {
  429. P[0] = *s->stream_ptr++;
  430. P[1] = *s->stream_ptr++;
  431. }
  432. memset(s->pixel_ptr, P[0], 4);
  433. memset(s->pixel_ptr + 4, P[1], 4);
  434. s->pixel_ptr += s->stride;
  435. }
  436. /* report success */
  437. return 0;
  438. }
  439. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  440. {
  441. int y;
  442. unsigned char pix;
  443. /* 1-color encoding: the whole block is 1 solid color */
  444. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  445. pix = *s->stream_ptr++;
  446. for (y = 0; y < 8; y++) {
  447. memset(s->pixel_ptr, pix, 8);
  448. s->pixel_ptr += s->stride;
  449. }
  450. /* report success */
  451. return 0;
  452. }
  453. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  454. {
  455. int x, y;
  456. unsigned char sample[2];
  457. /* dithered encoding */
  458. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  459. sample[0] = *s->stream_ptr++;
  460. sample[1] = *s->stream_ptr++;
  461. for (y = 0; y < 8; y++) {
  462. for (x = 0; x < 8; x += 2) {
  463. *s->pixel_ptr++ = sample[ y & 1 ];
  464. *s->pixel_ptr++ = sample[!(y & 1)];
  465. }
  466. s->pixel_ptr += s->line_inc;
  467. }
  468. /* report success */
  469. return 0;
  470. }
  471. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  472. {
  473. signed char x, y;
  474. /* copy a block from the second last frame using an expanded range */
  475. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  476. x = *s->stream_ptr++;
  477. y = *s->stream_ptr++;
  478. debug_interplay (" motion bytes = %d, %d\n", x, y);
  479. return copy_from(s, &s->second_last_frame, x, y);
  480. }
  481. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  482. {
  483. int x, y;
  484. uint16_t P[2];
  485. unsigned int flags;
  486. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  487. /* 2-color encoding */
  488. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  489. P[0] = bytestream_get_le16(&s->stream_ptr);
  490. P[1] = bytestream_get_le16(&s->stream_ptr);
  491. if (!(P[0] & 0x8000)) {
  492. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  493. for (y = 0; y < 8; y++) {
  494. flags = *s->stream_ptr++ | 0x100;
  495. for (; flags != 1; flags >>= 1)
  496. *pixel_ptr++ = P[flags & 1];
  497. pixel_ptr += s->line_inc;
  498. }
  499. } else {
  500. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  501. flags = bytestream_get_le16(&s->stream_ptr);
  502. for (y = 0; y < 8; y += 2) {
  503. for (x = 0; x < 8; x += 2, flags >>= 1) {
  504. pixel_ptr[x ] =
  505. pixel_ptr[x + 1 ] =
  506. pixel_ptr[x + s->stride] =
  507. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  508. }
  509. pixel_ptr += s->stride * 2;
  510. }
  511. }
  512. return 0;
  513. }
  514. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  515. {
  516. int x, y;
  517. uint16_t P[2];
  518. unsigned int flags = 0;
  519. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  520. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  521. * either top and bottom or left and right halves */
  522. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  523. P[0] = bytestream_get_le16(&s->stream_ptr);
  524. P[1] = bytestream_get_le16(&s->stream_ptr);
  525. if (!(P[0] & 0x8000)) {
  526. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  527. s->stream_ptr -= 4;
  528. for (y = 0; y < 16; y++) {
  529. // new values for each 4x4 block
  530. if (!(y & 3)) {
  531. P[0] = bytestream_get_le16(&s->stream_ptr);
  532. P[1] = bytestream_get_le16(&s->stream_ptr);
  533. flags = bytestream_get_le16(&s->stream_ptr);
  534. }
  535. for (x = 0; x < 4; x++, flags >>= 1)
  536. *pixel_ptr++ = P[flags & 1];
  537. pixel_ptr += s->stride - 4;
  538. // switch to right half
  539. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  540. }
  541. } else {
  542. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 12);
  543. if (!(AV_RL16(s->stream_ptr + 4) & 0x8000)) {
  544. flags = bytestream_get_le32(&s->stream_ptr);
  545. /* vertical split; left & right halves are 2-color encoded */
  546. for (y = 0; y < 16; y++) {
  547. for (x = 0; x < 4; x++, flags >>= 1)
  548. *pixel_ptr++ = P[flags & 1];
  549. pixel_ptr += s->stride - 4;
  550. // switch to right half
  551. if (y == 7) {
  552. pixel_ptr -= 8 * s->stride - 4;
  553. P[0] = bytestream_get_le16(&s->stream_ptr);
  554. P[1] = bytestream_get_le16(&s->stream_ptr);
  555. flags = bytestream_get_le32(&s->stream_ptr);
  556. }
  557. }
  558. } else {
  559. /* horizontal split; top & bottom halves are 2-color encoded */
  560. for (y = 0; y < 8; y++) {
  561. if (y == 4) {
  562. P[0] = bytestream_get_le16(&s->stream_ptr);
  563. P[1] = bytestream_get_le16(&s->stream_ptr);
  564. }
  565. flags = *s->stream_ptr++ | 0x100;
  566. for (; flags != 1; flags >>= 1)
  567. *pixel_ptr++ = P[flags & 1];
  568. pixel_ptr += s->line_inc;
  569. }
  570. }
  571. }
  572. /* report success */
  573. return 0;
  574. }
  575. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  576. {
  577. int x, y;
  578. uint16_t P[4];
  579. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  580. /* 4-color encoding */
  581. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  582. for (x = 0; x < 4; x++)
  583. P[x] = bytestream_get_le16(&s->stream_ptr);
  584. if (!(P[0] & 0x8000)) {
  585. if (!(P[2] & 0x8000)) {
  586. /* 1 of 4 colors for each pixel */
  587. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  588. for (y = 0; y < 8; y++) {
  589. /* get the next set of 8 2-bit flags */
  590. int flags = bytestream_get_le16(&s->stream_ptr);
  591. for (x = 0; x < 8; x++, flags >>= 2)
  592. *pixel_ptr++ = P[flags & 0x03];
  593. pixel_ptr += s->line_inc;
  594. }
  595. } else {
  596. uint32_t flags;
  597. /* 1 of 4 colors for each 2x2 block */
  598. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  599. flags = bytestream_get_le32(&s->stream_ptr);
  600. for (y = 0; y < 8; y += 2) {
  601. for (x = 0; x < 8; x += 2, flags >>= 2) {
  602. pixel_ptr[x ] =
  603. pixel_ptr[x + 1 ] =
  604. pixel_ptr[x + s->stride] =
  605. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  606. }
  607. pixel_ptr += s->stride * 2;
  608. }
  609. }
  610. } else {
  611. uint64_t flags;
  612. /* 1 of 4 colors for each 2x1 or 1x2 block */
  613. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  614. flags = bytestream_get_le64(&s->stream_ptr);
  615. if (!(P[2] & 0x8000)) {
  616. for (y = 0; y < 8; y++) {
  617. for (x = 0; x < 8; x += 2, flags >>= 2) {
  618. pixel_ptr[x ] =
  619. pixel_ptr[x + 1] = P[flags & 0x03];
  620. }
  621. pixel_ptr += s->stride;
  622. }
  623. } else {
  624. for (y = 0; y < 8; y += 2) {
  625. for (x = 0; x < 8; x++, flags >>= 2) {
  626. pixel_ptr[x ] =
  627. pixel_ptr[x + s->stride] = P[flags & 0x03];
  628. }
  629. pixel_ptr += s->stride * 2;
  630. }
  631. }
  632. }
  633. /* report success */
  634. return 0;
  635. }
  636. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  637. {
  638. int x, y;
  639. uint16_t P[4];
  640. int flags = 0;
  641. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  642. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  643. * either top and bottom or left and right halves */
  644. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  645. if (!(AV_RL16(s->stream_ptr) & 0x8000)) {
  646. /* 4-color encoding for each quadrant */
  647. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 48);
  648. for (y = 0; y < 16; y++) {
  649. // new values for each 4x4 block
  650. if (!(y & 3)) {
  651. for (x = 0; x < 4; x++)
  652. P[x] = bytestream_get_le16(&s->stream_ptr);
  653. flags = bytestream_get_le32(&s->stream_ptr);
  654. }
  655. for (x = 0; x < 4; x++, flags >>= 2)
  656. *pixel_ptr++ = P[flags & 0x03];
  657. pixel_ptr += s->stride - 4;
  658. // switch to right half
  659. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  660. }
  661. } else {
  662. // vertical split?
  663. int vert = !(AV_RL16(s->stream_ptr + 16) & 0x8000);
  664. uint64_t flags = 0;
  665. /* 4-color encoding for either left and right or top and bottom
  666. * halves */
  667. for (y = 0; y < 16; y++) {
  668. // load values for each half
  669. if (!(y & 7)) {
  670. for (x = 0; x < 4; x++)
  671. P[x] = bytestream_get_le16(&s->stream_ptr);
  672. flags = bytestream_get_le64(&s->stream_ptr);
  673. }
  674. for (x = 0; x < 4; x++, flags >>= 2)
  675. *pixel_ptr++ = P[flags & 0x03];
  676. if (vert) {
  677. pixel_ptr += s->stride - 4;
  678. // switch to right half
  679. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  680. } else if (y & 1) pixel_ptr += s->line_inc;
  681. }
  682. }
  683. /* report success */
  684. return 0;
  685. }
  686. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  687. {
  688. int x, y;
  689. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  690. /* 64-color encoding (each pixel in block is a different color) */
  691. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 128);
  692. for (y = 0; y < 8; y++) {
  693. for (x = 0; x < 8; x++)
  694. pixel_ptr[x] = bytestream_get_le16(&s->stream_ptr);
  695. pixel_ptr += s->stride;
  696. }
  697. /* report success */
  698. return 0;
  699. }
  700. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  701. {
  702. int x, y;
  703. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  704. /* 16-color block encoding: each 2x2 block is a different color */
  705. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  706. for (y = 0; y < 8; y += 2) {
  707. for (x = 0; x < 8; x += 2) {
  708. pixel_ptr[x ] =
  709. pixel_ptr[x + 1 ] =
  710. pixel_ptr[x + s->stride] =
  711. pixel_ptr[x + 1 + s->stride] = bytestream_get_le16(&s->stream_ptr);
  712. }
  713. pixel_ptr += s->stride * 2;
  714. }
  715. /* report success */
  716. return 0;
  717. }
  718. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  719. {
  720. int x, y;
  721. uint16_t P[2];
  722. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  723. /* 4-color block encoding: each 4x4 block is a different color */
  724. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  725. for (y = 0; y < 8; y++) {
  726. if (!(y & 3)) {
  727. P[0] = bytestream_get_le16(&s->stream_ptr);
  728. P[1] = bytestream_get_le16(&s->stream_ptr);
  729. }
  730. for (x = 0; x < 8; x++)
  731. pixel_ptr[x] = P[x >> 2];
  732. pixel_ptr += s->stride;
  733. }
  734. /* report success */
  735. return 0;
  736. }
  737. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  738. {
  739. int x, y;
  740. uint16_t pix;
  741. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  742. /* 1-color encoding: the whole block is 1 solid color */
  743. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  744. pix = bytestream_get_le16(&s->stream_ptr);
  745. for (y = 0; y < 8; y++) {
  746. for (x = 0; x < 8; x++)
  747. pixel_ptr[x] = pix;
  748. pixel_ptr += s->stride;
  749. }
  750. /* report success */
  751. return 0;
  752. }
  753. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  754. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  755. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  756. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  757. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  758. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  759. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  760. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  761. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  762. };
  763. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  764. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  765. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  766. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  767. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  768. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  769. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  770. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  771. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  772. };
  773. static void ipvideo_decode_opcodes(IpvideoContext *s)
  774. {
  775. int x, y;
  776. unsigned char opcode;
  777. int ret;
  778. static int frame = 0;
  779. GetBitContext gb;
  780. debug_interplay("------------------ frame %d\n", frame);
  781. frame++;
  782. if (!s->is_16bpp) {
  783. /* this is PAL8, so make the palette available */
  784. memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
  785. s->stride = s->current_frame.linesize[0];
  786. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  787. s->stream_end = s->buf + s->size;
  788. } else {
  789. s->stride = s->current_frame.linesize[0] >> 1;
  790. s->stream_ptr = s->buf + 16;
  791. s->stream_end =
  792. s->mv_ptr = s->buf + 14 + AV_RL16(s->buf+14);
  793. s->mv_end = s->buf + s->size;
  794. }
  795. s->line_inc = s->stride - 8;
  796. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  797. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  798. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  799. for (y = 0; y < s->avctx->height; y += 8) {
  800. for (x = 0; x < s->avctx->width; x += 8) {
  801. opcode = get_bits(&gb, 4);
  802. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  803. x, y, opcode, s->stream_ptr);
  804. if (!s->is_16bpp) {
  805. s->pixel_ptr = s->current_frame.data[0] + x
  806. + y*s->current_frame.linesize[0];
  807. ret = ipvideo_decode_block[opcode](s);
  808. } else {
  809. s->pixel_ptr = s->current_frame.data[0] + x*2
  810. + y*s->current_frame.linesize[0];
  811. ret = ipvideo_decode_block16[opcode](s);
  812. }
  813. if (ret != 0) {
  814. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  815. frame, x, y);
  816. return;
  817. }
  818. }
  819. }
  820. if (s->stream_end - s->stream_ptr > 1) {
  821. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
  822. s->stream_end - s->stream_ptr);
  823. }
  824. }
  825. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  826. {
  827. IpvideoContext *s = avctx->priv_data;
  828. s->avctx = avctx;
  829. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  830. avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
  831. if (!s->is_16bpp && s->avctx->palctrl == NULL) {
  832. av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
  833. return -1;
  834. }
  835. dsputil_init(&s->dsp, avctx);
  836. /* decoding map contains 4 bits of information per 8x8 block */
  837. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  838. s->current_frame.data[0] = s->last_frame.data[0] =
  839. s->second_last_frame.data[0] = NULL;
  840. return 0;
  841. }
  842. static int ipvideo_decode_frame(AVCodecContext *avctx,
  843. void *data, int *data_size,
  844. AVPacket *avpkt)
  845. {
  846. const uint8_t *buf = avpkt->data;
  847. int buf_size = avpkt->size;
  848. IpvideoContext *s = avctx->priv_data;
  849. AVPaletteControl *palette_control = avctx->palctrl;
  850. /* compressed buffer needs to be large enough to at least hold an entire
  851. * decoding map */
  852. if (buf_size < s->decoding_map_size)
  853. return buf_size;
  854. s->decoding_map = buf;
  855. s->buf = buf + s->decoding_map_size;
  856. s->size = buf_size - s->decoding_map_size;
  857. s->current_frame.reference = 3;
  858. if (avctx->get_buffer(avctx, &s->current_frame)) {
  859. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  860. return -1;
  861. }
  862. ipvideo_decode_opcodes(s);
  863. if (!s->is_16bpp && palette_control->palette_changed) {
  864. palette_control->palette_changed = 0;
  865. s->current_frame.palette_has_changed = 1;
  866. }
  867. *data_size = sizeof(AVFrame);
  868. *(AVFrame*)data = s->current_frame;
  869. /* shuffle frames */
  870. if (s->second_last_frame.data[0])
  871. avctx->release_buffer(avctx, &s->second_last_frame);
  872. s->second_last_frame = s->last_frame;
  873. s->last_frame = s->current_frame;
  874. s->current_frame.data[0] = NULL; /* catch any access attempts */
  875. /* report that the buffer was completely consumed */
  876. return buf_size;
  877. }
  878. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  879. {
  880. IpvideoContext *s = avctx->priv_data;
  881. /* release the last frame */
  882. if (s->last_frame.data[0])
  883. avctx->release_buffer(avctx, &s->last_frame);
  884. if (s->second_last_frame.data[0])
  885. avctx->release_buffer(avctx, &s->second_last_frame);
  886. return 0;
  887. }
  888. AVCodec ff_interplay_video_decoder = {
  889. "interplayvideo",
  890. AVMEDIA_TYPE_VIDEO,
  891. CODEC_ID_INTERPLAY_VIDEO,
  892. sizeof(IpvideoContext),
  893. ipvideo_decode_init,
  894. NULL,
  895. ipvideo_decode_end,
  896. ipvideo_decode_frame,
  897. CODEC_CAP_DR1,
  898. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  899. };