You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1108 lines
33KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define ALT_BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #define PALETTE_COUNT 256
  44. /* debugging support */
  45. #define DEBUG_INTERPLAY 0
  46. #if DEBUG_INTERPLAY
  47. #define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__)
  48. #else
  49. static inline void debug_interplay(const char *format, ...) { }
  50. #endif
  51. typedef struct IpvideoContext {
  52. AVCodecContext *avctx;
  53. DSPContext dsp;
  54. AVFrame second_last_frame;
  55. AVFrame last_frame;
  56. AVFrame current_frame;
  57. const unsigned char *decoding_map;
  58. int decoding_map_size;
  59. const unsigned char *buf;
  60. int size;
  61. int is_16bpp;
  62. const unsigned char *stream_ptr;
  63. const unsigned char *stream_end;
  64. const uint8_t *mv_ptr;
  65. const uint8_t *mv_end;
  66. unsigned char *pixel_ptr;
  67. int line_inc;
  68. int stride;
  69. int upper_motion_limit_offset;
  70. } IpvideoContext;
  71. #define CHECK_STREAM_PTR(stream_ptr, stream_end, n) \
  72. if (stream_end - stream_ptr < n) { \
  73. av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  74. stream_ptr + n, stream_end); \
  75. return -1; \
  76. }
  77. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  78. {
  79. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  80. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  81. + delta_x * (1 + s->is_16bpp);
  82. if (motion_offset < 0) {
  83. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  84. return -1;
  85. } else if (motion_offset > s->upper_motion_limit_offset) {
  86. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  87. motion_offset, s->upper_motion_limit_offset);
  88. return -1;
  89. }
  90. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  91. s->current_frame.linesize[0], 8);
  92. return 0;
  93. }
  94. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  95. {
  96. return copy_from(s, &s->last_frame, 0, 0);
  97. }
  98. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  99. {
  100. return copy_from(s, &s->second_last_frame, 0, 0);
  101. }
  102. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  103. {
  104. unsigned char B;
  105. int x, y;
  106. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  107. if (!s->is_16bpp) {
  108. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  109. B = *s->stream_ptr++;
  110. } else {
  111. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  112. B = *s->mv_ptr++;
  113. }
  114. if (B < 56) {
  115. x = 8 + (B % 7);
  116. y = B / 7;
  117. } else {
  118. x = -14 + ((B - 56) % 29);
  119. y = 8 + ((B - 56) / 29);
  120. }
  121. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  122. return copy_from(s, &s->second_last_frame, x, y);
  123. }
  124. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  125. {
  126. unsigned char B;
  127. int x, y;
  128. /* copy 8x8 block from current frame from an up/left block */
  129. /* need 1 more byte for motion */
  130. if (!s->is_16bpp) {
  131. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  132. B = *s->stream_ptr++;
  133. } else {
  134. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  135. B = *s->mv_ptr++;
  136. }
  137. if (B < 56) {
  138. x = -(8 + (B % 7));
  139. y = -(B / 7);
  140. } else {
  141. x = -(-14 + ((B - 56) % 29));
  142. y = -( 8 + ((B - 56) / 29));
  143. }
  144. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  145. return copy_from(s, &s->current_frame, x, y);
  146. }
  147. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  148. {
  149. int x, y;
  150. unsigned char B, BL, BH;
  151. /* copy a block from the previous frame; need 1 more byte */
  152. if (!s->is_16bpp) {
  153. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  154. B = *s->stream_ptr++;
  155. } else {
  156. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  157. B = *s->mv_ptr++;
  158. }
  159. BL = B & 0x0F;
  160. BH = (B >> 4) & 0x0F;
  161. x = -8 + BL;
  162. y = -8 + BH;
  163. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  164. return copy_from(s, &s->last_frame, x, y);
  165. }
  166. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  167. {
  168. signed char x, y;
  169. /* copy a block from the previous frame using an expanded range;
  170. * need 2 more bytes */
  171. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  172. x = *s->stream_ptr++;
  173. y = *s->stream_ptr++;
  174. debug_interplay (" motion bytes = %d, %d\n", x, y);
  175. return copy_from(s, &s->last_frame, x, y);
  176. }
  177. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  178. {
  179. /* mystery opcode? skip multiple blocks? */
  180. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  181. /* report success */
  182. return 0;
  183. }
  184. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  185. {
  186. int x, y;
  187. unsigned char P[2];
  188. unsigned int flags;
  189. /* 2-color encoding */
  190. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  191. P[0] = *s->stream_ptr++;
  192. P[1] = *s->stream_ptr++;
  193. if (P[0] <= P[1]) {
  194. /* need 8 more bytes from the stream */
  195. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  196. for (y = 0; y < 8; y++) {
  197. flags = *s->stream_ptr++ | 0x100;
  198. for (; flags != 1; flags >>= 1)
  199. *s->pixel_ptr++ = P[flags & 1];
  200. s->pixel_ptr += s->line_inc;
  201. }
  202. } else {
  203. /* need 2 more bytes from the stream */
  204. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  205. flags = bytestream_get_le16(&s->stream_ptr);
  206. for (y = 0; y < 8; y += 2) {
  207. for (x = 0; x < 8; x += 2, flags >>= 1) {
  208. s->pixel_ptr[x ] =
  209. s->pixel_ptr[x + 1 ] =
  210. s->pixel_ptr[x + s->stride] =
  211. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  212. }
  213. s->pixel_ptr += s->stride * 2;
  214. }
  215. }
  216. /* report success */
  217. return 0;
  218. }
  219. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  220. {
  221. int x, y;
  222. unsigned char P[2];
  223. unsigned int flags = 0;
  224. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  225. * either top and bottom or left and right halves */
  226. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  227. P[0] = *s->stream_ptr++;
  228. P[1] = *s->stream_ptr++;
  229. if (P[0] <= P[1]) {
  230. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 14);
  231. s->stream_ptr -= 2;
  232. for (y = 0; y < 16; y++) {
  233. // new values for each 4x4 block
  234. if (!(y & 3)) {
  235. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  236. flags = bytestream_get_le16(&s->stream_ptr);
  237. }
  238. for (x = 0; x < 4; x++, flags >>= 1)
  239. *s->pixel_ptr++ = P[flags & 1];
  240. s->pixel_ptr += s->stride - 4;
  241. // switch to right half
  242. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  243. }
  244. } else {
  245. /* need 10 more bytes */
  246. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 10);
  247. if (s->stream_ptr[4] <= s->stream_ptr[5]) {
  248. flags = bytestream_get_le32(&s->stream_ptr);
  249. /* vertical split; left & right halves are 2-color encoded */
  250. for (y = 0; y < 16; y++) {
  251. for (x = 0; x < 4; x++, flags >>= 1)
  252. *s->pixel_ptr++ = P[flags & 1];
  253. s->pixel_ptr += s->stride - 4;
  254. // switch to right half
  255. if (y == 7) {
  256. s->pixel_ptr -= 8 * s->stride - 4;
  257. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  258. flags = bytestream_get_le32(&s->stream_ptr);
  259. }
  260. }
  261. } else {
  262. /* horizontal split; top & bottom halves are 2-color encoded */
  263. for (y = 0; y < 8; y++) {
  264. if (y == 4) {
  265. P[0] = *s->stream_ptr++;
  266. P[1] = *s->stream_ptr++;
  267. }
  268. flags = *s->stream_ptr++ | 0x100;
  269. for (; flags != 1; flags >>= 1)
  270. *s->pixel_ptr++ = P[flags & 1];
  271. s->pixel_ptr += s->line_inc;
  272. }
  273. }
  274. }
  275. /* report success */
  276. return 0;
  277. }
  278. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  279. {
  280. int x, y;
  281. unsigned char P[4];
  282. /* 4-color encoding */
  283. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  284. memcpy(P, s->stream_ptr, 4);
  285. s->stream_ptr += 4;
  286. if (P[0] <= P[1]) {
  287. if (P[2] <= P[3]) {
  288. /* 1 of 4 colors for each pixel, need 16 more bytes */
  289. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  290. for (y = 0; y < 8; y++) {
  291. /* get the next set of 8 2-bit flags */
  292. int flags = bytestream_get_le16(&s->stream_ptr);
  293. for (x = 0; x < 8; x++, flags >>= 2)
  294. *s->pixel_ptr++ = P[flags & 0x03];
  295. s->pixel_ptr += s->line_inc;
  296. }
  297. } else {
  298. uint32_t flags;
  299. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  300. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  301. flags = bytestream_get_le32(&s->stream_ptr);
  302. for (y = 0; y < 8; y += 2) {
  303. for (x = 0; x < 8; x += 2, flags >>= 2) {
  304. s->pixel_ptr[x ] =
  305. s->pixel_ptr[x + 1 ] =
  306. s->pixel_ptr[x + s->stride] =
  307. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  308. }
  309. s->pixel_ptr += s->stride * 2;
  310. }
  311. }
  312. } else {
  313. uint64_t flags;
  314. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  315. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  316. flags = bytestream_get_le64(&s->stream_ptr);
  317. if (P[2] <= P[3]) {
  318. for (y = 0; y < 8; y++) {
  319. for (x = 0; x < 8; x += 2, flags >>= 2) {
  320. s->pixel_ptr[x ] =
  321. s->pixel_ptr[x + 1] = P[flags & 0x03];
  322. }
  323. s->pixel_ptr += s->stride;
  324. }
  325. } else {
  326. for (y = 0; y < 8; y += 2) {
  327. for (x = 0; x < 8; x++, flags >>= 2) {
  328. s->pixel_ptr[x ] =
  329. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  330. }
  331. s->pixel_ptr += s->stride * 2;
  332. }
  333. }
  334. }
  335. /* report success */
  336. return 0;
  337. }
  338. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  339. {
  340. int x, y;
  341. unsigned char P[4];
  342. int flags = 0;
  343. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  344. * either top and bottom or left and right halves */
  345. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  346. if (s->stream_ptr[0] <= s->stream_ptr[1]) {
  347. /* 4-color encoding for each quadrant; need 32 bytes */
  348. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  349. for (y = 0; y < 16; y++) {
  350. // new values for each 4x4 block
  351. if (!(y & 3)) {
  352. memcpy(P, s->stream_ptr, 4);
  353. s->stream_ptr += 4;
  354. flags = bytestream_get_le32(&s->stream_ptr);
  355. }
  356. for (x = 0; x < 4; x++, flags >>= 2)
  357. *s->pixel_ptr++ = P[flags & 0x03];
  358. s->pixel_ptr += s->stride - 4;
  359. // switch to right half
  360. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  361. }
  362. } else {
  363. // vertical split?
  364. int vert = s->stream_ptr[12] <= s->stream_ptr[13];
  365. uint64_t flags = 0;
  366. /* 4-color encoding for either left and right or top and bottom
  367. * halves */
  368. for (y = 0; y < 16; y++) {
  369. // load values for each half
  370. if (!(y & 7)) {
  371. memcpy(P, s->stream_ptr, 4);
  372. s->stream_ptr += 4;
  373. flags = bytestream_get_le64(&s->stream_ptr);
  374. }
  375. for (x = 0; x < 4; x++, flags >>= 2)
  376. *s->pixel_ptr++ = P[flags & 0x03];
  377. if (vert) {
  378. s->pixel_ptr += s->stride - 4;
  379. // switch to right half
  380. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  381. } else if (y & 1) s->pixel_ptr += s->line_inc;
  382. }
  383. }
  384. /* report success */
  385. return 0;
  386. }
  387. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  388. {
  389. int y;
  390. /* 64-color encoding (each pixel in block is a different color) */
  391. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 64);
  392. for (y = 0; y < 8; y++) {
  393. memcpy(s->pixel_ptr, s->stream_ptr, 8);
  394. s->stream_ptr += 8;
  395. s->pixel_ptr += s->stride;
  396. }
  397. /* report success */
  398. return 0;
  399. }
  400. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  401. {
  402. int x, y;
  403. /* 16-color block encoding: each 2x2 block is a different color */
  404. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  405. for (y = 0; y < 8; y += 2) {
  406. for (x = 0; x < 8; x += 2) {
  407. s->pixel_ptr[x ] =
  408. s->pixel_ptr[x + 1 ] =
  409. s->pixel_ptr[x + s->stride] =
  410. s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
  411. }
  412. s->pixel_ptr += s->stride * 2;
  413. }
  414. /* report success */
  415. return 0;
  416. }
  417. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  418. {
  419. int y;
  420. unsigned char P[2];
  421. /* 4-color block encoding: each 4x4 block is a different color */
  422. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  423. for (y = 0; y < 8; y++) {
  424. if (!(y & 3)) {
  425. P[0] = *s->stream_ptr++;
  426. P[1] = *s->stream_ptr++;
  427. }
  428. memset(s->pixel_ptr, P[0], 4);
  429. memset(s->pixel_ptr + 4, P[1], 4);
  430. s->pixel_ptr += s->stride;
  431. }
  432. /* report success */
  433. return 0;
  434. }
  435. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  436. {
  437. int y;
  438. unsigned char pix;
  439. /* 1-color encoding: the whole block is 1 solid color */
  440. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  441. pix = *s->stream_ptr++;
  442. for (y = 0; y < 8; y++) {
  443. memset(s->pixel_ptr, pix, 8);
  444. s->pixel_ptr += s->stride;
  445. }
  446. /* report success */
  447. return 0;
  448. }
  449. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  450. {
  451. int x, y;
  452. unsigned char sample[2];
  453. /* dithered encoding */
  454. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  455. sample[0] = *s->stream_ptr++;
  456. sample[1] = *s->stream_ptr++;
  457. for (y = 0; y < 8; y++) {
  458. for (x = 0; x < 8; x += 2) {
  459. *s->pixel_ptr++ = sample[ y & 1 ];
  460. *s->pixel_ptr++ = sample[!(y & 1)];
  461. }
  462. s->pixel_ptr += s->line_inc;
  463. }
  464. /* report success */
  465. return 0;
  466. }
  467. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  468. {
  469. signed char x, y;
  470. /* copy a block from the second last frame using an expanded range */
  471. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  472. x = *s->stream_ptr++;
  473. y = *s->stream_ptr++;
  474. debug_interplay (" motion bytes = %d, %d\n", x, y);
  475. return copy_from(s, &s->second_last_frame, x, y);
  476. }
  477. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  478. {
  479. int x, y;
  480. uint16_t P[2];
  481. unsigned int flags;
  482. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  483. /* 2-color encoding */
  484. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  485. P[0] = bytestream_get_le16(&s->stream_ptr);
  486. P[1] = bytestream_get_le16(&s->stream_ptr);
  487. if (!(P[0] & 0x8000)) {
  488. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  489. for (y = 0; y < 8; y++) {
  490. flags = *s->stream_ptr++ | 0x100;
  491. for (; flags != 1; flags >>= 1)
  492. *pixel_ptr++ = P[flags & 1];
  493. pixel_ptr += s->line_inc;
  494. }
  495. } else {
  496. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  497. flags = bytestream_get_le16(&s->stream_ptr);
  498. for (y = 0; y < 8; y += 2) {
  499. for (x = 0; x < 8; x += 2, flags >>= 1) {
  500. pixel_ptr[x ] =
  501. pixel_ptr[x + 1 ] =
  502. pixel_ptr[x + s->stride] =
  503. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  504. }
  505. pixel_ptr += s->stride * 2;
  506. }
  507. }
  508. return 0;
  509. }
  510. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  511. {
  512. int x, y;
  513. uint16_t P[2];
  514. unsigned int flags = 0;
  515. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  516. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  517. * either top and bottom or left and right halves */
  518. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  519. P[0] = bytestream_get_le16(&s->stream_ptr);
  520. P[1] = bytestream_get_le16(&s->stream_ptr);
  521. if (!(P[0] & 0x8000)) {
  522. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  523. s->stream_ptr -= 4;
  524. for (y = 0; y < 16; y++) {
  525. // new values for each 4x4 block
  526. if (!(y & 3)) {
  527. P[0] = bytestream_get_le16(&s->stream_ptr);
  528. P[1] = bytestream_get_le16(&s->stream_ptr);
  529. flags = bytestream_get_le16(&s->stream_ptr);
  530. }
  531. for (x = 0; x < 4; x++, flags >>= 1)
  532. *pixel_ptr++ = P[flags & 1];
  533. pixel_ptr += s->stride - 4;
  534. // switch to right half
  535. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  536. }
  537. } else {
  538. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 12);
  539. if (!(AV_RL16(s->stream_ptr + 4) & 0x8000)) {
  540. flags = bytestream_get_le32(&s->stream_ptr);
  541. /* vertical split; left & right halves are 2-color encoded */
  542. for (y = 0; y < 16; y++) {
  543. for (x = 0; x < 4; x++, flags >>= 1)
  544. *pixel_ptr++ = P[flags & 1];
  545. pixel_ptr += s->stride - 4;
  546. // switch to right half
  547. if (y == 7) {
  548. pixel_ptr -= 8 * s->stride - 4;
  549. P[0] = bytestream_get_le16(&s->stream_ptr);
  550. P[1] = bytestream_get_le16(&s->stream_ptr);
  551. flags = bytestream_get_le32(&s->stream_ptr);
  552. }
  553. }
  554. } else {
  555. /* horizontal split; top & bottom halves are 2-color encoded */
  556. for (y = 0; y < 8; y++) {
  557. if (y == 4) {
  558. P[0] = bytestream_get_le16(&s->stream_ptr);
  559. P[1] = bytestream_get_le16(&s->stream_ptr);
  560. }
  561. flags = *s->stream_ptr++ | 0x100;
  562. for (; flags != 1; flags >>= 1)
  563. *pixel_ptr++ = P[flags & 1];
  564. pixel_ptr += s->line_inc;
  565. }
  566. }
  567. }
  568. /* report success */
  569. return 0;
  570. }
  571. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  572. {
  573. int x, y;
  574. uint16_t P[4];
  575. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  576. /* 4-color encoding */
  577. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  578. for (x = 0; x < 4; x++)
  579. P[x] = bytestream_get_le16(&s->stream_ptr);
  580. if (!(P[0] & 0x8000)) {
  581. if (!(P[2] & 0x8000)) {
  582. /* 1 of 4 colors for each pixel */
  583. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  584. for (y = 0; y < 8; y++) {
  585. /* get the next set of 8 2-bit flags */
  586. int flags = bytestream_get_le16(&s->stream_ptr);
  587. for (x = 0; x < 8; x++, flags >>= 2)
  588. *pixel_ptr++ = P[flags & 0x03];
  589. pixel_ptr += s->line_inc;
  590. }
  591. } else {
  592. uint32_t flags;
  593. /* 1 of 4 colors for each 2x2 block */
  594. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  595. flags = bytestream_get_le32(&s->stream_ptr);
  596. for (y = 0; y < 8; y += 2) {
  597. for (x = 0; x < 8; x += 2, flags >>= 2) {
  598. pixel_ptr[x ] =
  599. pixel_ptr[x + 1 ] =
  600. pixel_ptr[x + s->stride] =
  601. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  602. }
  603. pixel_ptr += s->stride * 2;
  604. }
  605. }
  606. } else {
  607. uint64_t flags;
  608. /* 1 of 4 colors for each 2x1 or 1x2 block */
  609. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  610. flags = bytestream_get_le64(&s->stream_ptr);
  611. if (!(P[2] & 0x8000)) {
  612. for (y = 0; y < 8; y++) {
  613. for (x = 0; x < 8; x += 2, flags >>= 2) {
  614. pixel_ptr[x ] =
  615. pixel_ptr[x + 1] = P[flags & 0x03];
  616. }
  617. pixel_ptr += s->stride;
  618. }
  619. } else {
  620. for (y = 0; y < 8; y += 2) {
  621. for (x = 0; x < 8; x++, flags >>= 2) {
  622. pixel_ptr[x ] =
  623. pixel_ptr[x + s->stride] = P[flags & 0x03];
  624. }
  625. pixel_ptr += s->stride * 2;
  626. }
  627. }
  628. }
  629. /* report success */
  630. return 0;
  631. }
  632. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  633. {
  634. int x, y;
  635. uint16_t P[4];
  636. int flags = 0;
  637. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  638. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  639. * either top and bottom or left and right halves */
  640. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  641. if (!(AV_RL16(s->stream_ptr) & 0x8000)) {
  642. /* 4-color encoding for each quadrant */
  643. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 48);
  644. for (y = 0; y < 16; y++) {
  645. // new values for each 4x4 block
  646. if (!(y & 3)) {
  647. for (x = 0; x < 4; x++)
  648. P[x] = bytestream_get_le16(&s->stream_ptr);
  649. flags = bytestream_get_le32(&s->stream_ptr);
  650. }
  651. for (x = 0; x < 4; x++, flags >>= 2)
  652. *pixel_ptr++ = P[flags & 0x03];
  653. pixel_ptr += s->stride - 4;
  654. // switch to right half
  655. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  656. }
  657. } else {
  658. // vertical split?
  659. int vert = !(AV_RL16(s->stream_ptr + 16) & 0x8000);
  660. uint64_t flags = 0;
  661. /* 4-color encoding for either left and right or top and bottom
  662. * halves */
  663. for (y = 0; y < 16; y++) {
  664. // load values for each half
  665. if (!(y & 7)) {
  666. for (x = 0; x < 4; x++)
  667. P[x] = bytestream_get_le16(&s->stream_ptr);
  668. flags = bytestream_get_le64(&s->stream_ptr);
  669. }
  670. for (x = 0; x < 4; x++, flags >>= 2)
  671. *pixel_ptr++ = P[flags & 0x03];
  672. if (vert) {
  673. pixel_ptr += s->stride - 4;
  674. // switch to right half
  675. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  676. } else if (y & 1) pixel_ptr += s->line_inc;
  677. }
  678. }
  679. /* report success */
  680. return 0;
  681. }
  682. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  683. {
  684. int x, y;
  685. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  686. /* 64-color encoding (each pixel in block is a different color) */
  687. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 128);
  688. for (y = 0; y < 8; y++) {
  689. for (x = 0; x < 8; x++)
  690. pixel_ptr[x] = bytestream_get_le16(&s->stream_ptr);
  691. pixel_ptr += s->stride;
  692. }
  693. /* report success */
  694. return 0;
  695. }
  696. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  697. {
  698. int x, y;
  699. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  700. /* 16-color block encoding: each 2x2 block is a different color */
  701. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  702. for (y = 0; y < 8; y += 2) {
  703. for (x = 0; x < 8; x += 2) {
  704. pixel_ptr[x ] =
  705. pixel_ptr[x + 1 ] =
  706. pixel_ptr[x + s->stride] =
  707. pixel_ptr[x + 1 + s->stride] = bytestream_get_le16(&s->stream_ptr);
  708. }
  709. pixel_ptr += s->stride * 2;
  710. }
  711. /* report success */
  712. return 0;
  713. }
  714. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  715. {
  716. int x, y;
  717. uint16_t P[2];
  718. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  719. /* 4-color block encoding: each 4x4 block is a different color */
  720. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  721. for (y = 0; y < 8; y++) {
  722. if (!(y & 3)) {
  723. P[0] = bytestream_get_le16(&s->stream_ptr);
  724. P[1] = bytestream_get_le16(&s->stream_ptr);
  725. }
  726. for (x = 0; x < 8; x++)
  727. pixel_ptr[x] = P[x >> 2];
  728. pixel_ptr += s->stride;
  729. }
  730. /* report success */
  731. return 0;
  732. }
  733. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  734. {
  735. int x, y;
  736. uint16_t pix;
  737. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  738. /* 1-color encoding: the whole block is 1 solid color */
  739. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  740. pix = bytestream_get_le16(&s->stream_ptr);
  741. for (y = 0; y < 8; y++) {
  742. for (x = 0; x < 8; x++)
  743. pixel_ptr[x] = pix;
  744. pixel_ptr += s->stride;
  745. }
  746. /* report success */
  747. return 0;
  748. }
  749. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  750. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  751. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  752. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  753. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  754. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  755. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  756. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  757. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  758. };
  759. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  760. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  761. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  762. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  763. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  764. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  765. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  766. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  767. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  768. };
  769. static void ipvideo_decode_opcodes(IpvideoContext *s)
  770. {
  771. int x, y;
  772. unsigned char opcode;
  773. int ret;
  774. static int frame = 0;
  775. GetBitContext gb;
  776. debug_interplay("------------------ frame %d\n", frame);
  777. frame++;
  778. if (!s->is_16bpp) {
  779. /* this is PAL8, so make the palette available */
  780. memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
  781. s->stride = s->current_frame.linesize[0];
  782. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  783. s->stream_end = s->buf + s->size;
  784. } else {
  785. s->stride = s->current_frame.linesize[0] >> 1;
  786. s->stream_ptr = s->buf + 16;
  787. s->stream_end =
  788. s->mv_ptr = s->buf + 14 + AV_RL16(s->buf+14);
  789. s->mv_end = s->buf + s->size;
  790. }
  791. s->line_inc = s->stride - 8;
  792. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  793. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  794. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  795. for (y = 0; y < s->avctx->height; y += 8) {
  796. for (x = 0; x < s->avctx->width; x += 8) {
  797. opcode = get_bits(&gb, 4);
  798. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  799. x, y, opcode, s->stream_ptr);
  800. if (!s->is_16bpp) {
  801. s->pixel_ptr = s->current_frame.data[0] + x
  802. + y*s->current_frame.linesize[0];
  803. ret = ipvideo_decode_block[opcode](s);
  804. } else {
  805. s->pixel_ptr = s->current_frame.data[0] + x*2
  806. + y*s->current_frame.linesize[0];
  807. ret = ipvideo_decode_block16[opcode](s);
  808. }
  809. if (ret != 0) {
  810. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  811. frame, x, y);
  812. return;
  813. }
  814. }
  815. }
  816. if (s->stream_end - s->stream_ptr > 1) {
  817. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
  818. s->stream_end - s->stream_ptr);
  819. }
  820. }
  821. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  822. {
  823. IpvideoContext *s = avctx->priv_data;
  824. s->avctx = avctx;
  825. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  826. avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
  827. if (!s->is_16bpp && s->avctx->palctrl == NULL) {
  828. av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
  829. return -1;
  830. }
  831. dsputil_init(&s->dsp, avctx);
  832. /* decoding map contains 4 bits of information per 8x8 block */
  833. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  834. s->current_frame.data[0] = s->last_frame.data[0] =
  835. s->second_last_frame.data[0] = NULL;
  836. return 0;
  837. }
  838. static int ipvideo_decode_frame(AVCodecContext *avctx,
  839. void *data, int *data_size,
  840. AVPacket *avpkt)
  841. {
  842. const uint8_t *buf = avpkt->data;
  843. int buf_size = avpkt->size;
  844. IpvideoContext *s = avctx->priv_data;
  845. AVPaletteControl *palette_control = avctx->palctrl;
  846. /* compressed buffer needs to be large enough to at least hold an entire
  847. * decoding map */
  848. if (buf_size < s->decoding_map_size)
  849. return buf_size;
  850. s->decoding_map = buf;
  851. s->buf = buf + s->decoding_map_size;
  852. s->size = buf_size - s->decoding_map_size;
  853. s->current_frame.reference = 3;
  854. if (avctx->get_buffer(avctx, &s->current_frame)) {
  855. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  856. return -1;
  857. }
  858. ipvideo_decode_opcodes(s);
  859. if (!s->is_16bpp && palette_control->palette_changed) {
  860. palette_control->palette_changed = 0;
  861. s->current_frame.palette_has_changed = 1;
  862. }
  863. *data_size = sizeof(AVFrame);
  864. *(AVFrame*)data = s->current_frame;
  865. /* shuffle frames */
  866. if (s->second_last_frame.data[0])
  867. avctx->release_buffer(avctx, &s->second_last_frame);
  868. s->second_last_frame = s->last_frame;
  869. s->last_frame = s->current_frame;
  870. s->current_frame.data[0] = NULL; /* catch any access attempts */
  871. /* report that the buffer was completely consumed */
  872. return buf_size;
  873. }
  874. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  875. {
  876. IpvideoContext *s = avctx->priv_data;
  877. /* release the last frame */
  878. if (s->last_frame.data[0])
  879. avctx->release_buffer(avctx, &s->last_frame);
  880. if (s->second_last_frame.data[0])
  881. avctx->release_buffer(avctx, &s->second_last_frame);
  882. return 0;
  883. }
  884. AVCodec interplay_video_decoder = {
  885. "interplayvideo",
  886. AVMEDIA_TYPE_VIDEO,
  887. CODEC_ID_INTERPLAY_VIDEO,
  888. sizeof(IpvideoContext),
  889. ipvideo_decode_init,
  890. NULL,
  891. ipvideo_decode_end,
  892. ipvideo_decode_frame,
  893. CODEC_CAP_DR1,
  894. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  895. };