You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1107 lines
33KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define ALT_BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #define PALETTE_COUNT 256
  44. typedef struct IpvideoContext {
  45. AVCodecContext *avctx;
  46. DSPContext dsp;
  47. AVFrame second_last_frame;
  48. AVFrame last_frame;
  49. AVFrame current_frame;
  50. const unsigned char *decoding_map;
  51. int decoding_map_size;
  52. const unsigned char *buf;
  53. int size;
  54. int is_16bpp;
  55. const unsigned char *stream_ptr;
  56. const unsigned char *stream_end;
  57. const uint8_t *mv_ptr;
  58. const uint8_t *mv_end;
  59. unsigned char *pixel_ptr;
  60. int line_inc;
  61. int stride;
  62. int upper_motion_limit_offset;
  63. } IpvideoContext;
  64. #define CHECK_STREAM_PTR(stream_ptr, stream_end, n) \
  65. if (stream_end - stream_ptr < n) { \
  66. av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  67. stream_ptr + n, stream_end); \
  68. return -1; \
  69. }
  70. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  71. {
  72. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  73. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  74. + delta_x * (1 + s->is_16bpp);
  75. if (motion_offset < 0) {
  76. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  77. return -1;
  78. } else if (motion_offset > s->upper_motion_limit_offset) {
  79. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  80. motion_offset, s->upper_motion_limit_offset);
  81. return -1;
  82. }
  83. if (src->data[0] == NULL) {
  84. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  85. return AVERROR(EINVAL);
  86. }
  87. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  88. s->current_frame.linesize[0], 8);
  89. return 0;
  90. }
  91. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  92. {
  93. return copy_from(s, &s->last_frame, 0, 0);
  94. }
  95. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  96. {
  97. return copy_from(s, &s->second_last_frame, 0, 0);
  98. }
  99. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  100. {
  101. unsigned char B;
  102. int x, y;
  103. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  104. if (!s->is_16bpp) {
  105. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  106. B = *s->stream_ptr++;
  107. } else {
  108. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  109. B = *s->mv_ptr++;
  110. }
  111. if (B < 56) {
  112. x = 8 + (B % 7);
  113. y = B / 7;
  114. } else {
  115. x = -14 + ((B - 56) % 29);
  116. y = 8 + ((B - 56) / 29);
  117. }
  118. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  119. return copy_from(s, &s->second_last_frame, x, y);
  120. }
  121. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  122. {
  123. unsigned char B;
  124. int x, y;
  125. /* copy 8x8 block from current frame from an up/left block */
  126. /* need 1 more byte for motion */
  127. if (!s->is_16bpp) {
  128. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  129. B = *s->stream_ptr++;
  130. } else {
  131. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  132. B = *s->mv_ptr++;
  133. }
  134. if (B < 56) {
  135. x = -(8 + (B % 7));
  136. y = -(B / 7);
  137. } else {
  138. x = -(-14 + ((B - 56) % 29));
  139. y = -( 8 + ((B - 56) / 29));
  140. }
  141. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  142. return copy_from(s, &s->current_frame, x, y);
  143. }
  144. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  145. {
  146. int x, y;
  147. unsigned char B, BL, BH;
  148. /* copy a block from the previous frame; need 1 more byte */
  149. if (!s->is_16bpp) {
  150. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  151. B = *s->stream_ptr++;
  152. } else {
  153. CHECK_STREAM_PTR(s->mv_ptr, s->mv_end, 1);
  154. B = *s->mv_ptr++;
  155. }
  156. BL = B & 0x0F;
  157. BH = (B >> 4) & 0x0F;
  158. x = -8 + BL;
  159. y = -8 + BH;
  160. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  161. return copy_from(s, &s->last_frame, x, y);
  162. }
  163. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  164. {
  165. signed char x, y;
  166. /* copy a block from the previous frame using an expanded range;
  167. * need 2 more bytes */
  168. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  169. x = *s->stream_ptr++;
  170. y = *s->stream_ptr++;
  171. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  172. return copy_from(s, &s->last_frame, x, y);
  173. }
  174. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  175. {
  176. /* mystery opcode? skip multiple blocks? */
  177. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  178. /* report success */
  179. return 0;
  180. }
  181. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  182. {
  183. int x, y;
  184. unsigned char P[2];
  185. unsigned int flags;
  186. /* 2-color encoding */
  187. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  188. P[0] = *s->stream_ptr++;
  189. P[1] = *s->stream_ptr++;
  190. if (P[0] <= P[1]) {
  191. /* need 8 more bytes from the stream */
  192. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  193. for (y = 0; y < 8; y++) {
  194. flags = *s->stream_ptr++ | 0x100;
  195. for (; flags != 1; flags >>= 1)
  196. *s->pixel_ptr++ = P[flags & 1];
  197. s->pixel_ptr += s->line_inc;
  198. }
  199. } else {
  200. /* need 2 more bytes from the stream */
  201. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  202. flags = bytestream_get_le16(&s->stream_ptr);
  203. for (y = 0; y < 8; y += 2) {
  204. for (x = 0; x < 8; x += 2, flags >>= 1) {
  205. s->pixel_ptr[x ] =
  206. s->pixel_ptr[x + 1 ] =
  207. s->pixel_ptr[x + s->stride] =
  208. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  209. }
  210. s->pixel_ptr += s->stride * 2;
  211. }
  212. }
  213. /* report success */
  214. return 0;
  215. }
  216. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  217. {
  218. int x, y;
  219. unsigned char P[2];
  220. unsigned int flags = 0;
  221. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  222. * either top and bottom or left and right halves */
  223. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  224. P[0] = *s->stream_ptr++;
  225. P[1] = *s->stream_ptr++;
  226. if (P[0] <= P[1]) {
  227. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 14);
  228. s->stream_ptr -= 2;
  229. for (y = 0; y < 16; y++) {
  230. // new values for each 4x4 block
  231. if (!(y & 3)) {
  232. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  233. flags = bytestream_get_le16(&s->stream_ptr);
  234. }
  235. for (x = 0; x < 4; x++, flags >>= 1)
  236. *s->pixel_ptr++ = P[flags & 1];
  237. s->pixel_ptr += s->stride - 4;
  238. // switch to right half
  239. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  240. }
  241. } else {
  242. /* need 10 more bytes */
  243. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 10);
  244. if (s->stream_ptr[4] <= s->stream_ptr[5]) {
  245. flags = bytestream_get_le32(&s->stream_ptr);
  246. /* vertical split; left & right halves are 2-color encoded */
  247. for (y = 0; y < 16; y++) {
  248. for (x = 0; x < 4; x++, flags >>= 1)
  249. *s->pixel_ptr++ = P[flags & 1];
  250. s->pixel_ptr += s->stride - 4;
  251. // switch to right half
  252. if (y == 7) {
  253. s->pixel_ptr -= 8 * s->stride - 4;
  254. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  255. flags = bytestream_get_le32(&s->stream_ptr);
  256. }
  257. }
  258. } else {
  259. /* horizontal split; top & bottom halves are 2-color encoded */
  260. for (y = 0; y < 8; y++) {
  261. if (y == 4) {
  262. P[0] = *s->stream_ptr++;
  263. P[1] = *s->stream_ptr++;
  264. }
  265. flags = *s->stream_ptr++ | 0x100;
  266. for (; flags != 1; flags >>= 1)
  267. *s->pixel_ptr++ = P[flags & 1];
  268. s->pixel_ptr += s->line_inc;
  269. }
  270. }
  271. }
  272. /* report success */
  273. return 0;
  274. }
  275. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  276. {
  277. int x, y;
  278. unsigned char P[4];
  279. /* 4-color encoding */
  280. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  281. memcpy(P, s->stream_ptr, 4);
  282. s->stream_ptr += 4;
  283. if (P[0] <= P[1]) {
  284. if (P[2] <= P[3]) {
  285. /* 1 of 4 colors for each pixel, need 16 more bytes */
  286. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  287. for (y = 0; y < 8; y++) {
  288. /* get the next set of 8 2-bit flags */
  289. int flags = bytestream_get_le16(&s->stream_ptr);
  290. for (x = 0; x < 8; x++, flags >>= 2)
  291. *s->pixel_ptr++ = P[flags & 0x03];
  292. s->pixel_ptr += s->line_inc;
  293. }
  294. } else {
  295. uint32_t flags;
  296. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  297. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  298. flags = bytestream_get_le32(&s->stream_ptr);
  299. for (y = 0; y < 8; y += 2) {
  300. for (x = 0; x < 8; x += 2, flags >>= 2) {
  301. s->pixel_ptr[x ] =
  302. s->pixel_ptr[x + 1 ] =
  303. s->pixel_ptr[x + s->stride] =
  304. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  305. }
  306. s->pixel_ptr += s->stride * 2;
  307. }
  308. }
  309. } else {
  310. uint64_t flags;
  311. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  312. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  313. flags = bytestream_get_le64(&s->stream_ptr);
  314. if (P[2] <= P[3]) {
  315. for (y = 0; y < 8; y++) {
  316. for (x = 0; x < 8; x += 2, flags >>= 2) {
  317. s->pixel_ptr[x ] =
  318. s->pixel_ptr[x + 1] = P[flags & 0x03];
  319. }
  320. s->pixel_ptr += s->stride;
  321. }
  322. } else {
  323. for (y = 0; y < 8; y += 2) {
  324. for (x = 0; x < 8; x++, flags >>= 2) {
  325. s->pixel_ptr[x ] =
  326. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  327. }
  328. s->pixel_ptr += s->stride * 2;
  329. }
  330. }
  331. }
  332. /* report success */
  333. return 0;
  334. }
  335. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  336. {
  337. int x, y;
  338. unsigned char P[4];
  339. int flags = 0;
  340. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  341. * either top and bottom or left and right halves */
  342. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  343. if (s->stream_ptr[0] <= s->stream_ptr[1]) {
  344. /* 4-color encoding for each quadrant; need 32 bytes */
  345. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  346. for (y = 0; y < 16; y++) {
  347. // new values for each 4x4 block
  348. if (!(y & 3)) {
  349. memcpy(P, s->stream_ptr, 4);
  350. s->stream_ptr += 4;
  351. flags = bytestream_get_le32(&s->stream_ptr);
  352. }
  353. for (x = 0; x < 4; x++, flags >>= 2)
  354. *s->pixel_ptr++ = P[flags & 0x03];
  355. s->pixel_ptr += s->stride - 4;
  356. // switch to right half
  357. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  358. }
  359. } else {
  360. // vertical split?
  361. int vert = s->stream_ptr[12] <= s->stream_ptr[13];
  362. uint64_t flags = 0;
  363. /* 4-color encoding for either left and right or top and bottom
  364. * halves */
  365. for (y = 0; y < 16; y++) {
  366. // load values for each half
  367. if (!(y & 7)) {
  368. memcpy(P, s->stream_ptr, 4);
  369. s->stream_ptr += 4;
  370. flags = bytestream_get_le64(&s->stream_ptr);
  371. }
  372. for (x = 0; x < 4; x++, flags >>= 2)
  373. *s->pixel_ptr++ = P[flags & 0x03];
  374. if (vert) {
  375. s->pixel_ptr += s->stride - 4;
  376. // switch to right half
  377. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  378. } else if (y & 1) s->pixel_ptr += s->line_inc;
  379. }
  380. }
  381. /* report success */
  382. return 0;
  383. }
  384. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  385. {
  386. int y;
  387. /* 64-color encoding (each pixel in block is a different color) */
  388. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 64);
  389. for (y = 0; y < 8; y++) {
  390. memcpy(s->pixel_ptr, s->stream_ptr, 8);
  391. s->stream_ptr += 8;
  392. s->pixel_ptr += s->stride;
  393. }
  394. /* report success */
  395. return 0;
  396. }
  397. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  398. {
  399. int x, y;
  400. /* 16-color block encoding: each 2x2 block is a different color */
  401. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  402. for (y = 0; y < 8; y += 2) {
  403. for (x = 0; x < 8; x += 2) {
  404. s->pixel_ptr[x ] =
  405. s->pixel_ptr[x + 1 ] =
  406. s->pixel_ptr[x + s->stride] =
  407. s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
  408. }
  409. s->pixel_ptr += s->stride * 2;
  410. }
  411. /* report success */
  412. return 0;
  413. }
  414. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  415. {
  416. int y;
  417. unsigned char P[2];
  418. /* 4-color block encoding: each 4x4 block is a different color */
  419. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  420. for (y = 0; y < 8; y++) {
  421. if (!(y & 3)) {
  422. P[0] = *s->stream_ptr++;
  423. P[1] = *s->stream_ptr++;
  424. }
  425. memset(s->pixel_ptr, P[0], 4);
  426. memset(s->pixel_ptr + 4, P[1], 4);
  427. s->pixel_ptr += s->stride;
  428. }
  429. /* report success */
  430. return 0;
  431. }
  432. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  433. {
  434. int y;
  435. unsigned char pix;
  436. /* 1-color encoding: the whole block is 1 solid color */
  437. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 1);
  438. pix = *s->stream_ptr++;
  439. for (y = 0; y < 8; y++) {
  440. memset(s->pixel_ptr, pix, 8);
  441. s->pixel_ptr += s->stride;
  442. }
  443. /* report success */
  444. return 0;
  445. }
  446. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  447. {
  448. int x, y;
  449. unsigned char sample[2];
  450. /* dithered encoding */
  451. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  452. sample[0] = *s->stream_ptr++;
  453. sample[1] = *s->stream_ptr++;
  454. for (y = 0; y < 8; y++) {
  455. for (x = 0; x < 8; x += 2) {
  456. *s->pixel_ptr++ = sample[ y & 1 ];
  457. *s->pixel_ptr++ = sample[!(y & 1)];
  458. }
  459. s->pixel_ptr += s->line_inc;
  460. }
  461. /* report success */
  462. return 0;
  463. }
  464. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  465. {
  466. signed char x, y;
  467. /* copy a block from the second last frame using an expanded range */
  468. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  469. x = *s->stream_ptr++;
  470. y = *s->stream_ptr++;
  471. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  472. return copy_from(s, &s->second_last_frame, x, y);
  473. }
  474. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  475. {
  476. int x, y;
  477. uint16_t P[2];
  478. unsigned int flags;
  479. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  480. /* 2-color encoding */
  481. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  482. P[0] = bytestream_get_le16(&s->stream_ptr);
  483. P[1] = bytestream_get_le16(&s->stream_ptr);
  484. if (!(P[0] & 0x8000)) {
  485. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  486. for (y = 0; y < 8; y++) {
  487. flags = *s->stream_ptr++ | 0x100;
  488. for (; flags != 1; flags >>= 1)
  489. *pixel_ptr++ = P[flags & 1];
  490. pixel_ptr += s->line_inc;
  491. }
  492. } else {
  493. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  494. flags = bytestream_get_le16(&s->stream_ptr);
  495. for (y = 0; y < 8; y += 2) {
  496. for (x = 0; x < 8; x += 2, flags >>= 1) {
  497. pixel_ptr[x ] =
  498. pixel_ptr[x + 1 ] =
  499. pixel_ptr[x + s->stride] =
  500. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  501. }
  502. pixel_ptr += s->stride * 2;
  503. }
  504. }
  505. return 0;
  506. }
  507. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  508. {
  509. int x, y;
  510. uint16_t P[2];
  511. unsigned int flags = 0;
  512. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  513. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  514. * either top and bottom or left and right halves */
  515. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  516. P[0] = bytestream_get_le16(&s->stream_ptr);
  517. P[1] = bytestream_get_le16(&s->stream_ptr);
  518. if (!(P[0] & 0x8000)) {
  519. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  520. s->stream_ptr -= 4;
  521. for (y = 0; y < 16; y++) {
  522. // new values for each 4x4 block
  523. if (!(y & 3)) {
  524. P[0] = bytestream_get_le16(&s->stream_ptr);
  525. P[1] = bytestream_get_le16(&s->stream_ptr);
  526. flags = bytestream_get_le16(&s->stream_ptr);
  527. }
  528. for (x = 0; x < 4; x++, flags >>= 1)
  529. *pixel_ptr++ = P[flags & 1];
  530. pixel_ptr += s->stride - 4;
  531. // switch to right half
  532. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  533. }
  534. } else {
  535. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 12);
  536. if (!(AV_RL16(s->stream_ptr + 4) & 0x8000)) {
  537. flags = bytestream_get_le32(&s->stream_ptr);
  538. /* vertical split; left & right halves are 2-color encoded */
  539. for (y = 0; y < 16; y++) {
  540. for (x = 0; x < 4; x++, flags >>= 1)
  541. *pixel_ptr++ = P[flags & 1];
  542. pixel_ptr += s->stride - 4;
  543. // switch to right half
  544. if (y == 7) {
  545. pixel_ptr -= 8 * s->stride - 4;
  546. P[0] = bytestream_get_le16(&s->stream_ptr);
  547. P[1] = bytestream_get_le16(&s->stream_ptr);
  548. flags = bytestream_get_le32(&s->stream_ptr);
  549. }
  550. }
  551. } else {
  552. /* horizontal split; top & bottom halves are 2-color encoded */
  553. for (y = 0; y < 8; y++) {
  554. if (y == 4) {
  555. P[0] = bytestream_get_le16(&s->stream_ptr);
  556. P[1] = bytestream_get_le16(&s->stream_ptr);
  557. }
  558. flags = *s->stream_ptr++ | 0x100;
  559. for (; flags != 1; flags >>= 1)
  560. *pixel_ptr++ = P[flags & 1];
  561. pixel_ptr += s->line_inc;
  562. }
  563. }
  564. }
  565. /* report success */
  566. return 0;
  567. }
  568. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  569. {
  570. int x, y;
  571. uint16_t P[4];
  572. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  573. /* 4-color encoding */
  574. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  575. for (x = 0; x < 4; x++)
  576. P[x] = bytestream_get_le16(&s->stream_ptr);
  577. if (!(P[0] & 0x8000)) {
  578. if (!(P[2] & 0x8000)) {
  579. /* 1 of 4 colors for each pixel */
  580. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 16);
  581. for (y = 0; y < 8; y++) {
  582. /* get the next set of 8 2-bit flags */
  583. int flags = bytestream_get_le16(&s->stream_ptr);
  584. for (x = 0; x < 8; x++, flags >>= 2)
  585. *pixel_ptr++ = P[flags & 0x03];
  586. pixel_ptr += s->line_inc;
  587. }
  588. } else {
  589. uint32_t flags;
  590. /* 1 of 4 colors for each 2x2 block */
  591. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 4);
  592. flags = bytestream_get_le32(&s->stream_ptr);
  593. for (y = 0; y < 8; y += 2) {
  594. for (x = 0; x < 8; x += 2, flags >>= 2) {
  595. pixel_ptr[x ] =
  596. pixel_ptr[x + 1 ] =
  597. pixel_ptr[x + s->stride] =
  598. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  599. }
  600. pixel_ptr += s->stride * 2;
  601. }
  602. }
  603. } else {
  604. uint64_t flags;
  605. /* 1 of 4 colors for each 2x1 or 1x2 block */
  606. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  607. flags = bytestream_get_le64(&s->stream_ptr);
  608. if (!(P[2] & 0x8000)) {
  609. for (y = 0; y < 8; y++) {
  610. for (x = 0; x < 8; x += 2, flags >>= 2) {
  611. pixel_ptr[x ] =
  612. pixel_ptr[x + 1] = P[flags & 0x03];
  613. }
  614. pixel_ptr += s->stride;
  615. }
  616. } else {
  617. for (y = 0; y < 8; y += 2) {
  618. for (x = 0; x < 8; x++, flags >>= 2) {
  619. pixel_ptr[x ] =
  620. pixel_ptr[x + s->stride] = P[flags & 0x03];
  621. }
  622. pixel_ptr += s->stride * 2;
  623. }
  624. }
  625. }
  626. /* report success */
  627. return 0;
  628. }
  629. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  630. {
  631. int x, y;
  632. uint16_t P[4];
  633. int flags = 0;
  634. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  635. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  636. * either top and bottom or left and right halves */
  637. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 24);
  638. if (!(AV_RL16(s->stream_ptr) & 0x8000)) {
  639. /* 4-color encoding for each quadrant */
  640. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 48);
  641. for (y = 0; y < 16; y++) {
  642. // new values for each 4x4 block
  643. if (!(y & 3)) {
  644. for (x = 0; x < 4; x++)
  645. P[x] = bytestream_get_le16(&s->stream_ptr);
  646. flags = bytestream_get_le32(&s->stream_ptr);
  647. }
  648. for (x = 0; x < 4; x++, flags >>= 2)
  649. *pixel_ptr++ = P[flags & 0x03];
  650. pixel_ptr += s->stride - 4;
  651. // switch to right half
  652. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  653. }
  654. } else {
  655. // vertical split?
  656. int vert = !(AV_RL16(s->stream_ptr + 16) & 0x8000);
  657. uint64_t flags = 0;
  658. /* 4-color encoding for either left and right or top and bottom
  659. * halves */
  660. for (y = 0; y < 16; y++) {
  661. // load values for each half
  662. if (!(y & 7)) {
  663. for (x = 0; x < 4; x++)
  664. P[x] = bytestream_get_le16(&s->stream_ptr);
  665. flags = bytestream_get_le64(&s->stream_ptr);
  666. }
  667. for (x = 0; x < 4; x++, flags >>= 2)
  668. *pixel_ptr++ = P[flags & 0x03];
  669. if (vert) {
  670. pixel_ptr += s->stride - 4;
  671. // switch to right half
  672. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  673. } else if (y & 1) pixel_ptr += s->line_inc;
  674. }
  675. }
  676. /* report success */
  677. return 0;
  678. }
  679. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  680. {
  681. int x, y;
  682. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  683. /* 64-color encoding (each pixel in block is a different color) */
  684. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 128);
  685. for (y = 0; y < 8; y++) {
  686. for (x = 0; x < 8; x++)
  687. pixel_ptr[x] = bytestream_get_le16(&s->stream_ptr);
  688. pixel_ptr += s->stride;
  689. }
  690. /* report success */
  691. return 0;
  692. }
  693. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  694. {
  695. int x, y;
  696. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  697. /* 16-color block encoding: each 2x2 block is a different color */
  698. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 32);
  699. for (y = 0; y < 8; y += 2) {
  700. for (x = 0; x < 8; x += 2) {
  701. pixel_ptr[x ] =
  702. pixel_ptr[x + 1 ] =
  703. pixel_ptr[x + s->stride] =
  704. pixel_ptr[x + 1 + s->stride] = bytestream_get_le16(&s->stream_ptr);
  705. }
  706. pixel_ptr += s->stride * 2;
  707. }
  708. /* report success */
  709. return 0;
  710. }
  711. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  712. {
  713. int x, y;
  714. uint16_t P[2];
  715. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  716. /* 4-color block encoding: each 4x4 block is a different color */
  717. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 8);
  718. for (y = 0; y < 8; y++) {
  719. if (!(y & 3)) {
  720. P[0] = bytestream_get_le16(&s->stream_ptr);
  721. P[1] = bytestream_get_le16(&s->stream_ptr);
  722. }
  723. for (x = 0; x < 8; x++)
  724. pixel_ptr[x] = P[x >> 2];
  725. pixel_ptr += s->stride;
  726. }
  727. /* report success */
  728. return 0;
  729. }
  730. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  731. {
  732. int x, y;
  733. uint16_t pix;
  734. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  735. /* 1-color encoding: the whole block is 1 solid color */
  736. CHECK_STREAM_PTR(s->stream_ptr, s->stream_end, 2);
  737. pix = bytestream_get_le16(&s->stream_ptr);
  738. for (y = 0; y < 8; y++) {
  739. for (x = 0; x < 8; x++)
  740. pixel_ptr[x] = pix;
  741. pixel_ptr += s->stride;
  742. }
  743. /* report success */
  744. return 0;
  745. }
  746. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  747. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  748. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  749. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  750. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  751. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  752. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  753. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  754. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  755. };
  756. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  757. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  758. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  759. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  760. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  761. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  762. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  763. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  764. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  765. };
  766. static void ipvideo_decode_opcodes(IpvideoContext *s)
  767. {
  768. int x, y;
  769. unsigned char opcode;
  770. int ret;
  771. static int frame = 0;
  772. GetBitContext gb;
  773. av_dlog(NULL, "------------------ frame %d\n", frame);
  774. frame++;
  775. if (!s->is_16bpp) {
  776. /* this is PAL8, so make the palette available */
  777. memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
  778. s->stride = s->current_frame.linesize[0];
  779. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  780. s->stream_end = s->buf + s->size;
  781. } else {
  782. s->stride = s->current_frame.linesize[0] >> 1;
  783. s->stream_ptr = s->buf + 16;
  784. s->stream_end =
  785. s->mv_ptr = s->buf + 14 + AV_RL16(s->buf+14);
  786. s->mv_end = s->buf + s->size;
  787. }
  788. s->line_inc = s->stride - 8;
  789. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  790. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  791. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  792. for (y = 0; y < s->avctx->height; y += 8) {
  793. for (x = 0; x < s->avctx->width; x += 8) {
  794. opcode = get_bits(&gb, 4);
  795. av_dlog(NULL, " block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  796. x, y, opcode, s->stream_ptr);
  797. if (!s->is_16bpp) {
  798. s->pixel_ptr = s->current_frame.data[0] + x
  799. + y*s->current_frame.linesize[0];
  800. ret = ipvideo_decode_block[opcode](s);
  801. } else {
  802. s->pixel_ptr = s->current_frame.data[0] + x*2
  803. + y*s->current_frame.linesize[0];
  804. ret = ipvideo_decode_block16[opcode](s);
  805. }
  806. if (ret != 0) {
  807. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  808. frame, x, y);
  809. return;
  810. }
  811. }
  812. }
  813. if (s->stream_end - s->stream_ptr > 1) {
  814. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
  815. s->stream_end - s->stream_ptr);
  816. }
  817. }
  818. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  819. {
  820. IpvideoContext *s = avctx->priv_data;
  821. s->avctx = avctx;
  822. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  823. avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
  824. if (!s->is_16bpp && s->avctx->palctrl == NULL) {
  825. av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
  826. return -1;
  827. }
  828. dsputil_init(&s->dsp, avctx);
  829. /* decoding map contains 4 bits of information per 8x8 block */
  830. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  831. avcodec_get_frame_defaults(&s->second_last_frame);
  832. avcodec_get_frame_defaults(&s->last_frame);
  833. avcodec_get_frame_defaults(&s->current_frame);
  834. s->current_frame.data[0] = s->last_frame.data[0] =
  835. s->second_last_frame.data[0] = NULL;
  836. return 0;
  837. }
  838. static int ipvideo_decode_frame(AVCodecContext *avctx,
  839. void *data, int *data_size,
  840. AVPacket *avpkt)
  841. {
  842. const uint8_t *buf = avpkt->data;
  843. int buf_size = avpkt->size;
  844. IpvideoContext *s = avctx->priv_data;
  845. AVPaletteControl *palette_control = avctx->palctrl;
  846. /* compressed buffer needs to be large enough to at least hold an entire
  847. * decoding map */
  848. if (buf_size < s->decoding_map_size)
  849. return buf_size;
  850. s->decoding_map = buf;
  851. s->buf = buf + s->decoding_map_size;
  852. s->size = buf_size - s->decoding_map_size;
  853. s->current_frame.reference = 3;
  854. if (avctx->get_buffer(avctx, &s->current_frame)) {
  855. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  856. return -1;
  857. }
  858. ipvideo_decode_opcodes(s);
  859. if (!s->is_16bpp && palette_control->palette_changed) {
  860. palette_control->palette_changed = 0;
  861. s->current_frame.palette_has_changed = 1;
  862. }
  863. *data_size = sizeof(AVFrame);
  864. *(AVFrame*)data = s->current_frame;
  865. /* shuffle frames */
  866. if (s->second_last_frame.data[0])
  867. avctx->release_buffer(avctx, &s->second_last_frame);
  868. s->second_last_frame = s->last_frame;
  869. s->last_frame = s->current_frame;
  870. s->current_frame.data[0] = NULL; /* catch any access attempts */
  871. /* report that the buffer was completely consumed */
  872. return buf_size;
  873. }
  874. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  875. {
  876. IpvideoContext *s = avctx->priv_data;
  877. /* release the last frame */
  878. if (s->last_frame.data[0])
  879. avctx->release_buffer(avctx, &s->last_frame);
  880. if (s->second_last_frame.data[0])
  881. avctx->release_buffer(avctx, &s->second_last_frame);
  882. return 0;
  883. }
  884. AVCodec ff_interplay_video_decoder = {
  885. "interplayvideo",
  886. AVMEDIA_TYPE_VIDEO,
  887. CODEC_ID_INTERPLAY_VIDEO,
  888. sizeof(IpvideoContext),
  889. ipvideo_decode_init,
  890. NULL,
  891. ipvideo_decode_end,
  892. ipvideo_decode_frame,
  893. CODEC_CAP_DR1,
  894. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  895. };