You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1029 lines
31KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "dsputil.h"
  41. #define BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #include "internal.h"
  44. #define PALETTE_COUNT 256
  45. typedef struct IpvideoContext {
  46. AVCodecContext *avctx;
  47. DSPContext dsp;
  48. AVFrame second_last_frame;
  49. AVFrame last_frame;
  50. AVFrame current_frame;
  51. const unsigned char *decoding_map;
  52. int decoding_map_size;
  53. int is_16bpp;
  54. GetByteContext stream_ptr, mv_ptr;
  55. unsigned char *pixel_ptr;
  56. int line_inc;
  57. int stride;
  58. int upper_motion_limit_offset;
  59. uint32_t pal[256];
  60. } IpvideoContext;
  61. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  62. {
  63. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  64. int motion_offset = current_offset + delta_y * s->current_frame.linesize[0]
  65. + delta_x * (1 + s->is_16bpp);
  66. if (motion_offset < 0) {
  67. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  68. return -1;
  69. } else if (motion_offset > s->upper_motion_limit_offset) {
  70. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  71. motion_offset, s->upper_motion_limit_offset);
  72. return -1;
  73. }
  74. if (src->data[0] == NULL) {
  75. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  76. return AVERROR(EINVAL);
  77. }
  78. s->dsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  79. s->current_frame.linesize[0], 8);
  80. return 0;
  81. }
  82. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  83. {
  84. return copy_from(s, &s->last_frame, 0, 0);
  85. }
  86. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  87. {
  88. return copy_from(s, &s->second_last_frame, 0, 0);
  89. }
  90. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  91. {
  92. unsigned char B;
  93. int x, y;
  94. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  95. if (!s->is_16bpp) {
  96. B = bytestream2_get_byte(&s->stream_ptr);
  97. } else {
  98. B = bytestream2_get_byte(&s->mv_ptr);
  99. }
  100. if (B < 56) {
  101. x = 8 + (B % 7);
  102. y = B / 7;
  103. } else {
  104. x = -14 + ((B - 56) % 29);
  105. y = 8 + ((B - 56) / 29);
  106. }
  107. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  108. return copy_from(s, &s->second_last_frame, x, y);
  109. }
  110. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  111. {
  112. unsigned char B;
  113. int x, y;
  114. /* copy 8x8 block from current frame from an up/left block */
  115. /* need 1 more byte for motion */
  116. if (!s->is_16bpp) {
  117. B = bytestream2_get_byte(&s->stream_ptr);
  118. } else {
  119. B = bytestream2_get_byte(&s->mv_ptr);
  120. }
  121. if (B < 56) {
  122. x = -(8 + (B % 7));
  123. y = -(B / 7);
  124. } else {
  125. x = -(-14 + ((B - 56) % 29));
  126. y = -( 8 + ((B - 56) / 29));
  127. }
  128. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  129. return copy_from(s, &s->current_frame, x, y);
  130. }
  131. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  132. {
  133. int x, y;
  134. unsigned char B, BL, BH;
  135. /* copy a block from the previous frame; need 1 more byte */
  136. if (!s->is_16bpp) {
  137. B = bytestream2_get_byte(&s->stream_ptr);
  138. } else {
  139. B = bytestream2_get_byte(&s->mv_ptr);
  140. }
  141. BL = B & 0x0F;
  142. BH = (B >> 4) & 0x0F;
  143. x = -8 + BL;
  144. y = -8 + BH;
  145. av_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  146. return copy_from(s, &s->last_frame, x, y);
  147. }
  148. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  149. {
  150. signed char x, y;
  151. /* copy a block from the previous frame using an expanded range;
  152. * need 2 more bytes */
  153. x = bytestream2_get_byte(&s->stream_ptr);
  154. y = bytestream2_get_byte(&s->stream_ptr);
  155. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  156. return copy_from(s, &s->last_frame, x, y);
  157. }
  158. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  159. {
  160. /* mystery opcode? skip multiple blocks? */
  161. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  162. /* report success */
  163. return 0;
  164. }
  165. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  166. {
  167. int x, y;
  168. unsigned char P[2];
  169. unsigned int flags;
  170. /* 2-color encoding */
  171. P[0] = bytestream2_get_byte(&s->stream_ptr);
  172. P[1] = bytestream2_get_byte(&s->stream_ptr);
  173. if (P[0] <= P[1]) {
  174. /* need 8 more bytes from the stream */
  175. for (y = 0; y < 8; y++) {
  176. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  177. for (; flags != 1; flags >>= 1)
  178. *s->pixel_ptr++ = P[flags & 1];
  179. s->pixel_ptr += s->line_inc;
  180. }
  181. } else {
  182. /* need 2 more bytes from the stream */
  183. flags = bytestream2_get_le16(&s->stream_ptr);
  184. for (y = 0; y < 8; y += 2) {
  185. for (x = 0; x < 8; x += 2, flags >>= 1) {
  186. s->pixel_ptr[x ] =
  187. s->pixel_ptr[x + 1 ] =
  188. s->pixel_ptr[x + s->stride] =
  189. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  190. }
  191. s->pixel_ptr += s->stride * 2;
  192. }
  193. }
  194. /* report success */
  195. return 0;
  196. }
  197. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  198. {
  199. int x, y;
  200. unsigned char P[4];
  201. unsigned int flags = 0;
  202. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  203. * either top and bottom or left and right halves */
  204. P[0] = bytestream2_get_byte(&s->stream_ptr);
  205. P[1] = bytestream2_get_byte(&s->stream_ptr);
  206. if (P[0] <= P[1]) {
  207. for (y = 0; y < 16; y++) {
  208. // new values for each 4x4 block
  209. if (!(y & 3)) {
  210. if (y) {
  211. P[0] = bytestream2_get_byte(&s->stream_ptr);
  212. P[1] = bytestream2_get_byte(&s->stream_ptr);
  213. }
  214. flags = bytestream2_get_le16(&s->stream_ptr);
  215. }
  216. for (x = 0; x < 4; x++, flags >>= 1)
  217. *s->pixel_ptr++ = P[flags & 1];
  218. s->pixel_ptr += s->stride - 4;
  219. // switch to right half
  220. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  221. }
  222. } else {
  223. flags = bytestream2_get_le32(&s->stream_ptr);
  224. P[2] = bytestream2_get_byte(&s->stream_ptr);
  225. P[3] = bytestream2_get_byte(&s->stream_ptr);
  226. if (P[2] <= P[3]) {
  227. /* vertical split; left & right halves are 2-color encoded */
  228. for (y = 0; y < 16; y++) {
  229. for (x = 0; x < 4; x++, flags >>= 1)
  230. *s->pixel_ptr++ = P[flags & 1];
  231. s->pixel_ptr += s->stride - 4;
  232. // switch to right half
  233. if (y == 7) {
  234. s->pixel_ptr -= 8 * s->stride - 4;
  235. P[0] = P[2];
  236. P[1] = P[3];
  237. flags = bytestream2_get_le32(&s->stream_ptr);
  238. }
  239. }
  240. } else {
  241. /* horizontal split; top & bottom halves are 2-color encoded */
  242. for (y = 0; y < 8; y++) {
  243. if (y == 4) {
  244. P[0] = P[2];
  245. P[1] = P[3];
  246. flags = bytestream2_get_le32(&s->stream_ptr);
  247. }
  248. for (x = 0; x < 8; x++, flags >>= 1)
  249. *s->pixel_ptr++ = P[flags & 1];
  250. s->pixel_ptr += s->line_inc;
  251. }
  252. }
  253. }
  254. /* report success */
  255. return 0;
  256. }
  257. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  258. {
  259. int x, y;
  260. unsigned char P[4];
  261. /* 4-color encoding */
  262. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  263. if (P[0] <= P[1]) {
  264. if (P[2] <= P[3]) {
  265. /* 1 of 4 colors for each pixel, need 16 more bytes */
  266. for (y = 0; y < 8; y++) {
  267. /* get the next set of 8 2-bit flags */
  268. int flags = bytestream2_get_le16(&s->stream_ptr);
  269. for (x = 0; x < 8; x++, flags >>= 2)
  270. *s->pixel_ptr++ = P[flags & 0x03];
  271. s->pixel_ptr += s->line_inc;
  272. }
  273. } else {
  274. uint32_t flags;
  275. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  276. flags = bytestream2_get_le32(&s->stream_ptr);
  277. for (y = 0; y < 8; y += 2) {
  278. for (x = 0; x < 8; x += 2, flags >>= 2) {
  279. s->pixel_ptr[x ] =
  280. s->pixel_ptr[x + 1 ] =
  281. s->pixel_ptr[x + s->stride] =
  282. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  283. }
  284. s->pixel_ptr += s->stride * 2;
  285. }
  286. }
  287. } else {
  288. uint64_t flags;
  289. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  290. flags = bytestream2_get_le64(&s->stream_ptr);
  291. if (P[2] <= P[3]) {
  292. for (y = 0; y < 8; y++) {
  293. for (x = 0; x < 8; x += 2, flags >>= 2) {
  294. s->pixel_ptr[x ] =
  295. s->pixel_ptr[x + 1] = P[flags & 0x03];
  296. }
  297. s->pixel_ptr += s->stride;
  298. }
  299. } else {
  300. for (y = 0; y < 8; y += 2) {
  301. for (x = 0; x < 8; x++, flags >>= 2) {
  302. s->pixel_ptr[x ] =
  303. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  304. }
  305. s->pixel_ptr += s->stride * 2;
  306. }
  307. }
  308. }
  309. /* report success */
  310. return 0;
  311. }
  312. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  313. {
  314. int x, y;
  315. unsigned char P[8];
  316. int flags = 0;
  317. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  318. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  319. * either top and bottom or left and right halves */
  320. if (P[0] <= P[1]) {
  321. /* 4-color encoding for each quadrant; need 32 bytes */
  322. for (y = 0; y < 16; y++) {
  323. // new values for each 4x4 block
  324. if (!(y & 3)) {
  325. if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
  326. flags = bytestream2_get_le32(&s->stream_ptr);
  327. }
  328. for (x = 0; x < 4; x++, flags >>= 2)
  329. *s->pixel_ptr++ = P[flags & 0x03];
  330. s->pixel_ptr += s->stride - 4;
  331. // switch to right half
  332. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  333. }
  334. } else {
  335. // vertical split?
  336. int vert;
  337. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  338. bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
  339. vert = P[4] <= P[5];
  340. /* 4-color encoding for either left and right or top and bottom
  341. * halves */
  342. for (y = 0; y < 16; y++) {
  343. for (x = 0; x < 4; x++, flags >>= 2)
  344. *s->pixel_ptr++ = P[flags & 0x03];
  345. if (vert) {
  346. s->pixel_ptr += s->stride - 4;
  347. // switch to right half
  348. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  349. } else if (y & 1) s->pixel_ptr += s->line_inc;
  350. // load values for second half
  351. if (y == 7) {
  352. memcpy(P, P + 4, 4);
  353. flags = bytestream2_get_le64(&s->stream_ptr);
  354. }
  355. }
  356. }
  357. /* report success */
  358. return 0;
  359. }
  360. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  361. {
  362. int y;
  363. /* 64-color encoding (each pixel in block is a different color) */
  364. for (y = 0; y < 8; y++) {
  365. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  366. s->pixel_ptr += s->stride;
  367. }
  368. /* report success */
  369. return 0;
  370. }
  371. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  372. {
  373. int x, y;
  374. /* 16-color block encoding: each 2x2 block is a different color */
  375. for (y = 0; y < 8; y += 2) {
  376. for (x = 0; x < 8; x += 2) {
  377. s->pixel_ptr[x ] =
  378. s->pixel_ptr[x + 1 ] =
  379. s->pixel_ptr[x + s->stride] =
  380. s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
  381. }
  382. s->pixel_ptr += s->stride * 2;
  383. }
  384. /* report success */
  385. return 0;
  386. }
  387. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  388. {
  389. int y;
  390. unsigned char P[2];
  391. /* 4-color block encoding: each 4x4 block is a different color */
  392. for (y = 0; y < 8; y++) {
  393. if (!(y & 3)) {
  394. P[0] = bytestream2_get_byte(&s->stream_ptr);
  395. P[1] = bytestream2_get_byte(&s->stream_ptr);
  396. }
  397. memset(s->pixel_ptr, P[0], 4);
  398. memset(s->pixel_ptr + 4, P[1], 4);
  399. s->pixel_ptr += s->stride;
  400. }
  401. /* report success */
  402. return 0;
  403. }
  404. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  405. {
  406. int y;
  407. unsigned char pix;
  408. /* 1-color encoding: the whole block is 1 solid color */
  409. pix = bytestream2_get_byte(&s->stream_ptr);
  410. for (y = 0; y < 8; y++) {
  411. memset(s->pixel_ptr, pix, 8);
  412. s->pixel_ptr += s->stride;
  413. }
  414. /* report success */
  415. return 0;
  416. }
  417. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  418. {
  419. int x, y;
  420. unsigned char sample[2];
  421. /* dithered encoding */
  422. sample[0] = bytestream2_get_byte(&s->stream_ptr);
  423. sample[1] = bytestream2_get_byte(&s->stream_ptr);
  424. for (y = 0; y < 8; y++) {
  425. for (x = 0; x < 8; x += 2) {
  426. *s->pixel_ptr++ = sample[ y & 1 ];
  427. *s->pixel_ptr++ = sample[!(y & 1)];
  428. }
  429. s->pixel_ptr += s->line_inc;
  430. }
  431. /* report success */
  432. return 0;
  433. }
  434. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s)
  435. {
  436. signed char x, y;
  437. /* copy a block from the second last frame using an expanded range */
  438. x = bytestream2_get_byte(&s->stream_ptr);
  439. y = bytestream2_get_byte(&s->stream_ptr);
  440. av_dlog(NULL, " motion bytes = %d, %d\n", x, y);
  441. return copy_from(s, &s->second_last_frame, x, y);
  442. }
  443. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s)
  444. {
  445. int x, y;
  446. uint16_t P[2];
  447. unsigned int flags;
  448. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  449. /* 2-color encoding */
  450. P[0] = bytestream2_get_le16(&s->stream_ptr);
  451. P[1] = bytestream2_get_le16(&s->stream_ptr);
  452. if (!(P[0] & 0x8000)) {
  453. for (y = 0; y < 8; y++) {
  454. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  455. for (; flags != 1; flags >>= 1)
  456. *pixel_ptr++ = P[flags & 1];
  457. pixel_ptr += s->line_inc;
  458. }
  459. } else {
  460. flags = bytestream2_get_le16(&s->stream_ptr);
  461. for (y = 0; y < 8; y += 2) {
  462. for (x = 0; x < 8; x += 2, flags >>= 1) {
  463. pixel_ptr[x ] =
  464. pixel_ptr[x + 1 ] =
  465. pixel_ptr[x + s->stride] =
  466. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  467. }
  468. pixel_ptr += s->stride * 2;
  469. }
  470. }
  471. return 0;
  472. }
  473. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s)
  474. {
  475. int x, y;
  476. uint16_t P[4];
  477. unsigned int flags = 0;
  478. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  479. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  480. * either top and bottom or left and right halves */
  481. P[0] = bytestream2_get_le16(&s->stream_ptr);
  482. P[1] = bytestream2_get_le16(&s->stream_ptr);
  483. if (!(P[0] & 0x8000)) {
  484. for (y = 0; y < 16; y++) {
  485. // new values for each 4x4 block
  486. if (!(y & 3)) {
  487. if (y) {
  488. P[0] = bytestream2_get_le16(&s->stream_ptr);
  489. P[1] = bytestream2_get_le16(&s->stream_ptr);
  490. }
  491. flags = bytestream2_get_le16(&s->stream_ptr);
  492. }
  493. for (x = 0; x < 4; x++, flags >>= 1)
  494. *pixel_ptr++ = P[flags & 1];
  495. pixel_ptr += s->stride - 4;
  496. // switch to right half
  497. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  498. }
  499. } else {
  500. flags = bytestream2_get_le32(&s->stream_ptr);
  501. P[2] = bytestream2_get_le16(&s->stream_ptr);
  502. P[3] = bytestream2_get_le16(&s->stream_ptr);
  503. if (!(P[2] & 0x8000)) {
  504. /* vertical split; left & right halves are 2-color encoded */
  505. for (y = 0; y < 16; y++) {
  506. for (x = 0; x < 4; x++, flags >>= 1)
  507. *pixel_ptr++ = P[flags & 1];
  508. pixel_ptr += s->stride - 4;
  509. // switch to right half
  510. if (y == 7) {
  511. pixel_ptr -= 8 * s->stride - 4;
  512. P[0] = P[2];
  513. P[1] = P[3];
  514. flags = bytestream2_get_le32(&s->stream_ptr);
  515. }
  516. }
  517. } else {
  518. /* horizontal split; top & bottom halves are 2-color encoded */
  519. for (y = 0; y < 8; y++) {
  520. if (y == 4) {
  521. P[0] = P[2];
  522. P[1] = P[3];
  523. flags = bytestream2_get_le32(&s->stream_ptr);
  524. }
  525. for (x = 0; x < 8; x++, flags >>= 1)
  526. *pixel_ptr++ = P[flags & 1];
  527. pixel_ptr += s->line_inc;
  528. }
  529. }
  530. }
  531. /* report success */
  532. return 0;
  533. }
  534. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s)
  535. {
  536. int x, y;
  537. uint16_t P[4];
  538. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  539. /* 4-color encoding */
  540. for (x = 0; x < 4; x++)
  541. P[x] = bytestream2_get_le16(&s->stream_ptr);
  542. if (!(P[0] & 0x8000)) {
  543. if (!(P[2] & 0x8000)) {
  544. /* 1 of 4 colors for each pixel */
  545. for (y = 0; y < 8; y++) {
  546. /* get the next set of 8 2-bit flags */
  547. int flags = bytestream2_get_le16(&s->stream_ptr);
  548. for (x = 0; x < 8; x++, flags >>= 2)
  549. *pixel_ptr++ = P[flags & 0x03];
  550. pixel_ptr += s->line_inc;
  551. }
  552. } else {
  553. uint32_t flags;
  554. /* 1 of 4 colors for each 2x2 block */
  555. flags = bytestream2_get_le32(&s->stream_ptr);
  556. for (y = 0; y < 8; y += 2) {
  557. for (x = 0; x < 8; x += 2, flags >>= 2) {
  558. pixel_ptr[x ] =
  559. pixel_ptr[x + 1 ] =
  560. pixel_ptr[x + s->stride] =
  561. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  562. }
  563. pixel_ptr += s->stride * 2;
  564. }
  565. }
  566. } else {
  567. uint64_t flags;
  568. /* 1 of 4 colors for each 2x1 or 1x2 block */
  569. flags = bytestream2_get_le64(&s->stream_ptr);
  570. if (!(P[2] & 0x8000)) {
  571. for (y = 0; y < 8; y++) {
  572. for (x = 0; x < 8; x += 2, flags >>= 2) {
  573. pixel_ptr[x ] =
  574. pixel_ptr[x + 1] = P[flags & 0x03];
  575. }
  576. pixel_ptr += s->stride;
  577. }
  578. } else {
  579. for (y = 0; y < 8; y += 2) {
  580. for (x = 0; x < 8; x++, flags >>= 2) {
  581. pixel_ptr[x ] =
  582. pixel_ptr[x + s->stride] = P[flags & 0x03];
  583. }
  584. pixel_ptr += s->stride * 2;
  585. }
  586. }
  587. }
  588. /* report success */
  589. return 0;
  590. }
  591. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s)
  592. {
  593. int x, y;
  594. uint16_t P[8];
  595. int flags = 0;
  596. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  597. for (x = 0; x < 4; x++)
  598. P[x] = bytestream2_get_le16(&s->stream_ptr);
  599. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  600. * either top and bottom or left and right halves */
  601. if (!(P[0] & 0x8000)) {
  602. /* 4-color encoding for each quadrant */
  603. for (y = 0; y < 16; y++) {
  604. // new values for each 4x4 block
  605. if (!(y & 3)) {
  606. if (y)
  607. for (x = 0; x < 4; x++)
  608. P[x] = bytestream2_get_le16(&s->stream_ptr);
  609. flags = bytestream2_get_le32(&s->stream_ptr);
  610. }
  611. for (x = 0; x < 4; x++, flags >>= 2)
  612. *pixel_ptr++ = P[flags & 0x03];
  613. pixel_ptr += s->stride - 4;
  614. // switch to right half
  615. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  616. }
  617. } else {
  618. // vertical split?
  619. int vert;
  620. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  621. for (x = 4; x < 8; x++)
  622. P[x] = bytestream2_get_le16(&s->stream_ptr);
  623. vert = !(P[4] & 0x8000);
  624. /* 4-color encoding for either left and right or top and bottom
  625. * halves */
  626. for (y = 0; y < 16; y++) {
  627. for (x = 0; x < 4; x++, flags >>= 2)
  628. *pixel_ptr++ = P[flags & 0x03];
  629. if (vert) {
  630. pixel_ptr += s->stride - 4;
  631. // switch to right half
  632. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  633. } else if (y & 1) pixel_ptr += s->line_inc;
  634. // load values for second half
  635. if (y == 7) {
  636. memcpy(P, P + 4, 8);
  637. flags = bytestream2_get_le64(&s->stream_ptr);
  638. }
  639. }
  640. }
  641. /* report success */
  642. return 0;
  643. }
  644. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s)
  645. {
  646. int x, y;
  647. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  648. /* 64-color encoding (each pixel in block is a different color) */
  649. for (y = 0; y < 8; y++) {
  650. for (x = 0; x < 8; x++)
  651. pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
  652. pixel_ptr += s->stride;
  653. }
  654. /* report success */
  655. return 0;
  656. }
  657. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s)
  658. {
  659. int x, y;
  660. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  661. /* 16-color block encoding: each 2x2 block is a different color */
  662. for (y = 0; y < 8; y += 2) {
  663. for (x = 0; x < 8; x += 2) {
  664. pixel_ptr[x ] =
  665. pixel_ptr[x + 1 ] =
  666. pixel_ptr[x + s->stride] =
  667. pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
  668. }
  669. pixel_ptr += s->stride * 2;
  670. }
  671. /* report success */
  672. return 0;
  673. }
  674. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s)
  675. {
  676. int x, y;
  677. uint16_t P[2];
  678. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  679. /* 4-color block encoding: each 4x4 block is a different color */
  680. for (y = 0; y < 8; y++) {
  681. if (!(y & 3)) {
  682. P[0] = bytestream2_get_le16(&s->stream_ptr);
  683. P[1] = bytestream2_get_le16(&s->stream_ptr);
  684. }
  685. for (x = 0; x < 8; x++)
  686. pixel_ptr[x] = P[x >> 2];
  687. pixel_ptr += s->stride;
  688. }
  689. /* report success */
  690. return 0;
  691. }
  692. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s)
  693. {
  694. int x, y;
  695. uint16_t pix;
  696. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  697. /* 1-color encoding: the whole block is 1 solid color */
  698. pix = bytestream2_get_le16(&s->stream_ptr);
  699. for (y = 0; y < 8; y++) {
  700. for (x = 0; x < 8; x++)
  701. pixel_ptr[x] = pix;
  702. pixel_ptr += s->stride;
  703. }
  704. /* report success */
  705. return 0;
  706. }
  707. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  708. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  709. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  710. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  711. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  712. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  713. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  714. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  715. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  716. };
  717. static int (* const ipvideo_decode_block16[])(IpvideoContext *s) = {
  718. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  719. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  720. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  721. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  722. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  723. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  724. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  725. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  726. };
  727. static void ipvideo_decode_opcodes(IpvideoContext *s)
  728. {
  729. int x, y;
  730. unsigned char opcode;
  731. int ret;
  732. static int frame = 0;
  733. GetBitContext gb;
  734. av_dlog(NULL, "------------------ frame %d\n", frame);
  735. frame++;
  736. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  737. if (!s->is_16bpp) {
  738. /* this is PAL8, so make the palette available */
  739. memcpy(s->current_frame.data[1], s->pal, AVPALETTE_SIZE);
  740. s->stride = s->current_frame.linesize[0];
  741. } else {
  742. s->stride = s->current_frame.linesize[0] >> 1;
  743. s->mv_ptr = s->stream_ptr;
  744. bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
  745. }
  746. s->line_inc = s->stride - 8;
  747. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->current_frame.linesize[0]
  748. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  749. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  750. for (y = 0; y < s->avctx->height; y += 8) {
  751. for (x = 0; x < s->avctx->width; x += 8) {
  752. opcode = get_bits(&gb, 4);
  753. av_dlog(s->avctx,
  754. " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
  755. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  756. if (!s->is_16bpp) {
  757. s->pixel_ptr = s->current_frame.data[0] + x
  758. + y*s->current_frame.linesize[0];
  759. ret = ipvideo_decode_block[opcode](s);
  760. } else {
  761. s->pixel_ptr = s->current_frame.data[0] + x*2
  762. + y*s->current_frame.linesize[0];
  763. ret = ipvideo_decode_block16[opcode](s);
  764. }
  765. if (ret != 0) {
  766. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  767. frame, x, y);
  768. return;
  769. }
  770. }
  771. }
  772. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  773. av_log(s->avctx, AV_LOG_ERROR,
  774. "Interplay video: decode finished with %d bytes left over\n",
  775. bytestream2_get_bytes_left(&s->stream_ptr));
  776. }
  777. }
  778. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  779. {
  780. IpvideoContext *s = avctx->priv_data;
  781. s->avctx = avctx;
  782. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  783. avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
  784. ff_dsputil_init(&s->dsp, avctx);
  785. s->current_frame.data[0] = s->last_frame.data[0] =
  786. s->second_last_frame.data[0] = NULL;
  787. return 0;
  788. }
  789. static int ipvideo_decode_frame(AVCodecContext *avctx,
  790. void *data, int *got_frame,
  791. AVPacket *avpkt)
  792. {
  793. const uint8_t *buf = avpkt->data;
  794. int buf_size = avpkt->size;
  795. IpvideoContext *s = avctx->priv_data;
  796. /* decoding map contains 4 bits of information per 8x8 block */
  797. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  798. /* compressed buffer needs to be large enough to at least hold an entire
  799. * decoding map */
  800. if (buf_size < s->decoding_map_size)
  801. return buf_size;
  802. s->decoding_map = buf;
  803. bytestream2_init(&s->stream_ptr, buf + s->decoding_map_size,
  804. buf_size - s->decoding_map_size);
  805. s->current_frame.reference = 3;
  806. if (ff_get_buffer(avctx, &s->current_frame)) {
  807. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  808. return -1;
  809. }
  810. if (!s->is_16bpp) {
  811. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
  812. if (pal) {
  813. s->current_frame.palette_has_changed = 1;
  814. memcpy(s->pal, pal, AVPALETTE_SIZE);
  815. }
  816. }
  817. ipvideo_decode_opcodes(s);
  818. *got_frame = 1;
  819. *(AVFrame*)data = s->current_frame;
  820. /* shuffle frames */
  821. if (s->second_last_frame.data[0])
  822. avctx->release_buffer(avctx, &s->second_last_frame);
  823. s->second_last_frame = s->last_frame;
  824. s->last_frame = s->current_frame;
  825. s->current_frame.data[0] = NULL; /* catch any access attempts */
  826. /* report that the buffer was completely consumed */
  827. return buf_size;
  828. }
  829. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  830. {
  831. IpvideoContext *s = avctx->priv_data;
  832. /* release the last frame */
  833. if (s->last_frame.data[0])
  834. avctx->release_buffer(avctx, &s->last_frame);
  835. if (s->second_last_frame.data[0])
  836. avctx->release_buffer(avctx, &s->second_last_frame);
  837. return 0;
  838. }
  839. AVCodec ff_interplay_video_decoder = {
  840. .name = "interplayvideo",
  841. .type = AVMEDIA_TYPE_VIDEO,
  842. .id = AV_CODEC_ID_INTERPLAY_VIDEO,
  843. .priv_data_size = sizeof(IpvideoContext),
  844. .init = ipvideo_decode_init,
  845. .close = ipvideo_decode_end,
  846. .decode = ipvideo_decode_frame,
  847. .capabilities = CODEC_CAP_DR1 | CODEC_CAP_PARAM_CHANGE,
  848. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  849. };