You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1033 lines
31KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "avcodec.h"
  39. #include "bytestream.h"
  40. #include "hpeldsp.h"
  41. #define BITSTREAM_READER_LE
  42. #include "get_bits.h"
  43. #include "internal.h"
  44. #define PALETTE_COUNT 256
  45. typedef struct IpvideoContext {
  46. AVCodecContext *avctx;
  47. HpelDSPContext hdsp;
  48. AVFrame *second_last_frame;
  49. AVFrame *last_frame;
  50. const unsigned char *decoding_map;
  51. int decoding_map_size;
  52. int is_16bpp;
  53. GetByteContext stream_ptr, mv_ptr;
  54. unsigned char *pixel_ptr;
  55. int line_inc;
  56. int stride;
  57. int upper_motion_limit_offset;
  58. uint32_t pal[256];
  59. } IpvideoContext;
  60. static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
  61. {
  62. int current_offset = s->pixel_ptr - dst->data[0];
  63. int motion_offset = current_offset + delta_y * dst->linesize[0]
  64. + delta_x * (1 + s->is_16bpp);
  65. if (motion_offset < 0) {
  66. av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
  67. return AVERROR_INVALIDDATA;
  68. } else if (motion_offset > s->upper_motion_limit_offset) {
  69. av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
  70. motion_offset, s->upper_motion_limit_offset);
  71. return AVERROR_INVALIDDATA;
  72. }
  73. if (src->data[0] == NULL) {
  74. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  75. return AVERROR(EINVAL);
  76. }
  77. s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  78. dst->linesize[0], 8);
  79. return 0;
  80. }
  81. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
  82. {
  83. return copy_from(s, s->last_frame, frame, 0, 0);
  84. }
  85. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
  86. {
  87. return copy_from(s, s->second_last_frame, frame, 0, 0);
  88. }
  89. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
  90. {
  91. unsigned char B;
  92. int x, y;
  93. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  94. if (!s->is_16bpp) {
  95. B = bytestream2_get_byte(&s->stream_ptr);
  96. } else {
  97. B = bytestream2_get_byte(&s->mv_ptr);
  98. }
  99. if (B < 56) {
  100. x = 8 + (B % 7);
  101. y = B / 7;
  102. } else {
  103. x = -14 + ((B - 56) % 29);
  104. y = 8 + ((B - 56) / 29);
  105. }
  106. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  107. return copy_from(s, s->second_last_frame, frame, x, y);
  108. }
  109. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
  110. {
  111. unsigned char B;
  112. int x, y;
  113. /* copy 8x8 block from current frame from an up/left block */
  114. /* need 1 more byte for motion */
  115. if (!s->is_16bpp) {
  116. B = bytestream2_get_byte(&s->stream_ptr);
  117. } else {
  118. B = bytestream2_get_byte(&s->mv_ptr);
  119. }
  120. if (B < 56) {
  121. x = -(8 + (B % 7));
  122. y = -(B / 7);
  123. } else {
  124. x = -(-14 + ((B - 56) % 29));
  125. y = -( 8 + ((B - 56) / 29));
  126. }
  127. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  128. return copy_from(s, frame, frame, x, y);
  129. }
  130. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
  131. {
  132. int x, y;
  133. unsigned char B, BL, BH;
  134. /* copy a block from the previous frame; need 1 more byte */
  135. if (!s->is_16bpp) {
  136. B = bytestream2_get_byte(&s->stream_ptr);
  137. } else {
  138. B = bytestream2_get_byte(&s->mv_ptr);
  139. }
  140. BL = B & 0x0F;
  141. BH = (B >> 4) & 0x0F;
  142. x = -8 + BL;
  143. y = -8 + BH;
  144. av_dlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  145. return copy_from(s, s->last_frame, frame, x, y);
  146. }
  147. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
  148. {
  149. signed char x, y;
  150. /* copy a block from the previous frame using an expanded range;
  151. * need 2 more bytes */
  152. x = bytestream2_get_byte(&s->stream_ptr);
  153. y = bytestream2_get_byte(&s->stream_ptr);
  154. av_dlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  155. return copy_from(s, s->last_frame, frame, x, y);
  156. }
  157. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
  158. {
  159. /* mystery opcode? skip multiple blocks? */
  160. av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
  161. /* report success */
  162. return 0;
  163. }
  164. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
  165. {
  166. int x, y;
  167. unsigned char P[2];
  168. unsigned int flags;
  169. /* 2-color encoding */
  170. P[0] = bytestream2_get_byte(&s->stream_ptr);
  171. P[1] = bytestream2_get_byte(&s->stream_ptr);
  172. if (P[0] <= P[1]) {
  173. /* need 8 more bytes from the stream */
  174. for (y = 0; y < 8; y++) {
  175. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  176. for (; flags != 1; flags >>= 1)
  177. *s->pixel_ptr++ = P[flags & 1];
  178. s->pixel_ptr += s->line_inc;
  179. }
  180. } else {
  181. /* need 2 more bytes from the stream */
  182. flags = bytestream2_get_le16(&s->stream_ptr);
  183. for (y = 0; y < 8; y += 2) {
  184. for (x = 0; x < 8; x += 2, flags >>= 1) {
  185. s->pixel_ptr[x ] =
  186. s->pixel_ptr[x + 1 ] =
  187. s->pixel_ptr[x + s->stride] =
  188. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  189. }
  190. s->pixel_ptr += s->stride * 2;
  191. }
  192. }
  193. /* report success */
  194. return 0;
  195. }
  196. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
  197. {
  198. int x, y;
  199. unsigned char P[4];
  200. unsigned int flags = 0;
  201. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  202. * either top and bottom or left and right halves */
  203. P[0] = bytestream2_get_byte(&s->stream_ptr);
  204. P[1] = bytestream2_get_byte(&s->stream_ptr);
  205. if (P[0] <= P[1]) {
  206. for (y = 0; y < 16; y++) {
  207. // new values for each 4x4 block
  208. if (!(y & 3)) {
  209. if (y) {
  210. P[0] = bytestream2_get_byte(&s->stream_ptr);
  211. P[1] = bytestream2_get_byte(&s->stream_ptr);
  212. }
  213. flags = bytestream2_get_le16(&s->stream_ptr);
  214. }
  215. for (x = 0; x < 4; x++, flags >>= 1)
  216. *s->pixel_ptr++ = P[flags & 1];
  217. s->pixel_ptr += s->stride - 4;
  218. // switch to right half
  219. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  220. }
  221. } else {
  222. flags = bytestream2_get_le32(&s->stream_ptr);
  223. P[2] = bytestream2_get_byte(&s->stream_ptr);
  224. P[3] = bytestream2_get_byte(&s->stream_ptr);
  225. if (P[2] <= P[3]) {
  226. /* vertical split; left & right halves are 2-color encoded */
  227. for (y = 0; y < 16; y++) {
  228. for (x = 0; x < 4; x++, flags >>= 1)
  229. *s->pixel_ptr++ = P[flags & 1];
  230. s->pixel_ptr += s->stride - 4;
  231. // switch to right half
  232. if (y == 7) {
  233. s->pixel_ptr -= 8 * s->stride - 4;
  234. P[0] = P[2];
  235. P[1] = P[3];
  236. flags = bytestream2_get_le32(&s->stream_ptr);
  237. }
  238. }
  239. } else {
  240. /* horizontal split; top & bottom halves are 2-color encoded */
  241. for (y = 0; y < 8; y++) {
  242. if (y == 4) {
  243. P[0] = P[2];
  244. P[1] = P[3];
  245. flags = bytestream2_get_le32(&s->stream_ptr);
  246. }
  247. for (x = 0; x < 8; x++, flags >>= 1)
  248. *s->pixel_ptr++ = P[flags & 1];
  249. s->pixel_ptr += s->line_inc;
  250. }
  251. }
  252. }
  253. /* report success */
  254. return 0;
  255. }
  256. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
  257. {
  258. int x, y;
  259. unsigned char P[4];
  260. /* 4-color encoding */
  261. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  262. if (P[0] <= P[1]) {
  263. if (P[2] <= P[3]) {
  264. /* 1 of 4 colors for each pixel, need 16 more bytes */
  265. for (y = 0; y < 8; y++) {
  266. /* get the next set of 8 2-bit flags */
  267. int flags = bytestream2_get_le16(&s->stream_ptr);
  268. for (x = 0; x < 8; x++, flags >>= 2)
  269. *s->pixel_ptr++ = P[flags & 0x03];
  270. s->pixel_ptr += s->line_inc;
  271. }
  272. } else {
  273. uint32_t flags;
  274. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  275. flags = bytestream2_get_le32(&s->stream_ptr);
  276. for (y = 0; y < 8; y += 2) {
  277. for (x = 0; x < 8; x += 2, flags >>= 2) {
  278. s->pixel_ptr[x ] =
  279. s->pixel_ptr[x + 1 ] =
  280. s->pixel_ptr[x + s->stride] =
  281. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  282. }
  283. s->pixel_ptr += s->stride * 2;
  284. }
  285. }
  286. } else {
  287. uint64_t flags;
  288. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  289. flags = bytestream2_get_le64(&s->stream_ptr);
  290. if (P[2] <= P[3]) {
  291. for (y = 0; y < 8; y++) {
  292. for (x = 0; x < 8; x += 2, flags >>= 2) {
  293. s->pixel_ptr[x ] =
  294. s->pixel_ptr[x + 1] = P[flags & 0x03];
  295. }
  296. s->pixel_ptr += s->stride;
  297. }
  298. } else {
  299. for (y = 0; y < 8; y += 2) {
  300. for (x = 0; x < 8; x++, flags >>= 2) {
  301. s->pixel_ptr[x ] =
  302. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  303. }
  304. s->pixel_ptr += s->stride * 2;
  305. }
  306. }
  307. }
  308. /* report success */
  309. return 0;
  310. }
  311. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
  312. {
  313. int x, y;
  314. unsigned char P[8];
  315. int flags = 0;
  316. if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
  317. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
  318. return AVERROR_INVALIDDATA;
  319. }
  320. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  321. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  322. * either top and bottom or left and right halves */
  323. if (P[0] <= P[1]) {
  324. /* 4-color encoding for each quadrant; need 32 bytes */
  325. for (y = 0; y < 16; y++) {
  326. // new values for each 4x4 block
  327. if (!(y & 3)) {
  328. if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
  329. flags = bytestream2_get_le32(&s->stream_ptr);
  330. }
  331. for (x = 0; x < 4; x++, flags >>= 2)
  332. *s->pixel_ptr++ = P[flags & 0x03];
  333. s->pixel_ptr += s->stride - 4;
  334. // switch to right half
  335. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  336. }
  337. } else {
  338. // vertical split?
  339. int vert;
  340. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  341. bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
  342. vert = P[4] <= P[5];
  343. /* 4-color encoding for either left and right or top and bottom
  344. * halves */
  345. for (y = 0; y < 16; y++) {
  346. for (x = 0; x < 4; x++, flags >>= 2)
  347. *s->pixel_ptr++ = P[flags & 0x03];
  348. if (vert) {
  349. s->pixel_ptr += s->stride - 4;
  350. // switch to right half
  351. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  352. } else if (y & 1) s->pixel_ptr += s->line_inc;
  353. // load values for second half
  354. if (y == 7) {
  355. memcpy(P, P + 4, 4);
  356. flags = bytestream2_get_le64(&s->stream_ptr);
  357. }
  358. }
  359. }
  360. /* report success */
  361. return 0;
  362. }
  363. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
  364. {
  365. int y;
  366. /* 64-color encoding (each pixel in block is a different color) */
  367. for (y = 0; y < 8; y++) {
  368. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  369. s->pixel_ptr += s->stride;
  370. }
  371. /* report success */
  372. return 0;
  373. }
  374. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
  375. {
  376. int x, y;
  377. /* 16-color block encoding: each 2x2 block is a different color */
  378. for (y = 0; y < 8; y += 2) {
  379. for (x = 0; x < 8; x += 2) {
  380. s->pixel_ptr[x ] =
  381. s->pixel_ptr[x + 1 ] =
  382. s->pixel_ptr[x + s->stride] =
  383. s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
  384. }
  385. s->pixel_ptr += s->stride * 2;
  386. }
  387. /* report success */
  388. return 0;
  389. }
  390. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
  391. {
  392. int y;
  393. unsigned char P[2];
  394. /* 4-color block encoding: each 4x4 block is a different color */
  395. for (y = 0; y < 8; y++) {
  396. if (!(y & 3)) {
  397. P[0] = bytestream2_get_byte(&s->stream_ptr);
  398. P[1] = bytestream2_get_byte(&s->stream_ptr);
  399. }
  400. memset(s->pixel_ptr, P[0], 4);
  401. memset(s->pixel_ptr + 4, P[1], 4);
  402. s->pixel_ptr += s->stride;
  403. }
  404. /* report success */
  405. return 0;
  406. }
  407. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
  408. {
  409. int y;
  410. unsigned char pix;
  411. /* 1-color encoding: the whole block is 1 solid color */
  412. pix = bytestream2_get_byte(&s->stream_ptr);
  413. for (y = 0; y < 8; y++) {
  414. memset(s->pixel_ptr, pix, 8);
  415. s->pixel_ptr += s->stride;
  416. }
  417. /* report success */
  418. return 0;
  419. }
  420. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
  421. {
  422. int x, y;
  423. unsigned char sample[2];
  424. /* dithered encoding */
  425. sample[0] = bytestream2_get_byte(&s->stream_ptr);
  426. sample[1] = bytestream2_get_byte(&s->stream_ptr);
  427. for (y = 0; y < 8; y++) {
  428. for (x = 0; x < 8; x += 2) {
  429. *s->pixel_ptr++ = sample[ y & 1 ];
  430. *s->pixel_ptr++ = sample[!(y & 1)];
  431. }
  432. s->pixel_ptr += s->line_inc;
  433. }
  434. /* report success */
  435. return 0;
  436. }
  437. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
  438. {
  439. signed char x, y;
  440. /* copy a block from the second last frame using an expanded range */
  441. x = bytestream2_get_byte(&s->stream_ptr);
  442. y = bytestream2_get_byte(&s->stream_ptr);
  443. av_dlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  444. return copy_from(s, s->second_last_frame, frame, x, y);
  445. }
  446. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
  447. {
  448. int x, y;
  449. uint16_t P[2];
  450. unsigned int flags;
  451. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  452. /* 2-color encoding */
  453. P[0] = bytestream2_get_le16(&s->stream_ptr);
  454. P[1] = bytestream2_get_le16(&s->stream_ptr);
  455. if (!(P[0] & 0x8000)) {
  456. for (y = 0; y < 8; y++) {
  457. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  458. for (; flags != 1; flags >>= 1)
  459. *pixel_ptr++ = P[flags & 1];
  460. pixel_ptr += s->line_inc;
  461. }
  462. } else {
  463. flags = bytestream2_get_le16(&s->stream_ptr);
  464. for (y = 0; y < 8; y += 2) {
  465. for (x = 0; x < 8; x += 2, flags >>= 1) {
  466. pixel_ptr[x ] =
  467. pixel_ptr[x + 1 ] =
  468. pixel_ptr[x + s->stride] =
  469. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  470. }
  471. pixel_ptr += s->stride * 2;
  472. }
  473. }
  474. return 0;
  475. }
  476. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
  477. {
  478. int x, y;
  479. uint16_t P[4];
  480. unsigned int flags = 0;
  481. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  482. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  483. * either top and bottom or left and right halves */
  484. P[0] = bytestream2_get_le16(&s->stream_ptr);
  485. P[1] = bytestream2_get_le16(&s->stream_ptr);
  486. if (!(P[0] & 0x8000)) {
  487. for (y = 0; y < 16; y++) {
  488. // new values for each 4x4 block
  489. if (!(y & 3)) {
  490. if (y) {
  491. P[0] = bytestream2_get_le16(&s->stream_ptr);
  492. P[1] = bytestream2_get_le16(&s->stream_ptr);
  493. }
  494. flags = bytestream2_get_le16(&s->stream_ptr);
  495. }
  496. for (x = 0; x < 4; x++, flags >>= 1)
  497. *pixel_ptr++ = P[flags & 1];
  498. pixel_ptr += s->stride - 4;
  499. // switch to right half
  500. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  501. }
  502. } else {
  503. flags = bytestream2_get_le32(&s->stream_ptr);
  504. P[2] = bytestream2_get_le16(&s->stream_ptr);
  505. P[3] = bytestream2_get_le16(&s->stream_ptr);
  506. if (!(P[2] & 0x8000)) {
  507. /* vertical split; left & right halves are 2-color encoded */
  508. for (y = 0; y < 16; y++) {
  509. for (x = 0; x < 4; x++, flags >>= 1)
  510. *pixel_ptr++ = P[flags & 1];
  511. pixel_ptr += s->stride - 4;
  512. // switch to right half
  513. if (y == 7) {
  514. pixel_ptr -= 8 * s->stride - 4;
  515. P[0] = P[2];
  516. P[1] = P[3];
  517. flags = bytestream2_get_le32(&s->stream_ptr);
  518. }
  519. }
  520. } else {
  521. /* horizontal split; top & bottom halves are 2-color encoded */
  522. for (y = 0; y < 8; y++) {
  523. if (y == 4) {
  524. P[0] = P[2];
  525. P[1] = P[3];
  526. flags = bytestream2_get_le32(&s->stream_ptr);
  527. }
  528. for (x = 0; x < 8; x++, flags >>= 1)
  529. *pixel_ptr++ = P[flags & 1];
  530. pixel_ptr += s->line_inc;
  531. }
  532. }
  533. }
  534. /* report success */
  535. return 0;
  536. }
  537. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
  538. {
  539. int x, y;
  540. uint16_t P[4];
  541. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  542. /* 4-color encoding */
  543. for (x = 0; x < 4; x++)
  544. P[x] = bytestream2_get_le16(&s->stream_ptr);
  545. if (!(P[0] & 0x8000)) {
  546. if (!(P[2] & 0x8000)) {
  547. /* 1 of 4 colors for each pixel */
  548. for (y = 0; y < 8; y++) {
  549. /* get the next set of 8 2-bit flags */
  550. int flags = bytestream2_get_le16(&s->stream_ptr);
  551. for (x = 0; x < 8; x++, flags >>= 2)
  552. *pixel_ptr++ = P[flags & 0x03];
  553. pixel_ptr += s->line_inc;
  554. }
  555. } else {
  556. uint32_t flags;
  557. /* 1 of 4 colors for each 2x2 block */
  558. flags = bytestream2_get_le32(&s->stream_ptr);
  559. for (y = 0; y < 8; y += 2) {
  560. for (x = 0; x < 8; x += 2, flags >>= 2) {
  561. pixel_ptr[x ] =
  562. pixel_ptr[x + 1 ] =
  563. pixel_ptr[x + s->stride] =
  564. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  565. }
  566. pixel_ptr += s->stride * 2;
  567. }
  568. }
  569. } else {
  570. uint64_t flags;
  571. /* 1 of 4 colors for each 2x1 or 1x2 block */
  572. flags = bytestream2_get_le64(&s->stream_ptr);
  573. if (!(P[2] & 0x8000)) {
  574. for (y = 0; y < 8; y++) {
  575. for (x = 0; x < 8; x += 2, flags >>= 2) {
  576. pixel_ptr[x ] =
  577. pixel_ptr[x + 1] = P[flags & 0x03];
  578. }
  579. pixel_ptr += s->stride;
  580. }
  581. } else {
  582. for (y = 0; y < 8; y += 2) {
  583. for (x = 0; x < 8; x++, flags >>= 2) {
  584. pixel_ptr[x ] =
  585. pixel_ptr[x + s->stride] = P[flags & 0x03];
  586. }
  587. pixel_ptr += s->stride * 2;
  588. }
  589. }
  590. }
  591. /* report success */
  592. return 0;
  593. }
  594. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
  595. {
  596. int x, y;
  597. uint16_t P[8];
  598. int flags = 0;
  599. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  600. for (x = 0; x < 4; x++)
  601. P[x] = bytestream2_get_le16(&s->stream_ptr);
  602. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  603. * either top and bottom or left and right halves */
  604. if (!(P[0] & 0x8000)) {
  605. /* 4-color encoding for each quadrant */
  606. for (y = 0; y < 16; y++) {
  607. // new values for each 4x4 block
  608. if (!(y & 3)) {
  609. if (y)
  610. for (x = 0; x < 4; x++)
  611. P[x] = bytestream2_get_le16(&s->stream_ptr);
  612. flags = bytestream2_get_le32(&s->stream_ptr);
  613. }
  614. for (x = 0; x < 4; x++, flags >>= 2)
  615. *pixel_ptr++ = P[flags & 0x03];
  616. pixel_ptr += s->stride - 4;
  617. // switch to right half
  618. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  619. }
  620. } else {
  621. // vertical split?
  622. int vert;
  623. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  624. for (x = 4; x < 8; x++)
  625. P[x] = bytestream2_get_le16(&s->stream_ptr);
  626. vert = !(P[4] & 0x8000);
  627. /* 4-color encoding for either left and right or top and bottom
  628. * halves */
  629. for (y = 0; y < 16; y++) {
  630. for (x = 0; x < 4; x++, flags >>= 2)
  631. *pixel_ptr++ = P[flags & 0x03];
  632. if (vert) {
  633. pixel_ptr += s->stride - 4;
  634. // switch to right half
  635. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  636. } else if (y & 1) pixel_ptr += s->line_inc;
  637. // load values for second half
  638. if (y == 7) {
  639. memcpy(P, P + 4, 8);
  640. flags = bytestream2_get_le64(&s->stream_ptr);
  641. }
  642. }
  643. }
  644. /* report success */
  645. return 0;
  646. }
  647. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
  648. {
  649. int x, y;
  650. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  651. /* 64-color encoding (each pixel in block is a different color) */
  652. for (y = 0; y < 8; y++) {
  653. for (x = 0; x < 8; x++)
  654. pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
  655. pixel_ptr += s->stride;
  656. }
  657. /* report success */
  658. return 0;
  659. }
  660. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
  661. {
  662. int x, y;
  663. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  664. /* 16-color block encoding: each 2x2 block is a different color */
  665. for (y = 0; y < 8; y += 2) {
  666. for (x = 0; x < 8; x += 2) {
  667. pixel_ptr[x ] =
  668. pixel_ptr[x + 1 ] =
  669. pixel_ptr[x + s->stride] =
  670. pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
  671. }
  672. pixel_ptr += s->stride * 2;
  673. }
  674. /* report success */
  675. return 0;
  676. }
  677. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
  678. {
  679. int x, y;
  680. uint16_t P[2];
  681. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  682. /* 4-color block encoding: each 4x4 block is a different color */
  683. for (y = 0; y < 8; y++) {
  684. if (!(y & 3)) {
  685. P[0] = bytestream2_get_le16(&s->stream_ptr);
  686. P[1] = bytestream2_get_le16(&s->stream_ptr);
  687. }
  688. for (x = 0; x < 8; x++)
  689. pixel_ptr[x] = P[x >> 2];
  690. pixel_ptr += s->stride;
  691. }
  692. /* report success */
  693. return 0;
  694. }
  695. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
  696. {
  697. int x, y;
  698. uint16_t pix;
  699. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  700. /* 1-color encoding: the whole block is 1 solid color */
  701. pix = bytestream2_get_le16(&s->stream_ptr);
  702. for (y = 0; y < 8; y++) {
  703. for (x = 0; x < 8; x++)
  704. pixel_ptr[x] = pix;
  705. pixel_ptr += s->stride;
  706. }
  707. /* report success */
  708. return 0;
  709. }
  710. static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
  711. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  712. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  713. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  714. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  715. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  716. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  717. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  718. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  719. };
  720. static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
  721. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  722. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  723. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  724. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  725. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  726. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  727. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  728. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  729. };
  730. static void ipvideo_decode_opcodes(IpvideoContext *s, AVFrame *frame)
  731. {
  732. int x, y;
  733. unsigned char opcode;
  734. int ret;
  735. GetBitContext gb;
  736. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  737. if (!s->is_16bpp) {
  738. /* this is PAL8, so make the palette available */
  739. memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
  740. s->stride = frame->linesize[0];
  741. } else {
  742. s->stride = frame->linesize[0] >> 1;
  743. s->mv_ptr = s->stream_ptr;
  744. bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
  745. }
  746. s->line_inc = s->stride - 8;
  747. s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
  748. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  749. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  750. for (y = 0; y < s->avctx->height; y += 8) {
  751. for (x = 0; x < s->avctx->width; x += 8) {
  752. opcode = get_bits(&gb, 4);
  753. av_dlog(s->avctx,
  754. " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
  755. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  756. if (!s->is_16bpp) {
  757. s->pixel_ptr = frame->data[0] + x
  758. + y*frame->linesize[0];
  759. ret = ipvideo_decode_block[opcode](s, frame);
  760. } else {
  761. s->pixel_ptr = frame->data[0] + x*2
  762. + y*frame->linesize[0];
  763. ret = ipvideo_decode_block16[opcode](s, frame);
  764. }
  765. if (ret != 0) {
  766. av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
  767. s->avctx->frame_number, x, y);
  768. return;
  769. }
  770. }
  771. }
  772. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  773. av_log(s->avctx, AV_LOG_ERROR,
  774. "decode finished with %d bytes left over\n",
  775. bytestream2_get_bytes_left(&s->stream_ptr));
  776. }
  777. }
  778. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  779. {
  780. IpvideoContext *s = avctx->priv_data;
  781. s->avctx = avctx;
  782. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  783. avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
  784. ff_hpeldsp_init(&s->hdsp, avctx->flags);
  785. s->last_frame = av_frame_alloc();
  786. s->second_last_frame = av_frame_alloc();
  787. if (!s->last_frame || !s->second_last_frame) {
  788. av_frame_free(&s->last_frame);
  789. av_frame_free(&s->second_last_frame);
  790. return AVERROR(ENOMEM);
  791. }
  792. return 0;
  793. }
  794. static int ipvideo_decode_frame(AVCodecContext *avctx,
  795. void *data, int *got_frame,
  796. AVPacket *avpkt)
  797. {
  798. const uint8_t *buf = avpkt->data;
  799. int buf_size = avpkt->size;
  800. IpvideoContext *s = avctx->priv_data;
  801. AVFrame *frame = data;
  802. int ret;
  803. /* decoding map contains 4 bits of information per 8x8 block */
  804. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  805. /* compressed buffer needs to be large enough to at least hold an entire
  806. * decoding map */
  807. if (buf_size < s->decoding_map_size)
  808. return buf_size;
  809. if (av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, NULL)) {
  810. av_frame_unref(s->last_frame);
  811. av_frame_unref(s->second_last_frame);
  812. }
  813. s->decoding_map = buf;
  814. bytestream2_init(&s->stream_ptr, buf + s->decoding_map_size,
  815. buf_size - s->decoding_map_size);
  816. if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
  817. return ret;
  818. if (!s->is_16bpp) {
  819. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
  820. if (pal) {
  821. frame->palette_has_changed = 1;
  822. memcpy(s->pal, pal, AVPALETTE_SIZE);
  823. }
  824. }
  825. ipvideo_decode_opcodes(s, frame);
  826. *got_frame = 1;
  827. /* shuffle frames */
  828. av_frame_unref(s->second_last_frame);
  829. FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
  830. if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
  831. return ret;
  832. /* report that the buffer was completely consumed */
  833. return buf_size;
  834. }
  835. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  836. {
  837. IpvideoContext *s = avctx->priv_data;
  838. av_frame_free(&s->last_frame);
  839. av_frame_free(&s->second_last_frame);
  840. return 0;
  841. }
  842. AVCodec ff_interplay_video_decoder = {
  843. .name = "interplayvideo",
  844. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  845. .type = AVMEDIA_TYPE_VIDEO,
  846. .id = AV_CODEC_ID_INTERPLAY_VIDEO,
  847. .priv_data_size = sizeof(IpvideoContext),
  848. .init = ipvideo_decode_init,
  849. .close = ipvideo_decode_end,
  850. .decode = ipvideo_decode_frame,
  851. .capabilities = CODEC_CAP_DR1 | CODEC_CAP_PARAM_CHANGE,
  852. };