You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1057 lines
32KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 The FFmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "libavutil/intreadwrite.h"
  39. #include "avcodec.h"
  40. #include "bytestream.h"
  41. #include "hpeldsp.h"
  42. #define BITSTREAM_READER_LE
  43. #include "get_bits.h"
  44. #include "internal.h"
  45. #define PALETTE_COUNT 256
  46. typedef struct IpvideoContext {
  47. AVCodecContext *avctx;
  48. HpelDSPContext hdsp;
  49. AVFrame *second_last_frame;
  50. AVFrame *last_frame;
  51. const unsigned char *decoding_map;
  52. int decoding_map_size;
  53. int is_16bpp;
  54. GetByteContext stream_ptr, mv_ptr;
  55. unsigned char *pixel_ptr;
  56. int line_inc;
  57. int stride;
  58. int upper_motion_limit_offset;
  59. uint32_t pal[256];
  60. } IpvideoContext;
  61. static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
  62. {
  63. int current_offset = s->pixel_ptr - dst->data[0];
  64. int motion_offset = current_offset + delta_y * dst->linesize[0]
  65. + delta_x * (1 + s->is_16bpp);
  66. if (motion_offset < 0) {
  67. av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
  68. return AVERROR_INVALIDDATA;
  69. } else if (motion_offset > s->upper_motion_limit_offset) {
  70. av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
  71. motion_offset, s->upper_motion_limit_offset);
  72. return AVERROR_INVALIDDATA;
  73. }
  74. if (!src->data[0]) {
  75. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  76. return AVERROR(EINVAL);
  77. }
  78. s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  79. dst->linesize[0], 8);
  80. return 0;
  81. }
  82. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
  83. {
  84. return copy_from(s, s->last_frame, frame, 0, 0);
  85. }
  86. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
  87. {
  88. return copy_from(s, s->second_last_frame, frame, 0, 0);
  89. }
  90. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
  91. {
  92. unsigned char B;
  93. int x, y;
  94. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  95. if (!s->is_16bpp) {
  96. B = bytestream2_get_byte(&s->stream_ptr);
  97. } else {
  98. B = bytestream2_get_byte(&s->mv_ptr);
  99. }
  100. if (B < 56) {
  101. x = 8 + (B % 7);
  102. y = B / 7;
  103. } else {
  104. x = -14 + ((B - 56) % 29);
  105. y = 8 + ((B - 56) / 29);
  106. }
  107. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  108. return copy_from(s, s->second_last_frame, frame, x, y);
  109. }
  110. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
  111. {
  112. unsigned char B;
  113. int x, y;
  114. /* copy 8x8 block from current frame from an up/left block */
  115. /* need 1 more byte for motion */
  116. if (!s->is_16bpp) {
  117. B = bytestream2_get_byte(&s->stream_ptr);
  118. } else {
  119. B = bytestream2_get_byte(&s->mv_ptr);
  120. }
  121. if (B < 56) {
  122. x = -(8 + (B % 7));
  123. y = -(B / 7);
  124. } else {
  125. x = -(-14 + ((B - 56) % 29));
  126. y = -( 8 + ((B - 56) / 29));
  127. }
  128. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  129. return copy_from(s, frame, frame, x, y);
  130. }
  131. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
  132. {
  133. int x, y;
  134. unsigned char B, BL, BH;
  135. /* copy a block from the previous frame; need 1 more byte */
  136. if (!s->is_16bpp) {
  137. B = bytestream2_get_byte(&s->stream_ptr);
  138. } else {
  139. B = bytestream2_get_byte(&s->mv_ptr);
  140. }
  141. BL = B & 0x0F;
  142. BH = (B >> 4) & 0x0F;
  143. x = -8 + BL;
  144. y = -8 + BH;
  145. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  146. return copy_from(s, s->last_frame, frame, x, y);
  147. }
  148. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
  149. {
  150. signed char x, y;
  151. /* copy a block from the previous frame using an expanded range;
  152. * need 2 more bytes */
  153. x = bytestream2_get_byte(&s->stream_ptr);
  154. y = bytestream2_get_byte(&s->stream_ptr);
  155. ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  156. return copy_from(s, s->last_frame, frame, x, y);
  157. }
  158. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
  159. {
  160. /* mystery opcode? skip multiple blocks? */
  161. av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
  162. /* report success */
  163. return 0;
  164. }
  165. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
  166. {
  167. int x, y;
  168. unsigned char P[2];
  169. unsigned int flags;
  170. if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
  171. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
  172. return AVERROR_INVALIDDATA;
  173. }
  174. /* 2-color encoding */
  175. P[0] = bytestream2_get_byte(&s->stream_ptr);
  176. P[1] = bytestream2_get_byte(&s->stream_ptr);
  177. if (P[0] <= P[1]) {
  178. /* need 8 more bytes from the stream */
  179. for (y = 0; y < 8; y++) {
  180. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  181. for (; flags != 1; flags >>= 1)
  182. *s->pixel_ptr++ = P[flags & 1];
  183. s->pixel_ptr += s->line_inc;
  184. }
  185. } else {
  186. /* need 2 more bytes from the stream */
  187. flags = bytestream2_get_le16(&s->stream_ptr);
  188. for (y = 0; y < 8; y += 2) {
  189. for (x = 0; x < 8; x += 2, flags >>= 1) {
  190. s->pixel_ptr[x ] =
  191. s->pixel_ptr[x + 1 ] =
  192. s->pixel_ptr[x + s->stride] =
  193. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  194. }
  195. s->pixel_ptr += s->stride * 2;
  196. }
  197. }
  198. /* report success */
  199. return 0;
  200. }
  201. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
  202. {
  203. int x, y;
  204. unsigned char P[4];
  205. unsigned int flags = 0;
  206. if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
  207. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
  208. return AVERROR_INVALIDDATA;
  209. }
  210. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  211. * either top and bottom or left and right halves */
  212. P[0] = bytestream2_get_byte(&s->stream_ptr);
  213. P[1] = bytestream2_get_byte(&s->stream_ptr);
  214. if (P[0] <= P[1]) {
  215. for (y = 0; y < 16; y++) {
  216. // new values for each 4x4 block
  217. if (!(y & 3)) {
  218. if (y) {
  219. P[0] = bytestream2_get_byte(&s->stream_ptr);
  220. P[1] = bytestream2_get_byte(&s->stream_ptr);
  221. }
  222. flags = bytestream2_get_le16(&s->stream_ptr);
  223. }
  224. for (x = 0; x < 4; x++, flags >>= 1)
  225. *s->pixel_ptr++ = P[flags & 1];
  226. s->pixel_ptr += s->stride - 4;
  227. // switch to right half
  228. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  229. }
  230. } else {
  231. flags = bytestream2_get_le32(&s->stream_ptr);
  232. P[2] = bytestream2_get_byte(&s->stream_ptr);
  233. P[3] = bytestream2_get_byte(&s->stream_ptr);
  234. if (P[2] <= P[3]) {
  235. /* vertical split; left & right halves are 2-color encoded */
  236. for (y = 0; y < 16; y++) {
  237. for (x = 0; x < 4; x++, flags >>= 1)
  238. *s->pixel_ptr++ = P[flags & 1];
  239. s->pixel_ptr += s->stride - 4;
  240. // switch to right half
  241. if (y == 7) {
  242. s->pixel_ptr -= 8 * s->stride - 4;
  243. P[0] = P[2];
  244. P[1] = P[3];
  245. flags = bytestream2_get_le32(&s->stream_ptr);
  246. }
  247. }
  248. } else {
  249. /* horizontal split; top & bottom halves are 2-color encoded */
  250. for (y = 0; y < 8; y++) {
  251. if (y == 4) {
  252. P[0] = P[2];
  253. P[1] = P[3];
  254. flags = bytestream2_get_le32(&s->stream_ptr);
  255. }
  256. for (x = 0; x < 8; x++, flags >>= 1)
  257. *s->pixel_ptr++ = P[flags & 1];
  258. s->pixel_ptr += s->line_inc;
  259. }
  260. }
  261. }
  262. /* report success */
  263. return 0;
  264. }
  265. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
  266. {
  267. int x, y;
  268. unsigned char P[4];
  269. if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
  270. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
  271. return AVERROR_INVALIDDATA;
  272. }
  273. /* 4-color encoding */
  274. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  275. if (P[0] <= P[1]) {
  276. if (P[2] <= P[3]) {
  277. /* 1 of 4 colors for each pixel, need 16 more bytes */
  278. for (y = 0; y < 8; y++) {
  279. /* get the next set of 8 2-bit flags */
  280. int flags = bytestream2_get_le16(&s->stream_ptr);
  281. for (x = 0; x < 8; x++, flags >>= 2)
  282. *s->pixel_ptr++ = P[flags & 0x03];
  283. s->pixel_ptr += s->line_inc;
  284. }
  285. } else {
  286. uint32_t flags;
  287. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  288. flags = bytestream2_get_le32(&s->stream_ptr);
  289. for (y = 0; y < 8; y += 2) {
  290. for (x = 0; x < 8; x += 2, flags >>= 2) {
  291. s->pixel_ptr[x ] =
  292. s->pixel_ptr[x + 1 ] =
  293. s->pixel_ptr[x + s->stride] =
  294. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  295. }
  296. s->pixel_ptr += s->stride * 2;
  297. }
  298. }
  299. } else {
  300. uint64_t flags;
  301. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  302. flags = bytestream2_get_le64(&s->stream_ptr);
  303. if (P[2] <= P[3]) {
  304. for (y = 0; y < 8; y++) {
  305. for (x = 0; x < 8; x += 2, flags >>= 2) {
  306. s->pixel_ptr[x ] =
  307. s->pixel_ptr[x + 1] = P[flags & 0x03];
  308. }
  309. s->pixel_ptr += s->stride;
  310. }
  311. } else {
  312. for (y = 0; y < 8; y += 2) {
  313. for (x = 0; x < 8; x++, flags >>= 2) {
  314. s->pixel_ptr[x ] =
  315. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  316. }
  317. s->pixel_ptr += s->stride * 2;
  318. }
  319. }
  320. }
  321. /* report success */
  322. return 0;
  323. }
  324. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
  325. {
  326. int x, y;
  327. unsigned char P[8];
  328. int flags = 0;
  329. if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
  330. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
  331. return AVERROR_INVALIDDATA;
  332. }
  333. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  334. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  335. * either top and bottom or left and right halves */
  336. if (P[0] <= P[1]) {
  337. /* 4-color encoding for each quadrant; need 32 bytes */
  338. for (y = 0; y < 16; y++) {
  339. // new values for each 4x4 block
  340. if (!(y & 3)) {
  341. if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
  342. flags = bytestream2_get_le32(&s->stream_ptr);
  343. }
  344. for (x = 0; x < 4; x++, flags >>= 2)
  345. *s->pixel_ptr++ = P[flags & 0x03];
  346. s->pixel_ptr += s->stride - 4;
  347. // switch to right half
  348. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  349. }
  350. } else {
  351. // vertical split?
  352. int vert;
  353. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  354. bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
  355. vert = P[4] <= P[5];
  356. /* 4-color encoding for either left and right or top and bottom
  357. * halves */
  358. for (y = 0; y < 16; y++) {
  359. for (x = 0; x < 4; x++, flags >>= 2)
  360. *s->pixel_ptr++ = P[flags & 0x03];
  361. if (vert) {
  362. s->pixel_ptr += s->stride - 4;
  363. // switch to right half
  364. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  365. } else if (y & 1) s->pixel_ptr += s->line_inc;
  366. // load values for second half
  367. if (y == 7) {
  368. memcpy(P, P + 4, 4);
  369. flags = bytestream2_get_le64(&s->stream_ptr);
  370. }
  371. }
  372. }
  373. /* report success */
  374. return 0;
  375. }
  376. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
  377. {
  378. int y;
  379. /* 64-color encoding (each pixel in block is a different color) */
  380. for (y = 0; y < 8; y++) {
  381. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  382. s->pixel_ptr += s->stride;
  383. }
  384. /* report success */
  385. return 0;
  386. }
  387. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
  388. {
  389. int x, y;
  390. /* 16-color block encoding: each 2x2 block is a different color */
  391. for (y = 0; y < 8; y += 2) {
  392. for (x = 0; x < 8; x += 2) {
  393. s->pixel_ptr[x ] =
  394. s->pixel_ptr[x + 1 ] =
  395. s->pixel_ptr[x + s->stride] =
  396. s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
  397. }
  398. s->pixel_ptr += s->stride * 2;
  399. }
  400. /* report success */
  401. return 0;
  402. }
  403. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
  404. {
  405. int y;
  406. unsigned char P[2];
  407. if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
  408. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
  409. return AVERROR_INVALIDDATA;
  410. }
  411. /* 4-color block encoding: each 4x4 block is a different color */
  412. for (y = 0; y < 8; y++) {
  413. if (!(y & 3)) {
  414. P[0] = bytestream2_get_byte(&s->stream_ptr);
  415. P[1] = bytestream2_get_byte(&s->stream_ptr);
  416. }
  417. memset(s->pixel_ptr, P[0], 4);
  418. memset(s->pixel_ptr + 4, P[1], 4);
  419. s->pixel_ptr += s->stride;
  420. }
  421. /* report success */
  422. return 0;
  423. }
  424. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
  425. {
  426. int y;
  427. unsigned char pix;
  428. /* 1-color encoding: the whole block is 1 solid color */
  429. pix = bytestream2_get_byte(&s->stream_ptr);
  430. for (y = 0; y < 8; y++) {
  431. memset(s->pixel_ptr, pix, 8);
  432. s->pixel_ptr += s->stride;
  433. }
  434. /* report success */
  435. return 0;
  436. }
  437. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
  438. {
  439. int x, y;
  440. unsigned char sample[2];
  441. /* dithered encoding */
  442. sample[0] = bytestream2_get_byte(&s->stream_ptr);
  443. sample[1] = bytestream2_get_byte(&s->stream_ptr);
  444. for (y = 0; y < 8; y++) {
  445. for (x = 0; x < 8; x += 2) {
  446. *s->pixel_ptr++ = sample[ y & 1 ];
  447. *s->pixel_ptr++ = sample[!(y & 1)];
  448. }
  449. s->pixel_ptr += s->line_inc;
  450. }
  451. /* report success */
  452. return 0;
  453. }
  454. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
  455. {
  456. signed char x, y;
  457. /* copy a block from the second last frame using an expanded range */
  458. x = bytestream2_get_byte(&s->stream_ptr);
  459. y = bytestream2_get_byte(&s->stream_ptr);
  460. ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  461. return copy_from(s, s->second_last_frame, frame, x, y);
  462. }
  463. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
  464. {
  465. int x, y;
  466. uint16_t P[2];
  467. unsigned int flags;
  468. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  469. /* 2-color encoding */
  470. P[0] = bytestream2_get_le16(&s->stream_ptr);
  471. P[1] = bytestream2_get_le16(&s->stream_ptr);
  472. if (!(P[0] & 0x8000)) {
  473. for (y = 0; y < 8; y++) {
  474. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  475. for (; flags != 1; flags >>= 1)
  476. *pixel_ptr++ = P[flags & 1];
  477. pixel_ptr += s->line_inc;
  478. }
  479. } else {
  480. flags = bytestream2_get_le16(&s->stream_ptr);
  481. for (y = 0; y < 8; y += 2) {
  482. for (x = 0; x < 8; x += 2, flags >>= 1) {
  483. pixel_ptr[x ] =
  484. pixel_ptr[x + 1 ] =
  485. pixel_ptr[x + s->stride] =
  486. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  487. }
  488. pixel_ptr += s->stride * 2;
  489. }
  490. }
  491. return 0;
  492. }
  493. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
  494. {
  495. int x, y;
  496. uint16_t P[4];
  497. unsigned int flags = 0;
  498. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  499. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  500. * either top and bottom or left and right halves */
  501. P[0] = bytestream2_get_le16(&s->stream_ptr);
  502. P[1] = bytestream2_get_le16(&s->stream_ptr);
  503. if (!(P[0] & 0x8000)) {
  504. for (y = 0; y < 16; y++) {
  505. // new values for each 4x4 block
  506. if (!(y & 3)) {
  507. if (y) {
  508. P[0] = bytestream2_get_le16(&s->stream_ptr);
  509. P[1] = bytestream2_get_le16(&s->stream_ptr);
  510. }
  511. flags = bytestream2_get_le16(&s->stream_ptr);
  512. }
  513. for (x = 0; x < 4; x++, flags >>= 1)
  514. *pixel_ptr++ = P[flags & 1];
  515. pixel_ptr += s->stride - 4;
  516. // switch to right half
  517. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  518. }
  519. } else {
  520. flags = bytestream2_get_le32(&s->stream_ptr);
  521. P[2] = bytestream2_get_le16(&s->stream_ptr);
  522. P[3] = bytestream2_get_le16(&s->stream_ptr);
  523. if (!(P[2] & 0x8000)) {
  524. /* vertical split; left & right halves are 2-color encoded */
  525. for (y = 0; y < 16; y++) {
  526. for (x = 0; x < 4; x++, flags >>= 1)
  527. *pixel_ptr++ = P[flags & 1];
  528. pixel_ptr += s->stride - 4;
  529. // switch to right half
  530. if (y == 7) {
  531. pixel_ptr -= 8 * s->stride - 4;
  532. P[0] = P[2];
  533. P[1] = P[3];
  534. flags = bytestream2_get_le32(&s->stream_ptr);
  535. }
  536. }
  537. } else {
  538. /* horizontal split; top & bottom halves are 2-color encoded */
  539. for (y = 0; y < 8; y++) {
  540. if (y == 4) {
  541. P[0] = P[2];
  542. P[1] = P[3];
  543. flags = bytestream2_get_le32(&s->stream_ptr);
  544. }
  545. for (x = 0; x < 8; x++, flags >>= 1)
  546. *pixel_ptr++ = P[flags & 1];
  547. pixel_ptr += s->line_inc;
  548. }
  549. }
  550. }
  551. /* report success */
  552. return 0;
  553. }
  554. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
  555. {
  556. int x, y;
  557. uint16_t P[4];
  558. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  559. /* 4-color encoding */
  560. for (x = 0; x < 4; x++)
  561. P[x] = bytestream2_get_le16(&s->stream_ptr);
  562. if (!(P[0] & 0x8000)) {
  563. if (!(P[2] & 0x8000)) {
  564. /* 1 of 4 colors for each pixel */
  565. for (y = 0; y < 8; y++) {
  566. /* get the next set of 8 2-bit flags */
  567. int flags = bytestream2_get_le16(&s->stream_ptr);
  568. for (x = 0; x < 8; x++, flags >>= 2)
  569. *pixel_ptr++ = P[flags & 0x03];
  570. pixel_ptr += s->line_inc;
  571. }
  572. } else {
  573. uint32_t flags;
  574. /* 1 of 4 colors for each 2x2 block */
  575. flags = bytestream2_get_le32(&s->stream_ptr);
  576. for (y = 0; y < 8; y += 2) {
  577. for (x = 0; x < 8; x += 2, flags >>= 2) {
  578. pixel_ptr[x ] =
  579. pixel_ptr[x + 1 ] =
  580. pixel_ptr[x + s->stride] =
  581. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  582. }
  583. pixel_ptr += s->stride * 2;
  584. }
  585. }
  586. } else {
  587. uint64_t flags;
  588. /* 1 of 4 colors for each 2x1 or 1x2 block */
  589. flags = bytestream2_get_le64(&s->stream_ptr);
  590. if (!(P[2] & 0x8000)) {
  591. for (y = 0; y < 8; y++) {
  592. for (x = 0; x < 8; x += 2, flags >>= 2) {
  593. pixel_ptr[x ] =
  594. pixel_ptr[x + 1] = P[flags & 0x03];
  595. }
  596. pixel_ptr += s->stride;
  597. }
  598. } else {
  599. for (y = 0; y < 8; y += 2) {
  600. for (x = 0; x < 8; x++, flags >>= 2) {
  601. pixel_ptr[x ] =
  602. pixel_ptr[x + s->stride] = P[flags & 0x03];
  603. }
  604. pixel_ptr += s->stride * 2;
  605. }
  606. }
  607. }
  608. /* report success */
  609. return 0;
  610. }
  611. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
  612. {
  613. int x, y;
  614. uint16_t P[8];
  615. int flags = 0;
  616. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  617. for (x = 0; x < 4; x++)
  618. P[x] = bytestream2_get_le16(&s->stream_ptr);
  619. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  620. * either top and bottom or left and right halves */
  621. if (!(P[0] & 0x8000)) {
  622. /* 4-color encoding for each quadrant */
  623. for (y = 0; y < 16; y++) {
  624. // new values for each 4x4 block
  625. if (!(y & 3)) {
  626. if (y)
  627. for (x = 0; x < 4; x++)
  628. P[x] = bytestream2_get_le16(&s->stream_ptr);
  629. flags = bytestream2_get_le32(&s->stream_ptr);
  630. }
  631. for (x = 0; x < 4; x++, flags >>= 2)
  632. *pixel_ptr++ = P[flags & 0x03];
  633. pixel_ptr += s->stride - 4;
  634. // switch to right half
  635. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  636. }
  637. } else {
  638. // vertical split?
  639. int vert;
  640. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  641. for (x = 4; x < 8; x++)
  642. P[x] = bytestream2_get_le16(&s->stream_ptr);
  643. vert = !(P[4] & 0x8000);
  644. /* 4-color encoding for either left and right or top and bottom
  645. * halves */
  646. for (y = 0; y < 16; y++) {
  647. for (x = 0; x < 4; x++, flags >>= 2)
  648. *pixel_ptr++ = P[flags & 0x03];
  649. if (vert) {
  650. pixel_ptr += s->stride - 4;
  651. // switch to right half
  652. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  653. } else if (y & 1) pixel_ptr += s->line_inc;
  654. // load values for second half
  655. if (y == 7) {
  656. memcpy(P, P + 4, 8);
  657. flags = bytestream2_get_le64(&s->stream_ptr);
  658. }
  659. }
  660. }
  661. /* report success */
  662. return 0;
  663. }
  664. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
  665. {
  666. int x, y;
  667. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  668. /* 64-color encoding (each pixel in block is a different color) */
  669. for (y = 0; y < 8; y++) {
  670. for (x = 0; x < 8; x++)
  671. pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
  672. pixel_ptr += s->stride;
  673. }
  674. /* report success */
  675. return 0;
  676. }
  677. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
  678. {
  679. int x, y;
  680. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  681. /* 16-color block encoding: each 2x2 block is a different color */
  682. for (y = 0; y < 8; y += 2) {
  683. for (x = 0; x < 8; x += 2) {
  684. pixel_ptr[x ] =
  685. pixel_ptr[x + 1 ] =
  686. pixel_ptr[x + s->stride] =
  687. pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
  688. }
  689. pixel_ptr += s->stride * 2;
  690. }
  691. /* report success */
  692. return 0;
  693. }
  694. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
  695. {
  696. int x, y;
  697. uint16_t P[2];
  698. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  699. /* 4-color block encoding: each 4x4 block is a different color */
  700. for (y = 0; y < 8; y++) {
  701. if (!(y & 3)) {
  702. P[0] = bytestream2_get_le16(&s->stream_ptr);
  703. P[1] = bytestream2_get_le16(&s->stream_ptr);
  704. }
  705. for (x = 0; x < 8; x++)
  706. pixel_ptr[x] = P[x >> 2];
  707. pixel_ptr += s->stride;
  708. }
  709. /* report success */
  710. return 0;
  711. }
  712. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
  713. {
  714. int x, y;
  715. uint16_t pix;
  716. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  717. /* 1-color encoding: the whole block is 1 solid color */
  718. pix = bytestream2_get_le16(&s->stream_ptr);
  719. for (y = 0; y < 8; y++) {
  720. for (x = 0; x < 8; x++)
  721. pixel_ptr[x] = pix;
  722. pixel_ptr += s->stride;
  723. }
  724. /* report success */
  725. return 0;
  726. }
  727. static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
  728. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  729. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  730. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  731. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  732. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  733. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  734. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  735. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  736. };
  737. static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
  738. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  739. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  740. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  741. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  742. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  743. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  744. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  745. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  746. };
  747. static void ipvideo_decode_opcodes(IpvideoContext *s, AVFrame *frame)
  748. {
  749. int x, y;
  750. unsigned char opcode;
  751. int ret;
  752. GetBitContext gb;
  753. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  754. if (!s->is_16bpp) {
  755. /* this is PAL8, so make the palette available */
  756. memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
  757. s->stride = frame->linesize[0];
  758. } else {
  759. s->stride = frame->linesize[0] >> 1;
  760. s->mv_ptr = s->stream_ptr;
  761. bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
  762. }
  763. s->line_inc = s->stride - 8;
  764. s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
  765. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  766. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  767. for (y = 0; y < s->avctx->height; y += 8) {
  768. for (x = 0; x < s->avctx->width; x += 8) {
  769. opcode = get_bits(&gb, 4);
  770. ff_tlog(s->avctx,
  771. " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
  772. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  773. if (!s->is_16bpp) {
  774. s->pixel_ptr = frame->data[0] + x
  775. + y*frame->linesize[0];
  776. ret = ipvideo_decode_block[opcode](s, frame);
  777. } else {
  778. s->pixel_ptr = frame->data[0] + x*2
  779. + y*frame->linesize[0];
  780. ret = ipvideo_decode_block16[opcode](s, frame);
  781. }
  782. if (ret != 0) {
  783. av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
  784. s->avctx->frame_number, x, y);
  785. return;
  786. }
  787. }
  788. }
  789. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  790. av_log(s->avctx, AV_LOG_DEBUG,
  791. "decode finished with %d bytes left over\n",
  792. bytestream2_get_bytes_left(&s->stream_ptr));
  793. }
  794. }
  795. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  796. {
  797. IpvideoContext *s = avctx->priv_data;
  798. s->avctx = avctx;
  799. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  800. avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
  801. ff_hpeldsp_init(&s->hdsp, avctx->flags);
  802. s->last_frame = av_frame_alloc();
  803. s->second_last_frame = av_frame_alloc();
  804. if (!s->last_frame || !s->second_last_frame) {
  805. av_frame_free(&s->last_frame);
  806. av_frame_free(&s->second_last_frame);
  807. return AVERROR(ENOMEM);
  808. }
  809. return 0;
  810. }
  811. static int ipvideo_decode_frame(AVCodecContext *avctx,
  812. void *data, int *got_frame,
  813. AVPacket *avpkt)
  814. {
  815. const uint8_t *buf = avpkt->data;
  816. int buf_size = avpkt->size;
  817. IpvideoContext *s = avctx->priv_data;
  818. AVFrame *frame = data;
  819. int ret;
  820. if (buf_size < 2)
  821. return AVERROR_INVALIDDATA;
  822. /* decoding map contains 4 bits of information per 8x8 block */
  823. s->decoding_map_size = AV_RL16(avpkt->data);
  824. /* compressed buffer needs to be large enough to at least hold an entire
  825. * decoding map */
  826. if (buf_size < s->decoding_map_size + 2)
  827. return buf_size;
  828. if (av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, NULL)) {
  829. av_frame_unref(s->last_frame);
  830. av_frame_unref(s->second_last_frame);
  831. }
  832. s->decoding_map = buf + 2;
  833. bytestream2_init(&s->stream_ptr, buf + 2 + s->decoding_map_size,
  834. buf_size - s->decoding_map_size);
  835. if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
  836. return ret;
  837. if (!s->is_16bpp) {
  838. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
  839. if (pal) {
  840. frame->palette_has_changed = 1;
  841. memcpy(s->pal, pal, AVPALETTE_SIZE);
  842. }
  843. }
  844. ipvideo_decode_opcodes(s, frame);
  845. *got_frame = 1;
  846. /* shuffle frames */
  847. av_frame_unref(s->second_last_frame);
  848. FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
  849. if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
  850. return ret;
  851. /* report that the buffer was completely consumed */
  852. return buf_size;
  853. }
  854. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  855. {
  856. IpvideoContext *s = avctx->priv_data;
  857. av_frame_free(&s->last_frame);
  858. av_frame_free(&s->second_last_frame);
  859. return 0;
  860. }
  861. AVCodec ff_interplay_video_decoder = {
  862. .name = "interplayvideo",
  863. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  864. .type = AVMEDIA_TYPE_VIDEO,
  865. .id = AV_CODEC_ID_INTERPLAY_VIDEO,
  866. .priv_data_size = sizeof(IpvideoContext),
  867. .init = ipvideo_decode_init,
  868. .close = ipvideo_decode_end,
  869. .decode = ipvideo_decode_frame,
  870. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
  871. };