You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1378 lines
43KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 The FFmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include "libavutil/intreadwrite.h"
  39. #define BITSTREAM_READER_LE
  40. #include "avcodec.h"
  41. #include "bytestream.h"
  42. #include "get_bits.h"
  43. #include "hpeldsp.h"
  44. #include "internal.h"
  45. #define PALETTE_COUNT 256
  46. typedef struct IpvideoContext {
  47. AVCodecContext *avctx;
  48. HpelDSPContext hdsp;
  49. AVFrame *second_last_frame;
  50. AVFrame *last_frame;
  51. /* For format 0x10 */
  52. AVFrame *cur_decode_frame;
  53. AVFrame *prev_decode_frame;
  54. const unsigned char *decoding_map;
  55. int decoding_map_size;
  56. const unsigned char *skip_map;
  57. int skip_map_size;
  58. int is_16bpp;
  59. GetByteContext stream_ptr, mv_ptr;
  60. unsigned char *pixel_ptr;
  61. int line_inc;
  62. int stride;
  63. int upper_motion_limit_offset;
  64. uint32_t pal[256];
  65. } IpvideoContext;
  66. static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
  67. {
  68. int width = dst->width;
  69. int current_offset = s->pixel_ptr - dst->data[0];
  70. int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp);
  71. int y = current_offset / dst->linesize[0];
  72. int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width;
  73. int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0);
  74. int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp);
  75. if (motion_offset < 0) {
  76. av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
  77. return AVERROR_INVALIDDATA;
  78. } else if (motion_offset > s->upper_motion_limit_offset) {
  79. av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
  80. motion_offset, s->upper_motion_limit_offset);
  81. return AVERROR_INVALIDDATA;
  82. }
  83. if (!src->data[0]) {
  84. av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
  85. return AVERROR(EINVAL);
  86. }
  87. s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
  88. dst->linesize[0], 8);
  89. return 0;
  90. }
  91. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
  92. {
  93. return copy_from(s, s->last_frame, frame, 0, 0);
  94. }
  95. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
  96. {
  97. return copy_from(s, s->second_last_frame, frame, 0, 0);
  98. }
  99. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
  100. {
  101. unsigned char B;
  102. int x, y;
  103. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  104. if (!s->is_16bpp) {
  105. B = bytestream2_get_byte(&s->stream_ptr);
  106. } else {
  107. B = bytestream2_get_byte(&s->mv_ptr);
  108. }
  109. if (B < 56) {
  110. x = 8 + (B % 7);
  111. y = B / 7;
  112. } else {
  113. x = -14 + ((B - 56) % 29);
  114. y = 8 + ((B - 56) / 29);
  115. }
  116. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  117. return copy_from(s, s->second_last_frame, frame, x, y);
  118. }
  119. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
  120. {
  121. unsigned char B;
  122. int x, y;
  123. /* copy 8x8 block from current frame from an up/left block */
  124. /* need 1 more byte for motion */
  125. if (!s->is_16bpp) {
  126. B = bytestream2_get_byte(&s->stream_ptr);
  127. } else {
  128. B = bytestream2_get_byte(&s->mv_ptr);
  129. }
  130. if (B < 56) {
  131. x = -(8 + (B % 7));
  132. y = -(B / 7);
  133. } else {
  134. x = -(-14 + ((B - 56) % 29));
  135. y = -( 8 + ((B - 56) / 29));
  136. }
  137. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  138. return copy_from(s, frame, frame, x, y);
  139. }
  140. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
  141. {
  142. int x, y;
  143. unsigned char B, BL, BH;
  144. /* copy a block from the previous frame; need 1 more byte */
  145. if (!s->is_16bpp) {
  146. B = bytestream2_get_byte(&s->stream_ptr);
  147. } else {
  148. B = bytestream2_get_byte(&s->mv_ptr);
  149. }
  150. BL = B & 0x0F;
  151. BH = (B >> 4) & 0x0F;
  152. x = -8 + BL;
  153. y = -8 + BH;
  154. ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  155. return copy_from(s, s->last_frame, frame, x, y);
  156. }
  157. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
  158. {
  159. signed char x, y;
  160. /* copy a block from the previous frame using an expanded range;
  161. * need 2 more bytes */
  162. x = bytestream2_get_byte(&s->stream_ptr);
  163. y = bytestream2_get_byte(&s->stream_ptr);
  164. ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  165. return copy_from(s, s->last_frame, frame, x, y);
  166. }
  167. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
  168. {
  169. /* mystery opcode? skip multiple blocks? */
  170. av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
  171. /* report success */
  172. return 0;
  173. }
  174. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
  175. {
  176. int x, y;
  177. unsigned char P[2];
  178. unsigned int flags;
  179. if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
  180. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
  181. return AVERROR_INVALIDDATA;
  182. }
  183. /* 2-color encoding */
  184. P[0] = bytestream2_get_byte(&s->stream_ptr);
  185. P[1] = bytestream2_get_byte(&s->stream_ptr);
  186. if (P[0] <= P[1]) {
  187. /* need 8 more bytes from the stream */
  188. for (y = 0; y < 8; y++) {
  189. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  190. for (; flags != 1; flags >>= 1)
  191. *s->pixel_ptr++ = P[flags & 1];
  192. s->pixel_ptr += s->line_inc;
  193. }
  194. } else {
  195. /* need 2 more bytes from the stream */
  196. flags = bytestream2_get_le16(&s->stream_ptr);
  197. for (y = 0; y < 8; y += 2) {
  198. for (x = 0; x < 8; x += 2, flags >>= 1) {
  199. s->pixel_ptr[x ] =
  200. s->pixel_ptr[x + 1 ] =
  201. s->pixel_ptr[x + s->stride] =
  202. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  203. }
  204. s->pixel_ptr += s->stride * 2;
  205. }
  206. }
  207. /* report success */
  208. return 0;
  209. }
  210. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
  211. {
  212. int x, y;
  213. unsigned char P[4];
  214. unsigned int flags = 0;
  215. if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
  216. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
  217. return AVERROR_INVALIDDATA;
  218. }
  219. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  220. * either top and bottom or left and right halves */
  221. P[0] = bytestream2_get_byte(&s->stream_ptr);
  222. P[1] = bytestream2_get_byte(&s->stream_ptr);
  223. if (P[0] <= P[1]) {
  224. for (y = 0; y < 16; y++) {
  225. // new values for each 4x4 block
  226. if (!(y & 3)) {
  227. if (y) {
  228. P[0] = bytestream2_get_byte(&s->stream_ptr);
  229. P[1] = bytestream2_get_byte(&s->stream_ptr);
  230. }
  231. flags = bytestream2_get_le16(&s->stream_ptr);
  232. }
  233. for (x = 0; x < 4; x++, flags >>= 1)
  234. *s->pixel_ptr++ = P[flags & 1];
  235. s->pixel_ptr += s->stride - 4;
  236. // switch to right half
  237. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  238. }
  239. } else {
  240. flags = bytestream2_get_le32(&s->stream_ptr);
  241. P[2] = bytestream2_get_byte(&s->stream_ptr);
  242. P[3] = bytestream2_get_byte(&s->stream_ptr);
  243. if (P[2] <= P[3]) {
  244. /* vertical split; left & right halves are 2-color encoded */
  245. for (y = 0; y < 16; y++) {
  246. for (x = 0; x < 4; x++, flags >>= 1)
  247. *s->pixel_ptr++ = P[flags & 1];
  248. s->pixel_ptr += s->stride - 4;
  249. // switch to right half
  250. if (y == 7) {
  251. s->pixel_ptr -= 8 * s->stride - 4;
  252. P[0] = P[2];
  253. P[1] = P[3];
  254. flags = bytestream2_get_le32(&s->stream_ptr);
  255. }
  256. }
  257. } else {
  258. /* horizontal split; top & bottom halves are 2-color encoded */
  259. for (y = 0; y < 8; y++) {
  260. if (y == 4) {
  261. P[0] = P[2];
  262. P[1] = P[3];
  263. flags = bytestream2_get_le32(&s->stream_ptr);
  264. }
  265. for (x = 0; x < 8; x++, flags >>= 1)
  266. *s->pixel_ptr++ = P[flags & 1];
  267. s->pixel_ptr += s->line_inc;
  268. }
  269. }
  270. }
  271. /* report success */
  272. return 0;
  273. }
  274. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
  275. {
  276. int x, y;
  277. unsigned char P[4];
  278. if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
  279. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
  280. return AVERROR_INVALIDDATA;
  281. }
  282. /* 4-color encoding */
  283. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  284. if (P[0] <= P[1]) {
  285. if (P[2] <= P[3]) {
  286. /* 1 of 4 colors for each pixel, need 16 more bytes */
  287. for (y = 0; y < 8; y++) {
  288. /* get the next set of 8 2-bit flags */
  289. int flags = bytestream2_get_le16(&s->stream_ptr);
  290. for (x = 0; x < 8; x++, flags >>= 2)
  291. *s->pixel_ptr++ = P[flags & 0x03];
  292. s->pixel_ptr += s->line_inc;
  293. }
  294. } else {
  295. uint32_t flags;
  296. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  297. flags = bytestream2_get_le32(&s->stream_ptr);
  298. for (y = 0; y < 8; y += 2) {
  299. for (x = 0; x < 8; x += 2, flags >>= 2) {
  300. s->pixel_ptr[x ] =
  301. s->pixel_ptr[x + 1 ] =
  302. s->pixel_ptr[x + s->stride] =
  303. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  304. }
  305. s->pixel_ptr += s->stride * 2;
  306. }
  307. }
  308. } else {
  309. uint64_t flags;
  310. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  311. flags = bytestream2_get_le64(&s->stream_ptr);
  312. if (P[2] <= P[3]) {
  313. for (y = 0; y < 8; y++) {
  314. for (x = 0; x < 8; x += 2, flags >>= 2) {
  315. s->pixel_ptr[x ] =
  316. s->pixel_ptr[x + 1] = P[flags & 0x03];
  317. }
  318. s->pixel_ptr += s->stride;
  319. }
  320. } else {
  321. for (y = 0; y < 8; y += 2) {
  322. for (x = 0; x < 8; x++, flags >>= 2) {
  323. s->pixel_ptr[x ] =
  324. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  325. }
  326. s->pixel_ptr += s->stride * 2;
  327. }
  328. }
  329. }
  330. /* report success */
  331. return 0;
  332. }
  333. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
  334. {
  335. int x, y;
  336. unsigned char P[8];
  337. int flags = 0;
  338. if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
  339. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
  340. return AVERROR_INVALIDDATA;
  341. }
  342. bytestream2_get_buffer(&s->stream_ptr, P, 4);
  343. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  344. * either top and bottom or left and right halves */
  345. if (P[0] <= P[1]) {
  346. /* 4-color encoding for each quadrant; need 32 bytes */
  347. for (y = 0; y < 16; y++) {
  348. // new values for each 4x4 block
  349. if (!(y & 3)) {
  350. if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
  351. flags = bytestream2_get_le32(&s->stream_ptr);
  352. }
  353. for (x = 0; x < 4; x++, flags >>= 2)
  354. *s->pixel_ptr++ = P[flags & 0x03];
  355. s->pixel_ptr += s->stride - 4;
  356. // switch to right half
  357. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  358. }
  359. } else {
  360. // vertical split?
  361. int vert;
  362. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  363. bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
  364. vert = P[4] <= P[5];
  365. /* 4-color encoding for either left and right or top and bottom
  366. * halves */
  367. for (y = 0; y < 16; y++) {
  368. for (x = 0; x < 4; x++, flags >>= 2)
  369. *s->pixel_ptr++ = P[flags & 0x03];
  370. if (vert) {
  371. s->pixel_ptr += s->stride - 4;
  372. // switch to right half
  373. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  374. } else if (y & 1) s->pixel_ptr += s->line_inc;
  375. // load values for second half
  376. if (y == 7) {
  377. memcpy(P, P + 4, 4);
  378. flags = bytestream2_get_le64(&s->stream_ptr);
  379. }
  380. }
  381. }
  382. /* report success */
  383. return 0;
  384. }
  385. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
  386. {
  387. int y;
  388. /* 64-color encoding (each pixel in block is a different color) */
  389. for (y = 0; y < 8; y++) {
  390. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  391. s->pixel_ptr += s->stride;
  392. }
  393. /* report success */
  394. return 0;
  395. }
  396. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
  397. {
  398. int x, y;
  399. /* 16-color block encoding: each 2x2 block is a different color */
  400. for (y = 0; y < 8; y += 2) {
  401. for (x = 0; x < 8; x += 2) {
  402. s->pixel_ptr[x ] =
  403. s->pixel_ptr[x + 1 ] =
  404. s->pixel_ptr[x + s->stride] =
  405. s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
  406. }
  407. s->pixel_ptr += s->stride * 2;
  408. }
  409. /* report success */
  410. return 0;
  411. }
  412. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
  413. {
  414. int y;
  415. unsigned char P[2];
  416. if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
  417. av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
  418. return AVERROR_INVALIDDATA;
  419. }
  420. /* 4-color block encoding: each 4x4 block is a different color */
  421. for (y = 0; y < 8; y++) {
  422. if (!(y & 3)) {
  423. P[0] = bytestream2_get_byte(&s->stream_ptr);
  424. P[1] = bytestream2_get_byte(&s->stream_ptr);
  425. }
  426. memset(s->pixel_ptr, P[0], 4);
  427. memset(s->pixel_ptr + 4, P[1], 4);
  428. s->pixel_ptr += s->stride;
  429. }
  430. /* report success */
  431. return 0;
  432. }
  433. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
  434. {
  435. int y;
  436. unsigned char pix;
  437. /* 1-color encoding: the whole block is 1 solid color */
  438. pix = bytestream2_get_byte(&s->stream_ptr);
  439. for (y = 0; y < 8; y++) {
  440. memset(s->pixel_ptr, pix, 8);
  441. s->pixel_ptr += s->stride;
  442. }
  443. /* report success */
  444. return 0;
  445. }
  446. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
  447. {
  448. int x, y;
  449. unsigned char sample[2];
  450. /* dithered encoding */
  451. sample[0] = bytestream2_get_byte(&s->stream_ptr);
  452. sample[1] = bytestream2_get_byte(&s->stream_ptr);
  453. for (y = 0; y < 8; y++) {
  454. for (x = 0; x < 8; x += 2) {
  455. *s->pixel_ptr++ = sample[ y & 1 ];
  456. *s->pixel_ptr++ = sample[!(y & 1)];
  457. }
  458. s->pixel_ptr += s->line_inc;
  459. }
  460. /* report success */
  461. return 0;
  462. }
  463. static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
  464. {
  465. signed char x, y;
  466. /* copy a block from the second last frame using an expanded range */
  467. x = bytestream2_get_byte(&s->stream_ptr);
  468. y = bytestream2_get_byte(&s->stream_ptr);
  469. ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
  470. return copy_from(s, s->second_last_frame, frame, x, y);
  471. }
  472. static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
  473. {
  474. int x, y;
  475. uint16_t P[2];
  476. unsigned int flags;
  477. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  478. /* 2-color encoding */
  479. P[0] = bytestream2_get_le16(&s->stream_ptr);
  480. P[1] = bytestream2_get_le16(&s->stream_ptr);
  481. if (!(P[0] & 0x8000)) {
  482. for (y = 0; y < 8; y++) {
  483. flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
  484. for (; flags != 1; flags >>= 1)
  485. *pixel_ptr++ = P[flags & 1];
  486. pixel_ptr += s->line_inc;
  487. }
  488. } else {
  489. flags = bytestream2_get_le16(&s->stream_ptr);
  490. for (y = 0; y < 8; y += 2) {
  491. for (x = 0; x < 8; x += 2, flags >>= 1) {
  492. pixel_ptr[x ] =
  493. pixel_ptr[x + 1 ] =
  494. pixel_ptr[x + s->stride] =
  495. pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  496. }
  497. pixel_ptr += s->stride * 2;
  498. }
  499. }
  500. return 0;
  501. }
  502. static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
  503. {
  504. int x, y;
  505. uint16_t P[4];
  506. unsigned int flags = 0;
  507. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  508. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  509. * either top and bottom or left and right halves */
  510. P[0] = bytestream2_get_le16(&s->stream_ptr);
  511. P[1] = bytestream2_get_le16(&s->stream_ptr);
  512. if (!(P[0] & 0x8000)) {
  513. for (y = 0; y < 16; y++) {
  514. // new values for each 4x4 block
  515. if (!(y & 3)) {
  516. if (y) {
  517. P[0] = bytestream2_get_le16(&s->stream_ptr);
  518. P[1] = bytestream2_get_le16(&s->stream_ptr);
  519. }
  520. flags = bytestream2_get_le16(&s->stream_ptr);
  521. }
  522. for (x = 0; x < 4; x++, flags >>= 1)
  523. *pixel_ptr++ = P[flags & 1];
  524. pixel_ptr += s->stride - 4;
  525. // switch to right half
  526. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  527. }
  528. } else {
  529. flags = bytestream2_get_le32(&s->stream_ptr);
  530. P[2] = bytestream2_get_le16(&s->stream_ptr);
  531. P[3] = bytestream2_get_le16(&s->stream_ptr);
  532. if (!(P[2] & 0x8000)) {
  533. /* vertical split; left & right halves are 2-color encoded */
  534. for (y = 0; y < 16; y++) {
  535. for (x = 0; x < 4; x++, flags >>= 1)
  536. *pixel_ptr++ = P[flags & 1];
  537. pixel_ptr += s->stride - 4;
  538. // switch to right half
  539. if (y == 7) {
  540. pixel_ptr -= 8 * s->stride - 4;
  541. P[0] = P[2];
  542. P[1] = P[3];
  543. flags = bytestream2_get_le32(&s->stream_ptr);
  544. }
  545. }
  546. } else {
  547. /* horizontal split; top & bottom halves are 2-color encoded */
  548. for (y = 0; y < 8; y++) {
  549. if (y == 4) {
  550. P[0] = P[2];
  551. P[1] = P[3];
  552. flags = bytestream2_get_le32(&s->stream_ptr);
  553. }
  554. for (x = 0; x < 8; x++, flags >>= 1)
  555. *pixel_ptr++ = P[flags & 1];
  556. pixel_ptr += s->line_inc;
  557. }
  558. }
  559. }
  560. /* report success */
  561. return 0;
  562. }
  563. static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
  564. {
  565. int x, y;
  566. uint16_t P[4];
  567. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  568. /* 4-color encoding */
  569. for (x = 0; x < 4; x++)
  570. P[x] = bytestream2_get_le16(&s->stream_ptr);
  571. if (!(P[0] & 0x8000)) {
  572. if (!(P[2] & 0x8000)) {
  573. /* 1 of 4 colors for each pixel */
  574. for (y = 0; y < 8; y++) {
  575. /* get the next set of 8 2-bit flags */
  576. int flags = bytestream2_get_le16(&s->stream_ptr);
  577. for (x = 0; x < 8; x++, flags >>= 2)
  578. *pixel_ptr++ = P[flags & 0x03];
  579. pixel_ptr += s->line_inc;
  580. }
  581. } else {
  582. uint32_t flags;
  583. /* 1 of 4 colors for each 2x2 block */
  584. flags = bytestream2_get_le32(&s->stream_ptr);
  585. for (y = 0; y < 8; y += 2) {
  586. for (x = 0; x < 8; x += 2, flags >>= 2) {
  587. pixel_ptr[x ] =
  588. pixel_ptr[x + 1 ] =
  589. pixel_ptr[x + s->stride] =
  590. pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  591. }
  592. pixel_ptr += s->stride * 2;
  593. }
  594. }
  595. } else {
  596. uint64_t flags;
  597. /* 1 of 4 colors for each 2x1 or 1x2 block */
  598. flags = bytestream2_get_le64(&s->stream_ptr);
  599. if (!(P[2] & 0x8000)) {
  600. for (y = 0; y < 8; y++) {
  601. for (x = 0; x < 8; x += 2, flags >>= 2) {
  602. pixel_ptr[x ] =
  603. pixel_ptr[x + 1] = P[flags & 0x03];
  604. }
  605. pixel_ptr += s->stride;
  606. }
  607. } else {
  608. for (y = 0; y < 8; y += 2) {
  609. for (x = 0; x < 8; x++, flags >>= 2) {
  610. pixel_ptr[x ] =
  611. pixel_ptr[x + s->stride] = P[flags & 0x03];
  612. }
  613. pixel_ptr += s->stride * 2;
  614. }
  615. }
  616. }
  617. /* report success */
  618. return 0;
  619. }
  620. static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
  621. {
  622. int x, y;
  623. uint16_t P[8];
  624. int flags = 0;
  625. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  626. for (x = 0; x < 4; x++)
  627. P[x] = bytestream2_get_le16(&s->stream_ptr);
  628. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  629. * either top and bottom or left and right halves */
  630. if (!(P[0] & 0x8000)) {
  631. /* 4-color encoding for each quadrant */
  632. for (y = 0; y < 16; y++) {
  633. // new values for each 4x4 block
  634. if (!(y & 3)) {
  635. if (y)
  636. for (x = 0; x < 4; x++)
  637. P[x] = bytestream2_get_le16(&s->stream_ptr);
  638. flags = bytestream2_get_le32(&s->stream_ptr);
  639. }
  640. for (x = 0; x < 4; x++, flags >>= 2)
  641. *pixel_ptr++ = P[flags & 0x03];
  642. pixel_ptr += s->stride - 4;
  643. // switch to right half
  644. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  645. }
  646. } else {
  647. // vertical split?
  648. int vert;
  649. uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
  650. for (x = 4; x < 8; x++)
  651. P[x] = bytestream2_get_le16(&s->stream_ptr);
  652. vert = !(P[4] & 0x8000);
  653. /* 4-color encoding for either left and right or top and bottom
  654. * halves */
  655. for (y = 0; y < 16; y++) {
  656. for (x = 0; x < 4; x++, flags >>= 2)
  657. *pixel_ptr++ = P[flags & 0x03];
  658. if (vert) {
  659. pixel_ptr += s->stride - 4;
  660. // switch to right half
  661. if (y == 7) pixel_ptr -= 8 * s->stride - 4;
  662. } else if (y & 1) pixel_ptr += s->line_inc;
  663. // load values for second half
  664. if (y == 7) {
  665. memcpy(P, P + 4, 8);
  666. flags = bytestream2_get_le64(&s->stream_ptr);
  667. }
  668. }
  669. }
  670. /* report success */
  671. return 0;
  672. }
  673. static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
  674. {
  675. int x, y;
  676. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  677. /* 64-color encoding (each pixel in block is a different color) */
  678. for (y = 0; y < 8; y++) {
  679. for (x = 0; x < 8; x++)
  680. pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
  681. pixel_ptr += s->stride;
  682. }
  683. /* report success */
  684. return 0;
  685. }
  686. static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
  687. {
  688. int x, y;
  689. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  690. /* 16-color block encoding: each 2x2 block is a different color */
  691. for (y = 0; y < 8; y += 2) {
  692. for (x = 0; x < 8; x += 2) {
  693. pixel_ptr[x ] =
  694. pixel_ptr[x + 1 ] =
  695. pixel_ptr[x + s->stride] =
  696. pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
  697. }
  698. pixel_ptr += s->stride * 2;
  699. }
  700. /* report success */
  701. return 0;
  702. }
  703. static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
  704. {
  705. int x, y;
  706. uint16_t P[2];
  707. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  708. /* 4-color block encoding: each 4x4 block is a different color */
  709. for (y = 0; y < 8; y++) {
  710. if (!(y & 3)) {
  711. P[0] = bytestream2_get_le16(&s->stream_ptr);
  712. P[1] = bytestream2_get_le16(&s->stream_ptr);
  713. }
  714. for (x = 0; x < 8; x++)
  715. pixel_ptr[x] = P[x >> 2];
  716. pixel_ptr += s->stride;
  717. }
  718. /* report success */
  719. return 0;
  720. }
  721. static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
  722. {
  723. int x, y;
  724. uint16_t pix;
  725. uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
  726. /* 1-color encoding: the whole block is 1 solid color */
  727. pix = bytestream2_get_le16(&s->stream_ptr);
  728. for (y = 0; y < 8; y++) {
  729. for (x = 0; x < 8; x++)
  730. pixel_ptr[x] = pix;
  731. pixel_ptr += s->stride;
  732. }
  733. /* report success */
  734. return 0;
  735. }
  736. static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
  737. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  738. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  739. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  740. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  741. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  742. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  743. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  744. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  745. };
  746. static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
  747. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  748. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  749. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  750. ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
  751. ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
  752. ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
  753. ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
  754. ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
  755. };
  756. static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
  757. {
  758. int line;
  759. if (!opcode) {
  760. for (line = 0; line < 8; ++line) {
  761. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  762. s->pixel_ptr += s->stride;
  763. }
  764. } else {
  765. /* Don't try to copy second_last_frame data on the first frames */
  766. if (s->avctx->frame_number > 2)
  767. copy_from(s, s->second_last_frame, frame, 0, 0);
  768. }
  769. }
  770. static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
  771. {
  772. int off_x, off_y;
  773. if (opcode < 0) {
  774. off_x = ((uint16_t)opcode - 0xC000) % frame->width;
  775. off_y = ((uint16_t)opcode - 0xC000) / frame->width;
  776. copy_from(s, s->last_frame, frame, off_x, off_y);
  777. } else if (opcode > 0) {
  778. off_x = ((uint16_t)opcode - 0x4000) % frame->width;
  779. off_y = ((uint16_t)opcode - 0x4000) / frame->width;
  780. copy_from(s, frame, frame, off_x, off_y);
  781. }
  782. }
  783. static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
  784. ipvideo_format_06_firstpass, ipvideo_format_06_secondpass,
  785. };
  786. static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
  787. {
  788. int pass, x, y;
  789. int16_t opcode;
  790. GetByteContext decoding_map_ptr;
  791. /* this is PAL8, so make the palette available */
  792. memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
  793. s->stride = frame->linesize[0];
  794. s->line_inc = s->stride - 8;
  795. s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
  796. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  797. bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
  798. for (pass = 0; pass < 2; ++pass) {
  799. bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
  800. for (y = 0; y < s->avctx->height; y += 8) {
  801. for (x = 0; x < s->avctx->width; x += 8) {
  802. opcode = bytestream2_get_le16(&decoding_map_ptr);
  803. ff_tlog(s->avctx,
  804. " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
  805. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  806. s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
  807. ipvideo_format_06_passes[pass](s, frame, opcode);
  808. }
  809. }
  810. }
  811. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  812. av_log(s->avctx, AV_LOG_DEBUG,
  813. "decode finished with %d bytes left over\n",
  814. bytestream2_get_bytes_left(&s->stream_ptr));
  815. }
  816. }
  817. static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
  818. {
  819. int line;
  820. if (!opcode) {
  821. for (line = 0; line < 8; ++line) {
  822. bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
  823. s->pixel_ptr += s->stride;
  824. }
  825. }
  826. }
  827. static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
  828. {
  829. int off_x, off_y;
  830. if (opcode < 0) {
  831. off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width;
  832. off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width;
  833. copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
  834. } else if (opcode > 0) {
  835. off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width;
  836. off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width;
  837. copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
  838. }
  839. }
  840. static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
  841. ipvideo_format_10_firstpass, ipvideo_format_10_secondpass,
  842. };
  843. static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
  844. {
  845. int pass, x, y, changed_block;
  846. int16_t opcode, skip;
  847. GetByteContext decoding_map_ptr;
  848. GetByteContext skip_map_ptr;
  849. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  850. /* this is PAL8, so make the palette available */
  851. memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
  852. s->stride = frame->linesize[0];
  853. s->line_inc = s->stride - 8;
  854. s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
  855. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  856. bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
  857. bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
  858. for (pass = 0; pass < 2; ++pass) {
  859. bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
  860. bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
  861. skip = bytestream2_get_le16(&skip_map_ptr);
  862. for (y = 0; y < s->avctx->height; y += 8) {
  863. for (x = 0; x < s->avctx->width; x += 8) {
  864. s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
  865. while (skip <= 0) {
  866. if (skip != -0x8000 && skip) {
  867. opcode = bytestream2_get_le16(&decoding_map_ptr);
  868. ipvideo_format_10_passes[pass](s, frame, opcode);
  869. break;
  870. }
  871. if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
  872. return;
  873. skip = bytestream2_get_le16(&skip_map_ptr);
  874. }
  875. skip *= 2;
  876. }
  877. }
  878. }
  879. bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
  880. skip = bytestream2_get_le16(&skip_map_ptr);
  881. for (y = 0; y < s->avctx->height; y += 8) {
  882. for (x = 0; x < s->avctx->width; x += 8) {
  883. changed_block = 0;
  884. s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
  885. while (skip <= 0) {
  886. if (skip != -0x8000 && skip) {
  887. changed_block = 1;
  888. break;
  889. }
  890. if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
  891. return;
  892. skip = bytestream2_get_le16(&skip_map_ptr);
  893. }
  894. if (changed_block) {
  895. copy_from(s, s->cur_decode_frame, frame, 0, 0);
  896. } else {
  897. /* Don't try to copy last_frame data on the first frame */
  898. if (s->avctx->frame_number)
  899. copy_from(s, s->last_frame, frame, 0, 0);
  900. }
  901. skip *= 2;
  902. }
  903. }
  904. FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame);
  905. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  906. av_log(s->avctx, AV_LOG_DEBUG,
  907. "decode finished with %d bytes left over\n",
  908. bytestream2_get_bytes_left(&s->stream_ptr));
  909. }
  910. }
  911. static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
  912. {
  913. int x, y;
  914. unsigned char opcode;
  915. int ret;
  916. GetBitContext gb;
  917. bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
  918. if (!s->is_16bpp) {
  919. /* this is PAL8, so make the palette available */
  920. memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
  921. s->stride = frame->linesize[0];
  922. } else {
  923. s->stride = frame->linesize[0] >> 1;
  924. s->mv_ptr = s->stream_ptr;
  925. bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
  926. }
  927. s->line_inc = s->stride - 8;
  928. s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
  929. + (s->avctx->width - 8) * (1 + s->is_16bpp);
  930. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  931. for (y = 0; y < s->avctx->height; y += 8) {
  932. for (x = 0; x < s->avctx->width; x += 8) {
  933. if (get_bits_left(&gb) < 4)
  934. return;
  935. opcode = get_bits(&gb, 4);
  936. ff_tlog(s->avctx,
  937. " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
  938. x, y, opcode, bytestream2_tell(&s->stream_ptr));
  939. if (!s->is_16bpp) {
  940. s->pixel_ptr = frame->data[0] + x
  941. + y*frame->linesize[0];
  942. ret = ipvideo_decode_block[opcode](s, frame);
  943. } else {
  944. s->pixel_ptr = frame->data[0] + x*2
  945. + y*frame->linesize[0];
  946. ret = ipvideo_decode_block16[opcode](s, frame);
  947. }
  948. if (ret != 0) {
  949. av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
  950. s->avctx->frame_number, x, y);
  951. return;
  952. }
  953. }
  954. }
  955. if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
  956. av_log(s->avctx, AV_LOG_DEBUG,
  957. "decode finished with %d bytes left over\n",
  958. bytestream2_get_bytes_left(&s->stream_ptr));
  959. }
  960. }
  961. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  962. {
  963. IpvideoContext *s = avctx->priv_data;
  964. s->avctx = avctx;
  965. s->is_16bpp = avctx->bits_per_coded_sample == 16;
  966. avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
  967. ff_hpeldsp_init(&s->hdsp, avctx->flags);
  968. s->last_frame = av_frame_alloc();
  969. s->second_last_frame = av_frame_alloc();
  970. s->cur_decode_frame = av_frame_alloc();
  971. s->prev_decode_frame = av_frame_alloc();
  972. if (!s->last_frame || !s->second_last_frame ||
  973. !s->cur_decode_frame || !s->prev_decode_frame) {
  974. return AVERROR(ENOMEM);
  975. }
  976. s->cur_decode_frame->width = avctx->width;
  977. s->prev_decode_frame->width = avctx->width;
  978. s->cur_decode_frame->height = avctx->height;
  979. s->prev_decode_frame->height = avctx->height;
  980. s->cur_decode_frame->format = avctx->pix_fmt;
  981. s->prev_decode_frame->format = avctx->pix_fmt;
  982. return 0;
  983. }
  984. static int ipvideo_decode_frame(AVCodecContext *avctx,
  985. void *data, int *got_frame,
  986. AVPacket *avpkt)
  987. {
  988. const uint8_t *buf = avpkt->data;
  989. int buf_size = avpkt->size;
  990. IpvideoContext *s = avctx->priv_data;
  991. AVFrame *frame = data;
  992. int ret;
  993. int send_buffer;
  994. int frame_format;
  995. int video_data_size;
  996. if (av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, NULL)) {
  997. av_frame_unref(s->last_frame);
  998. av_frame_unref(s->second_last_frame);
  999. av_frame_unref(s->cur_decode_frame);
  1000. av_frame_unref(s->prev_decode_frame);
  1001. }
  1002. if (!s->cur_decode_frame->data[0]) {
  1003. ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
  1004. if (ret < 0)
  1005. return ret;
  1006. ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
  1007. if (ret < 0) {
  1008. av_frame_unref(s->cur_decode_frame);
  1009. return ret;
  1010. }
  1011. }
  1012. if (buf_size < 8)
  1013. return AVERROR_INVALIDDATA;
  1014. frame_format = AV_RL8(buf);
  1015. send_buffer = AV_RL8(buf + 1);
  1016. video_data_size = AV_RL16(buf + 2);
  1017. s->decoding_map_size = AV_RL16(buf + 4);
  1018. s->skip_map_size = AV_RL16(buf + 6);
  1019. switch (frame_format) {
  1020. case 0x06:
  1021. if (s->decoding_map_size) {
  1022. av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
  1023. return AVERROR_INVALIDDATA;
  1024. }
  1025. if (s->skip_map_size) {
  1026. av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
  1027. return AVERROR_INVALIDDATA;
  1028. }
  1029. if (s->is_16bpp) {
  1030. av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
  1031. return AVERROR_INVALIDDATA;
  1032. }
  1033. /* Decoding map for 0x06 frame format is at the top of pixeldata */
  1034. s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
  1035. s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
  1036. video_data_size -= s->decoding_map_size + 14;
  1037. if (video_data_size <= 0 || s->decoding_map_size == 0)
  1038. return AVERROR_INVALIDDATA;
  1039. if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
  1040. return AVERROR_INVALIDDATA;
  1041. bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
  1042. break;
  1043. case 0x10:
  1044. if (! s->decoding_map_size) {
  1045. av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
  1046. return AVERROR_INVALIDDATA;
  1047. }
  1048. if (! s->skip_map_size) {
  1049. av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
  1050. return AVERROR_INVALIDDATA;
  1051. }
  1052. if (s->is_16bpp) {
  1053. av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
  1054. return AVERROR_INVALIDDATA;
  1055. }
  1056. if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
  1057. return AVERROR_INVALIDDATA;
  1058. bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
  1059. s->decoding_map = buf + 8 + video_data_size;
  1060. s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
  1061. break;
  1062. case 0x11:
  1063. if (! s->decoding_map_size) {
  1064. av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
  1065. return AVERROR_INVALIDDATA;
  1066. }
  1067. if (s->skip_map_size) {
  1068. av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
  1069. return AVERROR_INVALIDDATA;
  1070. }
  1071. if (buf_size < 8 + video_data_size + s->decoding_map_size)
  1072. return AVERROR_INVALIDDATA;
  1073. bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
  1074. s->decoding_map = buf + 8 + video_data_size;
  1075. break;
  1076. default:
  1077. av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
  1078. }
  1079. /* ensure we can't overread the packet */
  1080. if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
  1081. av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
  1082. return AVERROR_INVALIDDATA;
  1083. }
  1084. if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
  1085. return ret;
  1086. if (!s->is_16bpp) {
  1087. buffer_size_t size;
  1088. const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, &size);
  1089. if (pal && size == AVPALETTE_SIZE) {
  1090. frame->palette_has_changed = 1;
  1091. memcpy(s->pal, pal, AVPALETTE_SIZE);
  1092. } else if (pal) {
  1093. av_log(avctx, AV_LOG_ERROR, "Palette size %d is wrong\n", size);
  1094. }
  1095. }
  1096. switch (frame_format) {
  1097. case 0x06:
  1098. ipvideo_decode_format_06_opcodes(s, frame);
  1099. break;
  1100. case 0x10:
  1101. ipvideo_decode_format_10_opcodes(s, frame);
  1102. break;
  1103. case 0x11:
  1104. ipvideo_decode_format_11_opcodes(s, frame);
  1105. break;
  1106. }
  1107. *got_frame = send_buffer;
  1108. /* shuffle frames */
  1109. av_frame_unref(s->second_last_frame);
  1110. FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
  1111. if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
  1112. return ret;
  1113. /* report that the buffer was completely consumed */
  1114. return buf_size;
  1115. }
  1116. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  1117. {
  1118. IpvideoContext *s = avctx->priv_data;
  1119. av_frame_free(&s->last_frame);
  1120. av_frame_free(&s->second_last_frame);
  1121. av_frame_free(&s->cur_decode_frame);
  1122. av_frame_free(&s->prev_decode_frame);
  1123. return 0;
  1124. }
  1125. AVCodec ff_interplay_video_decoder = {
  1126. .name = "interplayvideo",
  1127. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  1128. .type = AVMEDIA_TYPE_VIDEO,
  1129. .id = AV_CODEC_ID_INTERPLAY_VIDEO,
  1130. .priv_data_size = sizeof(IpvideoContext),
  1131. .init = ipvideo_decode_init,
  1132. .close = ipvideo_decode_end,
  1133. .decode = ipvideo_decode_frame,
  1134. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
  1135. .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
  1136. };