You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

705 lines
20KB

  1. /*
  2. * Interplay MVE Video Decoder
  3. * Copyright (C) 2003 the ffmpeg project
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/interplayvideo.c
  23. * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
  24. * For more information about the Interplay MVE format, visit:
  25. * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
  26. * This code is written in such a way that the identifiers match up
  27. * with the encoding descriptions in the document.
  28. *
  29. * This decoder presently only supports a PAL8 output colorspace.
  30. *
  31. * An Interplay video frame consists of 2 parts: The decoding map and
  32. * the video data. A demuxer must load these 2 parts together in a single
  33. * buffer before sending it through the stream to this decoder.
  34. */
  35. #include <stdio.h>
  36. #include <stdlib.h>
  37. #include <string.h>
  38. #include <unistd.h>
  39. #include "avcodec.h"
  40. #include "bytestream.h"
  41. #include "dsputil.h"
  42. #define ALT_BITSTREAM_READER_LE
  43. #include "get_bits.h"
  44. #define PALETTE_COUNT 256
  45. /* debugging support */
  46. #define DEBUG_INTERPLAY 0
  47. #if DEBUG_INTERPLAY
  48. #define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__)
  49. #else
  50. static inline void debug_interplay(const char *format, ...) { }
  51. #endif
  52. typedef struct IpvideoContext {
  53. AVCodecContext *avctx;
  54. DSPContext dsp;
  55. AVFrame second_last_frame;
  56. AVFrame last_frame;
  57. AVFrame current_frame;
  58. const unsigned char *decoding_map;
  59. int decoding_map_size;
  60. const unsigned char *buf;
  61. int size;
  62. const unsigned char *stream_ptr;
  63. const unsigned char *stream_end;
  64. unsigned char *pixel_ptr;
  65. int line_inc;
  66. int stride;
  67. int upper_motion_limit_offset;
  68. } IpvideoContext;
  69. #define CHECK_STREAM_PTR(n) \
  70. if (s->stream_end - s->stream_ptr < n) { \
  71. av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
  72. s->stream_ptr + n, s->stream_end); \
  73. return -1; \
  74. }
  75. static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
  76. {
  77. int current_offset = s->pixel_ptr - s->current_frame.data[0];
  78. int motion_offset = current_offset + delta_y * s->stride + delta_x;
  79. if (motion_offset < 0) {
  80. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
  81. return -1;
  82. } else if (motion_offset > s->upper_motion_limit_offset) {
  83. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
  84. motion_offset, s->upper_motion_limit_offset);
  85. return -1;
  86. }
  87. s->dsp.put_pixels_tab[1][0](s->pixel_ptr, src->data[0] + motion_offset, s->stride, 8);
  88. return 0;
  89. }
  90. static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
  91. {
  92. return copy_from(s, &s->last_frame, 0, 0);
  93. }
  94. static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
  95. {
  96. return copy_from(s, &s->second_last_frame, 0, 0);
  97. }
  98. static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
  99. {
  100. unsigned char B;
  101. int x, y;
  102. /* copy block from 2 frames ago using a motion vector; need 1 more byte */
  103. CHECK_STREAM_PTR(1);
  104. B = *s->stream_ptr++;
  105. if (B < 56) {
  106. x = 8 + (B % 7);
  107. y = B / 7;
  108. } else {
  109. x = -14 + ((B - 56) % 29);
  110. y = 8 + ((B - 56) / 29);
  111. }
  112. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  113. return copy_from(s, &s->second_last_frame, x, y);
  114. }
  115. static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
  116. {
  117. unsigned char B;
  118. int x, y;
  119. /* copy 8x8 block from current frame from an up/left block */
  120. /* need 1 more byte for motion */
  121. CHECK_STREAM_PTR(1);
  122. B = *s->stream_ptr++;
  123. if (B < 56) {
  124. x = -(8 + (B % 7));
  125. y = -(B / 7);
  126. } else {
  127. x = -(-14 + ((B - 56) % 29));
  128. y = -( 8 + ((B - 56) / 29));
  129. }
  130. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  131. return copy_from(s, &s->current_frame, x, y);
  132. }
  133. static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
  134. {
  135. int x, y;
  136. unsigned char B, BL, BH;
  137. /* copy a block from the previous frame; need 1 more byte */
  138. CHECK_STREAM_PTR(1);
  139. B = *s->stream_ptr++;
  140. BL = B & 0x0F;
  141. BH = (B >> 4) & 0x0F;
  142. x = -8 + BL;
  143. y = -8 + BH;
  144. debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
  145. return copy_from(s, &s->last_frame, x, y);
  146. }
  147. static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
  148. {
  149. signed char x, y;
  150. /* copy a block from the previous frame using an expanded range;
  151. * need 2 more bytes */
  152. CHECK_STREAM_PTR(2);
  153. x = *s->stream_ptr++;
  154. y = *s->stream_ptr++;
  155. debug_interplay (" motion bytes = %d, %d\n", x, y);
  156. return copy_from(s, &s->last_frame, x, y);
  157. }
  158. static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
  159. {
  160. /* mystery opcode? skip multiple blocks? */
  161. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
  162. /* report success */
  163. return 0;
  164. }
  165. static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
  166. {
  167. int x, y;
  168. unsigned char P[2];
  169. unsigned int flags;
  170. /* 2-color encoding */
  171. CHECK_STREAM_PTR(2);
  172. P[0] = *s->stream_ptr++;
  173. P[1] = *s->stream_ptr++;
  174. if (P[0] <= P[1]) {
  175. /* need 8 more bytes from the stream */
  176. CHECK_STREAM_PTR(8);
  177. for (y = 0; y < 8; y++) {
  178. flags = *s->stream_ptr++ | 0x100;
  179. for (; flags != 1; flags >>= 1)
  180. *s->pixel_ptr++ = P[flags & 1];
  181. s->pixel_ptr += s->line_inc;
  182. }
  183. } else {
  184. /* need 2 more bytes from the stream */
  185. CHECK_STREAM_PTR(2);
  186. flags = bytestream_get_le16(&s->stream_ptr);
  187. for (y = 0; y < 8; y += 2) {
  188. for (x = 0; x < 8; x += 2, flags >>= 1) {
  189. s->pixel_ptr[x ] =
  190. s->pixel_ptr[x + 1 ] =
  191. s->pixel_ptr[x + s->stride] =
  192. s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
  193. }
  194. s->pixel_ptr += s->stride * 2;
  195. }
  196. }
  197. /* report success */
  198. return 0;
  199. }
  200. static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
  201. {
  202. int x, y;
  203. unsigned char P[2];
  204. unsigned int flags = 0;
  205. /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
  206. * either top and bottom or left and right halves */
  207. CHECK_STREAM_PTR(2);
  208. P[0] = *s->stream_ptr++;
  209. P[1] = *s->stream_ptr++;
  210. if (P[0] <= P[1]) {
  211. CHECK_STREAM_PTR(14);
  212. s->stream_ptr -= 2;
  213. for (y = 0; y < 16; y++) {
  214. // new values for each 4x4 block
  215. if (!(y & 3)) {
  216. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  217. flags = bytestream_get_le16(&s->stream_ptr);
  218. }
  219. for (x = 0; x < 4; x++, flags >>= 1)
  220. *s->pixel_ptr++ = P[flags & 1];
  221. s->pixel_ptr += s->stride - 4;
  222. // switch to right half
  223. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  224. }
  225. } else {
  226. /* need 10 more bytes */
  227. CHECK_STREAM_PTR(10);
  228. if (s->stream_ptr[4] <= s->stream_ptr[5]) {
  229. flags = bytestream_get_le32(&s->stream_ptr);
  230. /* vertical split; left & right halves are 2-color encoded */
  231. for (y = 0; y < 16; y++) {
  232. for (x = 0; x < 4; x++, flags >>= 1)
  233. *s->pixel_ptr++ = P[flags & 1];
  234. s->pixel_ptr += s->stride - 4;
  235. // switch to right half
  236. if (y == 7) {
  237. s->pixel_ptr -= 8 * s->stride - 4;
  238. P[0] = *s->stream_ptr++; P[1] = *s->stream_ptr++;
  239. flags = bytestream_get_le32(&s->stream_ptr);
  240. }
  241. }
  242. } else {
  243. /* horizontal split; top & bottom halves are 2-color encoded */
  244. for (y = 0; y < 8; y++) {
  245. if (y == 4) {
  246. P[0] = *s->stream_ptr++;
  247. P[1] = *s->stream_ptr++;
  248. }
  249. flags = *s->stream_ptr++ | 0x100;
  250. for (; flags != 1; flags >>= 1)
  251. *s->pixel_ptr++ = P[flags & 1];
  252. s->pixel_ptr += s->line_inc;
  253. }
  254. }
  255. }
  256. /* report success */
  257. return 0;
  258. }
  259. static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
  260. {
  261. int x, y;
  262. unsigned char P[4];
  263. /* 4-color encoding */
  264. CHECK_STREAM_PTR(4);
  265. memcpy(P, s->stream_ptr, 4);
  266. s->stream_ptr += 4;
  267. if (P[0] <= P[1]) {
  268. if (P[2] <= P[3]) {
  269. /* 1 of 4 colors for each pixel, need 16 more bytes */
  270. CHECK_STREAM_PTR(16);
  271. for (y = 0; y < 8; y++) {
  272. /* get the next set of 8 2-bit flags */
  273. int flags = bytestream_get_le16(&s->stream_ptr);
  274. for (x = 0; x < 8; x++, flags >>= 2)
  275. *s->pixel_ptr++ = P[flags & 0x03];
  276. s->pixel_ptr += s->line_inc;
  277. }
  278. } else {
  279. uint32_t flags;
  280. /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
  281. CHECK_STREAM_PTR(4);
  282. flags = bytestream_get_le32(&s->stream_ptr);
  283. for (y = 0; y < 8; y += 2) {
  284. for (x = 0; x < 8; x += 2, flags >>= 2) {
  285. s->pixel_ptr[x ] =
  286. s->pixel_ptr[x + 1 ] =
  287. s->pixel_ptr[x + s->stride] =
  288. s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
  289. }
  290. s->pixel_ptr += s->stride * 2;
  291. }
  292. }
  293. } else {
  294. uint64_t flags;
  295. /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
  296. CHECK_STREAM_PTR(8);
  297. flags = bytestream_get_le64(&s->stream_ptr);
  298. if (P[2] <= P[3]) {
  299. for (y = 0; y < 8; y++) {
  300. for (x = 0; x < 8; x += 2, flags >>= 2) {
  301. s->pixel_ptr[x ] =
  302. s->pixel_ptr[x + 1] = P[flags & 0x03];
  303. }
  304. s->pixel_ptr += s->stride;
  305. }
  306. } else {
  307. for (y = 0; y < 8; y += 2) {
  308. for (x = 0; x < 8; x++, flags >>= 2) {
  309. s->pixel_ptr[x ] =
  310. s->pixel_ptr[x + s->stride] = P[flags & 0x03];
  311. }
  312. s->pixel_ptr += s->stride * 2;
  313. }
  314. }
  315. }
  316. /* report success */
  317. return 0;
  318. }
  319. static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
  320. {
  321. int x, y;
  322. unsigned char P[4];
  323. int flags = 0;
  324. /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
  325. * either top and bottom or left and right halves */
  326. CHECK_STREAM_PTR(24);
  327. if (s->stream_ptr[0] <= s->stream_ptr[1]) {
  328. /* 4-color encoding for each quadrant; need 32 bytes */
  329. CHECK_STREAM_PTR(32);
  330. for (y = 0; y < 16; y++) {
  331. // new values for each 4x4 block
  332. if (!(y & 3)) {
  333. memcpy(P, s->stream_ptr, 4);
  334. s->stream_ptr += 4;
  335. flags = bytestream_get_le32(&s->stream_ptr);
  336. }
  337. for (x = 0; x < 4; x++, flags >>= 2)
  338. *s->pixel_ptr++ = P[flags & 0x03];
  339. s->pixel_ptr += s->stride - 4;
  340. // switch to right half
  341. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  342. }
  343. } else {
  344. // vertical split?
  345. int vert = s->stream_ptr[12] <= s->stream_ptr[13];
  346. uint64_t flags = 0;
  347. /* 4-color encoding for either left and right or top and bottom
  348. * halves */
  349. for (y = 0; y < 16; y++) {
  350. // load values for each half
  351. if (!(y & 7)) {
  352. memcpy(P, s->stream_ptr, 4);
  353. s->stream_ptr += 4;
  354. flags = bytestream_get_le64(&s->stream_ptr);
  355. }
  356. for (x = 0; x < 4; x++, flags >>= 2)
  357. *s->pixel_ptr++ = P[flags & 0x03];
  358. if (vert) {
  359. s->pixel_ptr += s->stride - 4;
  360. // switch to right half
  361. if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
  362. } else if (y & 1) s->pixel_ptr += s->line_inc;
  363. }
  364. }
  365. /* report success */
  366. return 0;
  367. }
  368. static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
  369. {
  370. int y;
  371. /* 64-color encoding (each pixel in block is a different color) */
  372. CHECK_STREAM_PTR(64);
  373. for (y = 0; y < 8; y++) {
  374. memcpy(s->pixel_ptr, s->stream_ptr, 8);
  375. s->stream_ptr += 8;
  376. s->pixel_ptr += s->stride;
  377. }
  378. /* report success */
  379. return 0;
  380. }
  381. static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
  382. {
  383. int x, y;
  384. /* 16-color block encoding: each 2x2 block is a different color */
  385. CHECK_STREAM_PTR(16);
  386. for (y = 0; y < 8; y += 2) {
  387. for (x = 0; x < 8; x += 2) {
  388. s->pixel_ptr[x ] =
  389. s->pixel_ptr[x + 1 ] =
  390. s->pixel_ptr[x + s->stride] =
  391. s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
  392. }
  393. s->pixel_ptr += s->stride * 2;
  394. }
  395. /* report success */
  396. return 0;
  397. }
  398. static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
  399. {
  400. int y;
  401. unsigned char P[2];
  402. /* 4-color block encoding: each 4x4 block is a different color */
  403. CHECK_STREAM_PTR(4);
  404. for (y = 0; y < 8; y++) {
  405. if (!(y & 3)) {
  406. P[0] = *s->stream_ptr++;
  407. P[1] = *s->stream_ptr++;
  408. }
  409. memset(s->pixel_ptr, P[0], 4);
  410. memset(s->pixel_ptr + 4, P[1], 4);
  411. s->pixel_ptr += s->stride;
  412. }
  413. /* report success */
  414. return 0;
  415. }
  416. static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
  417. {
  418. int y;
  419. unsigned char pix;
  420. /* 1-color encoding: the whole block is 1 solid color */
  421. CHECK_STREAM_PTR(1);
  422. pix = *s->stream_ptr++;
  423. for (y = 0; y < 8; y++) {
  424. memset(s->pixel_ptr, pix, 8);
  425. s->pixel_ptr += s->stride;
  426. }
  427. /* report success */
  428. return 0;
  429. }
  430. static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
  431. {
  432. int x, y;
  433. unsigned char sample[2];
  434. /* dithered encoding */
  435. CHECK_STREAM_PTR(2);
  436. sample[0] = *s->stream_ptr++;
  437. sample[1] = *s->stream_ptr++;
  438. for (y = 0; y < 8; y++) {
  439. for (x = 0; x < 8; x += 2) {
  440. *s->pixel_ptr++ = sample[ y & 1 ];
  441. *s->pixel_ptr++ = sample[!(y & 1)];
  442. }
  443. s->pixel_ptr += s->line_inc;
  444. }
  445. /* report success */
  446. return 0;
  447. }
  448. static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
  449. ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
  450. ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
  451. ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
  452. ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
  453. ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
  454. ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
  455. ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
  456. ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
  457. };
  458. static void ipvideo_decode_opcodes(IpvideoContext *s)
  459. {
  460. int x, y;
  461. unsigned char opcode;
  462. int ret;
  463. static int frame = 0;
  464. GetBitContext gb;
  465. debug_interplay("------------------ frame %d\n", frame);
  466. frame++;
  467. /* this is PAL8, so make the palette available */
  468. memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
  469. s->stride = s->current_frame.linesize[0];
  470. s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
  471. s->stream_end = s->buf + s->size;
  472. s->line_inc = s->stride - 8;
  473. s->upper_motion_limit_offset = (s->avctx->height - 8) * s->stride
  474. + s->avctx->width - 8;
  475. init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
  476. for (y = 0; y < (s->stride * s->avctx->height); y += s->stride * 8) {
  477. for (x = y; x < y + s->avctx->width; x += 8) {
  478. opcode = get_bits(&gb, 4);
  479. debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
  480. x - y, y / s->stride, opcode, s->stream_ptr);
  481. s->pixel_ptr = s->current_frame.data[0] + x;
  482. ret = ipvideo_decode_block[opcode](s);
  483. if (ret != 0) {
  484. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
  485. frame, x - y, y / s->stride);
  486. return;
  487. }
  488. }
  489. }
  490. if (s->stream_end - s->stream_ptr > 1) {
  491. av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
  492. s->stream_end - s->stream_ptr);
  493. }
  494. }
  495. static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
  496. {
  497. IpvideoContext *s = avctx->priv_data;
  498. s->avctx = avctx;
  499. if (s->avctx->palctrl == NULL) {
  500. av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
  501. return -1;
  502. }
  503. avctx->pix_fmt = PIX_FMT_PAL8;
  504. dsputil_init(&s->dsp, avctx);
  505. /* decoding map contains 4 bits of information per 8x8 block */
  506. s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
  507. s->current_frame.data[0] = s->last_frame.data[0] =
  508. s->second_last_frame.data[0] = NULL;
  509. return 0;
  510. }
  511. static int ipvideo_decode_frame(AVCodecContext *avctx,
  512. void *data, int *data_size,
  513. AVPacket *avpkt)
  514. {
  515. const uint8_t *buf = avpkt->data;
  516. int buf_size = avpkt->size;
  517. IpvideoContext *s = avctx->priv_data;
  518. AVPaletteControl *palette_control = avctx->palctrl;
  519. /* compressed buffer needs to be large enough to at least hold an entire
  520. * decoding map */
  521. if (buf_size < s->decoding_map_size)
  522. return buf_size;
  523. s->decoding_map = buf;
  524. s->buf = buf + s->decoding_map_size;
  525. s->size = buf_size - s->decoding_map_size;
  526. s->current_frame.reference = 3;
  527. if (avctx->get_buffer(avctx, &s->current_frame)) {
  528. av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
  529. return -1;
  530. }
  531. ipvideo_decode_opcodes(s);
  532. if (palette_control->palette_changed) {
  533. palette_control->palette_changed = 0;
  534. s->current_frame.palette_has_changed = 1;
  535. }
  536. *data_size = sizeof(AVFrame);
  537. *(AVFrame*)data = s->current_frame;
  538. /* shuffle frames */
  539. if (s->second_last_frame.data[0])
  540. avctx->release_buffer(avctx, &s->second_last_frame);
  541. s->second_last_frame = s->last_frame;
  542. s->last_frame = s->current_frame;
  543. s->current_frame.data[0] = NULL; /* catch any access attempts */
  544. /* report that the buffer was completely consumed */
  545. return buf_size;
  546. }
  547. static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
  548. {
  549. IpvideoContext *s = avctx->priv_data;
  550. /* release the last frame */
  551. if (s->last_frame.data[0])
  552. avctx->release_buffer(avctx, &s->last_frame);
  553. if (s->second_last_frame.data[0])
  554. avctx->release_buffer(avctx, &s->second_last_frame);
  555. return 0;
  556. }
  557. AVCodec interplay_video_decoder = {
  558. "interplayvideo",
  559. CODEC_TYPE_VIDEO,
  560. CODEC_ID_INTERPLAY_VIDEO,
  561. sizeof(IpvideoContext),
  562. ipvideo_decode_init,
  563. NULL,
  564. ipvideo_decode_end,
  565. ipvideo_decode_frame,
  566. CODEC_CAP_DR1,
  567. .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
  568. };