You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

347 lines
11KB

  1. /*
  2. * Motion Pixels Video Decoder
  3. * Copyright (c) 2008 Gregory Montoir (cyx@users.sourceforge.net)
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "avcodec.h"
  22. #include "get_bits.h"
  23. #include "dsputil.h"
  24. #define MAX_HUFF_CODES 16
  25. #include "motionpixels_tablegen.h"
  26. typedef struct HuffCode {
  27. int code;
  28. uint8_t size;
  29. uint8_t delta;
  30. } HuffCode;
  31. typedef struct MotionPixelsContext {
  32. AVCodecContext *avctx;
  33. AVFrame frame;
  34. DSPContext dsp;
  35. uint8_t *changes_map;
  36. int offset_bits_len;
  37. int codes_count, current_codes_count;
  38. int max_codes_bits;
  39. HuffCode codes[MAX_HUFF_CODES];
  40. VLC vlc;
  41. YuvPixel *vpt, *hpt;
  42. uint8_t gradient_scale[3];
  43. uint8_t *bswapbuf;
  44. int bswapbuf_size;
  45. } MotionPixelsContext;
  46. static av_cold int mp_decode_init(AVCodecContext *avctx)
  47. {
  48. MotionPixelsContext *mp = avctx->priv_data;
  49. int w4 = (avctx->width + 3) & ~3;
  50. int h4 = (avctx->height + 3) & ~3;
  51. if(avctx->extradata_size < 2){
  52. av_log(avctx, AV_LOG_ERROR, "extradata too small\n");
  53. return AVERROR_INVALIDDATA;
  54. }
  55. motionpixels_tableinit();
  56. mp->avctx = avctx;
  57. ff_dsputil_init(&mp->dsp, avctx);
  58. mp->changes_map = av_mallocz(avctx->width * h4);
  59. mp->offset_bits_len = av_log2(avctx->width * avctx->height) + 1;
  60. mp->vpt = av_mallocz(avctx->height * sizeof(YuvPixel));
  61. mp->hpt = av_mallocz(h4 * w4 / 16 * sizeof(YuvPixel));
  62. avctx->pix_fmt = AV_PIX_FMT_RGB555;
  63. avcodec_get_frame_defaults(&mp->frame);
  64. return 0;
  65. }
  66. static void mp_read_changes_map(MotionPixelsContext *mp, GetBitContext *gb, int count, int bits_len, int read_color)
  67. {
  68. uint16_t *pixels;
  69. int offset, w, h, color = 0, x, y, i;
  70. while (count--) {
  71. offset = get_bits_long(gb, mp->offset_bits_len);
  72. w = get_bits(gb, bits_len) + 1;
  73. h = get_bits(gb, bits_len) + 1;
  74. if (read_color)
  75. color = get_bits(gb, 15);
  76. x = offset % mp->avctx->width;
  77. y = offset / mp->avctx->width;
  78. if (y >= mp->avctx->height)
  79. continue;
  80. w = FFMIN(w, mp->avctx->width - x);
  81. h = FFMIN(h, mp->avctx->height - y);
  82. pixels = (uint16_t *)&mp->frame.data[0][y * mp->frame.linesize[0] + x * 2];
  83. while (h--) {
  84. mp->changes_map[offset] = w;
  85. if (read_color)
  86. for (i = 0; i < w; ++i)
  87. pixels[i] = color;
  88. offset += mp->avctx->width;
  89. pixels += mp->frame.linesize[0] / 2;
  90. }
  91. }
  92. }
  93. static int mp_get_code(MotionPixelsContext *mp, GetBitContext *gb, int size, int code)
  94. {
  95. while (get_bits1(gb)) {
  96. ++size;
  97. if (size > mp->max_codes_bits) {
  98. av_log(mp->avctx, AV_LOG_ERROR, "invalid code size %d/%d\n", size, mp->max_codes_bits);
  99. return AVERROR_INVALIDDATA;
  100. }
  101. code <<= 1;
  102. if (mp_get_code(mp, gb, size, code + 1) < 0)
  103. return AVERROR_INVALIDDATA;
  104. }
  105. if (mp->current_codes_count >= MAX_HUFF_CODES) {
  106. av_log(mp->avctx, AV_LOG_ERROR, "too many codes\n");
  107. return AVERROR_INVALIDDATA;
  108. }
  109. mp->codes[mp->current_codes_count ].code = code;
  110. mp->codes[mp->current_codes_count++].size = size;
  111. return 0;
  112. }
  113. static int mp_read_codes_table(MotionPixelsContext *mp, GetBitContext *gb)
  114. {
  115. if (mp->codes_count == 1) {
  116. mp->codes[0].delta = get_bits(gb, 4);
  117. } else {
  118. int i;
  119. int ret;
  120. mp->max_codes_bits = get_bits(gb, 4);
  121. for (i = 0; i < mp->codes_count; ++i)
  122. mp->codes[i].delta = get_bits(gb, 4);
  123. mp->current_codes_count = 0;
  124. if ((ret = mp_get_code(mp, gb, 0, 0)) < 0)
  125. return ret;
  126. if (mp->current_codes_count < mp->codes_count) {
  127. av_log(mp->avctx, AV_LOG_ERROR, "too few codes\n");
  128. return AVERROR_INVALIDDATA;
  129. }
  130. }
  131. return 0;
  132. }
  133. static int mp_gradient(MotionPixelsContext *mp, int component, int v)
  134. {
  135. int delta;
  136. delta = (v - 7) * mp->gradient_scale[component];
  137. mp->gradient_scale[component] = (v == 0 || v == 14) ? 2 : 1;
  138. return delta;
  139. }
  140. static YuvPixel mp_get_yuv_from_rgb(MotionPixelsContext *mp, int x, int y)
  141. {
  142. int color;
  143. color = *(uint16_t *)&mp->frame.data[0][y * mp->frame.linesize[0] + x * 2];
  144. return mp_rgb_yuv_table[color];
  145. }
  146. static void mp_set_rgb_from_yuv(MotionPixelsContext *mp, int x, int y, const YuvPixel *p)
  147. {
  148. int color;
  149. color = mp_yuv_to_rgb(p->y, p->v, p->u, 1);
  150. *(uint16_t *)&mp->frame.data[0][y * mp->frame.linesize[0] + x * 2] = color;
  151. }
  152. static int mp_get_vlc(MotionPixelsContext *mp, GetBitContext *gb)
  153. {
  154. int i;
  155. i = (mp->codes_count == 1) ? 0 : get_vlc2(gb, mp->vlc.table, mp->max_codes_bits, 1);
  156. return mp->codes[i].delta;
  157. }
  158. static void mp_decode_line(MotionPixelsContext *mp, GetBitContext *gb, int y)
  159. {
  160. YuvPixel p;
  161. const int y0 = y * mp->avctx->width;
  162. int w, i, x = 0;
  163. p = mp->vpt[y];
  164. if (mp->changes_map[y0 + x] == 0) {
  165. memset(mp->gradient_scale, 1, sizeof(mp->gradient_scale));
  166. ++x;
  167. }
  168. while (x < mp->avctx->width) {
  169. w = mp->changes_map[y0 + x];
  170. if (w != 0) {
  171. if ((y & 3) == 0) {
  172. if (mp->changes_map[y0 + x + mp->avctx->width] < w ||
  173. mp->changes_map[y0 + x + mp->avctx->width * 2] < w ||
  174. mp->changes_map[y0 + x + mp->avctx->width * 3] < w) {
  175. for (i = (x + 3) & ~3; i < x + w; i += 4) {
  176. mp->hpt[((y / 4) * mp->avctx->width + i) / 4] = mp_get_yuv_from_rgb(mp, i, y);
  177. }
  178. }
  179. }
  180. x += w;
  181. memset(mp->gradient_scale, 1, sizeof(mp->gradient_scale));
  182. p = mp_get_yuv_from_rgb(mp, x - 1, y);
  183. } else {
  184. p.y += mp_gradient(mp, 0, mp_get_vlc(mp, gb));
  185. p.y = av_clip(p.y, 0, 31);
  186. if ((x & 3) == 0) {
  187. if ((y & 3) == 0) {
  188. p.v += mp_gradient(mp, 1, mp_get_vlc(mp, gb));
  189. p.v = av_clip(p.v, -32, 31);
  190. p.u += mp_gradient(mp, 2, mp_get_vlc(mp, gb));
  191. p.u = av_clip(p.u, -32, 31);
  192. mp->hpt[((y / 4) * mp->avctx->width + x) / 4] = p;
  193. } else {
  194. p.v = mp->hpt[((y / 4) * mp->avctx->width + x) / 4].v;
  195. p.u = mp->hpt[((y / 4) * mp->avctx->width + x) / 4].u;
  196. }
  197. }
  198. mp_set_rgb_from_yuv(mp, x, y, &p);
  199. ++x;
  200. }
  201. }
  202. }
  203. static void mp_decode_frame_helper(MotionPixelsContext *mp, GetBitContext *gb)
  204. {
  205. YuvPixel p;
  206. int y, y0;
  207. av_assert1(mp->changes_map[0]);
  208. for (y = 0; y < mp->avctx->height; ++y) {
  209. if (mp->changes_map[y * mp->avctx->width] != 0) {
  210. memset(mp->gradient_scale, 1, sizeof(mp->gradient_scale));
  211. p = mp_get_yuv_from_rgb(mp, 0, y);
  212. } else {
  213. p.y += mp_gradient(mp, 0, mp_get_vlc(mp, gb));
  214. p.y = av_clip(p.y, 0, 31);
  215. if ((y & 3) == 0) {
  216. p.v += mp_gradient(mp, 1, mp_get_vlc(mp, gb));
  217. p.v = av_clip(p.v, -32, 31);
  218. p.u += mp_gradient(mp, 2, mp_get_vlc(mp, gb));
  219. p.u = av_clip(p.u, -32, 31);
  220. }
  221. mp->vpt[y] = p;
  222. mp_set_rgb_from_yuv(mp, 0, y, &p);
  223. }
  224. }
  225. for (y0 = 0; y0 < 2; ++y0)
  226. for (y = y0; y < mp->avctx->height; y += 2)
  227. mp_decode_line(mp, gb, y);
  228. }
  229. static int mp_decode_frame(AVCodecContext *avctx,
  230. void *data, int *got_frame,
  231. AVPacket *avpkt)
  232. {
  233. const uint8_t *buf = avpkt->data;
  234. int buf_size = avpkt->size;
  235. MotionPixelsContext *mp = avctx->priv_data;
  236. GetBitContext gb;
  237. int i, count1, count2, sz, ret;
  238. mp->frame.reference = 3;
  239. mp->frame.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE | FF_BUFFER_HINTS_REUSABLE;
  240. if ((ret = avctx->reget_buffer(avctx, &mp->frame)) < 0) {
  241. av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
  242. return ret;
  243. }
  244. /* le32 bitstream msb first */
  245. av_fast_malloc(&mp->bswapbuf, &mp->bswapbuf_size, buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
  246. if (!mp->bswapbuf)
  247. return AVERROR(ENOMEM);
  248. mp->dsp.bswap_buf((uint32_t *)mp->bswapbuf, (const uint32_t *)buf, buf_size / 4);
  249. if (buf_size & 3)
  250. memcpy(mp->bswapbuf + (buf_size & ~3), buf + (buf_size & ~3), buf_size & 3);
  251. memset(mp->bswapbuf + buf_size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
  252. init_get_bits(&gb, mp->bswapbuf, buf_size * 8);
  253. memset(mp->changes_map, 0, avctx->width * avctx->height);
  254. for (i = !(avctx->extradata[1] & 2); i < 2; ++i) {
  255. count1 = get_bits(&gb, 12);
  256. count2 = get_bits(&gb, 12);
  257. mp_read_changes_map(mp, &gb, count1, 8, i);
  258. mp_read_changes_map(mp, &gb, count2, 4, i);
  259. }
  260. mp->codes_count = get_bits(&gb, 4);
  261. if (mp->codes_count == 0)
  262. goto end;
  263. if (mp->changes_map[0] == 0) {
  264. *(uint16_t *)mp->frame.data[0] = get_bits(&gb, 15);
  265. mp->changes_map[0] = 1;
  266. }
  267. if (mp_read_codes_table(mp, &gb) < 0)
  268. goto end;
  269. sz = get_bits(&gb, 18);
  270. if (avctx->extradata[0] != 5)
  271. sz += get_bits(&gb, 18);
  272. if (sz == 0)
  273. goto end;
  274. if (mp->max_codes_bits <= 0)
  275. goto end;
  276. if (init_vlc(&mp->vlc, mp->max_codes_bits, mp->codes_count, &mp->codes[0].size, sizeof(HuffCode), 1, &mp->codes[0].code, sizeof(HuffCode), 4, 0))
  277. goto end;
  278. mp_decode_frame_helper(mp, &gb);
  279. ff_free_vlc(&mp->vlc);
  280. end:
  281. *got_frame = 1;
  282. *(AVFrame *)data = mp->frame;
  283. return buf_size;
  284. }
  285. static av_cold int mp_decode_end(AVCodecContext *avctx)
  286. {
  287. MotionPixelsContext *mp = avctx->priv_data;
  288. av_freep(&mp->changes_map);
  289. av_freep(&mp->vpt);
  290. av_freep(&mp->hpt);
  291. av_freep(&mp->bswapbuf);
  292. if (mp->frame.data[0])
  293. avctx->release_buffer(avctx, &mp->frame);
  294. return 0;
  295. }
  296. AVCodec ff_motionpixels_decoder = {
  297. .name = "motionpixels",
  298. .type = AVMEDIA_TYPE_VIDEO,
  299. .id = AV_CODEC_ID_MOTIONPIXELS,
  300. .priv_data_size = sizeof(MotionPixelsContext),
  301. .init = mp_decode_init,
  302. .close = mp_decode_end,
  303. .decode = mp_decode_frame,
  304. .capabilities = CODEC_CAP_DR1,
  305. .long_name = NULL_IF_CONFIG_SMALL("Motion Pixels video"),
  306. };