You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

489 lines
17KB

  1. /*
  2. * Flash Compatible Streaming Format demuxer
  3. * Copyright (c) 2000 Fabrice Bellard
  4. * Copyright (c) 2003 Tinic Uro
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include "libavutil/avassert.h"
  23. #include "libavutil/channel_layout.h"
  24. #include "libavutil/imgutils.h"
  25. #include "libavutil/intreadwrite.h"
  26. #include "swf.h"
  27. static const AVCodecTag swf_audio_codec_tags[] = {
  28. { AV_CODEC_ID_PCM_S16LE, 0x00 },
  29. { AV_CODEC_ID_ADPCM_SWF, 0x01 },
  30. { AV_CODEC_ID_MP3, 0x02 },
  31. { AV_CODEC_ID_PCM_S16LE, 0x03 },
  32. // { AV_CODEC_ID_NELLYMOSER, 0x06 },
  33. { AV_CODEC_ID_NONE, 0 },
  34. };
  35. static int get_swf_tag(AVIOContext *pb, int *len_ptr)
  36. {
  37. int tag, len;
  38. if (url_feof(pb))
  39. return AVERROR_EOF;
  40. tag = avio_rl16(pb);
  41. len = tag & 0x3f;
  42. tag = tag >> 6;
  43. if (len == 0x3f) {
  44. len = avio_rl32(pb);
  45. }
  46. *len_ptr = len;
  47. return tag;
  48. }
  49. static int swf_probe(AVProbeData *p)
  50. {
  51. /* check file header */
  52. if ((p->buf[0] == 'F' || p->buf[0] == 'C') && p->buf[1] == 'W' &&
  53. p->buf[2] == 'S')
  54. return AVPROBE_SCORE_MAX;
  55. else
  56. return 0;
  57. }
  58. #if CONFIG_ZLIB
  59. static int zlib_refill(void *opaque, uint8_t *buf, int buf_size)
  60. {
  61. AVFormatContext *s = opaque;
  62. SWFContext *swf = s->priv_data;
  63. z_stream *z = &swf->zstream;
  64. int ret;
  65. retry:
  66. if (!z->avail_in) {
  67. int n = avio_read(s->pb, swf->zbuf_in, ZBUF_SIZE);
  68. if (n < 0)
  69. return n;
  70. z->next_in = swf->zbuf_in;
  71. z->avail_in = n;
  72. }
  73. z->next_out = buf;
  74. z->avail_out = buf_size;
  75. ret = inflate(z, Z_NO_FLUSH);
  76. if (ret < 0)
  77. return AVERROR(EINVAL);
  78. if (ret == Z_STREAM_END)
  79. return AVERROR_EOF;
  80. if (buf_size - z->avail_out == 0)
  81. goto retry;
  82. return buf_size - z->avail_out;
  83. }
  84. #endif
  85. static int swf_read_header(AVFormatContext *s)
  86. {
  87. SWFContext *swf = s->priv_data;
  88. AVIOContext *pb = s->pb;
  89. int nbits, len, tag;
  90. tag = avio_rb32(pb) & 0xffffff00;
  91. avio_rl32(pb);
  92. if (tag == MKBETAG('C', 'W', 'S', 0)) {
  93. av_log(s, AV_LOG_INFO, "SWF compressed file detected\n");
  94. #if CONFIG_ZLIB
  95. swf->zbuf_in = av_malloc(ZBUF_SIZE);
  96. swf->zbuf_out = av_malloc(ZBUF_SIZE);
  97. swf->zpb = avio_alloc_context(swf->zbuf_out, ZBUF_SIZE, 0, s,
  98. zlib_refill, NULL, NULL);
  99. if (!swf->zbuf_in || !swf->zbuf_out || !swf->zpb)
  100. return AVERROR(ENOMEM);
  101. swf->zpb->seekable = 0;
  102. if (inflateInit(&swf->zstream) != Z_OK) {
  103. av_log(s, AV_LOG_ERROR, "Unable to init zlib context\n");
  104. return AVERROR(EINVAL);
  105. }
  106. pb = swf->zpb;
  107. #else
  108. av_log(s, AV_LOG_ERROR, "zlib support is required to read SWF compressed files\n");
  109. return AVERROR(EIO);
  110. #endif
  111. } else if (tag != MKBETAG('F', 'W', 'S', 0))
  112. return AVERROR(EIO);
  113. /* skip rectangle size */
  114. nbits = avio_r8(pb) >> 3;
  115. len = (4 * nbits - 3 + 7) / 8;
  116. avio_skip(pb, len);
  117. swf->frame_rate = avio_rl16(pb); /* 8.8 fixed */
  118. avio_rl16(pb); /* frame count */
  119. swf->samples_per_frame = 0;
  120. s->ctx_flags |= AVFMTCTX_NOHEADER;
  121. return 0;
  122. }
  123. static int swf_read_packet(AVFormatContext *s, AVPacket *pkt)
  124. {
  125. SWFContext *swf = s->priv_data;
  126. AVIOContext *pb = s->pb;
  127. AVStream *vst = NULL, *ast = NULL, *st = 0;
  128. int tag, len, i, frame, v, res;
  129. #if CONFIG_ZLIB
  130. if (swf->zpb)
  131. pb = swf->zpb;
  132. #endif
  133. for(;;) {
  134. uint64_t pos = avio_tell(pb);
  135. tag = get_swf_tag(pb, &len);
  136. if (tag < 0)
  137. return tag;
  138. if (len < 0) {
  139. av_log(s, AV_LOG_ERROR, "invalid tag length: %d\n", len);
  140. return AVERROR_INVALIDDATA;
  141. }
  142. if (tag == TAG_VIDEOSTREAM) {
  143. int ch_id = avio_rl16(pb);
  144. len -= 2;
  145. for (i=0; i<s->nb_streams; i++) {
  146. st = s->streams[i];
  147. if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO && st->id == ch_id)
  148. goto skip;
  149. }
  150. avio_rl16(pb);
  151. avio_rl16(pb);
  152. avio_rl16(pb);
  153. avio_r8(pb);
  154. /* Check for FLV1 */
  155. vst = avformat_new_stream(s, NULL);
  156. if (!vst)
  157. return AVERROR(ENOMEM);
  158. vst->id = ch_id;
  159. vst->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  160. vst->codec->codec_id = ff_codec_get_id(ff_swf_codec_tags, avio_r8(pb));
  161. avpriv_set_pts_info(vst, 16, 256, swf->frame_rate);
  162. len -= 8;
  163. } else if (tag == TAG_STREAMHEAD || tag == TAG_STREAMHEAD2) {
  164. /* streaming found */
  165. int sample_rate_code;
  166. for (i=0; i<s->nb_streams; i++) {
  167. st = s->streams[i];
  168. if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO && st->id == -1)
  169. goto skip;
  170. }
  171. avio_r8(pb);
  172. v = avio_r8(pb);
  173. swf->samples_per_frame = avio_rl16(pb);
  174. ast = avformat_new_stream(s, NULL);
  175. if (!ast)
  176. return AVERROR(ENOMEM);
  177. ast->id = -1; /* -1 to avoid clash with video stream ch_id */
  178. if (v & 1) {
  179. ast->codec->channels = 2;
  180. ast->codec->channel_layout = AV_CH_LAYOUT_STEREO;
  181. } else {
  182. ast->codec->channels = 1;
  183. ast->codec->channel_layout = AV_CH_LAYOUT_MONO;
  184. }
  185. ast->codec->codec_type = AVMEDIA_TYPE_AUDIO;
  186. ast->codec->codec_id = ff_codec_get_id(swf_audio_codec_tags, (v>>4) & 15);
  187. ast->need_parsing = AVSTREAM_PARSE_FULL;
  188. sample_rate_code= (v>>2) & 3;
  189. ast->codec->sample_rate = 44100 >> (3 - sample_rate_code);
  190. avpriv_set_pts_info(ast, 64, 1, ast->codec->sample_rate);
  191. len -= 4;
  192. } else if (tag == TAG_DEFINESOUND) {
  193. /* audio stream */
  194. int sample_rate_code;
  195. int ch_id = avio_rl16(pb);
  196. for (i=0; i<s->nb_streams; i++) {
  197. st = s->streams[i];
  198. if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO && st->id == ch_id)
  199. goto skip;
  200. }
  201. // FIXME: 8-bit uncompressed PCM audio will be interpreted as 16-bit
  202. // FIXME: The entire audio stream is stored in a single chunk/tag. Normally,
  203. // these are smaller audio streams in DEFINESOUND tags, but it's technically
  204. // possible they could be huge. Break it up into multiple packets if it's big.
  205. v = avio_r8(pb);
  206. ast = avformat_new_stream(s, NULL);
  207. if (!ast)
  208. return AVERROR(ENOMEM);
  209. ast->id = ch_id;
  210. ast->codec->channels = 1 + (v&1);
  211. ast->codec->codec_type = AVMEDIA_TYPE_AUDIO;
  212. ast->codec->codec_id = ff_codec_get_id(swf_audio_codec_tags, (v>>4) & 15);
  213. ast->need_parsing = AVSTREAM_PARSE_FULL;
  214. sample_rate_code= (v>>2) & 3;
  215. ast->codec->sample_rate = 44100 >> (3 - sample_rate_code);
  216. avpriv_set_pts_info(ast, 64, 1, ast->codec->sample_rate);
  217. ast->duration = avio_rl32(pb); // number of samples
  218. if (((v>>4) & 15) == 2) { // MP3 sound data record
  219. ast->skip_samples = avio_rl16(pb);
  220. len -= 2;
  221. }
  222. len -= 7;
  223. if ((res = av_get_packet(pb, pkt, len)) < 0)
  224. return res;
  225. pkt->pos = pos;
  226. pkt->stream_index = ast->index;
  227. return pkt->size;
  228. } else if (tag == TAG_VIDEOFRAME) {
  229. int ch_id = avio_rl16(pb);
  230. len -= 2;
  231. for(i=0; i<s->nb_streams; i++) {
  232. st = s->streams[i];
  233. if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO && st->id == ch_id) {
  234. frame = avio_rl16(pb);
  235. len -= 2;
  236. if (len <= 0)
  237. goto skip;
  238. if ((res = av_get_packet(pb, pkt, len)) < 0)
  239. return res;
  240. pkt->pos = pos;
  241. pkt->pts = frame;
  242. pkt->stream_index = st->index;
  243. return pkt->size;
  244. }
  245. }
  246. } else if (tag == TAG_DEFINEBITSLOSSLESS || tag == TAG_DEFINEBITSLOSSLESS2) {
  247. #if CONFIG_ZLIB
  248. long out_len;
  249. uint8_t *buf = NULL, *zbuf = NULL, *pal;
  250. uint32_t colormap[AVPALETTE_COUNT] = {0};
  251. const int alpha_bmp = tag == TAG_DEFINEBITSLOSSLESS2;
  252. const int colormapbpp = 3 + alpha_bmp;
  253. int linesize, colormapsize = 0;
  254. const int ch_id = avio_rl16(pb);
  255. const int bmp_fmt = avio_r8(pb);
  256. const int width = avio_rl16(pb);
  257. const int height = avio_rl16(pb);
  258. len -= 2+1+2+2;
  259. switch (bmp_fmt) {
  260. case 3: // PAL-8
  261. linesize = width;
  262. colormapsize = avio_r8(pb) + 1;
  263. len--;
  264. break;
  265. case 4: // RGB15
  266. linesize = width * 2;
  267. break;
  268. case 5: // RGB24 (0RGB)
  269. linesize = width * 4;
  270. break;
  271. default:
  272. av_log(s, AV_LOG_ERROR, "invalid bitmap format %d, skipped\n", bmp_fmt);
  273. goto bitmap_end_skip;
  274. }
  275. linesize = FFALIGN(linesize, 4);
  276. if (av_image_check_size(width, height, 0, s) < 0 ||
  277. linesize >= INT_MAX / height ||
  278. linesize * height >= INT_MAX - colormapsize * colormapbpp) {
  279. av_log(s, AV_LOG_ERROR, "invalid frame size %dx%d\n", width, height);
  280. goto bitmap_end_skip;
  281. }
  282. out_len = colormapsize * colormapbpp + linesize * height;
  283. av_dlog(s, "bitmap: ch=%d fmt=%d %dx%d (linesize=%d) len=%d->%ld pal=%d\n",
  284. ch_id, bmp_fmt, width, height, linesize, len, out_len, colormapsize);
  285. zbuf = av_malloc(len);
  286. buf = av_malloc(out_len);
  287. if (!zbuf || !buf) {
  288. res = AVERROR(ENOMEM);
  289. goto bitmap_end;
  290. }
  291. len = avio_read(pb, zbuf, len);
  292. if (len < 0 || (res = uncompress(buf, &out_len, zbuf, len)) != Z_OK) {
  293. av_log(s, AV_LOG_WARNING, "Failed to uncompress one bitmap\n");
  294. goto bitmap_end_skip;
  295. }
  296. for (i = 0; i < s->nb_streams; i++) {
  297. st = s->streams[i];
  298. if (st->codec->codec_id == AV_CODEC_ID_RAWVIDEO && st->id == -3)
  299. break;
  300. }
  301. if (i == s->nb_streams) {
  302. vst = avformat_new_stream(s, NULL);
  303. if (!vst) {
  304. res = AVERROR(ENOMEM);
  305. goto bitmap_end;
  306. }
  307. vst->id = -3; /* -3 to avoid clash with video stream and audio stream */
  308. vst->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  309. vst->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
  310. avpriv_set_pts_info(vst, 64, 256, swf->frame_rate);
  311. st = vst;
  312. }
  313. st->codec->width = width;
  314. st->codec->height = height;
  315. if ((res = av_new_packet(pkt, out_len - colormapsize * colormapbpp)) < 0)
  316. goto bitmap_end;
  317. pkt->pos = pos;
  318. pkt->stream_index = st->index;
  319. switch (bmp_fmt) {
  320. case 3:
  321. st->codec->pix_fmt = AV_PIX_FMT_PAL8;
  322. for (i = 0; i < colormapsize; i++)
  323. if (alpha_bmp) colormap[i] = buf[3]<<24 | AV_RB24(buf + 4*i);
  324. else colormap[i] = 0xffU <<24 | AV_RB24(buf + 3*i);
  325. pal = av_packet_new_side_data(pkt, AV_PKT_DATA_PALETTE, AVPALETTE_SIZE);
  326. if (!pal) {
  327. res = AVERROR(ENOMEM);
  328. goto bitmap_end;
  329. }
  330. memcpy(pal, colormap, AVPALETTE_SIZE);
  331. break;
  332. case 4:
  333. st->codec->pix_fmt = AV_PIX_FMT_RGB555;
  334. break;
  335. case 5:
  336. st->codec->pix_fmt = alpha_bmp ? AV_PIX_FMT_ARGB : AV_PIX_FMT_0RGB;
  337. break;
  338. default:
  339. av_assert0(0);
  340. }
  341. if (linesize * height > pkt->size) {
  342. res = AVERROR_INVALIDDATA;
  343. goto bitmap_end;
  344. }
  345. memcpy(pkt->data, buf + colormapsize*colormapbpp, linesize * height);
  346. res = pkt->size;
  347. bitmap_end:
  348. av_freep(&zbuf);
  349. av_freep(&buf);
  350. return res;
  351. bitmap_end_skip:
  352. av_freep(&zbuf);
  353. av_freep(&buf);
  354. #else
  355. av_log(s, AV_LOG_ERROR, "this file requires zlib support compiled in\n");
  356. #endif
  357. } else if (tag == TAG_STREAMBLOCK) {
  358. for (i = 0; i < s->nb_streams; i++) {
  359. st = s->streams[i];
  360. if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO && st->id == -1) {
  361. if (st->codec->codec_id == AV_CODEC_ID_MP3) {
  362. avio_skip(pb, 4);
  363. len -= 4;
  364. if (len <= 0)
  365. goto skip;
  366. if ((res = av_get_packet(pb, pkt, len)) < 0)
  367. return res;
  368. } else { // ADPCM, PCM
  369. if (len <= 0)
  370. goto skip;
  371. if ((res = av_get_packet(pb, pkt, len)) < 0)
  372. return res;
  373. }
  374. pkt->pos = pos;
  375. pkt->stream_index = st->index;
  376. return pkt->size;
  377. }
  378. }
  379. } else if (tag == TAG_JPEG2) {
  380. for (i=0; i<s->nb_streams; i++) {
  381. st = s->streams[i];
  382. if (st->codec->codec_id == AV_CODEC_ID_MJPEG && st->id == -2)
  383. break;
  384. }
  385. if (i == s->nb_streams) {
  386. vst = avformat_new_stream(s, NULL);
  387. if (!vst)
  388. return AVERROR(ENOMEM);
  389. vst->id = -2; /* -2 to avoid clash with video stream and audio stream */
  390. vst->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  391. vst->codec->codec_id = AV_CODEC_ID_MJPEG;
  392. avpriv_set_pts_info(vst, 64, 256, swf->frame_rate);
  393. st = vst;
  394. }
  395. avio_rl16(pb); /* BITMAP_ID */
  396. len -= 2;
  397. if (len < 4)
  398. goto skip;
  399. if ((res = av_new_packet(pkt, len)) < 0)
  400. return res;
  401. avio_read(pb, pkt->data, 4);
  402. if (AV_RB32(pkt->data) == 0xffd8ffd9 ||
  403. AV_RB32(pkt->data) == 0xffd9ffd8) {
  404. /* old SWF files containing SOI/EOI as data start */
  405. /* files created by swink have reversed tag */
  406. pkt->size -= 4;
  407. avio_read(pb, pkt->data, pkt->size);
  408. } else {
  409. avio_read(pb, pkt->data + 4, pkt->size - 4);
  410. }
  411. pkt->pos = pos;
  412. pkt->stream_index = st->index;
  413. return pkt->size;
  414. } else {
  415. av_log(s, AV_LOG_DEBUG, "Unknown tag: %d\n", tag);
  416. }
  417. skip:
  418. if(len<0)
  419. av_log(s, AV_LOG_WARNING, "Cliping len %d\n", len);
  420. len = FFMAX(0, len);
  421. avio_skip(pb, len);
  422. }
  423. }
  424. #if CONFIG_ZLIB
  425. static av_cold int swf_read_close(AVFormatContext *avctx)
  426. {
  427. SWFContext *s = avctx->priv_data;
  428. inflateEnd(&s->zstream);
  429. av_freep(&s->zbuf_in);
  430. av_freep(&s->zbuf_out);
  431. av_freep(&s->zpb);
  432. return 0;
  433. }
  434. #endif
  435. AVInputFormat ff_swf_demuxer = {
  436. .name = "swf",
  437. .long_name = NULL_IF_CONFIG_SMALL("SWF (ShockWave Flash)"),
  438. .priv_data_size = sizeof(SWFContext),
  439. .read_probe = swf_probe,
  440. .read_header = swf_read_header,
  441. .read_packet = swf_read_packet,
  442. #if CONFIG_ZLIB
  443. .read_close = swf_read_close,
  444. #endif
  445. };