You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1605 lines
57KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include <inttypes.h>
  22. #include "libavutil/attributes.h"
  23. #include "libavutil/avassert.h"
  24. #include "libavutil/avstring.h"
  25. #include "libavutil/bswap.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/dict.h"
  28. #include "libavutil/internal.h"
  29. #include "libavutil/mathematics.h"
  30. #include "libavutil/opt.h"
  31. #include "avformat.h"
  32. #include "avio_internal.h"
  33. #include "avlanguage.h"
  34. #include "id3v2.h"
  35. #include "internal.h"
  36. #include "riff.h"
  37. #include "asf.h"
  38. #include "asfcrypt.h"
  39. typedef struct {
  40. const AVClass *class;
  41. int asfid2avid[128]; ///< conversion table from asf ID 2 AVStream ID
  42. ASFStream streams[128]; ///< it's max number and it's not that big
  43. uint32_t stream_bitrates[128]; ///< max number of streams, bitrate for each (for streaming)
  44. AVRational dar[128];
  45. char stream_languages[128][6]; ///< max number of streams, language for each (RFC1766, e.g. en-US)
  46. /* non streamed additonnal info */
  47. /* packet filling */
  48. int packet_size_left;
  49. /* only for reading */
  50. uint64_t data_offset; ///< beginning of the first data packet
  51. uint64_t data_object_offset; ///< data object offset (excl. GUID & size)
  52. uint64_t data_object_size; ///< size of the data object
  53. int index_read;
  54. ASFMainHeader hdr;
  55. int packet_flags;
  56. int packet_property;
  57. int packet_timestamp;
  58. int packet_segsizetype;
  59. int packet_segments;
  60. int packet_seq;
  61. int packet_replic_size;
  62. int packet_key_frame;
  63. int packet_padsize;
  64. unsigned int packet_frag_offset;
  65. unsigned int packet_frag_size;
  66. int64_t packet_frag_timestamp;
  67. int packet_multi_size;
  68. int packet_time_delta;
  69. int packet_time_start;
  70. int64_t packet_pos;
  71. int stream_index;
  72. ASFStream *asf_st; ///< currently decoded stream
  73. int no_resync_search;
  74. } ASFContext;
  75. static const AVOption options[] = {
  76. { "no_resync_search", "Don't try to resynchronize by looking for a certain optional start code", offsetof(ASFContext, no_resync_search), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  77. { NULL },
  78. };
  79. static const AVClass asf_class = {
  80. .class_name = "asf demuxer",
  81. .item_name = av_default_item_name,
  82. .option = options,
  83. .version = LIBAVUTIL_VERSION_INT,
  84. };
  85. #undef NDEBUG
  86. #include <assert.h>
  87. #define ASF_MAX_STREAMS 127
  88. #define FRAME_HEADER_SIZE 16
  89. // Fix Me! FRAME_HEADER_SIZE may be different. (17 is known to be too large)
  90. #ifdef DEBUG
  91. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  92. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  93. };
  94. #define PRINT_IF_GUID(g, cmp) \
  95. if (!ff_guidcmp(g, &cmp)) \
  96. av_dlog(NULL, "(GUID: %s) ", # cmp)
  97. static void print_guid(ff_asf_guid *g)
  98. {
  99. int i;
  100. PRINT_IF_GUID(g, ff_asf_header);
  101. else PRINT_IF_GUID(g, ff_asf_file_header);
  102. else PRINT_IF_GUID(g, ff_asf_stream_header);
  103. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  104. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  105. else PRINT_IF_GUID(g, ff_asf_video_stream);
  106. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  107. else PRINT_IF_GUID(g, ff_asf_command_stream);
  108. else PRINT_IF_GUID(g, ff_asf_comment_header);
  109. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  110. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  111. else PRINT_IF_GUID(g, ff_asf_data_header);
  112. else PRINT_IF_GUID(g, ff_asf_simple_index_header);
  113. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  114. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  115. else PRINT_IF_GUID(g, ff_asf_my_guid);
  116. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  117. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  118. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  119. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  120. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  121. else PRINT_IF_GUID(g, ff_asf_metadata_library_header);
  122. else PRINT_IF_GUID(g, ff_asf_marker_header);
  123. else PRINT_IF_GUID(g, stream_bitrate_guid);
  124. else PRINT_IF_GUID(g, ff_asf_language_guid);
  125. else
  126. av_dlog(NULL, "(GUID: unknown) ");
  127. for (i = 0; i < 16; i++)
  128. av_dlog(NULL, " 0x%02x,", (*g)[i]);
  129. av_dlog(NULL, "}\n");
  130. }
  131. #undef PRINT_IF_GUID
  132. #else
  133. #define print_guid(g)
  134. #endif
  135. static int asf_probe(AVProbeData *pd)
  136. {
  137. /* check file header */
  138. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  139. return AVPROBE_SCORE_MAX;
  140. else
  141. return 0;
  142. }
  143. /* size of type 2 (BOOL) is 32bit for "Extended Content Description Object"
  144. * but 16 bit for "Metadata Object" and "Metadata Library Object" */
  145. static int get_value(AVIOContext *pb, int type, int type2_size)
  146. {
  147. switch (type) {
  148. case 2:
  149. return (type2_size == 32) ? avio_rl32(pb) : avio_rl16(pb);
  150. case 3:
  151. return avio_rl32(pb);
  152. case 4:
  153. return avio_rl64(pb);
  154. case 5:
  155. return avio_rl16(pb);
  156. default:
  157. return INT_MIN;
  158. }
  159. }
  160. /* MSDN claims that this should be "compatible with the ID3 frame, APIC",
  161. * but in reality this is only loosely similar */
  162. static int asf_read_picture(AVFormatContext *s, int len)
  163. {
  164. AVPacket pkt = { 0 };
  165. const CodecMime *mime = ff_id3v2_mime_tags;
  166. enum AVCodecID id = AV_CODEC_ID_NONE;
  167. char mimetype[64];
  168. uint8_t *desc = NULL;
  169. AVStream *st = NULL;
  170. int ret, type, picsize, desc_len;
  171. /* type + picsize + mime + desc */
  172. if (len < 1 + 4 + 2 + 2) {
  173. av_log(s, AV_LOG_ERROR, "Invalid attached picture size: %d.\n", len);
  174. return AVERROR_INVALIDDATA;
  175. }
  176. /* picture type */
  177. type = avio_r8(s->pb);
  178. len--;
  179. if (type >= FF_ARRAY_ELEMS(ff_id3v2_picture_types) || type < 0) {
  180. av_log(s, AV_LOG_WARNING, "Unknown attached picture type: %d.\n", type);
  181. type = 0;
  182. }
  183. /* picture data size */
  184. picsize = avio_rl32(s->pb);
  185. len -= 4;
  186. /* picture MIME type */
  187. len -= avio_get_str16le(s->pb, len, mimetype, sizeof(mimetype));
  188. while (mime->id != AV_CODEC_ID_NONE) {
  189. if (!strncmp(mime->str, mimetype, sizeof(mimetype))) {
  190. id = mime->id;
  191. break;
  192. }
  193. mime++;
  194. }
  195. if (id == AV_CODEC_ID_NONE) {
  196. av_log(s, AV_LOG_ERROR, "Unknown attached picture mimetype: %s.\n",
  197. mimetype);
  198. return 0;
  199. }
  200. if (picsize >= len) {
  201. av_log(s, AV_LOG_ERROR, "Invalid attached picture data size: %d >= %d.\n",
  202. picsize, len);
  203. return AVERROR_INVALIDDATA;
  204. }
  205. /* picture description */
  206. desc_len = (len - picsize) * 2 + 1;
  207. desc = av_malloc(desc_len);
  208. if (!desc)
  209. return AVERROR(ENOMEM);
  210. len -= avio_get_str16le(s->pb, len - picsize, desc, desc_len);
  211. ret = av_get_packet(s->pb, &pkt, picsize);
  212. if (ret < 0)
  213. goto fail;
  214. st = avformat_new_stream(s, NULL);
  215. if (!st) {
  216. ret = AVERROR(ENOMEM);
  217. goto fail;
  218. }
  219. st->disposition |= AV_DISPOSITION_ATTACHED_PIC;
  220. st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  221. st->codec->codec_id = id;
  222. st->attached_pic = pkt;
  223. st->attached_pic.stream_index = st->index;
  224. st->attached_pic.flags |= AV_PKT_FLAG_KEY;
  225. if (*desc)
  226. av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL);
  227. else
  228. av_freep(&desc);
  229. av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0);
  230. return 0;
  231. fail:
  232. av_freep(&desc);
  233. av_free_packet(&pkt);
  234. return ret;
  235. }
  236. static void get_id3_tag(AVFormatContext *s, int len)
  237. {
  238. ID3v2ExtraMeta *id3v2_extra_meta = NULL;
  239. ff_id3v2_read(s, ID3v2_DEFAULT_MAGIC, &id3v2_extra_meta, len);
  240. if (id3v2_extra_meta)
  241. ff_id3v2_parse_apic(s, &id3v2_extra_meta);
  242. ff_id3v2_free_extra_meta(&id3v2_extra_meta);
  243. }
  244. static void get_tag(AVFormatContext *s, const char *key, int type, int len, int type2_size)
  245. {
  246. char *value;
  247. int64_t off = avio_tell(s->pb);
  248. #define LEN 22
  249. if ((unsigned)len >= (UINT_MAX - LEN) / 2)
  250. return;
  251. value = av_malloc(2 * len + LEN);
  252. if (!value)
  253. goto finish;
  254. if (type == 0) { // UTF16-LE
  255. avio_get_str16le(s->pb, len, value, 2 * len + 1);
  256. } else if (type == -1) { // ASCII
  257. avio_read(s->pb, value, len);
  258. value[len]=0;
  259. } else if (type == 1) { // byte array
  260. if (!strcmp(key, "WM/Picture")) { // handle cover art
  261. asf_read_picture(s, len);
  262. } else if (!strcmp(key, "ID3")) { // handle ID3 tag
  263. get_id3_tag(s, len);
  264. } else {
  265. av_log(s, AV_LOG_VERBOSE, "Unsupported byte array in tag %s.\n", key);
  266. }
  267. goto finish;
  268. } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD
  269. uint64_t num = get_value(s->pb, type, type2_size);
  270. snprintf(value, LEN, "%"PRIu64, num);
  271. } else if (type == 6) { // (don't) handle GUID
  272. av_log(s, AV_LOG_DEBUG, "Unsupported GUID value in tag %s.\n", key);
  273. goto finish;
  274. } else {
  275. av_log(s, AV_LOG_DEBUG,
  276. "Unsupported value type %d in tag %s.\n", type, key);
  277. goto finish;
  278. }
  279. if (*value)
  280. av_dict_set(&s->metadata, key, value, 0);
  281. finish:
  282. av_freep(&value);
  283. avio_seek(s->pb, off + len, SEEK_SET);
  284. }
  285. static int asf_read_file_properties(AVFormatContext *s, int64_t size)
  286. {
  287. ASFContext *asf = s->priv_data;
  288. AVIOContext *pb = s->pb;
  289. ff_get_guid(pb, &asf->hdr.guid);
  290. asf->hdr.file_size = avio_rl64(pb);
  291. asf->hdr.create_time = avio_rl64(pb);
  292. avio_rl64(pb); /* number of packets */
  293. asf->hdr.play_time = avio_rl64(pb);
  294. asf->hdr.send_time = avio_rl64(pb);
  295. asf->hdr.preroll = avio_rl32(pb);
  296. asf->hdr.ignore = avio_rl32(pb);
  297. asf->hdr.flags = avio_rl32(pb);
  298. asf->hdr.min_pktsize = avio_rl32(pb);
  299. asf->hdr.max_pktsize = avio_rl32(pb);
  300. if (asf->hdr.min_pktsize >= (1U << 29))
  301. return AVERROR_INVALIDDATA;
  302. asf->hdr.max_bitrate = avio_rl32(pb);
  303. s->packet_size = asf->hdr.max_pktsize;
  304. return 0;
  305. }
  306. static int asf_read_stream_properties(AVFormatContext *s, int64_t size)
  307. {
  308. ASFContext *asf = s->priv_data;
  309. AVIOContext *pb = s->pb;
  310. AVStream *st;
  311. ASFStream *asf_st;
  312. ff_asf_guid g;
  313. enum AVMediaType type;
  314. int type_specific_size, sizeX;
  315. unsigned int tag1;
  316. int64_t pos1, pos2, start_time;
  317. int test_for_ext_stream_audio, is_dvr_ms_audio = 0;
  318. if (s->nb_streams == ASF_MAX_STREAMS) {
  319. av_log(s, AV_LOG_ERROR, "too many streams\n");
  320. return AVERROR(EINVAL);
  321. }
  322. pos1 = avio_tell(pb);
  323. st = avformat_new_stream(s, NULL);
  324. if (!st)
  325. return AVERROR(ENOMEM);
  326. avpriv_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  327. start_time = asf->hdr.preroll;
  328. if (!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  329. int64_t fsize = avio_size(pb);
  330. if (fsize <= 0 || (int64_t)asf->hdr.file_size <= 0 ||
  331. FFABS(fsize - (int64_t)asf->hdr.file_size) / (float)FFMIN(fsize, asf->hdr.file_size) < 0.05)
  332. st->duration = asf->hdr.play_time /
  333. (10000000 / 1000) - start_time;
  334. }
  335. ff_get_guid(pb, &g);
  336. test_for_ext_stream_audio = 0;
  337. if (!ff_guidcmp(&g, &ff_asf_audio_stream)) {
  338. type = AVMEDIA_TYPE_AUDIO;
  339. } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) {
  340. type = AVMEDIA_TYPE_VIDEO;
  341. } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) {
  342. type = AVMEDIA_TYPE_VIDEO;
  343. st->codec->codec_id = AV_CODEC_ID_MJPEG;
  344. } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) {
  345. type = AVMEDIA_TYPE_DATA;
  346. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  347. test_for_ext_stream_audio = 1;
  348. type = AVMEDIA_TYPE_UNKNOWN;
  349. } else {
  350. return -1;
  351. }
  352. ff_get_guid(pb, &g);
  353. avio_skip(pb, 8); /* total_size */
  354. type_specific_size = avio_rl32(pb);
  355. avio_rl32(pb);
  356. st->id = avio_rl16(pb) & 0x7f; /* stream id */
  357. // mapping of asf ID to AV stream ID;
  358. asf->asfid2avid[st->id] = s->nb_streams - 1;
  359. asf_st = &asf->streams[st->id];
  360. avio_rl32(pb);
  361. if (test_for_ext_stream_audio) {
  362. ff_get_guid(pb, &g);
  363. if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  364. type = AVMEDIA_TYPE_AUDIO;
  365. is_dvr_ms_audio = 1;
  366. ff_get_guid(pb, &g);
  367. avio_rl32(pb);
  368. avio_rl32(pb);
  369. avio_rl32(pb);
  370. ff_get_guid(pb, &g);
  371. avio_rl32(pb);
  372. }
  373. }
  374. st->codec->codec_type = type;
  375. if (type == AVMEDIA_TYPE_AUDIO) {
  376. int ret = ff_get_wav_header(pb, st->codec, type_specific_size);
  377. if (ret < 0)
  378. return ret;
  379. if (is_dvr_ms_audio) {
  380. // codec_id and codec_tag are unreliable in dvr_ms
  381. // files. Set them later by probing stream.
  382. st->request_probe = 1;
  383. st->codec->codec_tag = 0;
  384. }
  385. if (st->codec->codec_id == AV_CODEC_ID_AAC)
  386. st->need_parsing = AVSTREAM_PARSE_NONE;
  387. else
  388. st->need_parsing = AVSTREAM_PARSE_FULL;
  389. /* We have to init the frame size at some point .... */
  390. pos2 = avio_tell(pb);
  391. if (size >= (pos2 + 8 - pos1 + 24)) {
  392. asf_st->ds_span = avio_r8(pb);
  393. asf_st->ds_packet_size = avio_rl16(pb);
  394. asf_st->ds_chunk_size = avio_rl16(pb);
  395. avio_rl16(pb); // ds_data_size
  396. avio_r8(pb); // ds_silence_data
  397. }
  398. if (asf_st->ds_span > 1) {
  399. if (!asf_st->ds_chunk_size ||
  400. (asf_st->ds_packet_size / asf_st->ds_chunk_size <= 1) ||
  401. asf_st->ds_packet_size % asf_st->ds_chunk_size)
  402. asf_st->ds_span = 0; // disable descrambling
  403. }
  404. } else if (type == AVMEDIA_TYPE_VIDEO &&
  405. size - (avio_tell(pb) - pos1 + 24) >= 51) {
  406. avio_rl32(pb);
  407. avio_rl32(pb);
  408. avio_r8(pb);
  409. avio_rl16(pb); /* size */
  410. sizeX = avio_rl32(pb); /* size */
  411. st->codec->width = avio_rl32(pb);
  412. st->codec->height = avio_rl32(pb);
  413. /* not available for asf */
  414. avio_rl16(pb); /* panes */
  415. st->codec->bits_per_coded_sample = avio_rl16(pb); /* depth */
  416. tag1 = avio_rl32(pb);
  417. avio_skip(pb, 20);
  418. if (sizeX > 40) {
  419. st->codec->extradata_size = ffio_limit(pb, sizeX - 40);
  420. st->codec->extradata = av_mallocz(st->codec->extradata_size +
  421. FF_INPUT_BUFFER_PADDING_SIZE);
  422. if (!st->codec->extradata)
  423. return AVERROR(ENOMEM);
  424. avio_read(pb, st->codec->extradata, st->codec->extradata_size);
  425. }
  426. /* Extract palette from extradata if bpp <= 8 */
  427. /* This code assumes that extradata contains only palette */
  428. /* This is true for all paletted codecs implemented in libavcodec */
  429. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  430. #if HAVE_BIGENDIAN
  431. int i;
  432. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE) / 4; i++)
  433. asf_st->palette[i] = av_bswap32(((uint32_t *)st->codec->extradata)[i]);
  434. #else
  435. memcpy(asf_st->palette, st->codec->extradata,
  436. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  437. #endif
  438. asf_st->palette_changed = 1;
  439. }
  440. st->codec->codec_tag = tag1;
  441. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  442. if (tag1 == MKTAG('D', 'V', 'R', ' ')) {
  443. st->need_parsing = AVSTREAM_PARSE_FULL;
  444. /* issue658 contains wrong w/h and MS even puts a fake seq header
  445. * with wrong w/h in extradata while a correct one is in the stream.
  446. * maximum lameness */
  447. st->codec->width =
  448. st->codec->height = 0;
  449. av_freep(&st->codec->extradata);
  450. st->codec->extradata_size = 0;
  451. }
  452. if (st->codec->codec_id == AV_CODEC_ID_H264)
  453. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  454. if (st->codec->codec_id == AV_CODEC_ID_MPEG4)
  455. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  456. }
  457. pos2 = avio_tell(pb);
  458. avio_skip(pb, size - (pos2 - pos1 + 24));
  459. return 0;
  460. }
  461. static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size)
  462. {
  463. ASFContext *asf = s->priv_data;
  464. AVIOContext *pb = s->pb;
  465. ff_asf_guid g;
  466. int ext_len, payload_ext_ct, stream_ct, i;
  467. uint32_t leak_rate, stream_num;
  468. unsigned int stream_languageid_index;
  469. avio_rl64(pb); // starttime
  470. avio_rl64(pb); // endtime
  471. leak_rate = avio_rl32(pb); // leak-datarate
  472. avio_rl32(pb); // bucket-datasize
  473. avio_rl32(pb); // init-bucket-fullness
  474. avio_rl32(pb); // alt-leak-datarate
  475. avio_rl32(pb); // alt-bucket-datasize
  476. avio_rl32(pb); // alt-init-bucket-fullness
  477. avio_rl32(pb); // max-object-size
  478. avio_rl32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  479. stream_num = avio_rl16(pb); // stream-num
  480. stream_languageid_index = avio_rl16(pb); // stream-language-id-index
  481. if (stream_num < 128)
  482. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  483. avio_rl64(pb); // avg frametime in 100ns units
  484. stream_ct = avio_rl16(pb); // stream-name-count
  485. payload_ext_ct = avio_rl16(pb); // payload-extension-system-count
  486. if (stream_num < 128) {
  487. asf->stream_bitrates[stream_num] = leak_rate;
  488. asf->streams[stream_num].payload_ext_ct = 0;
  489. }
  490. for (i = 0; i < stream_ct; i++) {
  491. avio_rl16(pb);
  492. ext_len = avio_rl16(pb);
  493. avio_skip(pb, ext_len);
  494. }
  495. for (i = 0; i < payload_ext_ct; i++) {
  496. int size;
  497. ff_get_guid(pb, &g);
  498. size = avio_rl16(pb);
  499. ext_len = avio_rl32(pb);
  500. avio_skip(pb, ext_len);
  501. if (stream_num < 128 && i < FF_ARRAY_ELEMS(asf->streams[stream_num].payload)) {
  502. ASFPayload *p = &asf->streams[stream_num].payload[i];
  503. p->type = g[0];
  504. p->size = size;
  505. av_log(s, AV_LOG_DEBUG, "Payload extension %x %d\n", g[0], p->size );
  506. asf->streams[stream_num].payload_ext_ct ++;
  507. }
  508. }
  509. return 0;
  510. }
  511. static int asf_read_content_desc(AVFormatContext *s, int64_t size)
  512. {
  513. AVIOContext *pb = s->pb;
  514. int len1, len2, len3, len4, len5;
  515. len1 = avio_rl16(pb);
  516. len2 = avio_rl16(pb);
  517. len3 = avio_rl16(pb);
  518. len4 = avio_rl16(pb);
  519. len5 = avio_rl16(pb);
  520. get_tag(s, "title", 0, len1, 32);
  521. get_tag(s, "author", 0, len2, 32);
  522. get_tag(s, "copyright", 0, len3, 32);
  523. get_tag(s, "comment", 0, len4, 32);
  524. avio_skip(pb, len5);
  525. return 0;
  526. }
  527. static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size)
  528. {
  529. AVIOContext *pb = s->pb;
  530. ASFContext *asf = s->priv_data;
  531. int desc_count, i, ret;
  532. desc_count = avio_rl16(pb);
  533. for (i = 0; i < desc_count; i++) {
  534. int name_len, value_type, value_len;
  535. char name[1024];
  536. name_len = avio_rl16(pb);
  537. if (name_len % 2) // must be even, broken lavf versions wrote len-1
  538. name_len += 1;
  539. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  540. avio_skip(pb, name_len - ret);
  541. value_type = avio_rl16(pb);
  542. value_len = avio_rl16(pb);
  543. if (!value_type && value_len % 2)
  544. value_len += 1;
  545. /* My sample has that stream set to 0 maybe that mean the container.
  546. * ASF stream count starts at 1. I am using 0 to the container value
  547. * since it's unused. */
  548. if (!strcmp(name, "AspectRatioX"))
  549. asf->dar[0].num = get_value(s->pb, value_type, 32);
  550. else if (!strcmp(name, "AspectRatioY"))
  551. asf->dar[0].den = get_value(s->pb, value_type, 32);
  552. else
  553. get_tag(s, name, value_type, value_len, 32);
  554. }
  555. return 0;
  556. }
  557. static int asf_read_language_list(AVFormatContext *s, int64_t size)
  558. {
  559. AVIOContext *pb = s->pb;
  560. ASFContext *asf = s->priv_data;
  561. int j, ret;
  562. int stream_count = avio_rl16(pb);
  563. for (j = 0; j < stream_count; j++) {
  564. char lang[6];
  565. unsigned int lang_len = avio_r8(pb);
  566. if ((ret = avio_get_str16le(pb, lang_len, lang,
  567. sizeof(lang))) < lang_len)
  568. avio_skip(pb, lang_len - ret);
  569. if (j < 128)
  570. av_strlcpy(asf->stream_languages[j], lang,
  571. sizeof(*asf->stream_languages));
  572. }
  573. return 0;
  574. }
  575. static int asf_read_metadata(AVFormatContext *s, int64_t size)
  576. {
  577. AVIOContext *pb = s->pb;
  578. ASFContext *asf = s->priv_data;
  579. int n, stream_num, name_len, value_len;
  580. int ret, i;
  581. n = avio_rl16(pb);
  582. for (i = 0; i < n; i++) {
  583. char name[1024];
  584. int value_type;
  585. avio_rl16(pb); // lang_list_index
  586. stream_num = avio_rl16(pb);
  587. name_len = avio_rl16(pb);
  588. value_type = avio_rl16(pb); /* value_type */
  589. value_len = avio_rl32(pb);
  590. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  591. avio_skip(pb, name_len - ret);
  592. av_dlog(s, "%d stream %d name_len %2d type %d len %4d <%s>\n",
  593. i, stream_num, name_len, value_type, value_len, name);
  594. if (!strcmp(name, "AspectRatioX")){
  595. int aspect_x = get_value(s->pb, value_type, 16);
  596. if(stream_num < 128)
  597. asf->dar[stream_num].num = aspect_x;
  598. } else if(!strcmp(name, "AspectRatioY")){
  599. int aspect_y = get_value(s->pb, value_type, 16);
  600. if(stream_num < 128)
  601. asf->dar[stream_num].den = aspect_y;
  602. } else {
  603. get_tag(s, name, value_type, value_len, 16);
  604. }
  605. }
  606. return 0;
  607. }
  608. static int asf_read_marker(AVFormatContext *s, int64_t size)
  609. {
  610. AVIOContext *pb = s->pb;
  611. ASFContext *asf = s->priv_data;
  612. int i, count, name_len, ret;
  613. char name[1024];
  614. avio_rl64(pb); // reserved 16 bytes
  615. avio_rl64(pb); // ...
  616. count = avio_rl32(pb); // markers count
  617. avio_rl16(pb); // reserved 2 bytes
  618. name_len = avio_rl16(pb); // name length
  619. for (i = 0; i < name_len; i++)
  620. avio_r8(pb); // skip the name
  621. for (i = 0; i < count; i++) {
  622. int64_t pres_time;
  623. int name_len;
  624. avio_rl64(pb); // offset, 8 bytes
  625. pres_time = avio_rl64(pb); // presentation time
  626. pres_time -= asf->hdr.preroll * 10000;
  627. avio_rl16(pb); // entry length
  628. avio_rl32(pb); // send time
  629. avio_rl32(pb); // flags
  630. name_len = avio_rl32(pb); // name length
  631. if ((ret = avio_get_str16le(pb, name_len * 2, name,
  632. sizeof(name))) < name_len)
  633. avio_skip(pb, name_len - ret);
  634. avpriv_new_chapter(s, i, (AVRational) { 1, 10000000 }, pres_time,
  635. AV_NOPTS_VALUE, name);
  636. }
  637. return 0;
  638. }
  639. static int asf_read_header(AVFormatContext *s)
  640. {
  641. ASFContext *asf = s->priv_data;
  642. ff_asf_guid g;
  643. AVIOContext *pb = s->pb;
  644. int i;
  645. int64_t gsize;
  646. ff_get_guid(pb, &g);
  647. if (ff_guidcmp(&g, &ff_asf_header))
  648. return AVERROR_INVALIDDATA;
  649. avio_rl64(pb);
  650. avio_rl32(pb);
  651. avio_r8(pb);
  652. avio_r8(pb);
  653. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  654. for (i = 0; i<128; i++)
  655. asf->streams[i].stream_language_index = 128; // invalid stream index means no language info
  656. for (;;) {
  657. uint64_t gpos = avio_tell(pb);
  658. ff_get_guid(pb, &g);
  659. gsize = avio_rl64(pb);
  660. print_guid(&g);
  661. if (!ff_guidcmp(&g, &ff_asf_data_header)) {
  662. asf->data_object_offset = avio_tell(pb);
  663. /* If not streaming, gsize is not unlimited (how?),
  664. * and there is enough space in the file.. */
  665. if (!(asf->hdr.flags & 0x01) && gsize >= 100)
  666. asf->data_object_size = gsize - 24;
  667. else
  668. asf->data_object_size = (uint64_t)-1;
  669. break;
  670. }
  671. if (gsize < 24)
  672. return AVERROR_INVALIDDATA;
  673. if (!ff_guidcmp(&g, &ff_asf_file_header)) {
  674. int ret = asf_read_file_properties(s, gsize);
  675. if (ret < 0)
  676. return ret;
  677. } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) {
  678. int ret = asf_read_stream_properties(s, gsize);
  679. if (ret < 0)
  680. return ret;
  681. } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) {
  682. asf_read_content_desc(s, gsize);
  683. } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) {
  684. asf_read_language_list(s, gsize);
  685. } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) {
  686. asf_read_ext_content_desc(s, gsize);
  687. } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) {
  688. asf_read_metadata(s, gsize);
  689. } else if (!ff_guidcmp(&g, &ff_asf_metadata_library_header)) {
  690. asf_read_metadata(s, gsize);
  691. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
  692. asf_read_ext_stream_properties(s, gsize);
  693. // there could be a optional stream properties object to follow
  694. // if so the next iteration will pick it up
  695. continue;
  696. } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
  697. ff_get_guid(pb, &g);
  698. avio_skip(pb, 6);
  699. continue;
  700. } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) {
  701. asf_read_marker(s, gsize);
  702. } else if (url_feof(pb)) {
  703. return AVERROR_EOF;
  704. } else {
  705. if (!s->keylen) {
  706. if (!ff_guidcmp(&g, &ff_asf_content_encryption)) {
  707. unsigned int len;
  708. AVPacket pkt;
  709. av_log(s, AV_LOG_WARNING,
  710. "DRM protected stream detected, decoding will likely fail!\n");
  711. len= avio_rl32(pb);
  712. av_log(s, AV_LOG_DEBUG, "Secret data:\n");
  713. av_get_packet(pb, &pkt, len); av_hex_dump_log(s, AV_LOG_DEBUG, pkt.data, pkt.size); av_free_packet(&pkt);
  714. len= avio_rl32(pb);
  715. get_tag(s, "ASF_Protection_Type", -1, len, 32);
  716. len= avio_rl32(pb);
  717. get_tag(s, "ASF_Key_ID", -1, len, 32);
  718. len= avio_rl32(pb);
  719. get_tag(s, "ASF_License_URL", -1, len, 32);
  720. } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) {
  721. av_log(s, AV_LOG_WARNING,
  722. "Ext DRM protected stream detected, decoding will likely fail!\n");
  723. av_dict_set(&s->metadata, "encryption", "ASF Extended Content Encryption", 0);
  724. } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) {
  725. av_log(s, AV_LOG_INFO, "Digital signature detected!\n");
  726. }
  727. }
  728. }
  729. if (avio_tell(pb) != gpos + gsize)
  730. av_log(s, AV_LOG_DEBUG,
  731. "gpos mismatch our pos=%"PRIu64", end=%"PRId64"\n",
  732. avio_tell(pb) - gpos, gsize);
  733. avio_seek(pb, gpos + gsize, SEEK_SET);
  734. }
  735. ff_get_guid(pb, &g);
  736. avio_rl64(pb);
  737. avio_r8(pb);
  738. avio_r8(pb);
  739. if (url_feof(pb))
  740. return AVERROR_EOF;
  741. asf->data_offset = avio_tell(pb);
  742. asf->packet_size_left = 0;
  743. for (i = 0; i < 128; i++) {
  744. int stream_num = asf->asfid2avid[i];
  745. if (stream_num >= 0) {
  746. AVStream *st = s->streams[stream_num];
  747. if (!st->codec->bit_rate)
  748. st->codec->bit_rate = asf->stream_bitrates[i];
  749. if (asf->dar[i].num > 0 && asf->dar[i].den > 0) {
  750. av_reduce(&st->sample_aspect_ratio.num,
  751. &st->sample_aspect_ratio.den,
  752. asf->dar[i].num, asf->dar[i].den, INT_MAX);
  753. } else if ((asf->dar[0].num > 0) && (asf->dar[0].den > 0) &&
  754. // Use ASF container value if the stream doesn't set AR.
  755. (st->codec->codec_type == AVMEDIA_TYPE_VIDEO))
  756. av_reduce(&st->sample_aspect_ratio.num,
  757. &st->sample_aspect_ratio.den,
  758. asf->dar[0].num, asf->dar[0].den, INT_MAX);
  759. av_dlog(s, "i=%d, st->codec->codec_type:%d, asf->dar %d:%d sar=%d:%d\n",
  760. i, st->codec->codec_type, asf->dar[i].num, asf->dar[i].den,
  761. st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  762. // copy and convert language codes to the frontend
  763. if (asf->streams[i].stream_language_index < 128) {
  764. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  765. if (rfc1766 && strlen(rfc1766) > 1) {
  766. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  767. const char *iso6392 = av_convert_lang_to(primary_tag,
  768. AV_LANG_ISO639_2_BIBL);
  769. if (iso6392)
  770. av_dict_set(&st->metadata, "language", iso6392, 0);
  771. }
  772. }
  773. }
  774. }
  775. ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv);
  776. return 0;
  777. }
  778. #define DO_2BITS(bits, var, defval) \
  779. switch (bits & 3) { \
  780. case 3: \
  781. var = avio_rl32(pb); \
  782. rsize += 4; \
  783. break; \
  784. case 2: \
  785. var = avio_rl16(pb); \
  786. rsize += 2; \
  787. break; \
  788. case 1: \
  789. var = avio_r8(pb); \
  790. rsize++; \
  791. break; \
  792. default: \
  793. var = defval; \
  794. break; \
  795. }
  796. /**
  797. * Load a single ASF packet into the demuxer.
  798. * @param s demux context
  799. * @param pb context to read data from
  800. * @return 0 on success, <0 on error
  801. */
  802. static int asf_get_packet(AVFormatContext *s, AVIOContext *pb)
  803. {
  804. ASFContext *asf = s->priv_data;
  805. uint32_t packet_length, padsize;
  806. int rsize = 8;
  807. int c, d, e, off;
  808. // if we do not know packet size, allow skipping up to 32 kB
  809. off = 32768;
  810. if (asf->no_resync_search)
  811. off = 3;
  812. else if (s->packet_size > 0)
  813. off = (avio_tell(pb) - s->data_offset) % s->packet_size + 3;
  814. c = d = e = -1;
  815. while (off-- > 0) {
  816. c = d;
  817. d = e;
  818. e = avio_r8(pb);
  819. if (c == 0x82 && !d && !e)
  820. break;
  821. }
  822. if (c != 0x82) {
  823. /* This code allows handling of -EAGAIN at packet boundaries (i.e.
  824. * if the packet sync code above triggers -EAGAIN). This does not
  825. * imply complete -EAGAIN handling support at random positions in
  826. * the stream. */
  827. if (pb->error == AVERROR(EAGAIN))
  828. return AVERROR(EAGAIN);
  829. if (!url_feof(pb))
  830. av_log(s, AV_LOG_ERROR,
  831. "ff asf bad header %x at:%"PRId64"\n", c, avio_tell(pb));
  832. }
  833. if ((c & 0x8f) == 0x82) {
  834. if (d || e) {
  835. if (!url_feof(pb))
  836. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  837. return AVERROR_INVALIDDATA;
  838. }
  839. c = avio_r8(pb);
  840. d = avio_r8(pb);
  841. rsize += 3;
  842. } else if(!url_feof(pb)) {
  843. avio_seek(pb, -1, SEEK_CUR); // FIXME
  844. }
  845. asf->packet_flags = c;
  846. asf->packet_property = d;
  847. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  848. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  849. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  850. // the following checks prevent overflows and infinite loops
  851. if (!packet_length || packet_length >= (1U << 29)) {
  852. av_log(s, AV_LOG_ERROR,
  853. "invalid packet_length %"PRIu32" at:%"PRId64"\n",
  854. packet_length, avio_tell(pb));
  855. return AVERROR_INVALIDDATA;
  856. }
  857. if (padsize >= packet_length) {
  858. av_log(s, AV_LOG_ERROR,
  859. "invalid padsize %"PRIu32" at:%"PRId64"\n", padsize, avio_tell(pb));
  860. return AVERROR_INVALIDDATA;
  861. }
  862. asf->packet_timestamp = avio_rl32(pb);
  863. avio_rl16(pb); /* duration */
  864. // rsize has at least 11 bytes which have to be present
  865. if (asf->packet_flags & 0x01) {
  866. asf->packet_segsizetype = avio_r8(pb);
  867. rsize++;
  868. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  869. } else {
  870. asf->packet_segments = 1;
  871. asf->packet_segsizetype = 0x80;
  872. }
  873. if (rsize > packet_length - padsize) {
  874. asf->packet_size_left = 0;
  875. av_log(s, AV_LOG_ERROR,
  876. "invalid packet header length %d for pktlen %"PRIu32"-%"PRIu32" at %"PRId64"\n",
  877. rsize, packet_length, padsize, avio_tell(pb));
  878. return AVERROR_INVALIDDATA;
  879. }
  880. asf->packet_size_left = packet_length - padsize - rsize;
  881. if (packet_length < asf->hdr.min_pktsize)
  882. padsize += asf->hdr.min_pktsize - packet_length;
  883. asf->packet_padsize = padsize;
  884. av_dlog(s, "packet: size=%d padsize=%d left=%d\n",
  885. s->packet_size, asf->packet_padsize, asf->packet_size_left);
  886. return 0;
  887. }
  888. /**
  889. *
  890. * @return <0 if error
  891. */
  892. static int asf_read_frame_header(AVFormatContext *s, AVIOContext *pb)
  893. {
  894. ASFContext *asf = s->priv_data;
  895. ASFStream *asfst;
  896. int rsize = 1;
  897. int num = avio_r8(pb);
  898. int i;
  899. int64_t ts0, ts1 av_unused;
  900. asf->packet_segments--;
  901. asf->packet_key_frame = num >> 7;
  902. asf->stream_index = asf->asfid2avid[num & 0x7f];
  903. asfst = &asf->streams[num & 0x7f];
  904. // sequence should be ignored!
  905. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  906. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  907. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  908. av_dlog(asf, "key:%d stream:%d seq:%d offset:%d replic_size:%d\n",
  909. asf->packet_key_frame, asf->stream_index, asf->packet_seq,
  910. asf->packet_frag_offset, asf->packet_replic_size);
  911. if (rsize+(int64_t)asf->packet_replic_size > asf->packet_size_left) {
  912. av_log(s, AV_LOG_ERROR, "packet_replic_size %d is invalid\n", asf->packet_replic_size);
  913. return AVERROR_INVALIDDATA;
  914. }
  915. if (asf->packet_replic_size >= 8) {
  916. int64_t end = avio_tell(pb) + asf->packet_replic_size;
  917. AVRational aspect;
  918. asfst->packet_obj_size = avio_rl32(pb);
  919. if (asfst->packet_obj_size >= (1 << 24) || asfst->packet_obj_size <= 0) {
  920. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  921. asfst->packet_obj_size = 0;
  922. return AVERROR_INVALIDDATA;
  923. }
  924. asf->packet_frag_timestamp = avio_rl32(pb); // timestamp
  925. for (i = 0; i < asfst->payload_ext_ct; i++) {
  926. ASFPayload *p = &asfst->payload[i];
  927. int size = p->size;
  928. int64_t payend;
  929. if (size == 0xFFFF)
  930. size = avio_rl16(pb);
  931. payend = avio_tell(pb) + size;
  932. if (payend > end) {
  933. av_log(s, AV_LOG_ERROR, "too long payload\n");
  934. break;
  935. }
  936. switch (p->type) {
  937. case 0x50:
  938. // duration = avio_rl16(pb);
  939. break;
  940. case 0x54:
  941. aspect.num = avio_r8(pb);
  942. aspect.den = avio_r8(pb);
  943. if (aspect.num > 0 && aspect.den > 0 && asf->stream_index >= 0) {
  944. s->streams[asf->stream_index]->sample_aspect_ratio = aspect;
  945. }
  946. break;
  947. case 0x2A:
  948. avio_skip(pb, 8);
  949. ts0 = avio_rl64(pb);
  950. ts1 = avio_rl64(pb);
  951. if (ts0!= -1) asf->packet_frag_timestamp = ts0/10000;
  952. else asf->packet_frag_timestamp = AV_NOPTS_VALUE;
  953. break;
  954. case 0x5B:
  955. case 0xB7:
  956. case 0xCC:
  957. case 0xC0:
  958. case 0xA0:
  959. //unknown
  960. break;
  961. }
  962. avio_seek(pb, payend, SEEK_SET);
  963. }
  964. avio_seek(pb, end, SEEK_SET);
  965. rsize += asf->packet_replic_size; // FIXME - check validity
  966. } else if (asf->packet_replic_size == 1) {
  967. // multipacket - frag_offset is beginning timestamp
  968. asf->packet_time_start = asf->packet_frag_offset;
  969. asf->packet_frag_offset = 0;
  970. asf->packet_frag_timestamp = asf->packet_timestamp;
  971. asf->packet_time_delta = avio_r8(pb);
  972. rsize++;
  973. } else if (asf->packet_replic_size != 0) {
  974. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n",
  975. asf->packet_replic_size);
  976. return AVERROR_INVALIDDATA;
  977. }
  978. if (asf->packet_flags & 0x01) {
  979. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  980. if (rsize > asf->packet_size_left) {
  981. av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n");
  982. return AVERROR_INVALIDDATA;
  983. } else if (asf->packet_frag_size > asf->packet_size_left - rsize) {
  984. if (asf->packet_frag_size > asf->packet_size_left - rsize + asf->packet_padsize) {
  985. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid (%d-%d)\n",
  986. asf->packet_size_left, rsize);
  987. return AVERROR_INVALIDDATA;
  988. } else {
  989. int diff = asf->packet_frag_size - (asf->packet_size_left - rsize);
  990. asf->packet_size_left += diff;
  991. asf->packet_padsize -= diff;
  992. }
  993. }
  994. } else {
  995. asf->packet_frag_size = asf->packet_size_left - rsize;
  996. }
  997. if (asf->packet_replic_size == 1) {
  998. asf->packet_multi_size = asf->packet_frag_size;
  999. if (asf->packet_multi_size > asf->packet_size_left)
  1000. return AVERROR_INVALIDDATA;
  1001. }
  1002. asf->packet_size_left -= rsize;
  1003. return 0;
  1004. }
  1005. /**
  1006. * Parse data from individual ASF packets (which were previously loaded
  1007. * with asf_get_packet()).
  1008. * @param s demux context
  1009. * @param pb context to read data from
  1010. * @param pkt pointer to store packet data into
  1011. * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF
  1012. * packets need to be loaded (through asf_get_packet())
  1013. */
  1014. static int asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pkt)
  1015. {
  1016. ASFContext *asf = s->priv_data;
  1017. ASFStream *asf_st = 0;
  1018. for (;;) {
  1019. int ret;
  1020. if (url_feof(pb))
  1021. return AVERROR_EOF;
  1022. if (asf->packet_size_left < FRAME_HEADER_SIZE) {
  1023. int ret = asf->packet_size_left + asf->packet_padsize;
  1024. assert(ret >= 0);
  1025. /* fail safe */
  1026. avio_skip(pb, ret);
  1027. asf->packet_pos = avio_tell(pb);
  1028. if (asf->data_object_size != (uint64_t)-1 &&
  1029. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  1030. return AVERROR_EOF; /* Do not exceed the size of the data object */
  1031. return 1;
  1032. }
  1033. if (asf->packet_time_start == 0) {
  1034. if (asf_read_frame_header(s, pb) < 0) {
  1035. asf->packet_time_start = asf->packet_segments = 0;
  1036. continue;
  1037. }
  1038. if (asf->stream_index < 0 ||
  1039. s->streams[asf->stream_index]->discard >= AVDISCARD_ALL ||
  1040. (!asf->packet_key_frame &&
  1041. (s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY || asf->streams[s->streams[asf->stream_index]->id].skip_to_key))) {
  1042. asf->packet_time_start = 0;
  1043. /* unhandled packet (should not happen) */
  1044. avio_skip(pb, asf->packet_frag_size);
  1045. asf->packet_size_left -= asf->packet_frag_size;
  1046. if (asf->stream_index < 0)
  1047. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n",
  1048. asf->packet_frag_size);
  1049. continue;
  1050. }
  1051. asf->asf_st = &asf->streams[s->streams[asf->stream_index]->id];
  1052. asf->asf_st->skip_to_key = 0;
  1053. }
  1054. asf_st = asf->asf_st;
  1055. av_assert0(asf_st);
  1056. if (!asf_st->frag_offset && asf->packet_frag_offset) {
  1057. av_dlog(s, "skipping asf data pkt with fragment offset for "
  1058. "stream:%d, expected:%d but got %d from pkt)\n",
  1059. asf->stream_index, asf_st->frag_offset,
  1060. asf->packet_frag_offset);
  1061. avio_skip(pb, asf->packet_frag_size);
  1062. asf->packet_size_left -= asf->packet_frag_size;
  1063. continue;
  1064. }
  1065. if (asf->packet_replic_size == 1) {
  1066. // frag_offset is here used as the beginning timestamp
  1067. asf->packet_frag_timestamp = asf->packet_time_start;
  1068. asf->packet_time_start += asf->packet_time_delta;
  1069. asf_st->packet_obj_size = asf->packet_frag_size = avio_r8(pb);
  1070. asf->packet_size_left--;
  1071. asf->packet_multi_size--;
  1072. if (asf->packet_multi_size < asf_st->packet_obj_size) {
  1073. asf->packet_time_start = 0;
  1074. avio_skip(pb, asf->packet_multi_size);
  1075. asf->packet_size_left -= asf->packet_multi_size;
  1076. continue;
  1077. }
  1078. asf->packet_multi_size -= asf_st->packet_obj_size;
  1079. }
  1080. if (asf_st->pkt.size != asf_st->packet_obj_size ||
  1081. // FIXME is this condition sufficient?
  1082. asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) {
  1083. if (asf_st->pkt.data) {
  1084. av_log(s, AV_LOG_INFO,
  1085. "freeing incomplete packet size %d, new %d\n",
  1086. asf_st->pkt.size, asf_st->packet_obj_size);
  1087. asf_st->frag_offset = 0;
  1088. av_free_packet(&asf_st->pkt);
  1089. }
  1090. /* new packet */
  1091. av_new_packet(&asf_st->pkt, asf_st->packet_obj_size);
  1092. asf_st->seq = asf->packet_seq;
  1093. asf_st->pkt.dts = asf->packet_frag_timestamp - asf->hdr.preroll;
  1094. asf_st->pkt.stream_index = asf->stream_index;
  1095. asf_st->pkt.pos = asf_st->packet_pos = asf->packet_pos;
  1096. asf_st->pkt_clean = 0;
  1097. if (asf_st->pkt.data && asf_st->palette_changed) {
  1098. uint8_t *pal;
  1099. pal = av_packet_new_side_data(&asf_st->pkt, AV_PKT_DATA_PALETTE,
  1100. AVPALETTE_SIZE);
  1101. if (!pal) {
  1102. av_log(s, AV_LOG_ERROR, "Cannot append palette to packet\n");
  1103. } else {
  1104. memcpy(pal, asf_st->palette, AVPALETTE_SIZE);
  1105. asf_st->palette_changed = 0;
  1106. }
  1107. }
  1108. av_dlog(asf, "new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  1109. asf->stream_index, asf->packet_key_frame,
  1110. asf_st->pkt.flags & AV_PKT_FLAG_KEY,
  1111. s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO,
  1112. asf_st->packet_obj_size);
  1113. if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  1114. asf->packet_key_frame = 1;
  1115. if (asf->packet_key_frame)
  1116. asf_st->pkt.flags |= AV_PKT_FLAG_KEY;
  1117. }
  1118. /* read data */
  1119. av_dlog(asf, "READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  1120. s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  1121. asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  1122. asf->packet_size_left -= asf->packet_frag_size;
  1123. if (asf->packet_size_left < 0)
  1124. continue;
  1125. if (asf->packet_frag_offset >= asf_st->pkt.size ||
  1126. asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset) {
  1127. av_log(s, AV_LOG_ERROR,
  1128. "packet fragment position invalid %u,%u not in %u\n",
  1129. asf->packet_frag_offset, asf->packet_frag_size,
  1130. asf_st->pkt.size);
  1131. continue;
  1132. }
  1133. if (asf->packet_frag_offset != asf_st->frag_offset && !asf_st->pkt_clean) {
  1134. memset(asf_st->pkt.data + asf_st->frag_offset, 0, asf_st->pkt.size - asf_st->frag_offset);
  1135. asf_st->pkt_clean = 1;
  1136. }
  1137. ret = avio_read(pb, asf_st->pkt.data + asf->packet_frag_offset,
  1138. asf->packet_frag_size);
  1139. if (ret != asf->packet_frag_size) {
  1140. if (ret < 0 || asf->packet_frag_offset + ret == 0)
  1141. return ret < 0 ? ret : AVERROR_EOF;
  1142. if (asf_st->ds_span > 1) {
  1143. // scrambling, we can either drop it completely or fill the remainder
  1144. // TODO: should we fill the whole packet instead of just the current
  1145. // fragment?
  1146. memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0,
  1147. asf->packet_frag_size - ret);
  1148. ret = asf->packet_frag_size;
  1149. } else {
  1150. // no scrambling, so we can return partial packets
  1151. av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret);
  1152. }
  1153. }
  1154. if (s->key && s->keylen == 20)
  1155. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  1156. ret);
  1157. asf_st->frag_offset += ret;
  1158. /* test if whole packet is read */
  1159. if (asf_st->frag_offset == asf_st->pkt.size) {
  1160. // workaround for macroshit radio DVR-MS files
  1161. if (s->streams[asf->stream_index]->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO &&
  1162. asf_st->pkt.size > 100) {
  1163. int i;
  1164. for (i = 0; i < asf_st->pkt.size && !asf_st->pkt.data[i]; i++)
  1165. ;
  1166. if (i == asf_st->pkt.size) {
  1167. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  1168. asf_st->frag_offset = 0;
  1169. av_free_packet(&asf_st->pkt);
  1170. continue;
  1171. }
  1172. }
  1173. /* return packet */
  1174. if (asf_st->ds_span > 1) {
  1175. if (asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span) {
  1176. av_log(s, AV_LOG_ERROR,
  1177. "pkt.size != ds_packet_size * ds_span (%d %d %d)\n",
  1178. asf_st->pkt.size, asf_st->ds_packet_size,
  1179. asf_st->ds_span);
  1180. } else {
  1181. /* packet descrambling */
  1182. AVBufferRef *buf = av_buffer_alloc(asf_st->pkt.size +
  1183. FF_INPUT_BUFFER_PADDING_SIZE);
  1184. if (buf) {
  1185. uint8_t *newdata = buf->data;
  1186. int offset = 0;
  1187. memset(newdata + asf_st->pkt.size, 0,
  1188. FF_INPUT_BUFFER_PADDING_SIZE);
  1189. while (offset < asf_st->pkt.size) {
  1190. int off = offset / asf_st->ds_chunk_size;
  1191. int row = off / asf_st->ds_span;
  1192. int col = off % asf_st->ds_span;
  1193. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  1194. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  1195. assert(idx + 1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  1196. memcpy(newdata + offset,
  1197. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  1198. asf_st->ds_chunk_size);
  1199. offset += asf_st->ds_chunk_size;
  1200. }
  1201. av_buffer_unref(&asf_st->pkt.buf);
  1202. asf_st->pkt.buf = buf;
  1203. asf_st->pkt.data = buf->data;
  1204. }
  1205. }
  1206. }
  1207. asf_st->frag_offset = 0;
  1208. *pkt = asf_st->pkt;
  1209. #if FF_API_DESTRUCT_PACKET
  1210. FF_DISABLE_DEPRECATION_WARNINGS
  1211. asf_st->pkt.destruct = NULL;
  1212. FF_ENABLE_DEPRECATION_WARNINGS
  1213. #endif
  1214. asf_st->pkt.buf = 0;
  1215. asf_st->pkt.size = 0;
  1216. asf_st->pkt.data = 0;
  1217. asf_st->pkt.side_data_elems = 0;
  1218. asf_st->pkt.side_data = NULL;
  1219. break; // packet completed
  1220. }
  1221. }
  1222. return 0;
  1223. }
  1224. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  1225. {
  1226. ASFContext *asf = s->priv_data;
  1227. for (;;) {
  1228. int ret;
  1229. /* parse cached packets, if any */
  1230. if ((ret = asf_parse_packet(s, s->pb, pkt)) <= 0)
  1231. return ret;
  1232. if ((ret = asf_get_packet(s, s->pb)) < 0)
  1233. assert(asf->packet_size_left < FRAME_HEADER_SIZE ||
  1234. asf->packet_segments < 1);
  1235. asf->packet_time_start = 0;
  1236. }
  1237. }
  1238. // Added to support seeking after packets have been read
  1239. // If information is not reset, read_packet fails due to
  1240. // leftover information from previous reads
  1241. static void asf_reset_header(AVFormatContext *s)
  1242. {
  1243. ASFContext *asf = s->priv_data;
  1244. ASFStream *asf_st;
  1245. int i;
  1246. asf->packet_size_left = 0;
  1247. asf->packet_flags = 0;
  1248. asf->packet_property = 0;
  1249. asf->packet_timestamp = 0;
  1250. asf->packet_segsizetype = 0;
  1251. asf->packet_segments = 0;
  1252. asf->packet_seq = 0;
  1253. asf->packet_replic_size = 0;
  1254. asf->packet_key_frame = 0;
  1255. asf->packet_padsize = 0;
  1256. asf->packet_frag_offset = 0;
  1257. asf->packet_frag_size = 0;
  1258. asf->packet_frag_timestamp = 0;
  1259. asf->packet_multi_size = 0;
  1260. asf->packet_time_delta = 0;
  1261. asf->packet_time_start = 0;
  1262. for (i = 0; i < 128; i++) {
  1263. asf_st = &asf->streams[i];
  1264. av_free_packet(&asf_st->pkt);
  1265. asf_st->packet_obj_size = 0;
  1266. asf_st->frag_offset = 0;
  1267. asf_st->seq = 0;
  1268. }
  1269. asf->asf_st = NULL;
  1270. }
  1271. static void skip_to_key(AVFormatContext *s)
  1272. {
  1273. ASFContext *asf = s->priv_data;
  1274. int i;
  1275. for (i = 0; i < 128; i++) {
  1276. int j = asf->asfid2avid[i];
  1277. ASFStream *asf_st = &asf->streams[i];
  1278. if (j < 0 || s->streams[j]->codec->codec_type != AVMEDIA_TYPE_VIDEO)
  1279. continue;
  1280. asf_st->skip_to_key = 1;
  1281. }
  1282. }
  1283. static int asf_read_close(AVFormatContext *s)
  1284. {
  1285. asf_reset_header(s);
  1286. return 0;
  1287. }
  1288. static int64_t asf_read_pts(AVFormatContext *s, int stream_index,
  1289. int64_t *ppos, int64_t pos_limit)
  1290. {
  1291. ASFContext *asf = s->priv_data;
  1292. AVPacket pkt1, *pkt = &pkt1;
  1293. ASFStream *asf_st;
  1294. int64_t pts;
  1295. int64_t pos = *ppos;
  1296. int i;
  1297. int64_t start_pos[ASF_MAX_STREAMS];
  1298. for (i = 0; i < s->nb_streams; i++)
  1299. start_pos[i] = pos;
  1300. if (s->packet_size > 0)
  1301. pos = (pos + s->packet_size - 1 - s->data_offset) /
  1302. s->packet_size * s->packet_size +
  1303. s->data_offset;
  1304. *ppos = pos;
  1305. if (avio_seek(s->pb, pos, SEEK_SET) < 0)
  1306. return AV_NOPTS_VALUE;
  1307. ff_read_frame_flush(s);
  1308. asf_reset_header(s);
  1309. for (;;) {
  1310. if (av_read_frame(s, pkt) < 0) {
  1311. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  1312. return AV_NOPTS_VALUE;
  1313. }
  1314. pts = pkt->dts;
  1315. av_free_packet(pkt);
  1316. if (pkt->flags & AV_PKT_FLAG_KEY) {
  1317. i = pkt->stream_index;
  1318. asf_st = &asf->streams[s->streams[i]->id];
  1319. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  1320. pos = asf_st->packet_pos;
  1321. av_add_index_entry(s->streams[i], pos, pts, pkt->size,
  1322. pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  1323. start_pos[i] = asf_st->packet_pos + 1;
  1324. if (pkt->stream_index == stream_index)
  1325. break;
  1326. }
  1327. }
  1328. *ppos = pos;
  1329. return pts;
  1330. }
  1331. static int asf_build_simple_index(AVFormatContext *s, int stream_index)
  1332. {
  1333. ff_asf_guid g;
  1334. ASFContext *asf = s->priv_data;
  1335. int64_t current_pos = avio_tell(s->pb);
  1336. int ret = 0;
  1337. if((ret = avio_seek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET)) < 0) {
  1338. return ret;
  1339. }
  1340. if ((ret = ff_get_guid(s->pb, &g)) < 0)
  1341. goto end;
  1342. /* the data object can be followed by other top-level objects,
  1343. * skip them until the simple index object is reached */
  1344. while (ff_guidcmp(&g, &ff_asf_simple_index_header)) {
  1345. int64_t gsize = avio_rl64(s->pb);
  1346. if (gsize < 24 || url_feof(s->pb)) {
  1347. goto end;
  1348. }
  1349. avio_skip(s->pb, gsize - 24);
  1350. if ((ret = ff_get_guid(s->pb, &g)) < 0)
  1351. goto end;
  1352. }
  1353. {
  1354. int64_t itime, last_pos = -1;
  1355. int pct, ict;
  1356. int i;
  1357. int64_t av_unused gsize = avio_rl64(s->pb);
  1358. if ((ret = ff_get_guid(s->pb, &g)) < 0)
  1359. goto end;
  1360. itime = avio_rl64(s->pb);
  1361. pct = avio_rl32(s->pb);
  1362. ict = avio_rl32(s->pb);
  1363. av_log(s, AV_LOG_DEBUG,
  1364. "itime:0x%"PRIx64", pct:%d, ict:%d\n", itime, pct, ict);
  1365. for (i = 0; i < ict; i++) {
  1366. int pktnum = avio_rl32(s->pb);
  1367. int pktct = avio_rl16(s->pb);
  1368. int64_t pos = s->data_offset + s->packet_size * (int64_t)pktnum;
  1369. int64_t index_pts = FFMAX(av_rescale(itime, i, 10000) - asf->hdr.preroll, 0);
  1370. if (pos != last_pos) {
  1371. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d pts: %"PRId64"\n",
  1372. pktnum, pktct, index_pts);
  1373. av_add_index_entry(s->streams[stream_index], pos, index_pts,
  1374. s->packet_size, 0, AVINDEX_KEYFRAME);
  1375. last_pos = pos;
  1376. }
  1377. }
  1378. asf->index_read = ict > 1;
  1379. }
  1380. end:
  1381. // if (url_feof(s->pb)) {
  1382. // ret = 0;
  1383. // }
  1384. avio_seek(s->pb, current_pos, SEEK_SET);
  1385. return ret;
  1386. }
  1387. static int asf_read_seek(AVFormatContext *s, int stream_index,
  1388. int64_t pts, int flags)
  1389. {
  1390. ASFContext *asf = s->priv_data;
  1391. AVStream *st = s->streams[stream_index];
  1392. int ret = 0;
  1393. if (s->packet_size <= 0)
  1394. return -1;
  1395. /* Try using the protocol's read_seek if available */
  1396. if (s->pb) {
  1397. int ret = avio_seek_time(s->pb, stream_index, pts, flags);
  1398. if (ret >= 0)
  1399. asf_reset_header(s);
  1400. if (ret != AVERROR(ENOSYS))
  1401. return ret;
  1402. }
  1403. /* explicitly handle the case of seeking to 0 */
  1404. if (!pts) {
  1405. asf_reset_header(s);
  1406. avio_seek(s->pb, s->data_offset, SEEK_SET);
  1407. return 0;
  1408. }
  1409. if (!asf->index_read) {
  1410. ret = asf_build_simple_index(s, stream_index);
  1411. if (ret < 0)
  1412. asf->index_read = -1;
  1413. }
  1414. if (asf->index_read > 0 && st->index_entries) {
  1415. int index = av_index_search_timestamp(st, pts, flags);
  1416. if (index >= 0) {
  1417. /* find the position */
  1418. uint64_t pos = st->index_entries[index].pos;
  1419. /* do the seek */
  1420. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1421. if(avio_seek(s->pb, pos, SEEK_SET) < 0)
  1422. return -1;
  1423. asf_reset_header(s);
  1424. skip_to_key(s);
  1425. return 0;
  1426. }
  1427. }
  1428. /* no index or seeking by index failed */
  1429. if (ff_seek_frame_binary(s, stream_index, pts, flags) < 0)
  1430. return -1;
  1431. asf_reset_header(s);
  1432. skip_to_key(s);
  1433. return 0;
  1434. }
  1435. AVInputFormat ff_asf_demuxer = {
  1436. .name = "asf",
  1437. .long_name = NULL_IF_CONFIG_SMALL("ASF (Advanced / Active Streaming Format)"),
  1438. .priv_data_size = sizeof(ASFContext),
  1439. .read_probe = asf_probe,
  1440. .read_header = asf_read_header,
  1441. .read_packet = asf_read_packet,
  1442. .read_close = asf_read_close,
  1443. .read_seek = asf_read_seek,
  1444. .read_timestamp = asf_read_pts,
  1445. .flags = AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH,
  1446. .priv_class = &asf_class,
  1447. };