You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1404 lines
49KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/attributes.h"
  23. #include "libavutil/bswap.h"
  24. #include "libavutil/common.h"
  25. #include "libavutil/avstring.h"
  26. #include "libavutil/dict.h"
  27. #include "libavutil/mathematics.h"
  28. #include "libavutil/opt.h"
  29. #include "avformat.h"
  30. #include "internal.h"
  31. #include "avio_internal.h"
  32. #include "id3v2.h"
  33. #include "riff.h"
  34. #include "asf.h"
  35. #include "asfcrypt.h"
  36. #include "avlanguage.h"
  37. typedef struct {
  38. const AVClass *class;
  39. int asfid2avid[128]; ///< conversion table from asf ID 2 AVStream ID
  40. ASFStream streams[128]; ///< it's max number and it's not that big
  41. uint32_t stream_bitrates[128]; ///< max number of streams, bitrate for each (for streaming)
  42. AVRational dar[128];
  43. char stream_languages[128][6]; ///< max number of streams, language for each (RFC1766, e.g. en-US)
  44. /* non streamed additonnal info */
  45. /* packet filling */
  46. int packet_size_left;
  47. /* only for reading */
  48. uint64_t data_offset; ///< beginning of the first data packet
  49. uint64_t data_object_offset; ///< data object offset (excl. GUID & size)
  50. uint64_t data_object_size; ///< size of the data object
  51. int index_read;
  52. ASFMainHeader hdr;
  53. int packet_flags;
  54. int packet_property;
  55. int packet_timestamp;
  56. int packet_segsizetype;
  57. int packet_segments;
  58. int packet_seq;
  59. int packet_replic_size;
  60. int packet_key_frame;
  61. int packet_padsize;
  62. unsigned int packet_frag_offset;
  63. unsigned int packet_frag_size;
  64. int64_t packet_frag_timestamp;
  65. int packet_multi_size;
  66. int packet_obj_size;
  67. int packet_time_delta;
  68. int packet_time_start;
  69. int64_t packet_pos;
  70. int stream_index;
  71. ASFStream* asf_st; ///< currently decoded stream
  72. int no_resync_search;
  73. } ASFContext;
  74. static const AVOption options[] = {
  75. {"no_resync_search", "Don't try to resynchronize by looking for a certain optional start code", offsetof(ASFContext, no_resync_search), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
  76. { NULL },
  77. };
  78. static const AVClass asf_class = {
  79. .class_name = "asf demuxer",
  80. .item_name = av_default_item_name,
  81. .option = options,
  82. .version = LIBAVUTIL_VERSION_INT,
  83. };
  84. #undef NDEBUG
  85. #include <assert.h>
  86. #define ASF_MAX_STREAMS 127
  87. #define FRAME_HEADER_SIZE 17
  88. // Fix Me! FRAME_HEADER_SIZE may be different.
  89. static const ff_asf_guid index_guid = {
  90. 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb
  91. };
  92. #ifdef DEBUG
  93. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  94. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  95. };
  96. #define PRINT_IF_GUID(g,cmp) \
  97. if (!ff_guidcmp(g, &cmp)) \
  98. av_dlog(NULL, "(GUID: %s) ", #cmp)
  99. static void print_guid(const ff_asf_guid *g)
  100. {
  101. int i;
  102. PRINT_IF_GUID(g, ff_asf_header);
  103. else PRINT_IF_GUID(g, ff_asf_file_header);
  104. else PRINT_IF_GUID(g, ff_asf_stream_header);
  105. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  106. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  107. else PRINT_IF_GUID(g, ff_asf_video_stream);
  108. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  109. else PRINT_IF_GUID(g, ff_asf_command_stream);
  110. else PRINT_IF_GUID(g, ff_asf_comment_header);
  111. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  112. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  113. else PRINT_IF_GUID(g, ff_asf_data_header);
  114. else PRINT_IF_GUID(g, index_guid);
  115. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  116. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  117. else PRINT_IF_GUID(g, ff_asf_my_guid);
  118. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  119. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  120. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  121. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  122. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  123. else PRINT_IF_GUID(g, ff_asf_marker_header);
  124. else PRINT_IF_GUID(g, stream_bitrate_guid);
  125. else PRINT_IF_GUID(g, ff_asf_language_guid);
  126. else
  127. av_dlog(NULL, "(GUID: unknown) ");
  128. for(i=0;i<16;i++)
  129. av_dlog(NULL, " 0x%02x,", (*g)[i]);
  130. av_dlog(NULL, "}\n");
  131. }
  132. #undef PRINT_IF_GUID
  133. #else
  134. #define print_guid(g)
  135. #endif
  136. void ff_get_guid(AVIOContext *s, ff_asf_guid *g)
  137. {
  138. assert(sizeof(*g) == 16);
  139. avio_read(s, *g, sizeof(*g));
  140. }
  141. static int asf_probe(AVProbeData *pd)
  142. {
  143. /* check file header */
  144. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  145. return AVPROBE_SCORE_MAX;
  146. else
  147. return 0;
  148. }
  149. static int get_value(AVIOContext *pb, int type){
  150. switch(type){
  151. case 2: return avio_rl32(pb);
  152. case 3: return avio_rl32(pb);
  153. case 4: return avio_rl64(pb);
  154. case 5: return avio_rl16(pb);
  155. default:return INT_MIN;
  156. }
  157. }
  158. /* MSDN claims that this should be "compatible with the ID3 frame, APIC",
  159. * but in reality this is only loosely similar */
  160. static int asf_read_picture(AVFormatContext *s, int len)
  161. {
  162. AVPacket pkt = { 0 };
  163. const CodecMime *mime = ff_id3v2_mime_tags;
  164. enum AVCodecID id = AV_CODEC_ID_NONE;
  165. char mimetype[64];
  166. uint8_t *desc = NULL;
  167. ASFStream *ast = NULL;
  168. AVStream *st = NULL;
  169. int ret, type, picsize, desc_len;
  170. /* type + picsize + mime + desc */
  171. if (len < 1 + 4 + 2 + 2) {
  172. av_log(s, AV_LOG_ERROR, "Invalid attached picture size: %d.\n", len);
  173. return AVERROR_INVALIDDATA;
  174. }
  175. /* picture type */
  176. type = avio_r8(s->pb);
  177. len--;
  178. if (type >= FF_ARRAY_ELEMS(ff_id3v2_picture_types) || type < 0) {
  179. av_log(s, AV_LOG_WARNING, "Unknown attached picture type: %d.\n", type);
  180. type = 0;
  181. }
  182. /* picture data size */
  183. picsize = avio_rl32(s->pb);
  184. len -= 4;
  185. /* picture MIME type */
  186. len -= avio_get_str16le(s->pb, len, mimetype, sizeof(mimetype));
  187. while (mime->id != AV_CODEC_ID_NONE) {
  188. if (!strncmp(mime->str, mimetype, sizeof(mimetype))) {
  189. id = mime->id;
  190. break;
  191. }
  192. mime++;
  193. }
  194. if (id == AV_CODEC_ID_NONE) {
  195. av_log(s, AV_LOG_ERROR, "Unknown attached picture mimetype: %s.\n",
  196. mimetype);
  197. return 0;
  198. }
  199. if (picsize >= len) {
  200. av_log(s, AV_LOG_ERROR, "Invalid attached picture data size: %d >= %d.\n",
  201. picsize, len);
  202. return AVERROR_INVALIDDATA;
  203. }
  204. /* picture description */
  205. desc_len = (len - picsize) * 2 + 1;
  206. desc = av_malloc(desc_len);
  207. if (!desc)
  208. return AVERROR(ENOMEM);
  209. len -= avio_get_str16le(s->pb, len - picsize, desc, desc_len);
  210. ret = av_get_packet(s->pb, &pkt, picsize);
  211. if (ret < 0)
  212. goto fail;
  213. st = avformat_new_stream(s, NULL);
  214. ast = av_mallocz(sizeof(*ast));
  215. if (!st || !ast) {
  216. ret = AVERROR(ENOMEM);
  217. goto fail;
  218. }
  219. st->priv_data = ast;
  220. st->disposition |= AV_DISPOSITION_ATTACHED_PIC;
  221. st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
  222. st->codec->codec_id = id;
  223. st->attached_pic = pkt;
  224. st->attached_pic.stream_index = st->index;
  225. st->attached_pic.flags |= AV_PKT_FLAG_KEY;
  226. if (*desc)
  227. av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL);
  228. else
  229. av_freep(&desc);
  230. av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0);
  231. return 0;
  232. fail:
  233. av_freep(&ast);
  234. av_freep(&desc);
  235. av_free_packet(&pkt);
  236. return ret;
  237. }
  238. static void get_tag(AVFormatContext *s, const char *key, int type, int len)
  239. {
  240. char *value;
  241. int64_t off = avio_tell(s->pb);
  242. if ((unsigned)len >= (UINT_MAX - 1)/2)
  243. return;
  244. value = av_malloc(2*len+1);
  245. if (!value)
  246. goto finish;
  247. if (type == 0) { // UTF16-LE
  248. avio_get_str16le(s->pb, len, value, 2*len + 1);
  249. } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD
  250. uint64_t num = get_value(s->pb, type);
  251. snprintf(value, len, "%"PRIu64, num);
  252. } else if (type == 1 && !strcmp(key, "WM/Picture")) { // handle cover art
  253. asf_read_picture(s, len);
  254. goto finish;
  255. } else {
  256. av_log(s, AV_LOG_DEBUG, "Unsupported value type %d in tag %s.\n", type, key);
  257. goto finish;
  258. }
  259. if (*value)
  260. av_dict_set(&s->metadata, key, value, 0);
  261. finish:
  262. av_freep(&value);
  263. avio_seek(s->pb, off + len, SEEK_SET);
  264. }
  265. static int asf_read_file_properties(AVFormatContext *s, int64_t size)
  266. {
  267. ASFContext *asf = s->priv_data;
  268. AVIOContext *pb = s->pb;
  269. ff_get_guid(pb, &asf->hdr.guid);
  270. asf->hdr.file_size = avio_rl64(pb);
  271. asf->hdr.create_time = avio_rl64(pb);
  272. avio_rl64(pb); /* number of packets */
  273. asf->hdr.play_time = avio_rl64(pb);
  274. asf->hdr.send_time = avio_rl64(pb);
  275. asf->hdr.preroll = avio_rl32(pb);
  276. asf->hdr.ignore = avio_rl32(pb);
  277. asf->hdr.flags = avio_rl32(pb);
  278. asf->hdr.min_pktsize = avio_rl32(pb);
  279. asf->hdr.max_pktsize = avio_rl32(pb);
  280. if (asf->hdr.min_pktsize >= (1U<<29))
  281. return AVERROR_INVALIDDATA;
  282. asf->hdr.max_bitrate = avio_rl32(pb);
  283. s->packet_size = asf->hdr.max_pktsize;
  284. return 0;
  285. }
  286. static int asf_read_stream_properties(AVFormatContext *s, int64_t size)
  287. {
  288. ASFContext *asf = s->priv_data;
  289. AVIOContext *pb = s->pb;
  290. AVStream *st;
  291. ASFStream *asf_st;
  292. ff_asf_guid g;
  293. enum AVMediaType type;
  294. int type_specific_size, sizeX;
  295. unsigned int tag1;
  296. int64_t pos1, pos2, start_time;
  297. int test_for_ext_stream_audio, is_dvr_ms_audio=0;
  298. if (s->nb_streams == ASF_MAX_STREAMS) {
  299. av_log(s, AV_LOG_ERROR, "too many streams\n");
  300. return AVERROR(EINVAL);
  301. }
  302. pos1 = avio_tell(pb);
  303. st = avformat_new_stream(s, NULL);
  304. if (!st)
  305. return AVERROR(ENOMEM);
  306. avpriv_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  307. asf_st = av_mallocz(sizeof(ASFStream));
  308. if (!asf_st)
  309. return AVERROR(ENOMEM);
  310. st->priv_data = asf_st;
  311. st->start_time = 0;
  312. start_time = asf->hdr.preroll;
  313. asf_st->stream_language_index = 128; // invalid stream index means no language info
  314. if(!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  315. st->duration = asf->hdr.play_time /
  316. (10000000 / 1000) - start_time;
  317. }
  318. ff_get_guid(pb, &g);
  319. test_for_ext_stream_audio = 0;
  320. if (!ff_guidcmp(&g, &ff_asf_audio_stream)) {
  321. type = AVMEDIA_TYPE_AUDIO;
  322. } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) {
  323. type = AVMEDIA_TYPE_VIDEO;
  324. } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) {
  325. type = AVMEDIA_TYPE_VIDEO;
  326. st->codec->codec_id = AV_CODEC_ID_MJPEG;
  327. } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) {
  328. type = AVMEDIA_TYPE_DATA;
  329. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  330. test_for_ext_stream_audio = 1;
  331. type = AVMEDIA_TYPE_UNKNOWN;
  332. } else {
  333. return -1;
  334. }
  335. ff_get_guid(pb, &g);
  336. avio_skip(pb, 8); /* total_size */
  337. type_specific_size = avio_rl32(pb);
  338. avio_rl32(pb);
  339. st->id = avio_rl16(pb) & 0x7f; /* stream id */
  340. // mapping of asf ID to AV stream ID;
  341. asf->asfid2avid[st->id] = s->nb_streams - 1;
  342. avio_rl32(pb);
  343. if (test_for_ext_stream_audio) {
  344. ff_get_guid(pb, &g);
  345. if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  346. type = AVMEDIA_TYPE_AUDIO;
  347. is_dvr_ms_audio=1;
  348. ff_get_guid(pb, &g);
  349. avio_rl32(pb);
  350. avio_rl32(pb);
  351. avio_rl32(pb);
  352. ff_get_guid(pb, &g);
  353. avio_rl32(pb);
  354. }
  355. }
  356. st->codec->codec_type = type;
  357. if (type == AVMEDIA_TYPE_AUDIO) {
  358. int ret = ff_get_wav_header(pb, st->codec, type_specific_size);
  359. if (ret < 0)
  360. return ret;
  361. if (is_dvr_ms_audio) {
  362. // codec_id and codec_tag are unreliable in dvr_ms
  363. // files. Set them later by probing stream.
  364. st->codec->codec_id = AV_CODEC_ID_PROBE;
  365. st->codec->codec_tag = 0;
  366. }
  367. if (st->codec->codec_id == AV_CODEC_ID_AAC) {
  368. st->need_parsing = AVSTREAM_PARSE_NONE;
  369. } else {
  370. st->need_parsing = AVSTREAM_PARSE_FULL;
  371. }
  372. /* We have to init the frame size at some point .... */
  373. pos2 = avio_tell(pb);
  374. if (size >= (pos2 + 8 - pos1 + 24)) {
  375. asf_st->ds_span = avio_r8(pb);
  376. asf_st->ds_packet_size = avio_rl16(pb);
  377. asf_st->ds_chunk_size = avio_rl16(pb);
  378. avio_rl16(pb); //ds_data_size
  379. avio_r8(pb); //ds_silence_data
  380. }
  381. if (asf_st->ds_span > 1) {
  382. if (!asf_st->ds_chunk_size
  383. || (asf_st->ds_packet_size/asf_st->ds_chunk_size <= 1)
  384. || asf_st->ds_packet_size % asf_st->ds_chunk_size)
  385. asf_st->ds_span = 0; // disable descrambling
  386. }
  387. } else if (type == AVMEDIA_TYPE_VIDEO &&
  388. size - (avio_tell(pb) - pos1 + 24) >= 51) {
  389. avio_rl32(pb);
  390. avio_rl32(pb);
  391. avio_r8(pb);
  392. avio_rl16(pb); /* size */
  393. sizeX= avio_rl32(pb); /* size */
  394. st->codec->width = avio_rl32(pb);
  395. st->codec->height = avio_rl32(pb);
  396. /* not available for asf */
  397. avio_rl16(pb); /* panes */
  398. st->codec->bits_per_coded_sample = avio_rl16(pb); /* depth */
  399. tag1 = avio_rl32(pb);
  400. avio_skip(pb, 20);
  401. if (sizeX > 40) {
  402. st->codec->extradata_size = sizeX - 40;
  403. st->codec->extradata = av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  404. avio_read(pb, st->codec->extradata, st->codec->extradata_size);
  405. }
  406. /* Extract palette from extradata if bpp <= 8 */
  407. /* This code assumes that extradata contains only palette */
  408. /* This is true for all paletted codecs implemented in libavcodec */
  409. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  410. #if HAVE_BIGENDIAN
  411. int i;
  412. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)/4; i++)
  413. asf_st->palette[i] = av_bswap32(((uint32_t*)st->codec->extradata)[i]);
  414. #else
  415. memcpy(asf_st->palette, st->codec->extradata,
  416. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  417. #endif
  418. asf_st->palette_changed = 1;
  419. }
  420. st->codec->codec_tag = tag1;
  421. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  422. if(tag1 == MKTAG('D', 'V', 'R', ' ')){
  423. st->need_parsing = AVSTREAM_PARSE_FULL;
  424. // issue658 containse wrong w/h and MS even puts a fake seq header with wrong w/h in extradata while a correct one is in te stream. maximum lameness
  425. st->codec->width =
  426. st->codec->height = 0;
  427. av_freep(&st->codec->extradata);
  428. st->codec->extradata_size=0;
  429. }
  430. if(st->codec->codec_id == AV_CODEC_ID_H264)
  431. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  432. }
  433. pos2 = avio_tell(pb);
  434. avio_skip(pb, size - (pos2 - pos1 + 24));
  435. return 0;
  436. }
  437. static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size)
  438. {
  439. ASFContext *asf = s->priv_data;
  440. AVIOContext *pb = s->pb;
  441. ff_asf_guid g;
  442. int ext_len, payload_ext_ct, stream_ct, i;
  443. uint32_t leak_rate, stream_num;
  444. unsigned int stream_languageid_index;
  445. avio_rl64(pb); // starttime
  446. avio_rl64(pb); // endtime
  447. leak_rate = avio_rl32(pb); // leak-datarate
  448. avio_rl32(pb); // bucket-datasize
  449. avio_rl32(pb); // init-bucket-fullness
  450. avio_rl32(pb); // alt-leak-datarate
  451. avio_rl32(pb); // alt-bucket-datasize
  452. avio_rl32(pb); // alt-init-bucket-fullness
  453. avio_rl32(pb); // max-object-size
  454. avio_rl32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  455. stream_num = avio_rl16(pb); // stream-num
  456. stream_languageid_index = avio_rl16(pb); // stream-language-id-index
  457. if (stream_num < 128)
  458. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  459. avio_rl64(pb); // avg frametime in 100ns units
  460. stream_ct = avio_rl16(pb); //stream-name-count
  461. payload_ext_ct = avio_rl16(pb); //payload-extension-system-count
  462. if (stream_num < 128)
  463. asf->stream_bitrates[stream_num] = leak_rate;
  464. for (i=0; i<stream_ct; i++){
  465. avio_rl16(pb);
  466. ext_len = avio_rl16(pb);
  467. avio_skip(pb, ext_len);
  468. }
  469. for (i=0; i<payload_ext_ct; i++){
  470. ff_get_guid(pb, &g);
  471. avio_skip(pb, 2);
  472. ext_len=avio_rl32(pb);
  473. avio_skip(pb, ext_len);
  474. }
  475. return 0;
  476. }
  477. static int asf_read_content_desc(AVFormatContext *s, int64_t size)
  478. {
  479. AVIOContext *pb = s->pb;
  480. int len1, len2, len3, len4, len5;
  481. len1 = avio_rl16(pb);
  482. len2 = avio_rl16(pb);
  483. len3 = avio_rl16(pb);
  484. len4 = avio_rl16(pb);
  485. len5 = avio_rl16(pb);
  486. get_tag(s, "title" , 0, len1);
  487. get_tag(s, "author" , 0, len2);
  488. get_tag(s, "copyright", 0, len3);
  489. get_tag(s, "comment" , 0, len4);
  490. avio_skip(pb, len5);
  491. return 0;
  492. }
  493. static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size)
  494. {
  495. AVIOContext *pb = s->pb;
  496. ASFContext *asf = s->priv_data;
  497. int desc_count, i, ret;
  498. desc_count = avio_rl16(pb);
  499. for(i=0;i<desc_count;i++) {
  500. int name_len,value_type,value_len;
  501. char name[1024];
  502. name_len = avio_rl16(pb);
  503. if (name_len%2) // must be even, broken lavf versions wrote len-1
  504. name_len += 1;
  505. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  506. avio_skip(pb, name_len - ret);
  507. value_type = avio_rl16(pb);
  508. value_len = avio_rl16(pb);
  509. if (!value_type && value_len%2)
  510. value_len += 1;
  511. /**
  512. * My sample has that stream set to 0 maybe that mean the container.
  513. * Asf stream count start at 1. I am using 0 to the container value since it's unused
  514. */
  515. if (!strcmp(name, "AspectRatioX")){
  516. asf->dar[0].num= get_value(s->pb, value_type);
  517. } else if(!strcmp(name, "AspectRatioY")){
  518. asf->dar[0].den= get_value(s->pb, value_type);
  519. } else
  520. get_tag(s, name, value_type, value_len);
  521. }
  522. return 0;
  523. }
  524. static int asf_read_language_list(AVFormatContext *s, int64_t size)
  525. {
  526. AVIOContext *pb = s->pb;
  527. ASFContext *asf = s->priv_data;
  528. int j, ret;
  529. int stream_count = avio_rl16(pb);
  530. for(j = 0; j < stream_count; j++) {
  531. char lang[6];
  532. unsigned int lang_len = avio_r8(pb);
  533. if ((ret = avio_get_str16le(pb, lang_len, lang, sizeof(lang))) < lang_len)
  534. avio_skip(pb, lang_len - ret);
  535. if (j < 128)
  536. av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
  537. }
  538. return 0;
  539. }
  540. static int asf_read_metadata(AVFormatContext *s, int64_t size)
  541. {
  542. AVIOContext *pb = s->pb;
  543. ASFContext *asf = s->priv_data;
  544. int n, stream_num, name_len, value_len, value_num;
  545. int ret, i;
  546. n = avio_rl16(pb);
  547. for(i=0;i<n;i++) {
  548. char name[1024];
  549. int av_unused value_type;
  550. avio_rl16(pb); //lang_list_index
  551. stream_num= avio_rl16(pb);
  552. name_len= avio_rl16(pb);
  553. value_type = avio_rl16(pb); /* value_type */
  554. value_len= avio_rl32(pb);
  555. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  556. avio_skip(pb, name_len - ret);
  557. av_dlog(s, "%d %d %d %d %d <%s>\n",
  558. i, stream_num, name_len, value_type, value_len, name);
  559. value_num= avio_rl16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
  560. avio_skip(pb, value_len - 2);
  561. if(stream_num<128){
  562. if (!strcmp(name, "AspectRatioX")) asf->dar[stream_num].num= value_num;
  563. else if(!strcmp(name, "AspectRatioY")) asf->dar[stream_num].den= value_num;
  564. }
  565. }
  566. return 0;
  567. }
  568. static int asf_read_marker(AVFormatContext *s, int64_t size)
  569. {
  570. AVIOContext *pb = s->pb;
  571. int i, count, name_len, ret;
  572. char name[1024];
  573. avio_rl64(pb); // reserved 16 bytes
  574. avio_rl64(pb); // ...
  575. count = avio_rl32(pb); // markers count
  576. avio_rl16(pb); // reserved 2 bytes
  577. name_len = avio_rl16(pb); // name length
  578. for(i=0;i<name_len;i++){
  579. avio_r8(pb); // skip the name
  580. }
  581. for(i=0;i<count;i++){
  582. int64_t pres_time;
  583. int name_len;
  584. avio_rl64(pb); // offset, 8 bytes
  585. pres_time = avio_rl64(pb); // presentation time
  586. avio_rl16(pb); // entry length
  587. avio_rl32(pb); // send time
  588. avio_rl32(pb); // flags
  589. name_len = avio_rl32(pb); // name length
  590. if ((ret = avio_get_str16le(pb, name_len * 2, name, sizeof(name))) < name_len)
  591. avio_skip(pb, name_len - ret);
  592. avpriv_new_chapter(s, i, (AVRational){1, 10000000}, pres_time, AV_NOPTS_VALUE, name );
  593. }
  594. return 0;
  595. }
  596. static int asf_read_header(AVFormatContext *s)
  597. {
  598. ASFContext *asf = s->priv_data;
  599. ff_asf_guid g;
  600. AVIOContext *pb = s->pb;
  601. int i;
  602. int64_t gsize;
  603. ff_get_guid(pb, &g);
  604. if (ff_guidcmp(&g, &ff_asf_header))
  605. return -1;
  606. avio_rl64(pb);
  607. avio_rl32(pb);
  608. avio_r8(pb);
  609. avio_r8(pb);
  610. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  611. for(;;) {
  612. uint64_t gpos= avio_tell(pb);
  613. ff_get_guid(pb, &g);
  614. gsize = avio_rl64(pb);
  615. print_guid(&g);
  616. if (!ff_guidcmp(&g, &ff_asf_data_header)) {
  617. asf->data_object_offset = avio_tell(pb);
  618. // if not streaming, gsize is not unlimited (how?), and there is enough space in the file..
  619. if (!(asf->hdr.flags & 0x01) && gsize >= 100) {
  620. asf->data_object_size = gsize - 24;
  621. } else {
  622. asf->data_object_size = (uint64_t)-1;
  623. }
  624. break;
  625. }
  626. if (gsize < 24)
  627. return -1;
  628. if (!ff_guidcmp(&g, &ff_asf_file_header)) {
  629. int ret = asf_read_file_properties(s, gsize);
  630. if (ret < 0)
  631. return ret;
  632. } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) {
  633. asf_read_stream_properties(s, gsize);
  634. } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) {
  635. asf_read_content_desc(s, gsize);
  636. } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) {
  637. asf_read_language_list(s, gsize);
  638. } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) {
  639. asf_read_ext_content_desc(s, gsize);
  640. } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) {
  641. asf_read_metadata(s, gsize);
  642. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
  643. asf_read_ext_stream_properties(s, gsize);
  644. // there could be a optional stream properties object to follow
  645. // if so the next iteration will pick it up
  646. continue;
  647. } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
  648. ff_get_guid(pb, &g);
  649. avio_skip(pb, 6);
  650. continue;
  651. } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) {
  652. asf_read_marker(s, gsize);
  653. } else if (pb->eof_reached) {
  654. return -1;
  655. } else {
  656. if (!s->keylen) {
  657. if (!ff_guidcmp(&g, &ff_asf_content_encryption)) {
  658. av_log(s, AV_LOG_WARNING, "DRM protected stream detected, decoding will likely fail!\n");
  659. } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) {
  660. av_log(s, AV_LOG_WARNING, "Ext DRM protected stream detected, decoding will likely fail!\n");
  661. } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) {
  662. av_log(s, AV_LOG_WARNING, "Digital signature detected, decoding will likely fail!\n");
  663. }
  664. }
  665. }
  666. if(avio_tell(pb) != gpos + gsize)
  667. av_log(s, AV_LOG_DEBUG, "gpos mismatch our pos=%"PRIu64", end=%"PRIu64"\n", avio_tell(pb)-gpos, gsize);
  668. avio_seek(pb, gpos + gsize, SEEK_SET);
  669. }
  670. ff_get_guid(pb, &g);
  671. avio_rl64(pb);
  672. avio_r8(pb);
  673. avio_r8(pb);
  674. if (pb->eof_reached)
  675. return -1;
  676. asf->data_offset = avio_tell(pb);
  677. asf->packet_size_left = 0;
  678. for(i=0; i<128; i++){
  679. int stream_num= asf->asfid2avid[i];
  680. if(stream_num>=0){
  681. AVStream *st = s->streams[stream_num];
  682. if (!st->codec->bit_rate)
  683. st->codec->bit_rate = asf->stream_bitrates[i];
  684. if (asf->dar[i].num > 0 && asf->dar[i].den > 0){
  685. av_reduce(&st->sample_aspect_ratio.num,
  686. &st->sample_aspect_ratio.den,
  687. asf->dar[i].num, asf->dar[i].den, INT_MAX);
  688. } else if ((asf->dar[0].num > 0) && (asf->dar[0].den > 0) && (st->codec->codec_type==AVMEDIA_TYPE_VIDEO)) // Use ASF container value if the stream doesn't AR set.
  689. av_reduce(&st->sample_aspect_ratio.num,
  690. &st->sample_aspect_ratio.den,
  691. asf->dar[0].num, asf->dar[0].den, INT_MAX);
  692. av_dlog(s, "i=%d, st->codec->codec_type:%d, asf->dar %d:%d sar=%d:%d\n",
  693. i, st->codec->codec_type, asf->dar[i].num, asf->dar[i].den,
  694. st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  695. // copy and convert language codes to the frontend
  696. if (asf->streams[i].stream_language_index < 128) {
  697. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  698. if (rfc1766 && strlen(rfc1766) > 1) {
  699. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  700. const char *iso6392 = av_convert_lang_to(primary_tag, AV_LANG_ISO639_2_BIBL);
  701. if (iso6392)
  702. av_dict_set(&st->metadata, "language", iso6392, 0);
  703. }
  704. }
  705. }
  706. }
  707. ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv);
  708. return 0;
  709. }
  710. #define DO_2BITS(bits, var, defval) \
  711. switch (bits & 3) \
  712. { \
  713. case 3: var = avio_rl32(pb); rsize += 4; break; \
  714. case 2: var = avio_rl16(pb); rsize += 2; break; \
  715. case 1: var = avio_r8(pb); rsize++; break; \
  716. default: var = defval; break; \
  717. }
  718. /**
  719. * Load a single ASF packet into the demuxer.
  720. * @param s demux context
  721. * @param pb context to read data from
  722. * @return 0 on success, <0 on error
  723. */
  724. static int ff_asf_get_packet(AVFormatContext *s, AVIOContext *pb)
  725. {
  726. ASFContext *asf = s->priv_data;
  727. uint32_t packet_length, padsize;
  728. int rsize = 8;
  729. int c, d, e, off;
  730. // if we do not know packet size, allow skipping up to 32 kB
  731. off= 32768;
  732. if (asf->no_resync_search)
  733. off = 3;
  734. else if (s->packet_size > 0)
  735. off= (avio_tell(pb) - s->data_offset) % s->packet_size + 3;
  736. c=d=e=-1;
  737. while(off-- > 0){
  738. c=d; d=e;
  739. e= avio_r8(pb);
  740. if(c == 0x82 && !d && !e)
  741. break;
  742. }
  743. if (c != 0x82) {
  744. /**
  745. * This code allows handling of -EAGAIN at packet boundaries (i.e.
  746. * if the packet sync code above triggers -EAGAIN). This does not
  747. * imply complete -EAGAIN handling support at random positions in
  748. * the stream.
  749. */
  750. if (pb->error == AVERROR(EAGAIN))
  751. return AVERROR(EAGAIN);
  752. if (!pb->eof_reached)
  753. av_log(s, AV_LOG_ERROR, "ff asf bad header %x at:%"PRId64"\n", c, avio_tell(pb));
  754. }
  755. if ((c & 0x8f) == 0x82) {
  756. if (d || e) {
  757. if (!pb->eof_reached)
  758. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  759. return -1;
  760. }
  761. c= avio_r8(pb);
  762. d= avio_r8(pb);
  763. rsize+=3;
  764. } else if (!pb->eof_reached) {
  765. avio_seek(pb, -1, SEEK_CUR); //FIXME
  766. }
  767. asf->packet_flags = c;
  768. asf->packet_property = d;
  769. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  770. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  771. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  772. //the following checks prevent overflows and infinite loops
  773. if(!packet_length || packet_length >= (1U<<29)){
  774. av_log(s, AV_LOG_ERROR, "invalid packet_length %d at:%"PRId64"\n", packet_length, avio_tell(pb));
  775. return -1;
  776. }
  777. if(padsize >= packet_length){
  778. av_log(s, AV_LOG_ERROR, "invalid padsize %d at:%"PRId64"\n", padsize, avio_tell(pb));
  779. return -1;
  780. }
  781. asf->packet_timestamp = avio_rl32(pb);
  782. avio_rl16(pb); /* duration */
  783. // rsize has at least 11 bytes which have to be present
  784. if (asf->packet_flags & 0x01) {
  785. asf->packet_segsizetype = avio_r8(pb); rsize++;
  786. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  787. } else {
  788. asf->packet_segments = 1;
  789. asf->packet_segsizetype = 0x80;
  790. }
  791. if (rsize > packet_length - padsize) {
  792. asf->packet_size_left = 0;
  793. av_log(s, AV_LOG_ERROR,
  794. "invalid packet header length %d for pktlen %d-%d at %"PRId64"\n",
  795. rsize, packet_length, padsize, avio_tell(pb));
  796. return -1;
  797. }
  798. asf->packet_size_left = packet_length - padsize - rsize;
  799. if (packet_length < asf->hdr.min_pktsize)
  800. padsize += asf->hdr.min_pktsize - packet_length;
  801. asf->packet_padsize = padsize;
  802. av_dlog(s, "packet: size=%d padsize=%d left=%d\n", s->packet_size, asf->packet_padsize, asf->packet_size_left);
  803. return 0;
  804. }
  805. /**
  806. *
  807. * @return <0 if error
  808. */
  809. static int asf_read_frame_header(AVFormatContext *s, AVIOContext *pb){
  810. ASFContext *asf = s->priv_data;
  811. int rsize = 1;
  812. int num = avio_r8(pb);
  813. int64_t ts0;
  814. asf->packet_segments--;
  815. asf->packet_key_frame = num >> 7;
  816. asf->stream_index = asf->asfid2avid[num & 0x7f];
  817. // sequence should be ignored!
  818. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  819. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  820. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  821. av_dlog(asf, "key:%d stream:%d seq:%d offset:%d replic_size:%d\n",
  822. asf->packet_key_frame, asf->stream_index, asf->packet_seq,
  823. asf->packet_frag_offset, asf->packet_replic_size);
  824. if (asf->packet_replic_size >= 8) {
  825. asf->packet_obj_size = avio_rl32(pb);
  826. if(asf->packet_obj_size >= (1<<24) || asf->packet_obj_size <= 0){
  827. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  828. return -1;
  829. }
  830. asf->packet_frag_timestamp = avio_rl32(pb); // timestamp
  831. if(asf->packet_replic_size >= 8+38+4){
  832. avio_skip(pb, 10);
  833. ts0= avio_rl64(pb);
  834. avio_skip(pb, 8);;
  835. avio_skip(pb, 12);
  836. avio_rl32(pb);
  837. avio_skip(pb, asf->packet_replic_size - 8 - 38 - 4);
  838. if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
  839. else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
  840. }else
  841. avio_skip(pb, asf->packet_replic_size - 8);
  842. rsize += asf->packet_replic_size; // FIXME - check validity
  843. } else if (asf->packet_replic_size==1){
  844. // multipacket - frag_offset is beginning timestamp
  845. asf->packet_time_start = asf->packet_frag_offset;
  846. asf->packet_frag_offset = 0;
  847. asf->packet_frag_timestamp = asf->packet_timestamp;
  848. asf->packet_time_delta = avio_r8(pb);
  849. rsize++;
  850. }else if(asf->packet_replic_size!=0){
  851. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", asf->packet_replic_size);
  852. return -1;
  853. }
  854. if (asf->packet_flags & 0x01) {
  855. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  856. if (rsize > asf->packet_size_left) {
  857. av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n");
  858. return -1;
  859. } else if(asf->packet_frag_size > asf->packet_size_left - rsize){
  860. if (asf->packet_frag_size > asf->packet_size_left - rsize + asf->packet_padsize) {
  861. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid (%d-%d)\n", asf->packet_size_left, rsize);
  862. return -1;
  863. } else {
  864. int diff = asf->packet_frag_size - (asf->packet_size_left - rsize);
  865. asf->packet_size_left += diff;
  866. asf->packet_padsize -= diff;
  867. }
  868. }
  869. } else {
  870. if (rsize > asf->packet_size_left) {
  871. av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n");
  872. return -1;
  873. }
  874. asf->packet_frag_size = asf->packet_size_left - rsize;
  875. }
  876. if (asf->packet_replic_size == 1) {
  877. asf->packet_multi_size = asf->packet_frag_size;
  878. if (asf->packet_multi_size > asf->packet_size_left)
  879. return -1;
  880. }
  881. asf->packet_size_left -= rsize;
  882. return 0;
  883. }
  884. /**
  885. * Parse data from individual ASF packets (which were previously loaded
  886. * with asf_get_packet()).
  887. * @param s demux context
  888. * @param pb context to read data from
  889. * @param pkt pointer to store packet data into
  890. * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF
  891. * packets need to be loaded (through asf_get_packet())
  892. */
  893. static int ff_asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pkt)
  894. {
  895. ASFContext *asf = s->priv_data;
  896. ASFStream *asf_st = 0;
  897. for (;;) {
  898. int ret;
  899. if(pb->eof_reached)
  900. return AVERROR_EOF;
  901. if (asf->packet_size_left < FRAME_HEADER_SIZE
  902. || asf->packet_segments < 1) {
  903. //asf->packet_size_left <= asf->packet_padsize) {
  904. int ret = asf->packet_size_left + asf->packet_padsize;
  905. assert(ret>=0);
  906. /* fail safe */
  907. avio_skip(pb, ret);
  908. asf->packet_pos= avio_tell(pb);
  909. if (asf->data_object_size != (uint64_t)-1 &&
  910. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  911. return AVERROR_EOF; /* Do not exceed the size of the data object */
  912. return 1;
  913. }
  914. if (asf->packet_time_start == 0) {
  915. if(asf_read_frame_header(s, pb) < 0){
  916. asf->packet_segments= 0;
  917. continue;
  918. }
  919. if (asf->stream_index < 0
  920. || s->streams[asf->stream_index]->discard >= AVDISCARD_ALL
  921. || (!asf->packet_key_frame && s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)
  922. ) {
  923. asf->packet_time_start = 0;
  924. /* unhandled packet (should not happen) */
  925. avio_skip(pb, asf->packet_frag_size);
  926. asf->packet_size_left -= asf->packet_frag_size;
  927. if(asf->stream_index < 0)
  928. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
  929. continue;
  930. }
  931. asf->asf_st = s->streams[asf->stream_index]->priv_data;
  932. }
  933. asf_st = asf->asf_st;
  934. if (asf->packet_replic_size == 1) {
  935. // frag_offset is here used as the beginning timestamp
  936. asf->packet_frag_timestamp = asf->packet_time_start;
  937. asf->packet_time_start += asf->packet_time_delta;
  938. asf->packet_obj_size = asf->packet_frag_size = avio_r8(pb);
  939. asf->packet_size_left--;
  940. asf->packet_multi_size--;
  941. if (asf->packet_multi_size < asf->packet_obj_size)
  942. {
  943. asf->packet_time_start = 0;
  944. avio_skip(pb, asf->packet_multi_size);
  945. asf->packet_size_left -= asf->packet_multi_size;
  946. continue;
  947. }
  948. asf->packet_multi_size -= asf->packet_obj_size;
  949. }
  950. if( /*asf->packet_frag_size == asf->packet_obj_size*/
  951. asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size
  952. && asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size){
  953. av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n",
  954. asf_st->frag_offset, asf->packet_frag_size,
  955. asf->packet_obj_size, asf_st->pkt.size);
  956. asf->packet_obj_size= asf_st->pkt.size;
  957. }
  958. if ( asf_st->pkt.size != asf->packet_obj_size
  959. || asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { //FIXME is this condition sufficient?
  960. if(asf_st->pkt.data){
  961. av_log(s, AV_LOG_INFO, "freeing incomplete packet size %d, new %d\n", asf_st->pkt.size, asf->packet_obj_size);
  962. asf_st->frag_offset = 0;
  963. av_free_packet(&asf_st->pkt);
  964. }
  965. /* new packet */
  966. av_new_packet(&asf_st->pkt, asf->packet_obj_size);
  967. asf_st->seq = asf->packet_seq;
  968. asf_st->pkt.dts = asf->packet_frag_timestamp - asf->hdr.preroll;
  969. asf_st->pkt.stream_index = asf->stream_index;
  970. asf_st->pkt.pos =
  971. asf_st->packet_pos= asf->packet_pos;
  972. if (asf_st->pkt.data && asf_st->palette_changed) {
  973. uint8_t *pal;
  974. pal = av_packet_new_side_data(&asf_st->pkt, AV_PKT_DATA_PALETTE,
  975. AVPALETTE_SIZE);
  976. if (!pal) {
  977. av_log(s, AV_LOG_ERROR, "Cannot append palette to packet\n");
  978. } else {
  979. memcpy(pal, asf_st->palette, AVPALETTE_SIZE);
  980. asf_st->palette_changed = 0;
  981. }
  982. }
  983. av_dlog(asf, "new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  984. asf->stream_index, asf->packet_key_frame,
  985. asf_st->pkt.flags & AV_PKT_FLAG_KEY,
  986. s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO,
  987. asf->packet_obj_size);
  988. if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  989. asf->packet_key_frame = 1;
  990. if (asf->packet_key_frame)
  991. asf_st->pkt.flags |= AV_PKT_FLAG_KEY;
  992. }
  993. /* read data */
  994. av_dlog(asf, "READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  995. s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  996. asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  997. asf->packet_size_left -= asf->packet_frag_size;
  998. if (asf->packet_size_left < 0)
  999. continue;
  1000. if( asf->packet_frag_offset >= asf_st->pkt.size
  1001. || asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset){
  1002. av_log(s, AV_LOG_ERROR, "packet fragment position invalid %u,%u not in %u\n",
  1003. asf->packet_frag_offset, asf->packet_frag_size, asf_st->pkt.size);
  1004. continue;
  1005. }
  1006. ret = avio_read(pb, asf_st->pkt.data + asf->packet_frag_offset,
  1007. asf->packet_frag_size);
  1008. if (ret != asf->packet_frag_size) {
  1009. if (ret < 0 || asf->packet_frag_offset + ret == 0)
  1010. return ret < 0 ? ret : AVERROR_EOF;
  1011. if (asf_st->ds_span > 1) {
  1012. // scrambling, we can either drop it completely or fill the remainder
  1013. // TODO: should we fill the whole packet instead of just the current
  1014. // fragment?
  1015. memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0,
  1016. asf->packet_frag_size - ret);
  1017. ret = asf->packet_frag_size;
  1018. } else
  1019. // no scrambling, so we can return partial packets
  1020. av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret);
  1021. }
  1022. if (s->key && s->keylen == 20)
  1023. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  1024. ret);
  1025. asf_st->frag_offset += ret;
  1026. /* test if whole packet is read */
  1027. if (asf_st->frag_offset == asf_st->pkt.size) {
  1028. //workaround for macroshit radio DVR-MS files
  1029. if( s->streams[asf->stream_index]->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO
  1030. && asf_st->pkt.size > 100){
  1031. int i;
  1032. for(i=0; i<asf_st->pkt.size && !asf_st->pkt.data[i]; i++);
  1033. if(i == asf_st->pkt.size){
  1034. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  1035. asf_st->frag_offset = 0;
  1036. av_free_packet(&asf_st->pkt);
  1037. continue;
  1038. }
  1039. }
  1040. /* return packet */
  1041. if (asf_st->ds_span > 1) {
  1042. if(asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span){
  1043. av_log(s, AV_LOG_ERROR, "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", asf_st->pkt.size, asf_st->ds_packet_size, asf_st->ds_span);
  1044. }else{
  1045. /* packet descrambling */
  1046. uint8_t *newdata = av_malloc(asf_st->pkt.size + FF_INPUT_BUFFER_PADDING_SIZE);
  1047. if (newdata) {
  1048. int offset = 0;
  1049. memset(newdata + asf_st->pkt.size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
  1050. while (offset < asf_st->pkt.size) {
  1051. int off = offset / asf_st->ds_chunk_size;
  1052. int row = off / asf_st->ds_span;
  1053. int col = off % asf_st->ds_span;
  1054. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  1055. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  1056. assert(idx+1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  1057. memcpy(newdata + offset,
  1058. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  1059. asf_st->ds_chunk_size);
  1060. offset += asf_st->ds_chunk_size;
  1061. }
  1062. av_free(asf_st->pkt.data);
  1063. asf_st->pkt.data = newdata;
  1064. }
  1065. }
  1066. }
  1067. asf_st->frag_offset = 0;
  1068. *pkt= asf_st->pkt;
  1069. asf_st->pkt.size = 0;
  1070. asf_st->pkt.data = 0;
  1071. asf_st->pkt.side_data_elems = 0;
  1072. asf_st->pkt.side_data = NULL;
  1073. break; // packet completed
  1074. }
  1075. }
  1076. return 0;
  1077. }
  1078. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  1079. {
  1080. ASFContext *asf = s->priv_data;
  1081. for (;;) {
  1082. int ret;
  1083. /* parse cached packets, if any */
  1084. if ((ret = ff_asf_parse_packet(s, s->pb, pkt)) <= 0)
  1085. return ret;
  1086. if ((ret = ff_asf_get_packet(s, s->pb)) < 0)
  1087. assert(asf->packet_size_left < FRAME_HEADER_SIZE || asf->packet_segments < 1);
  1088. asf->packet_time_start = 0;
  1089. }
  1090. }
  1091. // Added to support seeking after packets have been read
  1092. // If information is not reset, read_packet fails due to
  1093. // leftover information from previous reads
  1094. static void asf_reset_header(AVFormatContext *s)
  1095. {
  1096. ASFContext *asf = s->priv_data;
  1097. ASFStream *asf_st;
  1098. int i;
  1099. asf->packet_size_left = 0;
  1100. asf->packet_segments = 0;
  1101. asf->packet_flags = 0;
  1102. asf->packet_property = 0;
  1103. asf->packet_timestamp = 0;
  1104. asf->packet_segsizetype = 0;
  1105. asf->packet_segments = 0;
  1106. asf->packet_seq = 0;
  1107. asf->packet_replic_size = 0;
  1108. asf->packet_key_frame = 0;
  1109. asf->packet_padsize = 0;
  1110. asf->packet_frag_offset = 0;
  1111. asf->packet_frag_size = 0;
  1112. asf->packet_frag_timestamp = 0;
  1113. asf->packet_multi_size = 0;
  1114. asf->packet_obj_size = 0;
  1115. asf->packet_time_delta = 0;
  1116. asf->packet_time_start = 0;
  1117. for(i=0; i<s->nb_streams; i++){
  1118. asf_st= s->streams[i]->priv_data;
  1119. av_free_packet(&asf_st->pkt);
  1120. asf_st->frag_offset=0;
  1121. asf_st->seq=0;
  1122. }
  1123. asf->asf_st= NULL;
  1124. }
  1125. static int asf_read_close(AVFormatContext *s)
  1126. {
  1127. asf_reset_header(s);
  1128. return 0;
  1129. }
  1130. static int64_t asf_read_pts(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit)
  1131. {
  1132. AVPacket pkt1, *pkt = &pkt1;
  1133. ASFStream *asf_st;
  1134. int64_t pts;
  1135. int64_t pos= *ppos;
  1136. int i;
  1137. int64_t start_pos[ASF_MAX_STREAMS];
  1138. for(i=0; i<s->nb_streams; i++){
  1139. start_pos[i]= pos;
  1140. }
  1141. if (s->packet_size > 0)
  1142. pos= (pos+s->packet_size-1-s->data_offset)/s->packet_size*s->packet_size+ s->data_offset;
  1143. *ppos= pos;
  1144. avio_seek(s->pb, pos, SEEK_SET);
  1145. asf_reset_header(s);
  1146. for(;;){
  1147. if (asf_read_packet(s, pkt) < 0){
  1148. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  1149. return AV_NOPTS_VALUE;
  1150. }
  1151. pts = pkt->dts;
  1152. av_free_packet(pkt);
  1153. if(pkt->flags&AV_PKT_FLAG_KEY){
  1154. i= pkt->stream_index;
  1155. asf_st= s->streams[i]->priv_data;
  1156. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  1157. pos= asf_st->packet_pos;
  1158. av_add_index_entry(s->streams[i], pos, pts, pkt->size, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  1159. start_pos[i]= asf_st->packet_pos + 1;
  1160. if(pkt->stream_index == stream_index)
  1161. break;
  1162. }
  1163. }
  1164. *ppos= pos;
  1165. return pts;
  1166. }
  1167. static void asf_build_simple_index(AVFormatContext *s, int stream_index)
  1168. {
  1169. ff_asf_guid g;
  1170. ASFContext *asf = s->priv_data;
  1171. int64_t current_pos= avio_tell(s->pb);
  1172. int i;
  1173. avio_seek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET);
  1174. ff_get_guid(s->pb, &g);
  1175. /* the data object can be followed by other top-level objects,
  1176. skip them until the simple index object is reached */
  1177. while (ff_guidcmp(&g, &index_guid)) {
  1178. int64_t gsize= avio_rl64(s->pb);
  1179. if (gsize < 24 || s->pb->eof_reached) {
  1180. avio_seek(s->pb, current_pos, SEEK_SET);
  1181. return;
  1182. }
  1183. avio_skip(s->pb, gsize-24);
  1184. ff_get_guid(s->pb, &g);
  1185. }
  1186. {
  1187. int64_t itime, last_pos=-1;
  1188. int pct, ict;
  1189. int64_t av_unused gsize= avio_rl64(s->pb);
  1190. ff_get_guid(s->pb, &g);
  1191. itime=avio_rl64(s->pb);
  1192. pct=avio_rl32(s->pb);
  1193. ict=avio_rl32(s->pb);
  1194. av_log(s, AV_LOG_DEBUG, "itime:0x%"PRIx64", pct:%d, ict:%d\n",itime,pct,ict);
  1195. for (i=0;i<ict;i++){
  1196. int pktnum=avio_rl32(s->pb);
  1197. int pktct =avio_rl16(s->pb);
  1198. int64_t pos = s->data_offset + s->packet_size*(int64_t)pktnum;
  1199. int64_t index_pts= FFMAX(av_rescale(itime, i, 10000) - asf->hdr.preroll, 0);
  1200. if(pos != last_pos){
  1201. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d pts: %"PRId64"\n", pktnum, pktct, index_pts);
  1202. av_add_index_entry(s->streams[stream_index], pos, index_pts, s->packet_size, 0, AVINDEX_KEYFRAME);
  1203. last_pos=pos;
  1204. }
  1205. }
  1206. asf->index_read= ict > 0;
  1207. }
  1208. avio_seek(s->pb, current_pos, SEEK_SET);
  1209. }
  1210. static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags)
  1211. {
  1212. ASFContext *asf = s->priv_data;
  1213. AVStream *st = s->streams[stream_index];
  1214. int64_t pos;
  1215. int index;
  1216. if (s->packet_size <= 0)
  1217. return -1;
  1218. /* Try using the protocol's read_seek if available */
  1219. if(s->pb) {
  1220. int ret = avio_seek_time(s->pb, stream_index, pts, flags);
  1221. if(ret >= 0)
  1222. asf_reset_header(s);
  1223. if (ret != AVERROR(ENOSYS))
  1224. return ret;
  1225. }
  1226. if (!asf->index_read)
  1227. asf_build_simple_index(s, stream_index);
  1228. if((asf->index_read && st->index_entries)){
  1229. index= av_index_search_timestamp(st, pts, flags);
  1230. if(index >= 0) {
  1231. /* find the position */
  1232. pos = st->index_entries[index].pos;
  1233. /* do the seek */
  1234. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1235. avio_seek(s->pb, pos, SEEK_SET);
  1236. asf_reset_header(s);
  1237. return 0;
  1238. }
  1239. }
  1240. /* no index or seeking by index failed */
  1241. if (ff_seek_frame_binary(s, stream_index, pts, flags) < 0)
  1242. return -1;
  1243. asf_reset_header(s);
  1244. return 0;
  1245. }
  1246. AVInputFormat ff_asf_demuxer = {
  1247. .name = "asf",
  1248. .long_name = NULL_IF_CONFIG_SMALL("ASF (Advanced / Active Streaming Format)"),
  1249. .priv_data_size = sizeof(ASFContext),
  1250. .read_probe = asf_probe,
  1251. .read_header = asf_read_header,
  1252. .read_packet = asf_read_packet,
  1253. .read_close = asf_read_close,
  1254. .read_seek = asf_read_seek,
  1255. .read_timestamp = asf_read_pts,
  1256. .flags = AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH,
  1257. .priv_class = &asf_class,
  1258. };