You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1304 lines
47KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/bswap.h"
  23. #include "libavutil/common.h"
  24. #include "libavutil/avstring.h"
  25. #include "libavutil/dict.h"
  26. #include "libavutil/mathematics.h"
  27. #include "libavcodec/mpegaudio.h"
  28. #include "avformat.h"
  29. #include "internal.h"
  30. #include "avio_internal.h"
  31. #include "riff.h"
  32. #include "asf.h"
  33. #include "asfcrypt.h"
  34. #include "avlanguage.h"
  35. typedef struct {
  36. int asfid2avid[128]; ///< conversion table from asf ID 2 AVStream ID
  37. ASFStream streams[128]; ///< it's max number and it's not that big
  38. uint32_t stream_bitrates[128]; ///< max number of streams, bitrate for each (for streaming)
  39. AVRational dar[128];
  40. char stream_languages[128][6]; ///< max number of streams, language for each (RFC1766, e.g. en-US)
  41. /* non streamed additonnal info */
  42. /* packet filling */
  43. int packet_size_left;
  44. /* only for reading */
  45. uint64_t data_offset; ///< beginning of the first data packet
  46. uint64_t data_object_offset; ///< data object offset (excl. GUID & size)
  47. uint64_t data_object_size; ///< size of the data object
  48. int index_read;
  49. ASFMainHeader hdr;
  50. int packet_flags;
  51. int packet_property;
  52. int packet_timestamp;
  53. int packet_segsizetype;
  54. int packet_segments;
  55. int packet_seq;
  56. int packet_replic_size;
  57. int packet_key_frame;
  58. int packet_padsize;
  59. unsigned int packet_frag_offset;
  60. unsigned int packet_frag_size;
  61. int64_t packet_frag_timestamp;
  62. int packet_multi_size;
  63. int packet_obj_size;
  64. int packet_time_delta;
  65. int packet_time_start;
  66. int64_t packet_pos;
  67. int stream_index;
  68. ASFStream* asf_st; ///< currently decoded stream
  69. } ASFContext;
  70. #undef NDEBUG
  71. #include <assert.h>
  72. #define ASF_MAX_STREAMS 127
  73. #define FRAME_HEADER_SIZE 17
  74. // Fix Me! FRAME_HEADER_SIZE may be different.
  75. static const ff_asf_guid index_guid = {
  76. 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb
  77. };
  78. #ifdef DEBUG
  79. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  80. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  81. };
  82. #define PRINT_IF_GUID(g,cmp) \
  83. if (!ff_guidcmp(g, &cmp)) \
  84. av_dlog(NULL, "(GUID: %s) ", #cmp)
  85. static void print_guid(const ff_asf_guid *g)
  86. {
  87. int i;
  88. PRINT_IF_GUID(g, ff_asf_header);
  89. else PRINT_IF_GUID(g, ff_asf_file_header);
  90. else PRINT_IF_GUID(g, ff_asf_stream_header);
  91. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  92. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  93. else PRINT_IF_GUID(g, ff_asf_video_stream);
  94. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  95. else PRINT_IF_GUID(g, ff_asf_command_stream);
  96. else PRINT_IF_GUID(g, ff_asf_comment_header);
  97. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  98. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  99. else PRINT_IF_GUID(g, ff_asf_data_header);
  100. else PRINT_IF_GUID(g, index_guid);
  101. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  102. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  103. else PRINT_IF_GUID(g, ff_asf_my_guid);
  104. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  105. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  106. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  107. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  108. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  109. else PRINT_IF_GUID(g, ff_asf_marker_header);
  110. else PRINT_IF_GUID(g, stream_bitrate_guid);
  111. else PRINT_IF_GUID(g, ff_asf_language_guid);
  112. else
  113. av_dlog(NULL, "(GUID: unknown) ");
  114. for(i=0;i<16;i++)
  115. av_dlog(NULL, " 0x%02x,", (*g)[i]);
  116. av_dlog(NULL, "}\n");
  117. }
  118. #undef PRINT_IF_GUID
  119. #else
  120. #define print_guid(g)
  121. #endif
  122. void ff_get_guid(AVIOContext *s, ff_asf_guid *g)
  123. {
  124. assert(sizeof(*g) == 16);
  125. avio_read(s, *g, sizeof(*g));
  126. }
  127. static int asf_probe(AVProbeData *pd)
  128. {
  129. /* check file header */
  130. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  131. return AVPROBE_SCORE_MAX;
  132. else
  133. return 0;
  134. }
  135. static int get_value(AVIOContext *pb, int type){
  136. switch(type){
  137. case 2: return avio_rl32(pb);
  138. case 3: return avio_rl32(pb);
  139. case 4: return avio_rl64(pb);
  140. case 5: return avio_rl16(pb);
  141. default:return INT_MIN;
  142. }
  143. }
  144. static void get_tag(AVFormatContext *s, const char *key, int type, int len)
  145. {
  146. char *value;
  147. int64_t off = avio_tell(s->pb);
  148. if ((unsigned)len >= (UINT_MAX - 1)/2)
  149. return;
  150. value = av_malloc(2*len+1);
  151. if (!value)
  152. goto finish;
  153. if (type == 0) { // UTF16-LE
  154. avio_get_str16le(s->pb, len, value, 2*len + 1);
  155. } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD
  156. uint64_t num = get_value(s->pb, type);
  157. snprintf(value, len, "%"PRIu64, num);
  158. } else {
  159. av_log(s, AV_LOG_DEBUG, "Unsupported value type %d in tag %s.\n", type, key);
  160. goto finish;
  161. }
  162. if (*value)
  163. av_dict_set(&s->metadata, key, value, 0);
  164. finish:
  165. av_freep(&value);
  166. avio_seek(s->pb, off + len, SEEK_SET);
  167. }
  168. static int asf_read_file_properties(AVFormatContext *s, int64_t size)
  169. {
  170. ASFContext *asf = s->priv_data;
  171. AVIOContext *pb = s->pb;
  172. ff_get_guid(pb, &asf->hdr.guid);
  173. asf->hdr.file_size = avio_rl64(pb);
  174. asf->hdr.create_time = avio_rl64(pb);
  175. avio_rl64(pb); /* number of packets */
  176. asf->hdr.play_time = avio_rl64(pb);
  177. asf->hdr.send_time = avio_rl64(pb);
  178. asf->hdr.preroll = avio_rl32(pb);
  179. asf->hdr.ignore = avio_rl32(pb);
  180. asf->hdr.flags = avio_rl32(pb);
  181. asf->hdr.min_pktsize = avio_rl32(pb);
  182. asf->hdr.max_pktsize = avio_rl32(pb);
  183. asf->hdr.max_bitrate = avio_rl32(pb);
  184. s->packet_size = asf->hdr.max_pktsize;
  185. return 0;
  186. }
  187. static int asf_read_stream_properties(AVFormatContext *s, int64_t size)
  188. {
  189. ASFContext *asf = s->priv_data;
  190. AVIOContext *pb = s->pb;
  191. AVStream *st;
  192. ASFStream *asf_st;
  193. ff_asf_guid g;
  194. enum AVMediaType type;
  195. int type_specific_size, sizeX;
  196. unsigned int tag1;
  197. int64_t pos1, pos2, start_time;
  198. int test_for_ext_stream_audio, is_dvr_ms_audio=0;
  199. if (s->nb_streams == ASF_MAX_STREAMS) {
  200. av_log(s, AV_LOG_ERROR, "too many streams\n");
  201. return AVERROR(EINVAL);
  202. }
  203. pos1 = avio_tell(pb);
  204. st = avformat_new_stream(s, NULL);
  205. if (!st)
  206. return AVERROR(ENOMEM);
  207. avpriv_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  208. asf_st = av_mallocz(sizeof(ASFStream));
  209. if (!asf_st)
  210. return AVERROR(ENOMEM);
  211. st->priv_data = asf_st;
  212. st->start_time = 0;
  213. start_time = asf->hdr.preroll;
  214. asf_st->stream_language_index = 128; // invalid stream index means no language info
  215. if(!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  216. st->duration = asf->hdr.play_time /
  217. (10000000 / 1000) - start_time;
  218. }
  219. ff_get_guid(pb, &g);
  220. test_for_ext_stream_audio = 0;
  221. if (!ff_guidcmp(&g, &ff_asf_audio_stream)) {
  222. type = AVMEDIA_TYPE_AUDIO;
  223. } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) {
  224. type = AVMEDIA_TYPE_VIDEO;
  225. } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) {
  226. type = AVMEDIA_TYPE_VIDEO;
  227. st->codec->codec_id = CODEC_ID_MJPEG;
  228. } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) {
  229. type = AVMEDIA_TYPE_DATA;
  230. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  231. test_for_ext_stream_audio = 1;
  232. type = AVMEDIA_TYPE_UNKNOWN;
  233. } else {
  234. return -1;
  235. }
  236. ff_get_guid(pb, &g);
  237. avio_skip(pb, 8); /* total_size */
  238. type_specific_size = avio_rl32(pb);
  239. avio_rl32(pb);
  240. st->id = avio_rl16(pb) & 0x7f; /* stream id */
  241. // mapping of asf ID to AV stream ID;
  242. asf->asfid2avid[st->id] = s->nb_streams - 1;
  243. avio_rl32(pb);
  244. if (test_for_ext_stream_audio) {
  245. ff_get_guid(pb, &g);
  246. if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  247. type = AVMEDIA_TYPE_AUDIO;
  248. is_dvr_ms_audio=1;
  249. ff_get_guid(pb, &g);
  250. avio_rl32(pb);
  251. avio_rl32(pb);
  252. avio_rl32(pb);
  253. ff_get_guid(pb, &g);
  254. avio_rl32(pb);
  255. }
  256. }
  257. st->codec->codec_type = type;
  258. if (type == AVMEDIA_TYPE_AUDIO) {
  259. int ret = ff_get_wav_header(pb, st->codec, type_specific_size);
  260. if (ret < 0)
  261. return ret;
  262. if (is_dvr_ms_audio) {
  263. // codec_id and codec_tag are unreliable in dvr_ms
  264. // files. Set them later by probing stream.
  265. st->codec->codec_id = CODEC_ID_PROBE;
  266. st->codec->codec_tag = 0;
  267. }
  268. if (st->codec->codec_id == CODEC_ID_AAC) {
  269. st->need_parsing = AVSTREAM_PARSE_NONE;
  270. } else {
  271. st->need_parsing = AVSTREAM_PARSE_FULL;
  272. }
  273. /* We have to init the frame size at some point .... */
  274. pos2 = avio_tell(pb);
  275. if (size >= (pos2 + 8 - pos1 + 24)) {
  276. asf_st->ds_span = avio_r8(pb);
  277. asf_st->ds_packet_size = avio_rl16(pb);
  278. asf_st->ds_chunk_size = avio_rl16(pb);
  279. avio_rl16(pb); //ds_data_size
  280. avio_r8(pb); //ds_silence_data
  281. }
  282. //printf("Descrambling: ps:%d cs:%d ds:%d s:%d sd:%d\n",
  283. // asf_st->ds_packet_size, asf_st->ds_chunk_size,
  284. // asf_st->ds_data_size, asf_st->ds_span, asf_st->ds_silence_data);
  285. if (asf_st->ds_span > 1) {
  286. if (!asf_st->ds_chunk_size
  287. || (asf_st->ds_packet_size/asf_st->ds_chunk_size <= 1)
  288. || asf_st->ds_packet_size % asf_st->ds_chunk_size)
  289. asf_st->ds_span = 0; // disable descrambling
  290. }
  291. switch (st->codec->codec_id) {
  292. case CODEC_ID_MP3:
  293. st->codec->frame_size = MPA_FRAME_SIZE;
  294. break;
  295. case CODEC_ID_PCM_S16LE:
  296. case CODEC_ID_PCM_S16BE:
  297. case CODEC_ID_PCM_U16LE:
  298. case CODEC_ID_PCM_U16BE:
  299. case CODEC_ID_PCM_S8:
  300. case CODEC_ID_PCM_U8:
  301. case CODEC_ID_PCM_ALAW:
  302. case CODEC_ID_PCM_MULAW:
  303. st->codec->frame_size = 1;
  304. break;
  305. default:
  306. /* This is probably wrong, but it prevents a crash later */
  307. st->codec->frame_size = 1;
  308. break;
  309. }
  310. } else if (type == AVMEDIA_TYPE_VIDEO &&
  311. size - (avio_tell(pb) - pos1 + 24) >= 51) {
  312. avio_rl32(pb);
  313. avio_rl32(pb);
  314. avio_r8(pb);
  315. avio_rl16(pb); /* size */
  316. sizeX= avio_rl32(pb); /* size */
  317. st->codec->width = avio_rl32(pb);
  318. st->codec->height = avio_rl32(pb);
  319. /* not available for asf */
  320. avio_rl16(pb); /* panes */
  321. st->codec->bits_per_coded_sample = avio_rl16(pb); /* depth */
  322. tag1 = avio_rl32(pb);
  323. avio_skip(pb, 20);
  324. // av_log(s, AV_LOG_DEBUG, "size:%d tsize:%d sizeX:%d\n", size, total_size, sizeX);
  325. if (sizeX > 40) {
  326. st->codec->extradata_size = sizeX - 40;
  327. st->codec->extradata = av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  328. avio_read(pb, st->codec->extradata, st->codec->extradata_size);
  329. }
  330. /* Extract palette from extradata if bpp <= 8 */
  331. /* This code assumes that extradata contains only palette */
  332. /* This is true for all paletted codecs implemented in libavcodec */
  333. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  334. int av_unused i;
  335. #if HAVE_BIGENDIAN
  336. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)/4; i++)
  337. asf_st->palette[i] = av_bswap32(((uint32_t*)st->codec->extradata)[i]);
  338. #else
  339. memcpy(asf_st->palette, st->codec->extradata,
  340. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  341. #endif
  342. asf_st->palette_changed = 1;
  343. }
  344. st->codec->codec_tag = tag1;
  345. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  346. if(tag1 == MKTAG('D', 'V', 'R', ' ')){
  347. st->need_parsing = AVSTREAM_PARSE_FULL;
  348. // issue658 containse wrong w/h and MS even puts a fake seq header with wrong w/h in extradata while a correct one is in te stream. maximum lameness
  349. st->codec->width =
  350. st->codec->height = 0;
  351. av_freep(&st->codec->extradata);
  352. st->codec->extradata_size=0;
  353. }
  354. if(st->codec->codec_id == CODEC_ID_H264)
  355. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  356. }
  357. pos2 = avio_tell(pb);
  358. avio_skip(pb, size - (pos2 - pos1 + 24));
  359. return 0;
  360. }
  361. static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size)
  362. {
  363. ASFContext *asf = s->priv_data;
  364. AVIOContext *pb = s->pb;
  365. ff_asf_guid g;
  366. int ext_len, payload_ext_ct, stream_ct, i;
  367. uint32_t leak_rate, stream_num;
  368. unsigned int stream_languageid_index;
  369. avio_rl64(pb); // starttime
  370. avio_rl64(pb); // endtime
  371. leak_rate = avio_rl32(pb); // leak-datarate
  372. avio_rl32(pb); // bucket-datasize
  373. avio_rl32(pb); // init-bucket-fullness
  374. avio_rl32(pb); // alt-leak-datarate
  375. avio_rl32(pb); // alt-bucket-datasize
  376. avio_rl32(pb); // alt-init-bucket-fullness
  377. avio_rl32(pb); // max-object-size
  378. avio_rl32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  379. stream_num = avio_rl16(pb); // stream-num
  380. stream_languageid_index = avio_rl16(pb); // stream-language-id-index
  381. if (stream_num < 128)
  382. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  383. avio_rl64(pb); // avg frametime in 100ns units
  384. stream_ct = avio_rl16(pb); //stream-name-count
  385. payload_ext_ct = avio_rl16(pb); //payload-extension-system-count
  386. if (stream_num < 128)
  387. asf->stream_bitrates[stream_num] = leak_rate;
  388. for (i=0; i<stream_ct; i++){
  389. avio_rl16(pb);
  390. ext_len = avio_rl16(pb);
  391. avio_skip(pb, ext_len);
  392. }
  393. for (i=0; i<payload_ext_ct; i++){
  394. ff_get_guid(pb, &g);
  395. avio_skip(pb, 2);
  396. ext_len=avio_rl32(pb);
  397. avio_skip(pb, ext_len);
  398. }
  399. return 0;
  400. }
  401. static int asf_read_content_desc(AVFormatContext *s, int64_t size)
  402. {
  403. AVIOContext *pb = s->pb;
  404. int len1, len2, len3, len4, len5;
  405. len1 = avio_rl16(pb);
  406. len2 = avio_rl16(pb);
  407. len3 = avio_rl16(pb);
  408. len4 = avio_rl16(pb);
  409. len5 = avio_rl16(pb);
  410. get_tag(s, "title" , 0, len1);
  411. get_tag(s, "author" , 0, len2);
  412. get_tag(s, "copyright", 0, len3);
  413. get_tag(s, "comment" , 0, len4);
  414. avio_skip(pb, len5);
  415. return 0;
  416. }
  417. static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size)
  418. {
  419. AVIOContext *pb = s->pb;
  420. ASFContext *asf = s->priv_data;
  421. int desc_count, i, ret;
  422. desc_count = avio_rl16(pb);
  423. for(i=0;i<desc_count;i++) {
  424. int name_len,value_type,value_len;
  425. char name[1024];
  426. name_len = avio_rl16(pb);
  427. if (name_len%2) // must be even, broken lavf versions wrote len-1
  428. name_len += 1;
  429. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  430. avio_skip(pb, name_len - ret);
  431. value_type = avio_rl16(pb);
  432. value_len = avio_rl16(pb);
  433. if (!value_type && value_len%2)
  434. value_len += 1;
  435. /**
  436. * My sample has that stream set to 0 maybe that mean the container.
  437. * Asf stream count start at 1. I am using 0 to the container value since it's unused
  438. */
  439. if (!strcmp(name, "AspectRatioX")){
  440. asf->dar[0].num= get_value(s->pb, value_type);
  441. } else if(!strcmp(name, "AspectRatioY")){
  442. asf->dar[0].den= get_value(s->pb, value_type);
  443. } else
  444. get_tag(s, name, value_type, value_len);
  445. }
  446. return 0;
  447. }
  448. static int asf_read_language_list(AVFormatContext *s, int64_t size)
  449. {
  450. AVIOContext *pb = s->pb;
  451. ASFContext *asf = s->priv_data;
  452. int j, ret;
  453. int stream_count = avio_rl16(pb);
  454. for(j = 0; j < stream_count; j++) {
  455. char lang[6];
  456. unsigned int lang_len = avio_r8(pb);
  457. if ((ret = avio_get_str16le(pb, lang_len, lang, sizeof(lang))) < lang_len)
  458. avio_skip(pb, lang_len - ret);
  459. if (j < 128)
  460. av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
  461. }
  462. return 0;
  463. }
  464. static int asf_read_metadata(AVFormatContext *s, int64_t size)
  465. {
  466. AVIOContext *pb = s->pb;
  467. ASFContext *asf = s->priv_data;
  468. int n, stream_num, name_len, value_len, value_num;
  469. int ret, i;
  470. n = avio_rl16(pb);
  471. for(i=0;i<n;i++) {
  472. char name[1024];
  473. avio_rl16(pb); //lang_list_index
  474. stream_num= avio_rl16(pb);
  475. name_len= avio_rl16(pb);
  476. avio_skip(pb, 2); /* value_type */
  477. value_len= avio_rl32(pb);
  478. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  479. avio_skip(pb, name_len - ret);
  480. //av_log(s, AV_LOG_ERROR, "%d %d %d %d %d <%s>\n", i, stream_num, name_len, value_type, value_len, name);
  481. value_num= avio_rl16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
  482. avio_skip(pb, value_len - 2);
  483. if(stream_num<128){
  484. if (!strcmp(name, "AspectRatioX")) asf->dar[stream_num].num= value_num;
  485. else if(!strcmp(name, "AspectRatioY")) asf->dar[stream_num].den= value_num;
  486. }
  487. }
  488. return 0;
  489. }
  490. static int asf_read_marker(AVFormatContext *s, int64_t size)
  491. {
  492. AVIOContext *pb = s->pb;
  493. int i, count, name_len, ret;
  494. char name[1024];
  495. avio_rl64(pb); // reserved 16 bytes
  496. avio_rl64(pb); // ...
  497. count = avio_rl32(pb); // markers count
  498. avio_rl16(pb); // reserved 2 bytes
  499. name_len = avio_rl16(pb); // name length
  500. for(i=0;i<name_len;i++){
  501. avio_r8(pb); // skip the name
  502. }
  503. for(i=0;i<count;i++){
  504. int64_t pres_time;
  505. int name_len;
  506. avio_rl64(pb); // offset, 8 bytes
  507. pres_time = avio_rl64(pb); // presentation time
  508. avio_rl16(pb); // entry length
  509. avio_rl32(pb); // send time
  510. avio_rl32(pb); // flags
  511. name_len = avio_rl32(pb); // name length
  512. if ((ret = avio_get_str16le(pb, name_len * 2, name, sizeof(name))) < name_len)
  513. avio_skip(pb, name_len - ret);
  514. avpriv_new_chapter(s, i, (AVRational){1, 10000000}, pres_time, AV_NOPTS_VALUE, name );
  515. }
  516. return 0;
  517. }
  518. static int asf_read_header(AVFormatContext *s, AVFormatParameters *ap)
  519. {
  520. ASFContext *asf = s->priv_data;
  521. ff_asf_guid g;
  522. AVIOContext *pb = s->pb;
  523. int i;
  524. int64_t gsize;
  525. ff_get_guid(pb, &g);
  526. if (ff_guidcmp(&g, &ff_asf_header))
  527. return -1;
  528. avio_rl64(pb);
  529. avio_rl32(pb);
  530. avio_r8(pb);
  531. avio_r8(pb);
  532. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  533. for(;;) {
  534. uint64_t gpos= avio_tell(pb);
  535. ff_get_guid(pb, &g);
  536. gsize = avio_rl64(pb);
  537. av_dlog(s, "%08"PRIx64": ", gpos);
  538. print_guid(&g);
  539. av_dlog(s, " size=0x%"PRIx64"\n", gsize);
  540. if (!ff_guidcmp(&g, &ff_asf_data_header)) {
  541. asf->data_object_offset = avio_tell(pb);
  542. // if not streaming, gsize is not unlimited (how?), and there is enough space in the file..
  543. if (!(asf->hdr.flags & 0x01) && gsize >= 100) {
  544. asf->data_object_size = gsize - 24;
  545. } else {
  546. asf->data_object_size = (uint64_t)-1;
  547. }
  548. break;
  549. }
  550. if (gsize < 24)
  551. return -1;
  552. if (!ff_guidcmp(&g, &ff_asf_file_header)) {
  553. asf_read_file_properties(s, gsize);
  554. } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) {
  555. asf_read_stream_properties(s, gsize);
  556. } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) {
  557. asf_read_content_desc(s, gsize);
  558. } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) {
  559. asf_read_language_list(s, gsize);
  560. } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) {
  561. asf_read_ext_content_desc(s, gsize);
  562. } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) {
  563. asf_read_metadata(s, gsize);
  564. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
  565. asf_read_ext_stream_properties(s, gsize);
  566. // there could be a optional stream properties object to follow
  567. // if so the next iteration will pick it up
  568. continue;
  569. } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
  570. ff_get_guid(pb, &g);
  571. avio_skip(pb, 6);
  572. continue;
  573. } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) {
  574. asf_read_marker(s, gsize);
  575. } else if (pb->eof_reached) {
  576. return -1;
  577. } else {
  578. if (!s->keylen) {
  579. if (!ff_guidcmp(&g, &ff_asf_content_encryption)) {
  580. av_log(s, AV_LOG_WARNING, "DRM protected stream detected, decoding will likely fail!\n");
  581. } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) {
  582. av_log(s, AV_LOG_WARNING, "Ext DRM protected stream detected, decoding will likely fail!\n");
  583. } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) {
  584. av_log(s, AV_LOG_WARNING, "Digital signature detected, decoding will likely fail!\n");
  585. }
  586. }
  587. }
  588. if(avio_tell(pb) != gpos + gsize)
  589. av_log(s, AV_LOG_DEBUG, "gpos mismatch our pos=%"PRIu64", end=%"PRIu64"\n", avio_tell(pb)-gpos, gsize);
  590. avio_seek(pb, gpos + gsize, SEEK_SET);
  591. }
  592. ff_get_guid(pb, &g);
  593. avio_rl64(pb);
  594. avio_r8(pb);
  595. avio_r8(pb);
  596. if (pb->eof_reached)
  597. return -1;
  598. asf->data_offset = avio_tell(pb);
  599. asf->packet_size_left = 0;
  600. for(i=0; i<128; i++){
  601. int stream_num= asf->asfid2avid[i];
  602. if(stream_num>=0){
  603. AVStream *st = s->streams[stream_num];
  604. if (!st->codec->bit_rate)
  605. st->codec->bit_rate = asf->stream_bitrates[i];
  606. if (asf->dar[i].num > 0 && asf->dar[i].den > 0){
  607. av_reduce(&st->sample_aspect_ratio.num,
  608. &st->sample_aspect_ratio.den,
  609. asf->dar[i].num, asf->dar[i].den, INT_MAX);
  610. } else if ((asf->dar[0].num > 0) && (asf->dar[0].den > 0) && (st->codec->codec_type==AVMEDIA_TYPE_VIDEO)) // Use ASF container value if the stream doesn't AR set.
  611. av_reduce(&st->sample_aspect_ratio.num,
  612. &st->sample_aspect_ratio.den,
  613. asf->dar[0].num, asf->dar[0].den, INT_MAX);
  614. //av_log(s, AV_LOG_INFO, "i=%d, st->codec->codec_type:%d, dar %d:%d sar=%d:%d\n", i, st->codec->codec_type, dar[i].num, dar[i].den, st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  615. // copy and convert language codes to the frontend
  616. if (asf->streams[i].stream_language_index < 128) {
  617. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  618. if (rfc1766 && strlen(rfc1766) > 1) {
  619. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  620. const char *iso6392 = av_convert_lang_to(primary_tag, AV_LANG_ISO639_2_BIBL);
  621. if (iso6392)
  622. av_dict_set(&st->metadata, "language", iso6392, 0);
  623. }
  624. }
  625. }
  626. }
  627. ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv);
  628. return 0;
  629. }
  630. #define DO_2BITS(bits, var, defval) \
  631. switch (bits & 3) \
  632. { \
  633. case 3: var = avio_rl32(pb); rsize += 4; break; \
  634. case 2: var = avio_rl16(pb); rsize += 2; break; \
  635. case 1: var = avio_r8(pb); rsize++; break; \
  636. default: var = defval; break; \
  637. }
  638. /**
  639. * Load a single ASF packet into the demuxer.
  640. * @param s demux context
  641. * @param pb context to read data from
  642. * @return 0 on success, <0 on error
  643. */
  644. static int ff_asf_get_packet(AVFormatContext *s, AVIOContext *pb)
  645. {
  646. ASFContext *asf = s->priv_data;
  647. uint32_t packet_length, padsize;
  648. int rsize = 8;
  649. int c, d, e, off;
  650. // if we do not know packet size, allow skipping up to 32 kB
  651. off= 32768;
  652. if (s->packet_size > 0)
  653. off= (avio_tell(pb) - s->data_offset) % s->packet_size + 3;
  654. c=d=e=-1;
  655. while(off-- > 0){
  656. c=d; d=e;
  657. e= avio_r8(pb);
  658. if(c == 0x82 && !d && !e)
  659. break;
  660. }
  661. if (c != 0x82) {
  662. /**
  663. * This code allows handling of -EAGAIN at packet boundaries (i.e.
  664. * if the packet sync code above triggers -EAGAIN). This does not
  665. * imply complete -EAGAIN handling support at random positions in
  666. * the stream.
  667. */
  668. if (pb->error == AVERROR(EAGAIN))
  669. return AVERROR(EAGAIN);
  670. if (!pb->eof_reached)
  671. av_log(s, AV_LOG_ERROR, "ff asf bad header %x at:%"PRId64"\n", c, avio_tell(pb));
  672. }
  673. if ((c & 0x8f) == 0x82) {
  674. if (d || e) {
  675. if (!pb->eof_reached)
  676. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  677. return -1;
  678. }
  679. c= avio_r8(pb);
  680. d= avio_r8(pb);
  681. rsize+=3;
  682. }else{
  683. avio_seek(pb, -1, SEEK_CUR); //FIXME
  684. }
  685. asf->packet_flags = c;
  686. asf->packet_property = d;
  687. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  688. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  689. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  690. //the following checks prevent overflows and infinite loops
  691. if(!packet_length || packet_length >= (1U<<29)){
  692. av_log(s, AV_LOG_ERROR, "invalid packet_length %d at:%"PRId64"\n", packet_length, avio_tell(pb));
  693. return -1;
  694. }
  695. if(padsize >= packet_length){
  696. av_log(s, AV_LOG_ERROR, "invalid padsize %d at:%"PRId64"\n", padsize, avio_tell(pb));
  697. return -1;
  698. }
  699. asf->packet_timestamp = avio_rl32(pb);
  700. avio_rl16(pb); /* duration */
  701. // rsize has at least 11 bytes which have to be present
  702. if (asf->packet_flags & 0x01) {
  703. asf->packet_segsizetype = avio_r8(pb); rsize++;
  704. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  705. } else {
  706. asf->packet_segments = 1;
  707. asf->packet_segsizetype = 0x80;
  708. }
  709. asf->packet_size_left = packet_length - padsize - rsize;
  710. if (packet_length < asf->hdr.min_pktsize)
  711. padsize += asf->hdr.min_pktsize - packet_length;
  712. asf->packet_padsize = padsize;
  713. av_dlog(s, "packet: size=%d padsize=%d left=%d\n", s->packet_size, asf->packet_padsize, asf->packet_size_left);
  714. return 0;
  715. }
  716. /**
  717. *
  718. * @return <0 if error
  719. */
  720. static int asf_read_frame_header(AVFormatContext *s, AVIOContext *pb){
  721. ASFContext *asf = s->priv_data;
  722. int rsize = 1;
  723. int num = avio_r8(pb);
  724. int64_t ts0;
  725. asf->packet_segments--;
  726. asf->packet_key_frame = num >> 7;
  727. asf->stream_index = asf->asfid2avid[num & 0x7f];
  728. // sequence should be ignored!
  729. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  730. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  731. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  732. //printf("key:%d stream:%d seq:%d offset:%d replic_size:%d\n", asf->packet_key_frame, asf->stream_index, asf->packet_seq, //asf->packet_frag_offset, asf->packet_replic_size);
  733. if (asf->packet_replic_size >= 8) {
  734. asf->packet_obj_size = avio_rl32(pb);
  735. if(asf->packet_obj_size >= (1<<24) || asf->packet_obj_size <= 0){
  736. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  737. return -1;
  738. }
  739. asf->packet_frag_timestamp = avio_rl32(pb); // timestamp
  740. if(asf->packet_replic_size >= 8+38+4){
  741. // for(i=0; i<asf->packet_replic_size-8; i++)
  742. // av_log(s, AV_LOG_DEBUG, "%02X ",avio_r8(pb));
  743. // av_log(s, AV_LOG_DEBUG, "\n");
  744. avio_skip(pb, 10);
  745. ts0= avio_rl64(pb);
  746. avio_skip(pb, 8);;
  747. avio_skip(pb, 12);
  748. avio_rl32(pb);
  749. avio_skip(pb, asf->packet_replic_size - 8 - 38 - 4);
  750. if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
  751. else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
  752. }else
  753. avio_skip(pb, asf->packet_replic_size - 8);
  754. rsize += asf->packet_replic_size; // FIXME - check validity
  755. } else if (asf->packet_replic_size==1){
  756. // multipacket - frag_offset is beginning timestamp
  757. asf->packet_time_start = asf->packet_frag_offset;
  758. asf->packet_frag_offset = 0;
  759. asf->packet_frag_timestamp = asf->packet_timestamp;
  760. asf->packet_time_delta = avio_r8(pb);
  761. rsize++;
  762. }else if(asf->packet_replic_size!=0){
  763. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", asf->packet_replic_size);
  764. return -1;
  765. }
  766. if (asf->packet_flags & 0x01) {
  767. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  768. if (rsize > asf->packet_size_left) {
  769. av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n");
  770. return -1;
  771. } else if(asf->packet_frag_size > asf->packet_size_left - rsize){
  772. if (asf->packet_frag_size > asf->packet_size_left - rsize + asf->packet_padsize) {
  773. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid (%d-%d)\n", asf->packet_size_left, rsize);
  774. return -1;
  775. } else {
  776. int diff = asf->packet_frag_size - (asf->packet_size_left - rsize);
  777. asf->packet_size_left += diff;
  778. asf->packet_padsize -= diff;
  779. }
  780. }
  781. //printf("Fragsize %d\n", asf->packet_frag_size);
  782. } else {
  783. if (rsize > asf->packet_size_left) {
  784. av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n");
  785. return -1;
  786. }
  787. asf->packet_frag_size = asf->packet_size_left - rsize;
  788. //printf("Using rest %d %d %d\n", asf->packet_frag_size, asf->packet_size_left, rsize);
  789. }
  790. if (asf->packet_replic_size == 1) {
  791. asf->packet_multi_size = asf->packet_frag_size;
  792. if (asf->packet_multi_size > asf->packet_size_left)
  793. return -1;
  794. }
  795. asf->packet_size_left -= rsize;
  796. //printf("___objsize____ %d %d rs:%d\n", asf->packet_obj_size, asf->packet_frag_offset, rsize);
  797. return 0;
  798. }
  799. /**
  800. * Parse data from individual ASF packets (which were previously loaded
  801. * with asf_get_packet()).
  802. * @param s demux context
  803. * @param pb context to read data from
  804. * @param pkt pointer to store packet data into
  805. * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF
  806. * packets need to be loaded (through asf_get_packet())
  807. */
  808. static int ff_asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pkt)
  809. {
  810. ASFContext *asf = s->priv_data;
  811. ASFStream *asf_st = 0;
  812. for (;;) {
  813. int ret;
  814. if(pb->eof_reached)
  815. return AVERROR_EOF;
  816. if (asf->packet_size_left < FRAME_HEADER_SIZE
  817. || asf->packet_segments < 1) {
  818. //asf->packet_size_left <= asf->packet_padsize) {
  819. int ret = asf->packet_size_left + asf->packet_padsize;
  820. //printf("PacketLeftSize:%d Pad:%d Pos:%"PRId64"\n", asf->packet_size_left, asf->packet_padsize, avio_tell(pb));
  821. assert(ret>=0);
  822. /* fail safe */
  823. avio_skip(pb, ret);
  824. asf->packet_pos= avio_tell(pb);
  825. if (asf->data_object_size != (uint64_t)-1 &&
  826. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  827. return AVERROR_EOF; /* Do not exceed the size of the data object */
  828. return 1;
  829. }
  830. if (asf->packet_time_start == 0) {
  831. if(asf_read_frame_header(s, pb) < 0){
  832. asf->packet_segments= 0;
  833. continue;
  834. }
  835. if (asf->stream_index < 0
  836. || s->streams[asf->stream_index]->discard >= AVDISCARD_ALL
  837. || (!asf->packet_key_frame && s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)
  838. ) {
  839. asf->packet_time_start = 0;
  840. /* unhandled packet (should not happen) */
  841. avio_skip(pb, asf->packet_frag_size);
  842. asf->packet_size_left -= asf->packet_frag_size;
  843. if(asf->stream_index < 0)
  844. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
  845. continue;
  846. }
  847. asf->asf_st = s->streams[asf->stream_index]->priv_data;
  848. }
  849. asf_st = asf->asf_st;
  850. if (asf->packet_replic_size == 1) {
  851. // frag_offset is here used as the beginning timestamp
  852. asf->packet_frag_timestamp = asf->packet_time_start;
  853. asf->packet_time_start += asf->packet_time_delta;
  854. asf->packet_obj_size = asf->packet_frag_size = avio_r8(pb);
  855. asf->packet_size_left--;
  856. asf->packet_multi_size--;
  857. if (asf->packet_multi_size < asf->packet_obj_size)
  858. {
  859. asf->packet_time_start = 0;
  860. avio_skip(pb, asf->packet_multi_size);
  861. asf->packet_size_left -= asf->packet_multi_size;
  862. continue;
  863. }
  864. asf->packet_multi_size -= asf->packet_obj_size;
  865. //printf("COMPRESS size %d %d %d ms:%d\n", asf->packet_obj_size, asf->packet_frag_timestamp, asf->packet_size_left, asf->packet_multi_size);
  866. }
  867. if( /*asf->packet_frag_size == asf->packet_obj_size*/
  868. asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size
  869. && asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size){
  870. av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n",
  871. asf_st->frag_offset, asf->packet_frag_size,
  872. asf->packet_obj_size, asf_st->pkt.size);
  873. asf->packet_obj_size= asf_st->pkt.size;
  874. }
  875. if ( asf_st->pkt.size != asf->packet_obj_size
  876. || asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { //FIXME is this condition sufficient?
  877. if(asf_st->pkt.data){
  878. av_log(s, AV_LOG_INFO, "freeing incomplete packet size %d, new %d\n", asf_st->pkt.size, asf->packet_obj_size);
  879. asf_st->frag_offset = 0;
  880. av_free_packet(&asf_st->pkt);
  881. }
  882. /* new packet */
  883. av_new_packet(&asf_st->pkt, asf->packet_obj_size);
  884. asf_st->seq = asf->packet_seq;
  885. asf_st->pkt.dts = asf->packet_frag_timestamp - asf->hdr.preroll;
  886. asf_st->pkt.stream_index = asf->stream_index;
  887. asf_st->pkt.pos =
  888. asf_st->packet_pos= asf->packet_pos;
  889. if (asf_st->pkt.data && asf_st->palette_changed) {
  890. uint8_t *pal;
  891. pal = av_packet_new_side_data(&asf_st->pkt, AV_PKT_DATA_PALETTE,
  892. AVPALETTE_SIZE);
  893. if (!pal) {
  894. av_log(s, AV_LOG_ERROR, "Cannot append palette to packet\n");
  895. } else {
  896. memcpy(pal, asf_st->palette, AVPALETTE_SIZE);
  897. asf_st->palette_changed = 0;
  898. }
  899. }
  900. //printf("new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  901. //asf->stream_index, asf->packet_key_frame, asf_st->pkt.flags & AV_PKT_FLAG_KEY,
  902. //s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO, asf->packet_obj_size);
  903. if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  904. asf->packet_key_frame = 1;
  905. if (asf->packet_key_frame)
  906. asf_st->pkt.flags |= AV_PKT_FLAG_KEY;
  907. }
  908. /* read data */
  909. //printf("READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  910. // s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  911. // asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  912. asf->packet_size_left -= asf->packet_frag_size;
  913. if (asf->packet_size_left < 0)
  914. continue;
  915. if( asf->packet_frag_offset >= asf_st->pkt.size
  916. || asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset){
  917. av_log(s, AV_LOG_ERROR, "packet fragment position invalid %u,%u not in %u\n",
  918. asf->packet_frag_offset, asf->packet_frag_size, asf_st->pkt.size);
  919. continue;
  920. }
  921. ret = avio_read(pb, asf_st->pkt.data + asf->packet_frag_offset,
  922. asf->packet_frag_size);
  923. if (ret != asf->packet_frag_size) {
  924. if (ret < 0 || asf->packet_frag_offset + ret == 0)
  925. return ret < 0 ? ret : AVERROR_EOF;
  926. if (asf_st->ds_span > 1) {
  927. // scrambling, we can either drop it completely or fill the remainder
  928. // TODO: should we fill the whole packet instead of just the current
  929. // fragment?
  930. memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0,
  931. asf->packet_frag_size - ret);
  932. ret = asf->packet_frag_size;
  933. } else
  934. // no scrambling, so we can return partial packets
  935. av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret);
  936. }
  937. if (s->key && s->keylen == 20)
  938. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  939. ret);
  940. asf_st->frag_offset += ret;
  941. /* test if whole packet is read */
  942. if (asf_st->frag_offset == asf_st->pkt.size) {
  943. //workaround for macroshit radio DVR-MS files
  944. if( s->streams[asf->stream_index]->codec->codec_id == CODEC_ID_MPEG2VIDEO
  945. && asf_st->pkt.size > 100){
  946. int i;
  947. for(i=0; i<asf_st->pkt.size && !asf_st->pkt.data[i]; i++);
  948. if(i == asf_st->pkt.size){
  949. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  950. asf_st->frag_offset = 0;
  951. av_free_packet(&asf_st->pkt);
  952. continue;
  953. }
  954. }
  955. /* return packet */
  956. if (asf_st->ds_span > 1) {
  957. if(asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span){
  958. av_log(s, AV_LOG_ERROR, "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", asf_st->pkt.size, asf_st->ds_packet_size, asf_st->ds_span);
  959. }else{
  960. /* packet descrambling */
  961. uint8_t *newdata = av_malloc(asf_st->pkt.size + FF_INPUT_BUFFER_PADDING_SIZE);
  962. if (newdata) {
  963. int offset = 0;
  964. memset(newdata + asf_st->pkt.size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
  965. while (offset < asf_st->pkt.size) {
  966. int off = offset / asf_st->ds_chunk_size;
  967. int row = off / asf_st->ds_span;
  968. int col = off % asf_st->ds_span;
  969. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  970. //printf("off:%d row:%d col:%d idx:%d\n", off, row, col, idx);
  971. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  972. assert(idx+1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  973. memcpy(newdata + offset,
  974. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  975. asf_st->ds_chunk_size);
  976. offset += asf_st->ds_chunk_size;
  977. }
  978. av_free(asf_st->pkt.data);
  979. asf_st->pkt.data = newdata;
  980. }
  981. }
  982. }
  983. asf_st->frag_offset = 0;
  984. *pkt= asf_st->pkt;
  985. //printf("packet %d %d\n", asf_st->pkt.size, asf->packet_frag_size);
  986. asf_st->pkt.size = 0;
  987. asf_st->pkt.data = 0;
  988. break; // packet completed
  989. }
  990. }
  991. return 0;
  992. }
  993. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  994. {
  995. ASFContext *asf = s->priv_data;
  996. for (;;) {
  997. int ret;
  998. /* parse cached packets, if any */
  999. if ((ret = ff_asf_parse_packet(s, s->pb, pkt)) <= 0)
  1000. return ret;
  1001. if ((ret = ff_asf_get_packet(s, s->pb)) < 0)
  1002. assert(asf->packet_size_left < FRAME_HEADER_SIZE || asf->packet_segments < 1);
  1003. asf->packet_time_start = 0;
  1004. }
  1005. }
  1006. // Added to support seeking after packets have been read
  1007. // If information is not reset, read_packet fails due to
  1008. // leftover information from previous reads
  1009. static void asf_reset_header(AVFormatContext *s)
  1010. {
  1011. ASFContext *asf = s->priv_data;
  1012. ASFStream *asf_st;
  1013. int i;
  1014. asf->packet_size_left = 0;
  1015. asf->packet_segments = 0;
  1016. asf->packet_flags = 0;
  1017. asf->packet_property = 0;
  1018. asf->packet_timestamp = 0;
  1019. asf->packet_segsizetype = 0;
  1020. asf->packet_segments = 0;
  1021. asf->packet_seq = 0;
  1022. asf->packet_replic_size = 0;
  1023. asf->packet_key_frame = 0;
  1024. asf->packet_padsize = 0;
  1025. asf->packet_frag_offset = 0;
  1026. asf->packet_frag_size = 0;
  1027. asf->packet_frag_timestamp = 0;
  1028. asf->packet_multi_size = 0;
  1029. asf->packet_obj_size = 0;
  1030. asf->packet_time_delta = 0;
  1031. asf->packet_time_start = 0;
  1032. for(i=0; i<s->nb_streams; i++){
  1033. asf_st= s->streams[i]->priv_data;
  1034. av_free_packet(&asf_st->pkt);
  1035. asf_st->frag_offset=0;
  1036. asf_st->seq=0;
  1037. }
  1038. asf->asf_st= NULL;
  1039. }
  1040. static int asf_read_close(AVFormatContext *s)
  1041. {
  1042. asf_reset_header(s);
  1043. return 0;
  1044. }
  1045. static int64_t asf_read_pts(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit)
  1046. {
  1047. AVPacket pkt1, *pkt = &pkt1;
  1048. ASFStream *asf_st;
  1049. int64_t pts;
  1050. int64_t pos= *ppos;
  1051. int i;
  1052. int64_t start_pos[ASF_MAX_STREAMS];
  1053. for(i=0; i<s->nb_streams; i++){
  1054. start_pos[i]= pos;
  1055. }
  1056. if (s->packet_size > 0)
  1057. pos= (pos+s->packet_size-1-s->data_offset)/s->packet_size*s->packet_size+ s->data_offset;
  1058. *ppos= pos;
  1059. avio_seek(s->pb, pos, SEEK_SET);
  1060. //printf("asf_read_pts\n");
  1061. asf_reset_header(s);
  1062. for(;;){
  1063. if (asf_read_packet(s, pkt) < 0){
  1064. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  1065. return AV_NOPTS_VALUE;
  1066. }
  1067. pts = pkt->dts;
  1068. av_free_packet(pkt);
  1069. if(pkt->flags&AV_PKT_FLAG_KEY){
  1070. i= pkt->stream_index;
  1071. asf_st= s->streams[i]->priv_data;
  1072. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  1073. pos= asf_st->packet_pos;
  1074. av_add_index_entry(s->streams[i], pos, pts, pkt->size, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  1075. start_pos[i]= asf_st->packet_pos + 1;
  1076. if(pkt->stream_index == stream_index)
  1077. break;
  1078. }
  1079. }
  1080. *ppos= pos;
  1081. //printf("found keyframe at %"PRId64" stream %d stamp:%"PRId64"\n", *ppos, stream_index, pts);
  1082. return pts;
  1083. }
  1084. static void asf_build_simple_index(AVFormatContext *s, int stream_index)
  1085. {
  1086. ff_asf_guid g;
  1087. ASFContext *asf = s->priv_data;
  1088. int64_t current_pos= avio_tell(s->pb);
  1089. int i;
  1090. avio_seek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET);
  1091. ff_get_guid(s->pb, &g);
  1092. /* the data object can be followed by other top-level objects,
  1093. skip them until the simple index object is reached */
  1094. while (ff_guidcmp(&g, &index_guid)) {
  1095. int64_t gsize= avio_rl64(s->pb);
  1096. if (gsize < 24 || s->pb->eof_reached) {
  1097. avio_seek(s->pb, current_pos, SEEK_SET);
  1098. return;
  1099. }
  1100. avio_skip(s->pb, gsize-24);
  1101. ff_get_guid(s->pb, &g);
  1102. }
  1103. {
  1104. int64_t itime, last_pos=-1;
  1105. int pct, ict;
  1106. int64_t av_unused gsize= avio_rl64(s->pb);
  1107. ff_get_guid(s->pb, &g);
  1108. itime=avio_rl64(s->pb);
  1109. pct=avio_rl32(s->pb);
  1110. ict=avio_rl32(s->pb);
  1111. av_log(s, AV_LOG_DEBUG, "itime:0x%"PRIx64", pct:%d, ict:%d\n",itime,pct,ict);
  1112. for (i=0;i<ict;i++){
  1113. int pktnum=avio_rl32(s->pb);
  1114. int pktct =avio_rl16(s->pb);
  1115. int64_t pos = s->data_offset + s->packet_size*(int64_t)pktnum;
  1116. int64_t index_pts= FFMAX(av_rescale(itime, i, 10000) - asf->hdr.preroll, 0);
  1117. if(pos != last_pos){
  1118. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d pts: %"PRId64"\n", pktnum, pktct, index_pts);
  1119. av_add_index_entry(s->streams[stream_index], pos, index_pts, s->packet_size, 0, AVINDEX_KEYFRAME);
  1120. last_pos=pos;
  1121. }
  1122. }
  1123. asf->index_read= 1;
  1124. }
  1125. avio_seek(s->pb, current_pos, SEEK_SET);
  1126. }
  1127. static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags)
  1128. {
  1129. ASFContext *asf = s->priv_data;
  1130. AVStream *st = s->streams[stream_index];
  1131. int64_t pos;
  1132. int index;
  1133. if (s->packet_size <= 0)
  1134. return -1;
  1135. /* Try using the protocol's read_seek if available */
  1136. if(s->pb) {
  1137. int ret = avio_seek_time(s->pb, stream_index, pts, flags);
  1138. if(ret >= 0)
  1139. asf_reset_header(s);
  1140. if (ret != AVERROR(ENOSYS))
  1141. return ret;
  1142. }
  1143. if (!asf->index_read)
  1144. asf_build_simple_index(s, stream_index);
  1145. if((asf->index_read && st->index_entries)){
  1146. index= av_index_search_timestamp(st, pts, flags);
  1147. if(index >= 0) {
  1148. /* find the position */
  1149. pos = st->index_entries[index].pos;
  1150. /* do the seek */
  1151. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1152. avio_seek(s->pb, pos, SEEK_SET);
  1153. asf_reset_header(s);
  1154. return 0;
  1155. }
  1156. }
  1157. /* no index or seeking by index failed */
  1158. if (ff_seek_frame_binary(s, stream_index, pts, flags) < 0)
  1159. return -1;
  1160. asf_reset_header(s);
  1161. return 0;
  1162. }
  1163. AVInputFormat ff_asf_demuxer = {
  1164. .name = "asf",
  1165. .long_name = NULL_IF_CONFIG_SMALL("ASF format"),
  1166. .priv_data_size = sizeof(ASFContext),
  1167. .read_probe = asf_probe,
  1168. .read_header = asf_read_header,
  1169. .read_packet = asf_read_packet,
  1170. .read_close = asf_read_close,
  1171. .read_seek = asf_read_seek,
  1172. .read_timestamp = asf_read_pts,
  1173. .flags = AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH,
  1174. };