You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1270 lines
47KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/common.h"
  23. #include "libavutil/avstring.h"
  24. #include "libavcodec/mpegaudio.h"
  25. #include "avformat.h"
  26. #include "riff.h"
  27. #include "asf.h"
  28. #include "asfcrypt.h"
  29. #include "avlanguage.h"
  30. void ff_mms_set_stream_selection(URLContext *h, AVFormatContext *format);
  31. #undef NDEBUG
  32. #include <assert.h>
  33. #define ASF_MAX_STREAMS 127
  34. #define FRAME_HEADER_SIZE 17
  35. // Fix Me! FRAME_HEADER_SIZE may be different.
  36. static const ff_asf_guid index_guid = {
  37. 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb
  38. };
  39. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  40. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  41. };
  42. /**********************************/
  43. /* decoding */
  44. int ff_guidcmp(const void *g1, const void *g2)
  45. {
  46. return memcmp(g1, g2, sizeof(ff_asf_guid));
  47. }
  48. #ifdef DEBUG
  49. #define PRINT_IF_GUID(g,cmp) \
  50. if (!ff_guidcmp(g, &cmp)) \
  51. dprintf(NULL, "(GUID: %s) ", #cmp)
  52. static void print_guid(const ff_asf_guid *g)
  53. {
  54. int i;
  55. PRINT_IF_GUID(g, ff_asf_header);
  56. else PRINT_IF_GUID(g, ff_asf_file_header);
  57. else PRINT_IF_GUID(g, ff_asf_stream_header);
  58. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  59. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  60. else PRINT_IF_GUID(g, ff_asf_video_stream);
  61. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  62. else PRINT_IF_GUID(g, ff_asf_command_stream);
  63. else PRINT_IF_GUID(g, ff_asf_comment_header);
  64. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  65. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  66. else PRINT_IF_GUID(g, ff_asf_data_header);
  67. else PRINT_IF_GUID(g, index_guid);
  68. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  69. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  70. else PRINT_IF_GUID(g, ff_asf_my_guid);
  71. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  72. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  73. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  74. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  75. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  76. else PRINT_IF_GUID(g, ff_asf_marker_header);
  77. else PRINT_IF_GUID(g, stream_bitrate_guid);
  78. else PRINT_IF_GUID(g, ff_asf_language_guid);
  79. else
  80. dprintf(NULL, "(GUID: unknown) ");
  81. for(i=0;i<16;i++)
  82. dprintf(NULL, " 0x%02x,", (*g)[i]);
  83. dprintf(NULL, "}\n");
  84. }
  85. #undef PRINT_IF_GUID
  86. #else
  87. #define print_guid(g)
  88. #endif
  89. void ff_get_guid(ByteIOContext *s, ff_asf_guid *g)
  90. {
  91. assert(sizeof(*g) == 16);
  92. get_buffer(s, *g, sizeof(*g));
  93. }
  94. #if 0
  95. static void get_str16(ByteIOContext *pb, char *buf, int buf_size)
  96. {
  97. int len, c;
  98. char *q;
  99. len = get_le16(pb);
  100. q = buf;
  101. while (len > 0) {
  102. c = get_le16(pb);
  103. if ((q - buf) < buf_size - 1)
  104. *q++ = c;
  105. len--;
  106. }
  107. *q = '\0';
  108. }
  109. #endif
  110. static void get_str16_nolen(ByteIOContext *pb, int len, char *buf, int buf_size)
  111. {
  112. char* q = buf;
  113. while (len > 1) {
  114. uint8_t tmp;
  115. uint32_t ch;
  116. GET_UTF16(ch, (len -= 2) >= 0 ? get_le16(pb) : 0, break;)
  117. PUT_UTF8(ch, tmp, if (q - buf < buf_size - 1) *q++ = tmp;)
  118. }
  119. if (len > 0)
  120. url_fskip(pb, len);
  121. *q = '\0';
  122. }
  123. static int asf_probe(AVProbeData *pd)
  124. {
  125. /* check file header */
  126. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  127. return AVPROBE_SCORE_MAX;
  128. else
  129. return 0;
  130. }
  131. static int get_value(ByteIOContext *pb, int type){
  132. switch(type){
  133. case 2: return get_le32(pb);
  134. case 3: return get_le32(pb);
  135. case 4: return get_le64(pb);
  136. case 5: return get_le16(pb);
  137. default:return INT_MIN;
  138. }
  139. }
  140. static void get_tag(AVFormatContext *s, const char *key, int type, int len)
  141. {
  142. char *value;
  143. if ((unsigned)len >= (UINT_MAX - 1)/2)
  144. return;
  145. value = av_malloc(2*len+1);
  146. if (!value)
  147. return;
  148. if (type == 0) { // UTF16-LE
  149. get_str16_nolen(s->pb, len, value, 2*len + 1);
  150. } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD
  151. uint64_t num = get_value(s->pb, type);
  152. snprintf(value, len, "%"PRIu64, num);
  153. } else {
  154. url_fskip(s->pb, len);
  155. av_freep(&value);
  156. av_log(s, AV_LOG_DEBUG, "Unsupported value type %d in tag %s.\n", type, key);
  157. return;
  158. }
  159. av_metadata_set2(&s->metadata, key, value, 0);
  160. av_freep(&value);
  161. }
  162. static int asf_read_header(AVFormatContext *s, AVFormatParameters *ap)
  163. {
  164. ASFContext *asf = s->priv_data;
  165. ff_asf_guid g;
  166. ByteIOContext *pb = s->pb;
  167. AVStream *st;
  168. ASFStream *asf_st;
  169. int size, i;
  170. int64_t gsize;
  171. AVRational dar[128];
  172. uint32_t bitrate[128];
  173. memset(dar, 0, sizeof(dar));
  174. memset(bitrate, 0, sizeof(bitrate));
  175. ff_get_guid(pb, &g);
  176. if (ff_guidcmp(&g, &ff_asf_header))
  177. return -1;
  178. get_le64(pb);
  179. get_le32(pb);
  180. get_byte(pb);
  181. get_byte(pb);
  182. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  183. for(;;) {
  184. uint64_t gpos= url_ftell(pb);
  185. ff_get_guid(pb, &g);
  186. gsize = get_le64(pb);
  187. dprintf(s, "%08"PRIx64": ", gpos);
  188. print_guid(&g);
  189. dprintf(s, " size=0x%"PRIx64"\n", gsize);
  190. if (!ff_guidcmp(&g, &ff_asf_data_header)) {
  191. asf->data_object_offset = url_ftell(pb);
  192. // if not streaming, gsize is not unlimited (how?), and there is enough space in the file..
  193. if (!(asf->hdr.flags & 0x01) && gsize >= 100) {
  194. asf->data_object_size = gsize - 24;
  195. } else {
  196. asf->data_object_size = (uint64_t)-1;
  197. }
  198. break;
  199. }
  200. if (gsize < 24)
  201. return -1;
  202. if (!ff_guidcmp(&g, &ff_asf_file_header)) {
  203. ff_get_guid(pb, &asf->hdr.guid);
  204. asf->hdr.file_size = get_le64(pb);
  205. asf->hdr.create_time = get_le64(pb);
  206. asf->nb_packets = get_le64(pb);
  207. asf->hdr.play_time = get_le64(pb);
  208. asf->hdr.send_time = get_le64(pb);
  209. asf->hdr.preroll = get_le32(pb);
  210. asf->hdr.ignore = get_le32(pb);
  211. asf->hdr.flags = get_le32(pb);
  212. asf->hdr.min_pktsize = get_le32(pb);
  213. asf->hdr.max_pktsize = get_le32(pb);
  214. asf->hdr.max_bitrate = get_le32(pb);
  215. s->packet_size = asf->hdr.max_pktsize;
  216. } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) {
  217. enum AVMediaType type;
  218. int type_specific_size, sizeX;
  219. uint64_t total_size;
  220. unsigned int tag1;
  221. int64_t pos1, pos2, start_time;
  222. int test_for_ext_stream_audio, is_dvr_ms_audio=0;
  223. if (s->nb_streams == ASF_MAX_STREAMS) {
  224. av_log(s, AV_LOG_ERROR, "too many streams\n");
  225. return AVERROR(EINVAL);
  226. }
  227. pos1 = url_ftell(pb);
  228. st = av_new_stream(s, 0);
  229. if (!st)
  230. return AVERROR(ENOMEM);
  231. av_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  232. asf_st = av_mallocz(sizeof(ASFStream));
  233. if (!asf_st)
  234. return AVERROR(ENOMEM);
  235. st->priv_data = asf_st;
  236. start_time = asf->hdr.preroll;
  237. asf_st->stream_language_index = 128; // invalid stream index means no language info
  238. if(!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  239. st->duration = asf->hdr.play_time /
  240. (10000000 / 1000) - start_time;
  241. }
  242. ff_get_guid(pb, &g);
  243. test_for_ext_stream_audio = 0;
  244. if (!ff_guidcmp(&g, &ff_asf_audio_stream)) {
  245. type = AVMEDIA_TYPE_AUDIO;
  246. } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) {
  247. type = AVMEDIA_TYPE_VIDEO;
  248. } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) {
  249. type = AVMEDIA_TYPE_VIDEO;
  250. st->codec->codec_id = CODEC_ID_MJPEG;
  251. } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) {
  252. type = AVMEDIA_TYPE_DATA;
  253. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  254. test_for_ext_stream_audio = 1;
  255. type = AVMEDIA_TYPE_UNKNOWN;
  256. } else {
  257. return -1;
  258. }
  259. ff_get_guid(pb, &g);
  260. total_size = get_le64(pb);
  261. type_specific_size = get_le32(pb);
  262. get_le32(pb);
  263. st->id = get_le16(pb) & 0x7f; /* stream id */
  264. // mapping of asf ID to AV stream ID;
  265. asf->asfid2avid[st->id] = s->nb_streams - 1;
  266. get_le32(pb);
  267. if (test_for_ext_stream_audio) {
  268. ff_get_guid(pb, &g);
  269. if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  270. type = AVMEDIA_TYPE_AUDIO;
  271. is_dvr_ms_audio=1;
  272. ff_get_guid(pb, &g);
  273. get_le32(pb);
  274. get_le32(pb);
  275. get_le32(pb);
  276. ff_get_guid(pb, &g);
  277. get_le32(pb);
  278. }
  279. }
  280. st->codec->codec_type = type;
  281. if (type == AVMEDIA_TYPE_AUDIO) {
  282. ff_get_wav_header(pb, st->codec, type_specific_size);
  283. if (is_dvr_ms_audio) {
  284. // codec_id and codec_tag are unreliable in dvr_ms
  285. // files. Set them later by probing stream.
  286. st->codec->codec_id = CODEC_ID_PROBE;
  287. st->codec->codec_tag = 0;
  288. }
  289. if (st->codec->codec_id == CODEC_ID_AAC) {
  290. st->need_parsing = AVSTREAM_PARSE_NONE;
  291. } else {
  292. st->need_parsing = AVSTREAM_PARSE_FULL;
  293. }
  294. /* We have to init the frame size at some point .... */
  295. pos2 = url_ftell(pb);
  296. if (gsize >= (pos2 + 8 - pos1 + 24)) {
  297. asf_st->ds_span = get_byte(pb);
  298. asf_st->ds_packet_size = get_le16(pb);
  299. asf_st->ds_chunk_size = get_le16(pb);
  300. get_le16(pb); //ds_data_size
  301. get_byte(pb); //ds_silence_data
  302. }
  303. //printf("Descrambling: ps:%d cs:%d ds:%d s:%d sd:%d\n",
  304. // asf_st->ds_packet_size, asf_st->ds_chunk_size,
  305. // asf_st->ds_data_size, asf_st->ds_span, asf_st->ds_silence_data);
  306. if (asf_st->ds_span > 1) {
  307. if (!asf_st->ds_chunk_size
  308. || (asf_st->ds_packet_size/asf_st->ds_chunk_size <= 1)
  309. || asf_st->ds_packet_size % asf_st->ds_chunk_size)
  310. asf_st->ds_span = 0; // disable descrambling
  311. }
  312. switch (st->codec->codec_id) {
  313. case CODEC_ID_MP3:
  314. st->codec->frame_size = MPA_FRAME_SIZE;
  315. break;
  316. case CODEC_ID_PCM_S16LE:
  317. case CODEC_ID_PCM_S16BE:
  318. case CODEC_ID_PCM_U16LE:
  319. case CODEC_ID_PCM_U16BE:
  320. case CODEC_ID_PCM_S8:
  321. case CODEC_ID_PCM_U8:
  322. case CODEC_ID_PCM_ALAW:
  323. case CODEC_ID_PCM_MULAW:
  324. st->codec->frame_size = 1;
  325. break;
  326. default:
  327. /* This is probably wrong, but it prevents a crash later */
  328. st->codec->frame_size = 1;
  329. break;
  330. }
  331. } else if (type == AVMEDIA_TYPE_VIDEO &&
  332. gsize - (url_ftell(pb) - pos1 + 24) >= 51) {
  333. get_le32(pb);
  334. get_le32(pb);
  335. get_byte(pb);
  336. size = get_le16(pb); /* size */
  337. sizeX= get_le32(pb); /* size */
  338. st->codec->width = get_le32(pb);
  339. st->codec->height = get_le32(pb);
  340. /* not available for asf */
  341. get_le16(pb); /* panes */
  342. st->codec->bits_per_coded_sample = get_le16(pb); /* depth */
  343. tag1 = get_le32(pb);
  344. url_fskip(pb, 20);
  345. // av_log(s, AV_LOG_DEBUG, "size:%d tsize:%d sizeX:%d\n", size, total_size, sizeX);
  346. size= sizeX;
  347. if (size > 40) {
  348. st->codec->extradata_size = size - 40;
  349. st->codec->extradata = av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  350. get_buffer(pb, st->codec->extradata, st->codec->extradata_size);
  351. }
  352. /* Extract palette from extradata if bpp <= 8 */
  353. /* This code assumes that extradata contains only palette */
  354. /* This is true for all paletted codecs implemented in ffmpeg */
  355. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  356. st->codec->palctrl = av_mallocz(sizeof(AVPaletteControl));
  357. #if HAVE_BIGENDIAN
  358. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)/4; i++)
  359. st->codec->palctrl->palette[i] = av_bswap32(((uint32_t*)st->codec->extradata)[i]);
  360. #else
  361. memcpy(st->codec->palctrl->palette, st->codec->extradata,
  362. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  363. #endif
  364. st->codec->palctrl->palette_changed = 1;
  365. }
  366. st->codec->codec_tag = tag1;
  367. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  368. if(tag1 == MKTAG('D', 'V', 'R', ' ')){
  369. st->need_parsing = AVSTREAM_PARSE_FULL;
  370. // issue658 containse wrong w/h and MS even puts a fake seq header with wrong w/h in extradata while a correct one is in te stream. maximum lameness
  371. st->codec->width =
  372. st->codec->height = 0;
  373. av_freep(&st->codec->extradata);
  374. st->codec->extradata_size=0;
  375. }
  376. if(st->codec->codec_id == CODEC_ID_H264)
  377. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  378. }
  379. pos2 = url_ftell(pb);
  380. url_fskip(pb, gsize - (pos2 - pos1 + 24));
  381. } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) {
  382. int len1, len2, len3, len4, len5;
  383. len1 = get_le16(pb);
  384. len2 = get_le16(pb);
  385. len3 = get_le16(pb);
  386. len4 = get_le16(pb);
  387. len5 = get_le16(pb);
  388. get_tag(s, "title" , 0, len1);
  389. get_tag(s, "author" , 0, len2);
  390. get_tag(s, "copyright", 0, len3);
  391. get_tag(s, "comment" , 0, len4);
  392. url_fskip(pb, len5);
  393. } else if (!ff_guidcmp(&g, &stream_bitrate_guid)) {
  394. int stream_count = get_le16(pb);
  395. int j;
  396. // av_log(s, AV_LOG_ERROR, "stream bitrate properties\n");
  397. // av_log(s, AV_LOG_ERROR, "streams %d\n", streams);
  398. for(j = 0; j < stream_count; j++) {
  399. int flags, bitrate, stream_id;
  400. flags= get_le16(pb);
  401. bitrate= get_le32(pb);
  402. stream_id= (flags & 0x7f);
  403. // av_log(s, AV_LOG_ERROR, "flags: 0x%x stream id %d, bitrate %d\n", flags, stream_id, bitrate);
  404. asf->stream_bitrates[stream_id]= bitrate;
  405. }
  406. } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) {
  407. int j;
  408. int stream_count = get_le16(pb);
  409. for(j = 0; j < stream_count; j++) {
  410. char lang[6];
  411. unsigned int lang_len = get_byte(pb);
  412. get_str16_nolen(pb, lang_len, lang, sizeof(lang));
  413. if (j < 128)
  414. av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
  415. }
  416. } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) {
  417. int desc_count, i;
  418. desc_count = get_le16(pb);
  419. for(i=0;i<desc_count;i++) {
  420. int name_len,value_type,value_len;
  421. char name[1024];
  422. name_len = get_le16(pb);
  423. if (name_len%2) // must be even, broken lavf versions wrote len-1
  424. name_len += 1;
  425. get_str16_nolen(pb, name_len, name, sizeof(name));
  426. value_type = get_le16(pb);
  427. value_len = get_le16(pb);
  428. if (!value_type && value_len%2)
  429. value_len += 1;
  430. /**
  431. * My sample has that stream set to 0 maybe that mean the container.
  432. * Asf stream count start at 1. I am using 0 to the container value since it's unused
  433. */
  434. if (!strcmp(name, "AspectRatioX")){
  435. dar[0].num= get_value(s->pb, value_type);
  436. } else if(!strcmp(name, "AspectRatioY")){
  437. dar[0].den= get_value(s->pb, value_type);
  438. } else
  439. get_tag(s, name, value_type, value_len);
  440. }
  441. } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) {
  442. int n, stream_num, name_len, value_len, value_type, value_num;
  443. n = get_le16(pb);
  444. for(i=0;i<n;i++) {
  445. char name[1024];
  446. get_le16(pb); //lang_list_index
  447. stream_num= get_le16(pb);
  448. name_len= get_le16(pb);
  449. value_type= get_le16(pb);
  450. value_len= get_le32(pb);
  451. get_str16_nolen(pb, name_len, name, sizeof(name));
  452. //av_log(s, AV_LOG_ERROR, "%d %d %d %d %d <%s>\n", i, stream_num, name_len, value_type, value_len, name);
  453. value_num= get_le16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
  454. url_fskip(pb, value_len - 2);
  455. if(stream_num<128){
  456. if (!strcmp(name, "AspectRatioX")) dar[stream_num].num= value_num;
  457. else if(!strcmp(name, "AspectRatioY")) dar[stream_num].den= value_num;
  458. }
  459. }
  460. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
  461. int ext_len, payload_ext_ct, stream_ct;
  462. uint32_t ext_d, leak_rate, stream_num;
  463. unsigned int stream_languageid_index;
  464. get_le64(pb); // starttime
  465. get_le64(pb); // endtime
  466. leak_rate = get_le32(pb); // leak-datarate
  467. get_le32(pb); // bucket-datasize
  468. get_le32(pb); // init-bucket-fullness
  469. get_le32(pb); // alt-leak-datarate
  470. get_le32(pb); // alt-bucket-datasize
  471. get_le32(pb); // alt-init-bucket-fullness
  472. get_le32(pb); // max-object-size
  473. get_le32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  474. stream_num = get_le16(pb); // stream-num
  475. stream_languageid_index = get_le16(pb); // stream-language-id-index
  476. if (stream_num < 128)
  477. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  478. get_le64(pb); // avg frametime in 100ns units
  479. stream_ct = get_le16(pb); //stream-name-count
  480. payload_ext_ct = get_le16(pb); //payload-extension-system-count
  481. if (stream_num < 128)
  482. bitrate[stream_num] = leak_rate;
  483. for (i=0; i<stream_ct; i++){
  484. get_le16(pb);
  485. ext_len = get_le16(pb);
  486. url_fseek(pb, ext_len, SEEK_CUR);
  487. }
  488. for (i=0; i<payload_ext_ct; i++){
  489. ff_get_guid(pb, &g);
  490. ext_d=get_le16(pb);
  491. ext_len=get_le32(pb);
  492. url_fseek(pb, ext_len, SEEK_CUR);
  493. }
  494. // there could be a optional stream properties object to follow
  495. // if so the next iteration will pick it up
  496. continue;
  497. } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
  498. int v1, v2;
  499. ff_get_guid(pb, &g);
  500. v1 = get_le32(pb);
  501. v2 = get_le16(pb);
  502. continue;
  503. } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) {
  504. int i, count, name_len;
  505. char name[1024];
  506. get_le64(pb); // reserved 16 bytes
  507. get_le64(pb); // ...
  508. count = get_le32(pb); // markers count
  509. get_le16(pb); // reserved 2 bytes
  510. name_len = get_le16(pb); // name length
  511. for(i=0;i<name_len;i++){
  512. get_byte(pb); // skip the name
  513. }
  514. for(i=0;i<count;i++){
  515. int64_t pres_time;
  516. int name_len;
  517. get_le64(pb); // offset, 8 bytes
  518. pres_time = get_le64(pb); // presentation time
  519. get_le16(pb); // entry length
  520. get_le32(pb); // send time
  521. get_le32(pb); // flags
  522. name_len = get_le32(pb); // name length
  523. get_str16_nolen(pb, name_len * 2, name, sizeof(name));
  524. ff_new_chapter(s, i, (AVRational){1, 10000000}, pres_time, AV_NOPTS_VALUE, name );
  525. }
  526. #if 0
  527. } else if (!ff_guidcmp(&g, &ff_asf_codec_comment_header)) {
  528. int len, v1, n, num;
  529. char str[256], *q;
  530. char tag[16];
  531. ff_get_guid(pb, &g);
  532. print_guid(&g);
  533. n = get_le32(pb);
  534. for(i=0;i<n;i++) {
  535. num = get_le16(pb); /* stream number */
  536. get_str16(pb, str, sizeof(str));
  537. get_str16(pb, str, sizeof(str));
  538. len = get_le16(pb);
  539. q = tag;
  540. while (len > 0) {
  541. v1 = get_byte(pb);
  542. if ((q - tag) < sizeof(tag) - 1)
  543. *q++ = v1;
  544. len--;
  545. }
  546. *q = '\0';
  547. }
  548. #endif
  549. } else if (url_feof(pb)) {
  550. return -1;
  551. } else {
  552. if (!s->keylen) {
  553. if (!ff_guidcmp(&g, &ff_asf_content_encryption)) {
  554. av_log(s, AV_LOG_WARNING, "DRM protected stream detected, decoding will likely fail!\n");
  555. } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) {
  556. av_log(s, AV_LOG_WARNING, "Ext DRM protected stream detected, decoding will likely fail!\n");
  557. } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) {
  558. av_log(s, AV_LOG_WARNING, "Digital signature detected, decoding will likely fail!\n");
  559. }
  560. }
  561. }
  562. if(url_ftell(pb) != gpos + gsize)
  563. av_log(s, AV_LOG_DEBUG, "gpos mismatch our pos=%"PRIu64", end=%"PRIu64"\n", url_ftell(pb)-gpos, gsize);
  564. url_fseek(pb, gpos + gsize, SEEK_SET);
  565. }
  566. ff_get_guid(pb, &g);
  567. get_le64(pb);
  568. get_byte(pb);
  569. get_byte(pb);
  570. if (url_feof(pb))
  571. return -1;
  572. asf->data_offset = url_ftell(pb);
  573. asf->packet_size_left = 0;
  574. for(i=0; i<128; i++){
  575. int stream_num= asf->asfid2avid[i];
  576. if(stream_num>=0){
  577. AVStream *st = s->streams[stream_num];
  578. if (!st->codec->bit_rate)
  579. st->codec->bit_rate = bitrate[i];
  580. if (dar[i].num > 0 && dar[i].den > 0){
  581. av_reduce(&st->sample_aspect_ratio.num,
  582. &st->sample_aspect_ratio.den,
  583. dar[i].num, dar[i].den, INT_MAX);
  584. } else if ((dar[0].num > 0) && (dar[0].den > 0) && (st->codec->codec_type==AVMEDIA_TYPE_VIDEO)) // Use ASF container value if the stream doesn't AR set.
  585. av_reduce(&st->sample_aspect_ratio.num,
  586. &st->sample_aspect_ratio.den,
  587. dar[0].num, dar[0].den, INT_MAX);
  588. //av_log(s, AV_LOG_INFO, "i=%d, st->codec->codec_type:%d, dar %d:%d sar=%d:%d\n", i, st->codec->codec_type, dar[i].num, dar[i].den, st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  589. // copy and convert language codes to the frontend
  590. if (asf->streams[i].stream_language_index < 128) {
  591. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  592. if (rfc1766 && strlen(rfc1766) > 1) {
  593. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  594. const char *iso6392 = av_convert_lang_to(primary_tag, AV_LANG_ISO639_2_BIBL);
  595. if (iso6392)
  596. av_metadata_set2(&st->metadata, "language", iso6392, 0);
  597. }
  598. }
  599. }
  600. }
  601. ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv);
  602. return 0;
  603. }
  604. #define DO_2BITS(bits, var, defval) \
  605. switch (bits & 3) \
  606. { \
  607. case 3: var = get_le32(pb); rsize += 4; break; \
  608. case 2: var = get_le16(pb); rsize += 2; break; \
  609. case 1: var = get_byte(pb); rsize++; break; \
  610. default: var = defval; break; \
  611. }
  612. /**
  613. * Load a single ASF packet into the demuxer.
  614. * @param s demux context
  615. * @param pb context to read data from
  616. * @return 0 on success, <0 on error
  617. */
  618. static int ff_asf_get_packet(AVFormatContext *s, ByteIOContext *pb)
  619. {
  620. ASFContext *asf = s->priv_data;
  621. uint32_t packet_length, padsize;
  622. int rsize = 8;
  623. int c, d, e, off;
  624. // if we do not know packet size, allow skipping up to 32 kB
  625. off= 32768;
  626. if (s->packet_size > 0)
  627. off= (url_ftell(pb) - s->data_offset) % s->packet_size + 3;
  628. c=d=e=-1;
  629. while(off-- > 0){
  630. c=d; d=e;
  631. e= get_byte(pb);
  632. if(c == 0x82 && !d && !e)
  633. break;
  634. }
  635. if (c != 0x82) {
  636. /**
  637. * This code allows handling of -EAGAIN at packet boundaries (i.e.
  638. * if the packet sync code above triggers -EAGAIN). This does not
  639. * imply complete -EAGAIN handling support at random positions in
  640. * the stream.
  641. */
  642. if (url_ferror(pb) == AVERROR(EAGAIN))
  643. return AVERROR(EAGAIN);
  644. if (!url_feof(pb))
  645. av_log(s, AV_LOG_ERROR, "ff asf bad header %x at:%"PRId64"\n", c, url_ftell(pb));
  646. }
  647. if ((c & 0x8f) == 0x82) {
  648. if (d || e) {
  649. if (!url_feof(pb))
  650. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  651. return -1;
  652. }
  653. c= get_byte(pb);
  654. d= get_byte(pb);
  655. rsize+=3;
  656. }else{
  657. url_fseek(pb, -1, SEEK_CUR); //FIXME
  658. }
  659. asf->packet_flags = c;
  660. asf->packet_property = d;
  661. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  662. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  663. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  664. //the following checks prevent overflows and infinite loops
  665. if(!packet_length || packet_length >= (1U<<29)){
  666. av_log(s, AV_LOG_ERROR, "invalid packet_length %d at:%"PRId64"\n", packet_length, url_ftell(pb));
  667. return -1;
  668. }
  669. if(padsize >= packet_length){
  670. av_log(s, AV_LOG_ERROR, "invalid padsize %d at:%"PRId64"\n", padsize, url_ftell(pb));
  671. return -1;
  672. }
  673. asf->packet_timestamp = get_le32(pb);
  674. get_le16(pb); /* duration */
  675. // rsize has at least 11 bytes which have to be present
  676. if (asf->packet_flags & 0x01) {
  677. asf->packet_segsizetype = get_byte(pb); rsize++;
  678. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  679. } else {
  680. asf->packet_segments = 1;
  681. asf->packet_segsizetype = 0x80;
  682. }
  683. asf->packet_size_left = packet_length - padsize - rsize;
  684. if (packet_length < asf->hdr.min_pktsize)
  685. padsize += asf->hdr.min_pktsize - packet_length;
  686. asf->packet_padsize = padsize;
  687. dprintf(s, "packet: size=%d padsize=%d left=%d\n", s->packet_size, asf->packet_padsize, asf->packet_size_left);
  688. return 0;
  689. }
  690. /**
  691. *
  692. * @return <0 if error
  693. */
  694. static int asf_read_frame_header(AVFormatContext *s, ByteIOContext *pb){
  695. ASFContext *asf = s->priv_data;
  696. int rsize = 1;
  697. int num = get_byte(pb);
  698. int64_t ts0, ts1;
  699. asf->packet_segments--;
  700. asf->packet_key_frame = num >> 7;
  701. asf->stream_index = asf->asfid2avid[num & 0x7f];
  702. // sequence should be ignored!
  703. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  704. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  705. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  706. //printf("key:%d stream:%d seq:%d offset:%d replic_size:%d\n", asf->packet_key_frame, asf->stream_index, asf->packet_seq, //asf->packet_frag_offset, asf->packet_replic_size);
  707. if (asf->packet_replic_size >= 8) {
  708. asf->packet_obj_size = get_le32(pb);
  709. if(asf->packet_obj_size >= (1<<24) || asf->packet_obj_size <= 0){
  710. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  711. return -1;
  712. }
  713. asf->packet_frag_timestamp = get_le32(pb); // timestamp
  714. if(asf->packet_replic_size >= 8+38+4){
  715. // for(i=0; i<asf->packet_replic_size-8; i++)
  716. // av_log(s, AV_LOG_DEBUG, "%02X ",get_byte(pb));
  717. // av_log(s, AV_LOG_DEBUG, "\n");
  718. url_fskip(pb, 10);
  719. ts0= get_le64(pb);
  720. ts1= get_le64(pb);
  721. url_fskip(pb, 12);
  722. get_le32(pb);
  723. url_fskip(pb, asf->packet_replic_size - 8 - 38 - 4);
  724. if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
  725. else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
  726. }else
  727. url_fskip(pb, asf->packet_replic_size - 8);
  728. rsize += asf->packet_replic_size; // FIXME - check validity
  729. } else if (asf->packet_replic_size==1){
  730. // multipacket - frag_offset is beginning timestamp
  731. asf->packet_time_start = asf->packet_frag_offset;
  732. asf->packet_frag_offset = 0;
  733. asf->packet_frag_timestamp = asf->packet_timestamp;
  734. asf->packet_time_delta = get_byte(pb);
  735. rsize++;
  736. }else if(asf->packet_replic_size!=0){
  737. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", asf->packet_replic_size);
  738. return -1;
  739. }
  740. if (asf->packet_flags & 0x01) {
  741. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  742. if(asf->packet_frag_size > asf->packet_size_left - rsize){
  743. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid\n");
  744. return -1;
  745. }
  746. //printf("Fragsize %d\n", asf->packet_frag_size);
  747. } else {
  748. asf->packet_frag_size = asf->packet_size_left - rsize;
  749. //printf("Using rest %d %d %d\n", asf->packet_frag_size, asf->packet_size_left, rsize);
  750. }
  751. if (asf->packet_replic_size == 1) {
  752. asf->packet_multi_size = asf->packet_frag_size;
  753. if (asf->packet_multi_size > asf->packet_size_left)
  754. return -1;
  755. }
  756. asf->packet_size_left -= rsize;
  757. //printf("___objsize____ %d %d rs:%d\n", asf->packet_obj_size, asf->packet_frag_offset, rsize);
  758. return 0;
  759. }
  760. /**
  761. * Parse data from individual ASF packets (which were previously loaded
  762. * with asf_get_packet()).
  763. * @param s demux context
  764. * @param pb context to read data from
  765. * @param pkt pointer to store packet data into
  766. * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF
  767. * packets need to be loaded (through asf_get_packet())
  768. */
  769. static int ff_asf_parse_packet(AVFormatContext *s, ByteIOContext *pb, AVPacket *pkt)
  770. {
  771. ASFContext *asf = s->priv_data;
  772. ASFStream *asf_st = 0;
  773. for (;;) {
  774. int ret;
  775. if(url_feof(pb))
  776. return AVERROR_EOF;
  777. if (asf->packet_size_left < FRAME_HEADER_SIZE
  778. || asf->packet_segments < 1) {
  779. //asf->packet_size_left <= asf->packet_padsize) {
  780. int ret = asf->packet_size_left + asf->packet_padsize;
  781. //printf("PacketLeftSize:%d Pad:%d Pos:%"PRId64"\n", asf->packet_size_left, asf->packet_padsize, url_ftell(pb));
  782. assert(ret>=0);
  783. /* fail safe */
  784. url_fskip(pb, ret);
  785. asf->packet_pos= url_ftell(pb);
  786. if (asf->data_object_size != (uint64_t)-1 &&
  787. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  788. return AVERROR_EOF; /* Do not exceed the size of the data object */
  789. return 1;
  790. }
  791. if (asf->packet_time_start == 0) {
  792. if(asf_read_frame_header(s, pb) < 0){
  793. asf->packet_segments= 0;
  794. continue;
  795. }
  796. if (asf->stream_index < 0
  797. || s->streams[asf->stream_index]->discard >= AVDISCARD_ALL
  798. || (!asf->packet_key_frame && s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)
  799. ) {
  800. asf->packet_time_start = 0;
  801. /* unhandled packet (should not happen) */
  802. url_fskip(pb, asf->packet_frag_size);
  803. asf->packet_size_left -= asf->packet_frag_size;
  804. if(asf->stream_index < 0)
  805. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
  806. continue;
  807. }
  808. asf->asf_st = s->streams[asf->stream_index]->priv_data;
  809. }
  810. asf_st = asf->asf_st;
  811. if (asf->packet_replic_size == 1) {
  812. // frag_offset is here used as the beginning timestamp
  813. asf->packet_frag_timestamp = asf->packet_time_start;
  814. asf->packet_time_start += asf->packet_time_delta;
  815. asf->packet_obj_size = asf->packet_frag_size = get_byte(pb);
  816. asf->packet_size_left--;
  817. asf->packet_multi_size--;
  818. if (asf->packet_multi_size < asf->packet_obj_size)
  819. {
  820. asf->packet_time_start = 0;
  821. url_fskip(pb, asf->packet_multi_size);
  822. asf->packet_size_left -= asf->packet_multi_size;
  823. continue;
  824. }
  825. asf->packet_multi_size -= asf->packet_obj_size;
  826. //printf("COMPRESS size %d %d %d ms:%d\n", asf->packet_obj_size, asf->packet_frag_timestamp, asf->packet_size_left, asf->packet_multi_size);
  827. }
  828. if( /*asf->packet_frag_size == asf->packet_obj_size*/
  829. asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size
  830. && asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size){
  831. av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n",
  832. asf_st->frag_offset, asf->packet_frag_size,
  833. asf->packet_obj_size, asf_st->pkt.size);
  834. asf->packet_obj_size= asf_st->pkt.size;
  835. }
  836. if ( asf_st->pkt.size != asf->packet_obj_size
  837. || asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { //FIXME is this condition sufficient?
  838. if(asf_st->pkt.data){
  839. av_log(s, AV_LOG_INFO, "freeing incomplete packet size %d, new %d\n", asf_st->pkt.size, asf->packet_obj_size);
  840. asf_st->frag_offset = 0;
  841. av_free_packet(&asf_st->pkt);
  842. }
  843. /* new packet */
  844. av_new_packet(&asf_st->pkt, asf->packet_obj_size);
  845. asf_st->seq = asf->packet_seq;
  846. asf_st->pkt.dts = asf->packet_frag_timestamp;
  847. asf_st->pkt.stream_index = asf->stream_index;
  848. asf_st->pkt.pos =
  849. asf_st->packet_pos= asf->packet_pos;
  850. //printf("new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  851. //asf->stream_index, asf->packet_key_frame, asf_st->pkt.flags & AV_PKT_FLAG_KEY,
  852. //s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO, asf->packet_obj_size);
  853. if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  854. asf->packet_key_frame = 1;
  855. if (asf->packet_key_frame)
  856. asf_st->pkt.flags |= AV_PKT_FLAG_KEY;
  857. }
  858. /* read data */
  859. //printf("READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  860. // s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  861. // asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  862. asf->packet_size_left -= asf->packet_frag_size;
  863. if (asf->packet_size_left < 0)
  864. continue;
  865. if( asf->packet_frag_offset >= asf_st->pkt.size
  866. || asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset){
  867. av_log(s, AV_LOG_ERROR, "packet fragment position invalid %u,%u not in %u\n",
  868. asf->packet_frag_offset, asf->packet_frag_size, asf_st->pkt.size);
  869. continue;
  870. }
  871. ret = get_buffer(pb, asf_st->pkt.data + asf->packet_frag_offset,
  872. asf->packet_frag_size);
  873. if (ret != asf->packet_frag_size) {
  874. if (ret < 0 || asf->packet_frag_offset + ret == 0)
  875. return ret < 0 ? ret : AVERROR_EOF;
  876. if (asf_st->ds_span > 1) {
  877. // scrambling, we can either drop it completely or fill the remainder
  878. // TODO: should we fill the whole packet instead of just the current
  879. // fragment?
  880. memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0,
  881. asf->packet_frag_size - ret);
  882. ret = asf->packet_frag_size;
  883. } else
  884. // no scrambling, so we can return partial packets
  885. av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret);
  886. }
  887. if (s->key && s->keylen == 20)
  888. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  889. ret);
  890. asf_st->frag_offset += ret;
  891. /* test if whole packet is read */
  892. if (asf_st->frag_offset == asf_st->pkt.size) {
  893. //workaround for macroshit radio DVR-MS files
  894. if( s->streams[asf->stream_index]->codec->codec_id == CODEC_ID_MPEG2VIDEO
  895. && asf_st->pkt.size > 100){
  896. int i;
  897. for(i=0; i<asf_st->pkt.size && !asf_st->pkt.data[i]; i++);
  898. if(i == asf_st->pkt.size){
  899. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  900. asf_st->frag_offset = 0;
  901. av_free_packet(&asf_st->pkt);
  902. continue;
  903. }
  904. }
  905. /* return packet */
  906. if (asf_st->ds_span > 1) {
  907. if(asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span){
  908. av_log(s, AV_LOG_ERROR, "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", asf_st->pkt.size, asf_st->ds_packet_size, asf_st->ds_span);
  909. }else{
  910. /* packet descrambling */
  911. uint8_t *newdata = av_malloc(asf_st->pkt.size + FF_INPUT_BUFFER_PADDING_SIZE);
  912. if (newdata) {
  913. int offset = 0;
  914. memset(newdata + asf_st->pkt.size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
  915. while (offset < asf_st->pkt.size) {
  916. int off = offset / asf_st->ds_chunk_size;
  917. int row = off / asf_st->ds_span;
  918. int col = off % asf_st->ds_span;
  919. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  920. //printf("off:%d row:%d col:%d idx:%d\n", off, row, col, idx);
  921. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  922. assert(idx+1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  923. memcpy(newdata + offset,
  924. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  925. asf_st->ds_chunk_size);
  926. offset += asf_st->ds_chunk_size;
  927. }
  928. av_free(asf_st->pkt.data);
  929. asf_st->pkt.data = newdata;
  930. }
  931. }
  932. }
  933. asf_st->frag_offset = 0;
  934. *pkt= asf_st->pkt;
  935. //printf("packet %d %d\n", asf_st->pkt.size, asf->packet_frag_size);
  936. asf_st->pkt.size = 0;
  937. asf_st->pkt.data = 0;
  938. break; // packet completed
  939. }
  940. }
  941. return 0;
  942. }
  943. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  944. {
  945. ASFContext *asf = s->priv_data;
  946. for (;;) {
  947. int ret;
  948. /* parse cached packets, if any */
  949. if ((ret = ff_asf_parse_packet(s, s->pb, pkt)) <= 0)
  950. return ret;
  951. if ((ret = ff_asf_get_packet(s, s->pb)) < 0)
  952. assert(asf->packet_size_left < FRAME_HEADER_SIZE || asf->packet_segments < 1);
  953. asf->packet_time_start = 0;
  954. }
  955. return 0;
  956. }
  957. // Added to support seeking after packets have been read
  958. // If information is not reset, read_packet fails due to
  959. // leftover information from previous reads
  960. static void asf_reset_header(AVFormatContext *s)
  961. {
  962. ASFContext *asf = s->priv_data;
  963. ASFStream *asf_st;
  964. int i;
  965. asf->packet_nb_frames = 0;
  966. asf->packet_size_left = 0;
  967. asf->packet_segments = 0;
  968. asf->packet_flags = 0;
  969. asf->packet_property = 0;
  970. asf->packet_timestamp = 0;
  971. asf->packet_segsizetype = 0;
  972. asf->packet_segments = 0;
  973. asf->packet_seq = 0;
  974. asf->packet_replic_size = 0;
  975. asf->packet_key_frame = 0;
  976. asf->packet_padsize = 0;
  977. asf->packet_frag_offset = 0;
  978. asf->packet_frag_size = 0;
  979. asf->packet_frag_timestamp = 0;
  980. asf->packet_multi_size = 0;
  981. asf->packet_obj_size = 0;
  982. asf->packet_time_delta = 0;
  983. asf->packet_time_start = 0;
  984. for(i=0; i<s->nb_streams; i++){
  985. asf_st= s->streams[i]->priv_data;
  986. av_free_packet(&asf_st->pkt);
  987. asf_st->frag_offset=0;
  988. asf_st->seq=0;
  989. }
  990. asf->asf_st= NULL;
  991. }
  992. static int asf_read_close(AVFormatContext *s)
  993. {
  994. int i;
  995. asf_reset_header(s);
  996. for(i=0;i<s->nb_streams;i++) {
  997. AVStream *st = s->streams[i];
  998. av_free(st->codec->palctrl);
  999. }
  1000. return 0;
  1001. }
  1002. static int64_t asf_read_pts(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit)
  1003. {
  1004. AVPacket pkt1, *pkt = &pkt1;
  1005. ASFStream *asf_st;
  1006. int64_t pts;
  1007. int64_t pos= *ppos;
  1008. int i;
  1009. int64_t start_pos[ASF_MAX_STREAMS];
  1010. for(i=0; i<s->nb_streams; i++){
  1011. start_pos[i]= pos;
  1012. }
  1013. if (s->packet_size > 0)
  1014. pos= (pos+s->packet_size-1-s->data_offset)/s->packet_size*s->packet_size+ s->data_offset;
  1015. *ppos= pos;
  1016. url_fseek(s->pb, pos, SEEK_SET);
  1017. //printf("asf_read_pts\n");
  1018. asf_reset_header(s);
  1019. for(;;){
  1020. if (av_read_frame(s, pkt) < 0){
  1021. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  1022. return AV_NOPTS_VALUE;
  1023. }
  1024. pts= pkt->pts;
  1025. av_free_packet(pkt);
  1026. if(pkt->flags&AV_PKT_FLAG_KEY){
  1027. i= pkt->stream_index;
  1028. asf_st= s->streams[i]->priv_data;
  1029. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  1030. pos= asf_st->packet_pos;
  1031. av_add_index_entry(s->streams[i], pos, pts, pkt->size, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  1032. start_pos[i]= asf_st->packet_pos + 1;
  1033. if(pkt->stream_index == stream_index)
  1034. break;
  1035. }
  1036. }
  1037. *ppos= pos;
  1038. //printf("found keyframe at %"PRId64" stream %d stamp:%"PRId64"\n", *ppos, stream_index, pts);
  1039. return pts;
  1040. }
  1041. static void asf_build_simple_index(AVFormatContext *s, int stream_index)
  1042. {
  1043. ff_asf_guid g;
  1044. ASFContext *asf = s->priv_data;
  1045. int64_t current_pos= url_ftell(s->pb);
  1046. int i;
  1047. url_fseek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET);
  1048. ff_get_guid(s->pb, &g);
  1049. /* the data object can be followed by other top-level objects,
  1050. skip them until the simple index object is reached */
  1051. while (ff_guidcmp(&g, &index_guid)) {
  1052. int64_t gsize= get_le64(s->pb);
  1053. if (gsize < 24 || url_feof(s->pb)) {
  1054. url_fseek(s->pb, current_pos, SEEK_SET);
  1055. return;
  1056. }
  1057. url_fseek(s->pb, gsize-24, SEEK_CUR);
  1058. ff_get_guid(s->pb, &g);
  1059. }
  1060. {
  1061. int64_t itime, last_pos=-1;
  1062. int pct, ict;
  1063. int64_t av_unused gsize= get_le64(s->pb);
  1064. ff_get_guid(s->pb, &g);
  1065. itime=get_le64(s->pb);
  1066. pct=get_le32(s->pb);
  1067. ict=get_le32(s->pb);
  1068. av_log(s, AV_LOG_DEBUG, "itime:0x%"PRIx64", pct:%d, ict:%d\n",itime,pct,ict);
  1069. for (i=0;i<ict;i++){
  1070. int pktnum=get_le32(s->pb);
  1071. int pktct =get_le16(s->pb);
  1072. int64_t pos = s->data_offset + s->packet_size*(int64_t)pktnum;
  1073. int64_t index_pts= av_rescale(itime, i, 10000);
  1074. if(pos != last_pos){
  1075. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d\n", pktnum, pktct);
  1076. av_add_index_entry(s->streams[stream_index], pos, index_pts, s->packet_size, 0, AVINDEX_KEYFRAME);
  1077. last_pos=pos;
  1078. }
  1079. }
  1080. asf->index_read= 1;
  1081. }
  1082. url_fseek(s->pb, current_pos, SEEK_SET);
  1083. }
  1084. static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags)
  1085. {
  1086. ASFContext *asf = s->priv_data;
  1087. AVStream *st = s->streams[stream_index];
  1088. int64_t pos;
  1089. int index;
  1090. if (s->packet_size <= 0)
  1091. return -1;
  1092. /* Try using the protocol's read_seek if available */
  1093. if(s->pb) {
  1094. int ret = av_url_read_fseek(s->pb, stream_index, pts, flags);
  1095. if(ret >= 0)
  1096. asf_reset_header(s);
  1097. if (ret != AVERROR(ENOSYS))
  1098. return ret;
  1099. }
  1100. if (!asf->index_read)
  1101. asf_build_simple_index(s, stream_index);
  1102. if(!(asf->index_read && st->index_entries)){
  1103. if(av_seek_frame_binary(s, stream_index, pts, flags)<0)
  1104. return -1;
  1105. }else{
  1106. index= av_index_search_timestamp(st, pts, flags);
  1107. if(index<0)
  1108. return -1;
  1109. /* find the position */
  1110. pos = st->index_entries[index].pos;
  1111. // various attempts to find key frame have failed so far
  1112. // asf_reset_header(s);
  1113. // url_fseek(s->pb, pos, SEEK_SET);
  1114. // key_pos = pos;
  1115. // for(i=0;i<16;i++){
  1116. // pos = url_ftell(s->pb);
  1117. // if (av_read_frame(s, &pkt) < 0){
  1118. // av_log(s, AV_LOG_INFO, "seek failed\n");
  1119. // return -1;
  1120. // }
  1121. // asf_st = s->streams[stream_index]->priv_data;
  1122. // pos += st->parser->frame_offset;
  1123. //
  1124. // if (pkt.size > b) {
  1125. // b = pkt.size;
  1126. // key_pos = pos;
  1127. // }
  1128. //
  1129. // av_free_packet(&pkt);
  1130. // }
  1131. /* do the seek */
  1132. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1133. url_fseek(s->pb, pos, SEEK_SET);
  1134. }
  1135. asf_reset_header(s);
  1136. return 0;
  1137. }
  1138. AVInputFormat asf_demuxer = {
  1139. "asf",
  1140. NULL_IF_CONFIG_SMALL("ASF format"),
  1141. sizeof(ASFContext),
  1142. asf_probe,
  1143. asf_read_header,
  1144. asf_read_packet,
  1145. asf_read_close,
  1146. asf_read_seek,
  1147. asf_read_pts,
  1148. };