You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1148 lines
42KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/common.h"
  23. #include "libavutil/avstring.h"
  24. #include "libavcodec/mpegaudio.h"
  25. #include "avformat.h"
  26. #include "riff.h"
  27. #include "asf.h"
  28. #include "asfcrypt.h"
  29. #include "avlanguage.h"
  30. void ff_mms_set_stream_selection(URLContext *h, AVFormatContext *format);
  31. #undef NDEBUG
  32. #include <assert.h>
  33. #define FRAME_HEADER_SIZE 17
  34. // Fix Me! FRAME_HEADER_SIZE may be different.
  35. static const ff_asf_guid index_guid = {
  36. 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb
  37. };
  38. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  39. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  40. };
  41. /**********************************/
  42. /* decoding */
  43. static int guidcmp(const void *g1, const void *g2)
  44. {
  45. return memcmp(g1, g2, sizeof(ff_asf_guid));
  46. }
  47. #ifdef DEBUG
  48. #define PRINT_IF_GUID(g,cmp) \
  49. if (!guidcmp(g, &cmp)) \
  50. dprintf(NULL, "(GUID: %s) ", #cmp)
  51. static void print_guid(const ff_asf_guid *g)
  52. {
  53. int i;
  54. PRINT_IF_GUID(g, ff_asf_header);
  55. else PRINT_IF_GUID(g, ff_asf_file_header);
  56. else PRINT_IF_GUID(g, ff_asf_stream_header);
  57. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  58. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  59. else PRINT_IF_GUID(g, ff_asf_video_stream);
  60. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  61. else PRINT_IF_GUID(g, ff_asf_command_stream);
  62. else PRINT_IF_GUID(g, ff_asf_comment_header);
  63. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  64. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  65. else PRINT_IF_GUID(g, ff_asf_data_header);
  66. else PRINT_IF_GUID(g, index_guid);
  67. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  68. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  69. else PRINT_IF_GUID(g, ff_asf_my_guid);
  70. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  71. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  72. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  73. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  74. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  75. else PRINT_IF_GUID(g, stream_bitrate_guid);
  76. else PRINT_IF_GUID(g, ff_asf_language_guid);
  77. else
  78. dprintf(NULL, "(GUID: unknown) ");
  79. for(i=0;i<16;i++)
  80. dprintf(NULL, " 0x%02x,", (*g)[i]);
  81. dprintf(NULL, "}\n");
  82. }
  83. #undef PRINT_IF_GUID
  84. #else
  85. #define print_guid(g)
  86. #endif
  87. static void get_guid(ByteIOContext *s, ff_asf_guid *g)
  88. {
  89. assert(sizeof(*g) == 16);
  90. get_buffer(s, *g, sizeof(*g));
  91. }
  92. #if 0
  93. static void get_str16(ByteIOContext *pb, char *buf, int buf_size)
  94. {
  95. int len, c;
  96. char *q;
  97. len = get_le16(pb);
  98. q = buf;
  99. while (len > 0) {
  100. c = get_le16(pb);
  101. if ((q - buf) < buf_size - 1)
  102. *q++ = c;
  103. len--;
  104. }
  105. *q = '\0';
  106. }
  107. #endif
  108. static void get_str16_nolen(ByteIOContext *pb, int len, char *buf, int buf_size)
  109. {
  110. char* q = buf;
  111. for (; len > 1; len -= 2) {
  112. uint8_t tmp;
  113. PUT_UTF8(get_le16(pb), tmp, if (q - buf < buf_size - 1) *q++ = tmp;)
  114. }
  115. if (len > 0)
  116. url_fskip(pb, len);
  117. *q = '\0';
  118. }
  119. static int asf_probe(AVProbeData *pd)
  120. {
  121. /* check file header */
  122. if (!guidcmp(pd->buf, &ff_asf_header))
  123. return AVPROBE_SCORE_MAX;
  124. else
  125. return 0;
  126. }
  127. static int get_value(ByteIOContext *pb, int type){
  128. switch(type){
  129. case 2: return get_le32(pb);
  130. case 3: return get_le32(pb);
  131. case 4: return get_le64(pb);
  132. case 5: return get_le16(pb);
  133. default:return INT_MIN;
  134. }
  135. }
  136. static void get_tag(AVFormatContext *s, const char *key, int type, int len)
  137. {
  138. char value[1024];
  139. if (type <= 1) { // unicode or byte
  140. get_str16_nolen(s->pb, len, value, sizeof(value));
  141. } else if (type <= 5) { // boolean or DWORD or QWORD or WORD
  142. uint64_t num = get_value(s->pb, type);
  143. snprintf(value, sizeof(value), "%"PRIu64, num);
  144. } else {
  145. url_fskip(s->pb, len);
  146. return;
  147. }
  148. if (!strncmp(key, "WM/", 3))
  149. key += 3;
  150. av_metadata_set(&s->metadata, key, value);
  151. }
  152. static int asf_read_header(AVFormatContext *s, AVFormatParameters *ap)
  153. {
  154. ASFContext *asf = s->priv_data;
  155. ff_asf_guid g;
  156. ByteIOContext *pb = s->pb;
  157. AVStream *st;
  158. ASFStream *asf_st;
  159. int size, i;
  160. int64_t gsize;
  161. AVRational dar[128];
  162. uint32_t bitrate[128];
  163. memset(dar, 0, sizeof(dar));
  164. memset(bitrate, 0, sizeof(bitrate));
  165. get_guid(pb, &g);
  166. if (guidcmp(&g, &ff_asf_header))
  167. return -1;
  168. get_le64(pb);
  169. get_le32(pb);
  170. get_byte(pb);
  171. get_byte(pb);
  172. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  173. for(;;) {
  174. get_guid(pb, &g);
  175. gsize = get_le64(pb);
  176. dprintf(s, "%08"PRIx64": ", url_ftell(pb) - 24);
  177. print_guid(&g);
  178. dprintf(s, " size=0x%"PRIx64"\n", gsize);
  179. if (!guidcmp(&g, &ff_asf_data_header)) {
  180. asf->data_object_offset = url_ftell(pb);
  181. // if not streaming, gsize is not unlimited (how?), and there is enough space in the file..
  182. if (!(asf->hdr.flags & 0x01) && gsize >= 100) {
  183. asf->data_object_size = gsize - 24;
  184. } else {
  185. asf->data_object_size = (uint64_t)-1;
  186. }
  187. break;
  188. }
  189. if (gsize < 24)
  190. return -1;
  191. if (!guidcmp(&g, &ff_asf_file_header)) {
  192. get_guid(pb, &asf->hdr.guid);
  193. asf->hdr.file_size = get_le64(pb);
  194. asf->hdr.create_time = get_le64(pb);
  195. asf->nb_packets = get_le64(pb);
  196. asf->hdr.play_time = get_le64(pb);
  197. asf->hdr.send_time = get_le64(pb);
  198. asf->hdr.preroll = get_le32(pb);
  199. asf->hdr.ignore = get_le32(pb);
  200. asf->hdr.flags = get_le32(pb);
  201. asf->hdr.min_pktsize = get_le32(pb);
  202. asf->hdr.max_pktsize = get_le32(pb);
  203. asf->hdr.max_bitrate = get_le32(pb);
  204. s->packet_size = asf->hdr.max_pktsize;
  205. } else if (!guidcmp(&g, &ff_asf_stream_header)) {
  206. enum CodecType type;
  207. int type_specific_size, sizeX;
  208. uint64_t total_size;
  209. unsigned int tag1;
  210. int64_t pos1, pos2, start_time;
  211. int test_for_ext_stream_audio, is_dvr_ms_audio=0;
  212. pos1 = url_ftell(pb);
  213. st = av_new_stream(s, 0);
  214. if (!st)
  215. return AVERROR(ENOMEM);
  216. av_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  217. asf_st = av_mallocz(sizeof(ASFStream));
  218. if (!asf_st)
  219. return AVERROR(ENOMEM);
  220. st->priv_data = asf_st;
  221. start_time = asf->hdr.preroll;
  222. asf_st->stream_language_index = 128; // invalid stream index means no language info
  223. if(!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  224. st->duration = asf->hdr.send_time /
  225. (10000000 / 1000) - start_time;
  226. }
  227. get_guid(pb, &g);
  228. test_for_ext_stream_audio = 0;
  229. if (!guidcmp(&g, &ff_asf_audio_stream)) {
  230. type = CODEC_TYPE_AUDIO;
  231. } else if (!guidcmp(&g, &ff_asf_video_stream)) {
  232. type = CODEC_TYPE_VIDEO;
  233. } else if (!guidcmp(&g, &ff_asf_command_stream)) {
  234. type = CODEC_TYPE_DATA;
  235. } else if (!guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  236. test_for_ext_stream_audio = 1;
  237. type = CODEC_TYPE_UNKNOWN;
  238. } else {
  239. return -1;
  240. }
  241. get_guid(pb, &g);
  242. total_size = get_le64(pb);
  243. type_specific_size = get_le32(pb);
  244. get_le32(pb);
  245. st->id = get_le16(pb) & 0x7f; /* stream id */
  246. // mapping of asf ID to AV stream ID;
  247. asf->asfid2avid[st->id] = s->nb_streams - 1;
  248. get_le32(pb);
  249. if (test_for_ext_stream_audio) {
  250. get_guid(pb, &g);
  251. if (!guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  252. type = CODEC_TYPE_AUDIO;
  253. is_dvr_ms_audio=1;
  254. get_guid(pb, &g);
  255. get_le32(pb);
  256. get_le32(pb);
  257. get_le32(pb);
  258. get_guid(pb, &g);
  259. get_le32(pb);
  260. }
  261. }
  262. st->codec->codec_type = type;
  263. if (type == CODEC_TYPE_AUDIO) {
  264. ff_get_wav_header(pb, st->codec, type_specific_size);
  265. if (is_dvr_ms_audio) {
  266. // codec_id and codec_tag are unreliable in dvr_ms
  267. // files. Set them later by probing stream.
  268. st->codec->codec_id = CODEC_ID_PROBE;
  269. st->codec->codec_tag = 0;
  270. }
  271. if (st->codec->codec_id == CODEC_ID_AAC) {
  272. st->need_parsing = AVSTREAM_PARSE_NONE;
  273. } else {
  274. st->need_parsing = AVSTREAM_PARSE_FULL;
  275. }
  276. /* We have to init the frame size at some point .... */
  277. pos2 = url_ftell(pb);
  278. if (gsize >= (pos2 + 8 - pos1 + 24)) {
  279. asf_st->ds_span = get_byte(pb);
  280. asf_st->ds_packet_size = get_le16(pb);
  281. asf_st->ds_chunk_size = get_le16(pb);
  282. get_le16(pb); //ds_data_size
  283. get_byte(pb); //ds_silence_data
  284. }
  285. //printf("Descrambling: ps:%d cs:%d ds:%d s:%d sd:%d\n",
  286. // asf_st->ds_packet_size, asf_st->ds_chunk_size,
  287. // asf_st->ds_data_size, asf_st->ds_span, asf_st->ds_silence_data);
  288. if (asf_st->ds_span > 1) {
  289. if (!asf_st->ds_chunk_size
  290. || (asf_st->ds_packet_size/asf_st->ds_chunk_size <= 1)
  291. || asf_st->ds_packet_size % asf_st->ds_chunk_size)
  292. asf_st->ds_span = 0; // disable descrambling
  293. }
  294. switch (st->codec->codec_id) {
  295. case CODEC_ID_MP3:
  296. st->codec->frame_size = MPA_FRAME_SIZE;
  297. break;
  298. case CODEC_ID_PCM_S16LE:
  299. case CODEC_ID_PCM_S16BE:
  300. case CODEC_ID_PCM_U16LE:
  301. case CODEC_ID_PCM_U16BE:
  302. case CODEC_ID_PCM_S8:
  303. case CODEC_ID_PCM_U8:
  304. case CODEC_ID_PCM_ALAW:
  305. case CODEC_ID_PCM_MULAW:
  306. st->codec->frame_size = 1;
  307. break;
  308. default:
  309. /* This is probably wrong, but it prevents a crash later */
  310. st->codec->frame_size = 1;
  311. break;
  312. }
  313. } else if (type == CODEC_TYPE_VIDEO) {
  314. get_le32(pb);
  315. get_le32(pb);
  316. get_byte(pb);
  317. size = get_le16(pb); /* size */
  318. sizeX= get_le32(pb); /* size */
  319. st->codec->width = get_le32(pb);
  320. st->codec->height = get_le32(pb);
  321. /* not available for asf */
  322. get_le16(pb); /* panes */
  323. st->codec->bits_per_coded_sample = get_le16(pb); /* depth */
  324. tag1 = get_le32(pb);
  325. url_fskip(pb, 20);
  326. // av_log(s, AV_LOG_DEBUG, "size:%d tsize:%d sizeX:%d\n", size, total_size, sizeX);
  327. size= sizeX;
  328. if (size > 40) {
  329. st->codec->extradata_size = size - 40;
  330. st->codec->extradata = av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  331. get_buffer(pb, st->codec->extradata, st->codec->extradata_size);
  332. }
  333. /* Extract palette from extradata if bpp <= 8 */
  334. /* This code assumes that extradata contains only palette */
  335. /* This is true for all paletted codecs implemented in ffmpeg */
  336. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  337. st->codec->palctrl = av_mallocz(sizeof(AVPaletteControl));
  338. #if HAVE_BIGENDIAN
  339. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)/4; i++)
  340. st->codec->palctrl->palette[i] = bswap_32(((uint32_t*)st->codec->extradata)[i]);
  341. #else
  342. memcpy(st->codec->palctrl->palette, st->codec->extradata,
  343. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  344. #endif
  345. st->codec->palctrl->palette_changed = 1;
  346. }
  347. st->codec->codec_tag = tag1;
  348. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  349. if(tag1 == MKTAG('D', 'V', 'R', ' '))
  350. st->need_parsing = AVSTREAM_PARSE_FULL;
  351. }
  352. pos2 = url_ftell(pb);
  353. url_fskip(pb, gsize - (pos2 - pos1 + 24));
  354. } else if (!guidcmp(&g, &ff_asf_comment_header)) {
  355. int len1, len2, len3, len4, len5;
  356. len1 = get_le16(pb);
  357. len2 = get_le16(pb);
  358. len3 = get_le16(pb);
  359. len4 = get_le16(pb);
  360. len5 = get_le16(pb);
  361. get_tag(s, "title" , 0, len1);
  362. get_tag(s, "author" , 0, len2);
  363. get_tag(s, "copyright", 0, len3);
  364. get_tag(s, "comment" , 0, len4);
  365. url_fskip(pb, len5);
  366. } else if (!guidcmp(&g, &stream_bitrate_guid)) {
  367. int stream_count = get_le16(pb);
  368. int j;
  369. // av_log(s, AV_LOG_ERROR, "stream bitrate properties\n");
  370. // av_log(s, AV_LOG_ERROR, "streams %d\n", streams);
  371. for(j = 0; j < stream_count; j++) {
  372. int flags, bitrate, stream_id;
  373. flags= get_le16(pb);
  374. bitrate= get_le32(pb);
  375. stream_id= (flags & 0x7f);
  376. // av_log(s, AV_LOG_ERROR, "flags: 0x%x stream id %d, bitrate %d\n", flags, stream_id, bitrate);
  377. asf->stream_bitrates[stream_id]= bitrate;
  378. }
  379. } else if (!guidcmp(&g, &ff_asf_language_guid)) {
  380. int j;
  381. int stream_count = get_le16(pb);
  382. for(j = 0; j < stream_count; j++) {
  383. char lang[6];
  384. unsigned int lang_len = get_byte(pb);
  385. get_str16_nolen(pb, lang_len, lang, sizeof(lang));
  386. if (j < 128)
  387. av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
  388. }
  389. } else if (!guidcmp(&g, &ff_asf_extended_content_header)) {
  390. int desc_count, i;
  391. desc_count = get_le16(pb);
  392. for(i=0;i<desc_count;i++) {
  393. int name_len,value_type,value_len;
  394. char name[1024];
  395. name_len = get_le16(pb);
  396. get_str16_nolen(pb, name_len, name, sizeof(name));
  397. value_type = get_le16(pb);
  398. value_len = get_le16(pb);
  399. get_tag(s, name, value_type, value_len);
  400. }
  401. } else if (!guidcmp(&g, &ff_asf_metadata_header)) {
  402. int n, stream_num, name_len, value_len, value_type, value_num;
  403. n = get_le16(pb);
  404. for(i=0;i<n;i++) {
  405. char name[1024];
  406. get_le16(pb); //lang_list_index
  407. stream_num= get_le16(pb);
  408. name_len= get_le16(pb);
  409. value_type= get_le16(pb);
  410. value_len= get_le32(pb);
  411. get_str16_nolen(pb, name_len, name, sizeof(name));
  412. //av_log(s, AV_LOG_ERROR, "%d %d %d %d %d <%s>\n", i, stream_num, name_len, value_type, value_len, name);
  413. value_num= get_le16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
  414. url_fskip(pb, value_len - 2);
  415. if(stream_num<128){
  416. if (!strcmp(name, "AspectRatioX")) dar[stream_num].num= value_num;
  417. else if(!strcmp(name, "AspectRatioY")) dar[stream_num].den= value_num;
  418. }
  419. }
  420. } else if (!guidcmp(&g, &ff_asf_ext_stream_header)) {
  421. int ext_len, payload_ext_ct, stream_ct;
  422. uint32_t ext_d, leak_rate, stream_num;
  423. unsigned int stream_languageid_index;
  424. get_le64(pb); // starttime
  425. get_le64(pb); // endtime
  426. leak_rate = get_le32(pb); // leak-datarate
  427. get_le32(pb); // bucket-datasize
  428. get_le32(pb); // init-bucket-fullness
  429. get_le32(pb); // alt-leak-datarate
  430. get_le32(pb); // alt-bucket-datasize
  431. get_le32(pb); // alt-init-bucket-fullness
  432. get_le32(pb); // max-object-size
  433. get_le32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  434. stream_num = get_le16(pb); // stream-num
  435. stream_languageid_index = get_le16(pb); // stream-language-id-index
  436. if (stream_num < 128)
  437. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  438. get_le64(pb); // avg frametime in 100ns units
  439. stream_ct = get_le16(pb); //stream-name-count
  440. payload_ext_ct = get_le16(pb); //payload-extension-system-count
  441. if (stream_num < 128)
  442. bitrate[stream_num] = leak_rate;
  443. for (i=0; i<stream_ct; i++){
  444. get_le16(pb);
  445. ext_len = get_le16(pb);
  446. url_fseek(pb, ext_len, SEEK_CUR);
  447. }
  448. for (i=0; i<payload_ext_ct; i++){
  449. get_guid(pb, &g);
  450. ext_d=get_le16(pb);
  451. ext_len=get_le32(pb);
  452. url_fseek(pb, ext_len, SEEK_CUR);
  453. }
  454. // there could be a optional stream properties object to follow
  455. // if so the next iteration will pick it up
  456. } else if (!guidcmp(&g, &ff_asf_head1_guid)) {
  457. int v1, v2;
  458. get_guid(pb, &g);
  459. v1 = get_le32(pb);
  460. v2 = get_le16(pb);
  461. #if 0
  462. } else if (!guidcmp(&g, &ff_asf_codec_comment_header)) {
  463. int len, v1, n, num;
  464. char str[256], *q;
  465. char tag[16];
  466. get_guid(pb, &g);
  467. print_guid(&g);
  468. n = get_le32(pb);
  469. for(i=0;i<n;i++) {
  470. num = get_le16(pb); /* stream number */
  471. get_str16(pb, str, sizeof(str));
  472. get_str16(pb, str, sizeof(str));
  473. len = get_le16(pb);
  474. q = tag;
  475. while (len > 0) {
  476. v1 = get_byte(pb);
  477. if ((q - tag) < sizeof(tag) - 1)
  478. *q++ = v1;
  479. len--;
  480. }
  481. *q = '\0';
  482. }
  483. #endif
  484. } else if (url_feof(pb)) {
  485. return -1;
  486. } else {
  487. url_fseek(pb, gsize - 24, SEEK_CUR);
  488. }
  489. }
  490. get_guid(pb, &g);
  491. get_le64(pb);
  492. get_byte(pb);
  493. get_byte(pb);
  494. if (url_feof(pb))
  495. return -1;
  496. asf->data_offset = url_ftell(pb);
  497. asf->packet_size_left = 0;
  498. for(i=0; i<128; i++){
  499. int stream_num= asf->asfid2avid[i];
  500. if(stream_num>=0){
  501. AVStream *st = s->streams[stream_num];
  502. if (!st->codec->bit_rate)
  503. st->codec->bit_rate = bitrate[i];
  504. if (dar[i].num > 0 && dar[i].den > 0)
  505. av_reduce(&st->sample_aspect_ratio.num,
  506. &st->sample_aspect_ratio.den,
  507. dar[i].num, dar[i].den, INT_MAX);
  508. //av_log(s, AV_LOG_ERROR, "dar %d:%d sar=%d:%d\n", dar[i].num, dar[i].den, st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  509. // copy and convert language codes to the frontend
  510. if (asf->streams[i].stream_language_index < 128) {
  511. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  512. if (rfc1766 && strlen(rfc1766) > 1) {
  513. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  514. const char *iso6392 = av_convert_lang_to(primary_tag, AV_LANG_ISO639_2_BIBL);
  515. if (iso6392)
  516. av_metadata_set(&st->metadata, "language", iso6392);
  517. }
  518. }
  519. }
  520. }
  521. return 0;
  522. }
  523. #define DO_2BITS(bits, var, defval) \
  524. switch (bits & 3) \
  525. { \
  526. case 3: var = get_le32(pb); rsize += 4; break; \
  527. case 2: var = get_le16(pb); rsize += 2; break; \
  528. case 1: var = get_byte(pb); rsize++; break; \
  529. default: var = defval; break; \
  530. }
  531. /**
  532. * Load a single ASF packet into the demuxer.
  533. * @param s demux context
  534. * @param pb context to read data from
  535. * @returns 0 on success, <0 on error
  536. */
  537. static int ff_asf_get_packet(AVFormatContext *s, ByteIOContext *pb)
  538. {
  539. ASFContext *asf = s->priv_data;
  540. uint32_t packet_length, padsize;
  541. int rsize = 8;
  542. int c, d, e, off;
  543. // if we do not know packet size, allow skipping up to 32 kB
  544. off= 32768;
  545. if (s->packet_size > 0)
  546. off= (url_ftell(pb) - s->data_offset) % s->packet_size + 3;
  547. c=d=e=-1;
  548. while(off-- > 0){
  549. c=d; d=e;
  550. e= get_byte(pb);
  551. if(c == 0x82 && !d && !e)
  552. break;
  553. }
  554. if (c != 0x82) {
  555. /**
  556. * This code allows handling of -EAGAIN at packet boundaries (i.e.
  557. * if the packet sync code above triggers -EAGAIN). This does not
  558. * imply complete -EAGAIN handling support at random positions in
  559. * the stream.
  560. */
  561. if (url_ferror(pb) == AVERROR(EAGAIN))
  562. return AVERROR(EAGAIN);
  563. if (!url_feof(pb))
  564. av_log(s, AV_LOG_ERROR, "ff asf bad header %x at:%"PRId64"\n", c, url_ftell(pb));
  565. }
  566. if ((c & 0x8f) == 0x82) {
  567. if (d || e) {
  568. if (!url_feof(pb))
  569. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  570. return -1;
  571. }
  572. c= get_byte(pb);
  573. d= get_byte(pb);
  574. rsize+=3;
  575. }else{
  576. url_fseek(pb, -1, SEEK_CUR); //FIXME
  577. }
  578. asf->packet_flags = c;
  579. asf->packet_property = d;
  580. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  581. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  582. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  583. //the following checks prevent overflows and infinite loops
  584. if(!packet_length || packet_length >= (1U<<29)){
  585. av_log(s, AV_LOG_ERROR, "invalid packet_length %d at:%"PRId64"\n", packet_length, url_ftell(pb));
  586. return -1;
  587. }
  588. if(padsize >= packet_length){
  589. av_log(s, AV_LOG_ERROR, "invalid padsize %d at:%"PRId64"\n", padsize, url_ftell(pb));
  590. return -1;
  591. }
  592. asf->packet_timestamp = get_le32(pb);
  593. get_le16(pb); /* duration */
  594. // rsize has at least 11 bytes which have to be present
  595. if (asf->packet_flags & 0x01) {
  596. asf->packet_segsizetype = get_byte(pb); rsize++;
  597. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  598. } else {
  599. asf->packet_segments = 1;
  600. asf->packet_segsizetype = 0x80;
  601. }
  602. asf->packet_size_left = packet_length - padsize - rsize;
  603. if (packet_length < asf->hdr.min_pktsize)
  604. padsize += asf->hdr.min_pktsize - packet_length;
  605. asf->packet_padsize = padsize;
  606. dprintf(s, "packet: size=%d padsize=%d left=%d\n", s->packet_size, asf->packet_padsize, asf->packet_size_left);
  607. return 0;
  608. }
  609. /**
  610. *
  611. * @return <0 if error
  612. */
  613. static int asf_read_frame_header(AVFormatContext *s, ByteIOContext *pb){
  614. ASFContext *asf = s->priv_data;
  615. int rsize = 1;
  616. int num = get_byte(pb);
  617. int64_t ts0, ts1;
  618. asf->packet_segments--;
  619. asf->packet_key_frame = num >> 7;
  620. asf->stream_index = asf->asfid2avid[num & 0x7f];
  621. // sequence should be ignored!
  622. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  623. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  624. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  625. //printf("key:%d stream:%d seq:%d offset:%d replic_size:%d\n", asf->packet_key_frame, asf->stream_index, asf->packet_seq, //asf->packet_frag_offset, asf->packet_replic_size);
  626. if (asf->packet_replic_size >= 8) {
  627. asf->packet_obj_size = get_le32(pb);
  628. if(asf->packet_obj_size >= (1<<24) || asf->packet_obj_size <= 0){
  629. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  630. return -1;
  631. }
  632. asf->packet_frag_timestamp = get_le32(pb); // timestamp
  633. if(asf->packet_replic_size >= 8+38+4){
  634. // for(i=0; i<asf->packet_replic_size-8; i++)
  635. // av_log(s, AV_LOG_DEBUG, "%02X ",get_byte(pb));
  636. // av_log(s, AV_LOG_DEBUG, "\n");
  637. url_fskip(pb, 10);
  638. ts0= get_le64(pb);
  639. ts1= get_le64(pb);
  640. url_fskip(pb, 12);
  641. get_le32(pb);
  642. url_fskip(pb, asf->packet_replic_size - 8 - 38 - 4);
  643. if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
  644. else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
  645. }else
  646. url_fskip(pb, asf->packet_replic_size - 8);
  647. rsize += asf->packet_replic_size; // FIXME - check validity
  648. } else if (asf->packet_replic_size==1){
  649. // multipacket - frag_offset is beginning timestamp
  650. asf->packet_time_start = asf->packet_frag_offset;
  651. asf->packet_frag_offset = 0;
  652. asf->packet_frag_timestamp = asf->packet_timestamp;
  653. asf->packet_time_delta = get_byte(pb);
  654. rsize++;
  655. }else if(asf->packet_replic_size!=0){
  656. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", asf->packet_replic_size);
  657. return -1;
  658. }
  659. if (asf->packet_flags & 0x01) {
  660. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  661. if(asf->packet_frag_size > asf->packet_size_left - rsize){
  662. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid\n");
  663. return -1;
  664. }
  665. //printf("Fragsize %d\n", asf->packet_frag_size);
  666. } else {
  667. asf->packet_frag_size = asf->packet_size_left - rsize;
  668. //printf("Using rest %d %d %d\n", asf->packet_frag_size, asf->packet_size_left, rsize);
  669. }
  670. if (asf->packet_replic_size == 1) {
  671. asf->packet_multi_size = asf->packet_frag_size;
  672. if (asf->packet_multi_size > asf->packet_size_left)
  673. return -1;
  674. }
  675. asf->packet_size_left -= rsize;
  676. //printf("___objsize____ %d %d rs:%d\n", asf->packet_obj_size, asf->packet_frag_offset, rsize);
  677. return 0;
  678. }
  679. /**
  680. * Parse data from individual ASF packets (which were previously loaded
  681. * with asf_get_packet()).
  682. * @param s demux context
  683. * @param pb context to read data from
  684. * @param pkt pointer to store packet data into
  685. * @returns 0 if data was stored in pkt, <0 on error or 1 if more ASF
  686. * packets need to be loaded (through asf_get_packet())
  687. */
  688. static int ff_asf_parse_packet(AVFormatContext *s, ByteIOContext *pb, AVPacket *pkt)
  689. {
  690. ASFContext *asf = s->priv_data;
  691. ASFStream *asf_st = 0;
  692. for (;;) {
  693. if(url_feof(pb))
  694. return AVERROR_EOF;
  695. if (asf->packet_size_left < FRAME_HEADER_SIZE
  696. || asf->packet_segments < 1) {
  697. //asf->packet_size_left <= asf->packet_padsize) {
  698. int ret = asf->packet_size_left + asf->packet_padsize;
  699. //printf("PacketLeftSize:%d Pad:%d Pos:%"PRId64"\n", asf->packet_size_left, asf->packet_padsize, url_ftell(pb));
  700. assert(ret>=0);
  701. /* fail safe */
  702. url_fskip(pb, ret);
  703. asf->packet_pos= url_ftell(pb);
  704. if (asf->data_object_size != (uint64_t)-1 &&
  705. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  706. return AVERROR_EOF; /* Do not exceed the size of the data object */
  707. return 1;
  708. }
  709. if (asf->packet_time_start == 0) {
  710. if(asf_read_frame_header(s, pb) < 0){
  711. asf->packet_segments= 0;
  712. continue;
  713. }
  714. if (asf->stream_index < 0
  715. || s->streams[asf->stream_index]->discard >= AVDISCARD_ALL
  716. || (!asf->packet_key_frame && s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)
  717. ) {
  718. asf->packet_time_start = 0;
  719. /* unhandled packet (should not happen) */
  720. url_fskip(pb, asf->packet_frag_size);
  721. asf->packet_size_left -= asf->packet_frag_size;
  722. if(asf->stream_index < 0)
  723. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
  724. continue;
  725. }
  726. asf->asf_st = s->streams[asf->stream_index]->priv_data;
  727. }
  728. asf_st = asf->asf_st;
  729. if (asf->packet_replic_size == 1) {
  730. // frag_offset is here used as the beginning timestamp
  731. asf->packet_frag_timestamp = asf->packet_time_start;
  732. asf->packet_time_start += asf->packet_time_delta;
  733. asf->packet_obj_size = asf->packet_frag_size = get_byte(pb);
  734. asf->packet_size_left--;
  735. asf->packet_multi_size--;
  736. if (asf->packet_multi_size < asf->packet_obj_size)
  737. {
  738. asf->packet_time_start = 0;
  739. url_fskip(pb, asf->packet_multi_size);
  740. asf->packet_size_left -= asf->packet_multi_size;
  741. continue;
  742. }
  743. asf->packet_multi_size -= asf->packet_obj_size;
  744. //printf("COMPRESS size %d %d %d ms:%d\n", asf->packet_obj_size, asf->packet_frag_timestamp, asf->packet_size_left, asf->packet_multi_size);
  745. }
  746. if( /*asf->packet_frag_size == asf->packet_obj_size*/
  747. asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size
  748. && asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size){
  749. av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n",
  750. asf_st->frag_offset, asf->packet_frag_size,
  751. asf->packet_obj_size, asf_st->pkt.size);
  752. asf->packet_obj_size= asf_st->pkt.size;
  753. }
  754. if ( asf_st->pkt.size != asf->packet_obj_size
  755. || asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { //FIXME is this condition sufficient?
  756. if(asf_st->pkt.data){
  757. av_log(s, AV_LOG_INFO, "freeing incomplete packet size %d, new %d\n", asf_st->pkt.size, asf->packet_obj_size);
  758. asf_st->frag_offset = 0;
  759. av_free_packet(&asf_st->pkt);
  760. }
  761. /* new packet */
  762. av_new_packet(&asf_st->pkt, asf->packet_obj_size);
  763. asf_st->seq = asf->packet_seq;
  764. asf_st->pkt.dts = asf->packet_frag_timestamp;
  765. asf_st->pkt.stream_index = asf->stream_index;
  766. asf_st->pkt.pos =
  767. asf_st->packet_pos= asf->packet_pos;
  768. //printf("new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  769. //asf->stream_index, asf->packet_key_frame, asf_st->pkt.flags & PKT_FLAG_KEY,
  770. //s->streams[asf->stream_index]->codec->codec_type == CODEC_TYPE_AUDIO, asf->packet_obj_size);
  771. if (s->streams[asf->stream_index]->codec->codec_type == CODEC_TYPE_AUDIO)
  772. asf->packet_key_frame = 1;
  773. if (asf->packet_key_frame)
  774. asf_st->pkt.flags |= PKT_FLAG_KEY;
  775. }
  776. /* read data */
  777. //printf("READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  778. // s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  779. // asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  780. asf->packet_size_left -= asf->packet_frag_size;
  781. if (asf->packet_size_left < 0)
  782. continue;
  783. if( asf->packet_frag_offset >= asf_st->pkt.size
  784. || asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset){
  785. av_log(s, AV_LOG_ERROR, "packet fragment position invalid %u,%u not in %u\n",
  786. asf->packet_frag_offset, asf->packet_frag_size, asf_st->pkt.size);
  787. continue;
  788. }
  789. get_buffer(pb, asf_st->pkt.data + asf->packet_frag_offset,
  790. asf->packet_frag_size);
  791. if (s->key && s->keylen == 20)
  792. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  793. asf->packet_frag_size);
  794. asf_st->frag_offset += asf->packet_frag_size;
  795. /* test if whole packet is read */
  796. if (asf_st->frag_offset == asf_st->pkt.size) {
  797. //workaround for macroshit radio DVR-MS files
  798. if( s->streams[asf->stream_index]->codec->codec_id == CODEC_ID_MPEG2VIDEO
  799. && asf_st->pkt.size > 100){
  800. int i;
  801. for(i=0; i<asf_st->pkt.size && !asf_st->pkt.data[i]; i++);
  802. if(i == asf_st->pkt.size){
  803. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  804. asf_st->frag_offset = 0;
  805. av_free_packet(&asf_st->pkt);
  806. continue;
  807. }
  808. }
  809. /* return packet */
  810. if (asf_st->ds_span > 1) {
  811. if(asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span){
  812. av_log(s, AV_LOG_ERROR, "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", asf_st->pkt.size, asf_st->ds_packet_size, asf_st->ds_span);
  813. }else{
  814. /* packet descrambling */
  815. uint8_t *newdata = av_malloc(asf_st->pkt.size);
  816. if (newdata) {
  817. int offset = 0;
  818. while (offset < asf_st->pkt.size) {
  819. int off = offset / asf_st->ds_chunk_size;
  820. int row = off / asf_st->ds_span;
  821. int col = off % asf_st->ds_span;
  822. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  823. //printf("off:%d row:%d col:%d idx:%d\n", off, row, col, idx);
  824. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  825. assert(idx+1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  826. memcpy(newdata + offset,
  827. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  828. asf_st->ds_chunk_size);
  829. offset += asf_st->ds_chunk_size;
  830. }
  831. av_free(asf_st->pkt.data);
  832. asf_st->pkt.data = newdata;
  833. }
  834. }
  835. }
  836. asf_st->frag_offset = 0;
  837. *pkt= asf_st->pkt;
  838. //printf("packet %d %d\n", asf_st->pkt.size, asf->packet_frag_size);
  839. asf_st->pkt.size = 0;
  840. asf_st->pkt.data = 0;
  841. break; // packet completed
  842. }
  843. }
  844. return 0;
  845. }
  846. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  847. {
  848. ASFContext *asf = s->priv_data;
  849. for (;;) {
  850. int ret;
  851. /* parse cached packets, if any */
  852. if ((ret = ff_asf_parse_packet(s, s->pb, pkt)) <= 0)
  853. return ret;
  854. if ((ret = ff_asf_get_packet(s, s->pb)) < 0)
  855. assert(asf->packet_size_left < FRAME_HEADER_SIZE || asf->packet_segments < 1);
  856. asf->packet_time_start = 0;
  857. }
  858. return 0;
  859. }
  860. // Added to support seeking after packets have been read
  861. // If information is not reset, read_packet fails due to
  862. // leftover information from previous reads
  863. static void asf_reset_header(AVFormatContext *s)
  864. {
  865. ASFContext *asf = s->priv_data;
  866. ASFStream *asf_st;
  867. int i;
  868. asf->packet_nb_frames = 0;
  869. asf->packet_size_left = 0;
  870. asf->packet_segments = 0;
  871. asf->packet_flags = 0;
  872. asf->packet_property = 0;
  873. asf->packet_timestamp = 0;
  874. asf->packet_segsizetype = 0;
  875. asf->packet_segments = 0;
  876. asf->packet_seq = 0;
  877. asf->packet_replic_size = 0;
  878. asf->packet_key_frame = 0;
  879. asf->packet_padsize = 0;
  880. asf->packet_frag_offset = 0;
  881. asf->packet_frag_size = 0;
  882. asf->packet_frag_timestamp = 0;
  883. asf->packet_multi_size = 0;
  884. asf->packet_obj_size = 0;
  885. asf->packet_time_delta = 0;
  886. asf->packet_time_start = 0;
  887. for(i=0; i<s->nb_streams; i++){
  888. asf_st= s->streams[i]->priv_data;
  889. av_free_packet(&asf_st->pkt);
  890. asf_st->frag_offset=0;
  891. asf_st->seq=0;
  892. }
  893. asf->asf_st= NULL;
  894. }
  895. static int asf_read_close(AVFormatContext *s)
  896. {
  897. int i;
  898. asf_reset_header(s);
  899. for(i=0;i<s->nb_streams;i++) {
  900. AVStream *st = s->streams[i];
  901. av_free(st->codec->palctrl);
  902. }
  903. return 0;
  904. }
  905. static int64_t asf_read_pts(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit)
  906. {
  907. AVPacket pkt1, *pkt = &pkt1;
  908. ASFStream *asf_st;
  909. int64_t pts;
  910. int64_t pos= *ppos;
  911. int i;
  912. int64_t start_pos[s->nb_streams];
  913. for(i=0; i<s->nb_streams; i++){
  914. start_pos[i]= pos;
  915. }
  916. if (s->packet_size > 0)
  917. pos= (pos+s->packet_size-1-s->data_offset)/s->packet_size*s->packet_size+ s->data_offset;
  918. *ppos= pos;
  919. url_fseek(s->pb, pos, SEEK_SET);
  920. //printf("asf_read_pts\n");
  921. asf_reset_header(s);
  922. for(;;){
  923. if (av_read_frame(s, pkt) < 0){
  924. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  925. return AV_NOPTS_VALUE;
  926. }
  927. pts= pkt->pts;
  928. av_free_packet(pkt);
  929. if(pkt->flags&PKT_FLAG_KEY){
  930. i= pkt->stream_index;
  931. asf_st= s->streams[i]->priv_data;
  932. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  933. pos= asf_st->packet_pos;
  934. av_add_index_entry(s->streams[i], pos, pts, pkt->size, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  935. start_pos[i]= asf_st->packet_pos + 1;
  936. if(pkt->stream_index == stream_index)
  937. break;
  938. }
  939. }
  940. *ppos= pos;
  941. //printf("found keyframe at %"PRId64" stream %d stamp:%"PRId64"\n", *ppos, stream_index, pts);
  942. return pts;
  943. }
  944. static void asf_build_simple_index(AVFormatContext *s, int stream_index)
  945. {
  946. ff_asf_guid g;
  947. ASFContext *asf = s->priv_data;
  948. int64_t current_pos= url_ftell(s->pb);
  949. int i;
  950. url_fseek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET);
  951. get_guid(s->pb, &g);
  952. if (!guidcmp(&g, &index_guid)) {
  953. int64_t itime;
  954. int pct, ict;
  955. int64_t av_unused gsize= get_le64(s->pb);
  956. get_guid(s->pb, &g);
  957. itime=get_le64(s->pb);
  958. pct=get_le32(s->pb);
  959. ict=get_le32(s->pb);
  960. av_log(s, AV_LOG_DEBUG, "itime:0x%"PRIx64", pct:%d, ict:%d\n",itime,pct,ict);
  961. for (i=0;i<ict;i++){
  962. int pktnum=get_le32(s->pb);
  963. int pktct =get_le16(s->pb);
  964. int64_t pos = s->data_offset + s->packet_size*(int64_t)pktnum;
  965. int64_t index_pts= av_rescale(itime, i, 10000);
  966. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d\n", pktnum, pktct);
  967. av_add_index_entry(s->streams[stream_index], pos, index_pts, s->packet_size, 0, AVINDEX_KEYFRAME);
  968. }
  969. asf->index_read= 1;
  970. }
  971. url_fseek(s->pb, current_pos, SEEK_SET);
  972. }
  973. static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags)
  974. {
  975. ASFContext *asf = s->priv_data;
  976. AVStream *st = s->streams[stream_index];
  977. int64_t pos;
  978. int index;
  979. if (s->packet_size <= 0)
  980. return -1;
  981. /* Try using the protocol's read_seek if available */
  982. if(s->pb) {
  983. int ret = av_url_read_fseek(s->pb, stream_index, pts, flags);
  984. if(ret >= 0)
  985. asf_reset_header(s);
  986. if (ret != AVERROR(ENOSYS))
  987. return ret;
  988. }
  989. if (!asf->index_read)
  990. asf_build_simple_index(s, stream_index);
  991. if(!(asf->index_read && st->index_entries)){
  992. if(av_seek_frame_binary(s, stream_index, pts, flags)<0)
  993. return -1;
  994. }else{
  995. index= av_index_search_timestamp(st, pts, flags);
  996. if(index<0)
  997. return -1;
  998. /* find the position */
  999. pos = st->index_entries[index].pos;
  1000. // various attempts to find key frame have failed so far
  1001. // asf_reset_header(s);
  1002. // url_fseek(s->pb, pos, SEEK_SET);
  1003. // key_pos = pos;
  1004. // for(i=0;i<16;i++){
  1005. // pos = url_ftell(s->pb);
  1006. // if (av_read_frame(s, &pkt) < 0){
  1007. // av_log(s, AV_LOG_INFO, "seek failed\n");
  1008. // return -1;
  1009. // }
  1010. // asf_st = s->streams[stream_index]->priv_data;
  1011. // pos += st->parser->frame_offset;
  1012. //
  1013. // if (pkt.size > b) {
  1014. // b = pkt.size;
  1015. // key_pos = pos;
  1016. // }
  1017. //
  1018. // av_free_packet(&pkt);
  1019. // }
  1020. /* do the seek */
  1021. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1022. url_fseek(s->pb, pos, SEEK_SET);
  1023. }
  1024. asf_reset_header(s);
  1025. return 0;
  1026. }
  1027. AVInputFormat asf_demuxer = {
  1028. "asf",
  1029. NULL_IF_CONFIG_SMALL("ASF format"),
  1030. sizeof(ASFContext),
  1031. asf_probe,
  1032. asf_read_header,
  1033. asf_read_packet,
  1034. asf_read_close,
  1035. asf_read_seek,
  1036. asf_read_pts,
  1037. .metadata_conv = ff_asf_metadata_conv,
  1038. };