You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1294 lines
47KB

  1. /*
  2. * ASF compatible demuxer
  3. * Copyright (c) 2000, 2001 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. //#define DEBUG
  22. #include "libavutil/common.h"
  23. #include "libavutil/avstring.h"
  24. #include "libavcodec/mpegaudio.h"
  25. #include "avformat.h"
  26. #include "riff.h"
  27. #include "asf.h"
  28. #include "asfcrypt.h"
  29. #include "avlanguage.h"
  30. void ff_mms_set_stream_selection(URLContext *h, AVFormatContext *format);
  31. typedef struct {
  32. int asfid2avid[128]; ///< conversion table from asf ID 2 AVStream ID
  33. ASFStream streams[128]; ///< it's max number and it's not that big
  34. uint32_t stream_bitrates[128]; ///< max number of streams, bitrate for each (for streaming)
  35. AVRational dar[128];
  36. char stream_languages[128][6]; ///< max number of streams, language for each (RFC1766, e.g. en-US)
  37. /* non streamed additonnal info */
  38. /* packet filling */
  39. int packet_size_left;
  40. /* only for reading */
  41. uint64_t data_offset; ///< beginning of the first data packet
  42. uint64_t data_object_offset; ///< data object offset (excl. GUID & size)
  43. uint64_t data_object_size; ///< size of the data object
  44. int index_read;
  45. ASFMainHeader hdr;
  46. int packet_flags;
  47. int packet_property;
  48. int packet_timestamp;
  49. int packet_segsizetype;
  50. int packet_segments;
  51. int packet_seq;
  52. int packet_replic_size;
  53. int packet_key_frame;
  54. int packet_padsize;
  55. unsigned int packet_frag_offset;
  56. unsigned int packet_frag_size;
  57. int64_t packet_frag_timestamp;
  58. int packet_multi_size;
  59. int packet_obj_size;
  60. int packet_time_delta;
  61. int packet_time_start;
  62. int64_t packet_pos;
  63. int stream_index;
  64. ASFStream* asf_st; ///< currently decoded stream
  65. } ASFContext;
  66. #undef NDEBUG
  67. #include <assert.h>
  68. #define ASF_MAX_STREAMS 127
  69. #define FRAME_HEADER_SIZE 17
  70. // Fix Me! FRAME_HEADER_SIZE may be different.
  71. static const ff_asf_guid index_guid = {
  72. 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb
  73. };
  74. static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */
  75. 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2
  76. };
  77. /**********************************/
  78. /* decoding */
  79. #ifdef DEBUG
  80. #define PRINT_IF_GUID(g,cmp) \
  81. if (!ff_guidcmp(g, &cmp)) \
  82. av_dlog(NULL, "(GUID: %s) ", #cmp)
  83. static void print_guid(const ff_asf_guid *g)
  84. {
  85. int i;
  86. PRINT_IF_GUID(g, ff_asf_header);
  87. else PRINT_IF_GUID(g, ff_asf_file_header);
  88. else PRINT_IF_GUID(g, ff_asf_stream_header);
  89. else PRINT_IF_GUID(g, ff_asf_audio_stream);
  90. else PRINT_IF_GUID(g, ff_asf_audio_conceal_none);
  91. else PRINT_IF_GUID(g, ff_asf_video_stream);
  92. else PRINT_IF_GUID(g, ff_asf_video_conceal_none);
  93. else PRINT_IF_GUID(g, ff_asf_command_stream);
  94. else PRINT_IF_GUID(g, ff_asf_comment_header);
  95. else PRINT_IF_GUID(g, ff_asf_codec_comment_header);
  96. else PRINT_IF_GUID(g, ff_asf_codec_comment1_header);
  97. else PRINT_IF_GUID(g, ff_asf_data_header);
  98. else PRINT_IF_GUID(g, index_guid);
  99. else PRINT_IF_GUID(g, ff_asf_head1_guid);
  100. else PRINT_IF_GUID(g, ff_asf_head2_guid);
  101. else PRINT_IF_GUID(g, ff_asf_my_guid);
  102. else PRINT_IF_GUID(g, ff_asf_ext_stream_header);
  103. else PRINT_IF_GUID(g, ff_asf_extended_content_header);
  104. else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header);
  105. else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream);
  106. else PRINT_IF_GUID(g, ff_asf_metadata_header);
  107. else PRINT_IF_GUID(g, ff_asf_marker_header);
  108. else PRINT_IF_GUID(g, stream_bitrate_guid);
  109. else PRINT_IF_GUID(g, ff_asf_language_guid);
  110. else
  111. av_dlog(NULL, "(GUID: unknown) ");
  112. for(i=0;i<16;i++)
  113. av_dlog(NULL, " 0x%02x,", (*g)[i]);
  114. av_dlog(NULL, "}\n");
  115. }
  116. #undef PRINT_IF_GUID
  117. #else
  118. #define print_guid(g)
  119. #endif
  120. void ff_get_guid(ByteIOContext *s, ff_asf_guid *g)
  121. {
  122. assert(sizeof(*g) == 16);
  123. get_buffer(s, *g, sizeof(*g));
  124. }
  125. static int asf_probe(AVProbeData *pd)
  126. {
  127. /* check file header */
  128. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  129. return AVPROBE_SCORE_MAX;
  130. else
  131. return 0;
  132. }
  133. static int get_value(ByteIOContext *pb, int type){
  134. switch(type){
  135. case 2: return get_le32(pb);
  136. case 3: return get_le32(pb);
  137. case 4: return get_le64(pb);
  138. case 5: return get_le16(pb);
  139. default:return INT_MIN;
  140. }
  141. }
  142. static void get_tag(AVFormatContext *s, const char *key, int type, int len)
  143. {
  144. char *value;
  145. int64_t off = url_ftell(s->pb);
  146. if ((unsigned)len >= (UINT_MAX - 1)/2)
  147. return;
  148. value = av_malloc(2*len+1);
  149. if (!value)
  150. goto finish;
  151. if (type == 0) { // UTF16-LE
  152. avio_get_str16le(s->pb, len, value, 2*len + 1);
  153. } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD
  154. uint64_t num = get_value(s->pb, type);
  155. snprintf(value, len, "%"PRIu64, num);
  156. } else {
  157. av_log(s, AV_LOG_DEBUG, "Unsupported value type %d in tag %s.\n", type, key);
  158. goto finish;
  159. }
  160. av_metadata_set2(&s->metadata, key, value, 0);
  161. finish:
  162. av_freep(&value);
  163. url_fseek(s->pb, off + len, SEEK_SET);
  164. }
  165. static int asf_read_file_properties(AVFormatContext *s, int64_t size)
  166. {
  167. ASFContext *asf = s->priv_data;
  168. ByteIOContext *pb = s->pb;
  169. ff_get_guid(pb, &asf->hdr.guid);
  170. asf->hdr.file_size = get_le64(pb);
  171. asf->hdr.create_time = get_le64(pb);
  172. get_le64(pb); /* number of packets */
  173. asf->hdr.play_time = get_le64(pb);
  174. asf->hdr.send_time = get_le64(pb);
  175. asf->hdr.preroll = get_le32(pb);
  176. asf->hdr.ignore = get_le32(pb);
  177. asf->hdr.flags = get_le32(pb);
  178. asf->hdr.min_pktsize = get_le32(pb);
  179. asf->hdr.max_pktsize = get_le32(pb);
  180. asf->hdr.max_bitrate = get_le32(pb);
  181. s->packet_size = asf->hdr.max_pktsize;
  182. return 0;
  183. }
  184. static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size)
  185. {
  186. ASFContext *asf = s->priv_data;
  187. ByteIOContext *pb = s->pb;
  188. ff_asf_guid g;
  189. int ext_len, payload_ext_ct, stream_ct, i;
  190. uint32_t ext_d, leak_rate, stream_num;
  191. unsigned int stream_languageid_index;
  192. get_le64(pb); // starttime
  193. get_le64(pb); // endtime
  194. leak_rate = get_le32(pb); // leak-datarate
  195. get_le32(pb); // bucket-datasize
  196. get_le32(pb); // init-bucket-fullness
  197. get_le32(pb); // alt-leak-datarate
  198. get_le32(pb); // alt-bucket-datasize
  199. get_le32(pb); // alt-init-bucket-fullness
  200. get_le32(pb); // max-object-size
  201. get_le32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved)
  202. stream_num = get_le16(pb); // stream-num
  203. stream_languageid_index = get_le16(pb); // stream-language-id-index
  204. if (stream_num < 128)
  205. asf->streams[stream_num].stream_language_index = stream_languageid_index;
  206. get_le64(pb); // avg frametime in 100ns units
  207. stream_ct = get_le16(pb); //stream-name-count
  208. payload_ext_ct = get_le16(pb); //payload-extension-system-count
  209. if (stream_num < 128)
  210. asf->stream_bitrates[stream_num] = leak_rate;
  211. for (i=0; i<stream_ct; i++){
  212. get_le16(pb);
  213. ext_len = get_le16(pb);
  214. url_fseek(pb, ext_len, SEEK_CUR);
  215. }
  216. for (i=0; i<payload_ext_ct; i++){
  217. ff_get_guid(pb, &g);
  218. ext_d=get_le16(pb);
  219. ext_len=get_le32(pb);
  220. url_fseek(pb, ext_len, SEEK_CUR);
  221. }
  222. return 0;
  223. }
  224. static int asf_read_content_desc(AVFormatContext *s, int64_t size)
  225. {
  226. ByteIOContext *pb = s->pb;
  227. int len1, len2, len3, len4, len5;
  228. len1 = get_le16(pb);
  229. len2 = get_le16(pb);
  230. len3 = get_le16(pb);
  231. len4 = get_le16(pb);
  232. len5 = get_le16(pb);
  233. get_tag(s, "title" , 0, len1);
  234. get_tag(s, "author" , 0, len2);
  235. get_tag(s, "copyright", 0, len3);
  236. get_tag(s, "comment" , 0, len4);
  237. url_fskip(pb, len5);
  238. return 0;
  239. }
  240. static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size)
  241. {
  242. ByteIOContext *pb = s->pb;
  243. ASFContext *asf = s->priv_data;
  244. int desc_count, i, ret;
  245. desc_count = get_le16(pb);
  246. for(i=0;i<desc_count;i++) {
  247. int name_len,value_type,value_len;
  248. char name[1024];
  249. name_len = get_le16(pb);
  250. if (name_len%2) // must be even, broken lavf versions wrote len-1
  251. name_len += 1;
  252. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  253. url_fskip(pb, name_len - ret);
  254. value_type = get_le16(pb);
  255. value_len = get_le16(pb);
  256. if (!value_type && value_len%2)
  257. value_len += 1;
  258. /**
  259. * My sample has that stream set to 0 maybe that mean the container.
  260. * Asf stream count start at 1. I am using 0 to the container value since it's unused
  261. */
  262. if (!strcmp(name, "AspectRatioX")){
  263. asf->dar[0].num= get_value(s->pb, value_type);
  264. } else if(!strcmp(name, "AspectRatioY")){
  265. asf->dar[0].den= get_value(s->pb, value_type);
  266. } else
  267. get_tag(s, name, value_type, value_len);
  268. }
  269. return 0;
  270. }
  271. static int asf_read_language_list(AVFormatContext *s, int64_t size)
  272. {
  273. ByteIOContext *pb = s->pb;
  274. ASFContext *asf = s->priv_data;
  275. int j, ret;
  276. int stream_count = get_le16(pb);
  277. for(j = 0; j < stream_count; j++) {
  278. char lang[6];
  279. unsigned int lang_len = get_byte(pb);
  280. if ((ret = avio_get_str16le(pb, lang_len, lang, sizeof(lang))) < lang_len)
  281. url_fskip(pb, lang_len - ret);
  282. if (j < 128)
  283. av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
  284. }
  285. return 0;
  286. }
  287. static int asf_read_metadata(AVFormatContext *s, int64_t size)
  288. {
  289. ByteIOContext *pb = s->pb;
  290. ASFContext *asf = s->priv_data;
  291. int n, stream_num, name_len, value_len, value_type, value_num;
  292. int ret, i;
  293. n = get_le16(pb);
  294. for(i=0;i<n;i++) {
  295. char name[1024];
  296. get_le16(pb); //lang_list_index
  297. stream_num= get_le16(pb);
  298. name_len= get_le16(pb);
  299. value_type= get_le16(pb);
  300. value_len= get_le32(pb);
  301. if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
  302. url_fskip(pb, name_len - ret);
  303. //av_log(s, AV_LOG_ERROR, "%d %d %d %d %d <%s>\n", i, stream_num, name_len, value_type, value_len, name);
  304. value_num= get_le16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
  305. url_fskip(pb, value_len - 2);
  306. if(stream_num<128){
  307. if (!strcmp(name, "AspectRatioX")) asf->dar[stream_num].num= value_num;
  308. else if(!strcmp(name, "AspectRatioY")) asf->dar[stream_num].den= value_num;
  309. }
  310. }
  311. return 0;
  312. }
  313. static int asf_read_marker(AVFormatContext *s, int64_t size)
  314. {
  315. ByteIOContext *pb = s->pb;
  316. int i, count, name_len, ret;
  317. char name[1024];
  318. get_le64(pb); // reserved 16 bytes
  319. get_le64(pb); // ...
  320. count = get_le32(pb); // markers count
  321. get_le16(pb); // reserved 2 bytes
  322. name_len = get_le16(pb); // name length
  323. for(i=0;i<name_len;i++){
  324. get_byte(pb); // skip the name
  325. }
  326. for(i=0;i<count;i++){
  327. int64_t pres_time;
  328. int name_len;
  329. get_le64(pb); // offset, 8 bytes
  330. pres_time = get_le64(pb); // presentation time
  331. get_le16(pb); // entry length
  332. get_le32(pb); // send time
  333. get_le32(pb); // flags
  334. name_len = get_le32(pb); // name length
  335. if ((ret = avio_get_str16le(pb, name_len * 2, name, sizeof(name))) < name_len)
  336. url_fskip(pb, name_len - ret);
  337. ff_new_chapter(s, i, (AVRational){1, 10000000}, pres_time, AV_NOPTS_VALUE, name );
  338. }
  339. return 0;
  340. }
  341. static int asf_read_header(AVFormatContext *s, AVFormatParameters *ap)
  342. {
  343. ASFContext *asf = s->priv_data;
  344. ff_asf_guid g;
  345. ByteIOContext *pb = s->pb;
  346. AVStream *st;
  347. ASFStream *asf_st;
  348. int i;
  349. int64_t gsize;
  350. ff_get_guid(pb, &g);
  351. if (ff_guidcmp(&g, &ff_asf_header))
  352. return -1;
  353. get_le64(pb);
  354. get_le32(pb);
  355. get_byte(pb);
  356. get_byte(pb);
  357. memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid));
  358. for(;;) {
  359. uint64_t gpos= url_ftell(pb);
  360. ff_get_guid(pb, &g);
  361. gsize = get_le64(pb);
  362. av_dlog(s, "%08"PRIx64": ", gpos);
  363. print_guid(&g);
  364. av_dlog(s, " size=0x%"PRIx64"\n", gsize);
  365. if (!ff_guidcmp(&g, &ff_asf_data_header)) {
  366. asf->data_object_offset = url_ftell(pb);
  367. // if not streaming, gsize is not unlimited (how?), and there is enough space in the file..
  368. if (!(asf->hdr.flags & 0x01) && gsize >= 100) {
  369. asf->data_object_size = gsize - 24;
  370. } else {
  371. asf->data_object_size = (uint64_t)-1;
  372. }
  373. break;
  374. }
  375. if (gsize < 24)
  376. return -1;
  377. if (!ff_guidcmp(&g, &ff_asf_file_header)) {
  378. asf_read_file_properties(s, gsize);
  379. } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) {
  380. enum AVMediaType type;
  381. int type_specific_size, sizeX;
  382. uint64_t total_size;
  383. unsigned int tag1;
  384. int64_t pos1, pos2, start_time;
  385. int test_for_ext_stream_audio, is_dvr_ms_audio=0;
  386. if (s->nb_streams == ASF_MAX_STREAMS) {
  387. av_log(s, AV_LOG_ERROR, "too many streams\n");
  388. return AVERROR(EINVAL);
  389. }
  390. pos1 = url_ftell(pb);
  391. st = av_new_stream(s, 0);
  392. if (!st)
  393. return AVERROR(ENOMEM);
  394. av_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */
  395. asf_st = av_mallocz(sizeof(ASFStream));
  396. if (!asf_st)
  397. return AVERROR(ENOMEM);
  398. st->priv_data = asf_st;
  399. start_time = asf->hdr.preroll;
  400. asf_st->stream_language_index = 128; // invalid stream index means no language info
  401. if(!(asf->hdr.flags & 0x01)) { // if we aren't streaming...
  402. st->duration = asf->hdr.play_time /
  403. (10000000 / 1000) - start_time;
  404. }
  405. ff_get_guid(pb, &g);
  406. test_for_ext_stream_audio = 0;
  407. if (!ff_guidcmp(&g, &ff_asf_audio_stream)) {
  408. type = AVMEDIA_TYPE_AUDIO;
  409. } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) {
  410. type = AVMEDIA_TYPE_VIDEO;
  411. } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) {
  412. type = AVMEDIA_TYPE_VIDEO;
  413. st->codec->codec_id = CODEC_ID_MJPEG;
  414. } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) {
  415. type = AVMEDIA_TYPE_DATA;
  416. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) {
  417. test_for_ext_stream_audio = 1;
  418. type = AVMEDIA_TYPE_UNKNOWN;
  419. } else {
  420. return -1;
  421. }
  422. ff_get_guid(pb, &g);
  423. total_size = get_le64(pb);
  424. type_specific_size = get_le32(pb);
  425. get_le32(pb);
  426. st->id = get_le16(pb) & 0x7f; /* stream id */
  427. // mapping of asf ID to AV stream ID;
  428. asf->asfid2avid[st->id] = s->nb_streams - 1;
  429. get_le32(pb);
  430. if (test_for_ext_stream_audio) {
  431. ff_get_guid(pb, &g);
  432. if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) {
  433. type = AVMEDIA_TYPE_AUDIO;
  434. is_dvr_ms_audio=1;
  435. ff_get_guid(pb, &g);
  436. get_le32(pb);
  437. get_le32(pb);
  438. get_le32(pb);
  439. ff_get_guid(pb, &g);
  440. get_le32(pb);
  441. }
  442. }
  443. st->codec->codec_type = type;
  444. if (type == AVMEDIA_TYPE_AUDIO) {
  445. ff_get_wav_header(pb, st->codec, type_specific_size);
  446. if (is_dvr_ms_audio) {
  447. // codec_id and codec_tag are unreliable in dvr_ms
  448. // files. Set them later by probing stream.
  449. st->codec->codec_id = CODEC_ID_PROBE;
  450. st->codec->codec_tag = 0;
  451. }
  452. if (st->codec->codec_id == CODEC_ID_AAC) {
  453. st->need_parsing = AVSTREAM_PARSE_NONE;
  454. } else {
  455. st->need_parsing = AVSTREAM_PARSE_FULL;
  456. }
  457. /* We have to init the frame size at some point .... */
  458. pos2 = url_ftell(pb);
  459. if (gsize >= (pos2 + 8 - pos1 + 24)) {
  460. asf_st->ds_span = get_byte(pb);
  461. asf_st->ds_packet_size = get_le16(pb);
  462. asf_st->ds_chunk_size = get_le16(pb);
  463. get_le16(pb); //ds_data_size
  464. get_byte(pb); //ds_silence_data
  465. }
  466. //printf("Descrambling: ps:%d cs:%d ds:%d s:%d sd:%d\n",
  467. // asf_st->ds_packet_size, asf_st->ds_chunk_size,
  468. // asf_st->ds_data_size, asf_st->ds_span, asf_st->ds_silence_data);
  469. if (asf_st->ds_span > 1) {
  470. if (!asf_st->ds_chunk_size
  471. || (asf_st->ds_packet_size/asf_st->ds_chunk_size <= 1)
  472. || asf_st->ds_packet_size % asf_st->ds_chunk_size)
  473. asf_st->ds_span = 0; // disable descrambling
  474. }
  475. switch (st->codec->codec_id) {
  476. case CODEC_ID_MP3:
  477. st->codec->frame_size = MPA_FRAME_SIZE;
  478. break;
  479. case CODEC_ID_PCM_S16LE:
  480. case CODEC_ID_PCM_S16BE:
  481. case CODEC_ID_PCM_U16LE:
  482. case CODEC_ID_PCM_U16BE:
  483. case CODEC_ID_PCM_S8:
  484. case CODEC_ID_PCM_U8:
  485. case CODEC_ID_PCM_ALAW:
  486. case CODEC_ID_PCM_MULAW:
  487. st->codec->frame_size = 1;
  488. break;
  489. default:
  490. /* This is probably wrong, but it prevents a crash later */
  491. st->codec->frame_size = 1;
  492. break;
  493. }
  494. } else if (type == AVMEDIA_TYPE_VIDEO &&
  495. gsize - (url_ftell(pb) - pos1 + 24) >= 51) {
  496. get_le32(pb);
  497. get_le32(pb);
  498. get_byte(pb);
  499. get_le16(pb); /* size */
  500. sizeX= get_le32(pb); /* size */
  501. st->codec->width = get_le32(pb);
  502. st->codec->height = get_le32(pb);
  503. /* not available for asf */
  504. get_le16(pb); /* panes */
  505. st->codec->bits_per_coded_sample = get_le16(pb); /* depth */
  506. tag1 = get_le32(pb);
  507. url_fskip(pb, 20);
  508. // av_log(s, AV_LOG_DEBUG, "size:%d tsize:%d sizeX:%d\n", size, total_size, sizeX);
  509. if (sizeX > 40) {
  510. st->codec->extradata_size = sizeX - 40;
  511. st->codec->extradata = av_mallocz(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE);
  512. get_buffer(pb, st->codec->extradata, st->codec->extradata_size);
  513. }
  514. /* Extract palette from extradata if bpp <= 8 */
  515. /* This code assumes that extradata contains only palette */
  516. /* This is true for all paletted codecs implemented in ffmpeg */
  517. if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) {
  518. st->codec->palctrl = av_mallocz(sizeof(AVPaletteControl));
  519. #if HAVE_BIGENDIAN
  520. for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)/4; i++)
  521. st->codec->palctrl->palette[i] = av_bswap32(((uint32_t*)st->codec->extradata)[i]);
  522. #else
  523. memcpy(st->codec->palctrl->palette, st->codec->extradata,
  524. FFMIN(st->codec->extradata_size, AVPALETTE_SIZE));
  525. #endif
  526. st->codec->palctrl->palette_changed = 1;
  527. }
  528. st->codec->codec_tag = tag1;
  529. st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1);
  530. if(tag1 == MKTAG('D', 'V', 'R', ' ')){
  531. st->need_parsing = AVSTREAM_PARSE_FULL;
  532. // issue658 containse wrong w/h and MS even puts a fake seq header with wrong w/h in extradata while a correct one is in te stream. maximum lameness
  533. st->codec->width =
  534. st->codec->height = 0;
  535. av_freep(&st->codec->extradata);
  536. st->codec->extradata_size=0;
  537. }
  538. if(st->codec->codec_id == CODEC_ID_H264)
  539. st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
  540. }
  541. pos2 = url_ftell(pb);
  542. url_fskip(pb, gsize - (pos2 - pos1 + 24));
  543. } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) {
  544. asf_read_content_desc(s, gsize);
  545. } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) {
  546. asf_read_language_list(s, gsize);
  547. } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) {
  548. asf_read_ext_content_desc(s, gsize);
  549. } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) {
  550. asf_read_metadata(s, gsize);
  551. } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
  552. asf_read_ext_stream_properties(s, gsize);
  553. // there could be a optional stream properties object to follow
  554. // if so the next iteration will pick it up
  555. continue;
  556. } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
  557. int v1, v2;
  558. ff_get_guid(pb, &g);
  559. v1 = get_le32(pb);
  560. v2 = get_le16(pb);
  561. continue;
  562. } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) {
  563. asf_read_marker(s, gsize);
  564. } else if (url_feof(pb)) {
  565. return -1;
  566. } else {
  567. if (!s->keylen) {
  568. if (!ff_guidcmp(&g, &ff_asf_content_encryption)) {
  569. av_log(s, AV_LOG_WARNING, "DRM protected stream detected, decoding will likely fail!\n");
  570. } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) {
  571. av_log(s, AV_LOG_WARNING, "Ext DRM protected stream detected, decoding will likely fail!\n");
  572. } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) {
  573. av_log(s, AV_LOG_WARNING, "Digital signature detected, decoding will likely fail!\n");
  574. }
  575. }
  576. }
  577. if(url_ftell(pb) != gpos + gsize)
  578. av_log(s, AV_LOG_DEBUG, "gpos mismatch our pos=%"PRIu64", end=%"PRIu64"\n", url_ftell(pb)-gpos, gsize);
  579. url_fseek(pb, gpos + gsize, SEEK_SET);
  580. }
  581. ff_get_guid(pb, &g);
  582. get_le64(pb);
  583. get_byte(pb);
  584. get_byte(pb);
  585. if (url_feof(pb))
  586. return -1;
  587. asf->data_offset = url_ftell(pb);
  588. asf->packet_size_left = 0;
  589. for(i=0; i<128; i++){
  590. int stream_num= asf->asfid2avid[i];
  591. if(stream_num>=0){
  592. AVStream *st = s->streams[stream_num];
  593. if (!st->codec->bit_rate)
  594. st->codec->bit_rate = asf->stream_bitrates[i];
  595. if (asf->dar[i].num > 0 && asf->dar[i].den > 0){
  596. av_reduce(&st->sample_aspect_ratio.num,
  597. &st->sample_aspect_ratio.den,
  598. asf->dar[i].num, asf->dar[i].den, INT_MAX);
  599. } else if ((asf->dar[0].num > 0) && (asf->dar[0].den > 0) && (st->codec->codec_type==AVMEDIA_TYPE_VIDEO)) // Use ASF container value if the stream doesn't AR set.
  600. av_reduce(&st->sample_aspect_ratio.num,
  601. &st->sample_aspect_ratio.den,
  602. asf->dar[0].num, asf->dar[0].den, INT_MAX);
  603. //av_log(s, AV_LOG_INFO, "i=%d, st->codec->codec_type:%d, dar %d:%d sar=%d:%d\n", i, st->codec->codec_type, dar[i].num, dar[i].den, st->sample_aspect_ratio.num, st->sample_aspect_ratio.den);
  604. // copy and convert language codes to the frontend
  605. if (asf->streams[i].stream_language_index < 128) {
  606. const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index];
  607. if (rfc1766 && strlen(rfc1766) > 1) {
  608. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  609. const char *iso6392 = av_convert_lang_to(primary_tag, AV_LANG_ISO639_2_BIBL);
  610. if (iso6392)
  611. av_metadata_set2(&st->metadata, "language", iso6392, 0);
  612. }
  613. }
  614. }
  615. }
  616. ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv);
  617. return 0;
  618. }
  619. #define DO_2BITS(bits, var, defval) \
  620. switch (bits & 3) \
  621. { \
  622. case 3: var = get_le32(pb); rsize += 4; break; \
  623. case 2: var = get_le16(pb); rsize += 2; break; \
  624. case 1: var = get_byte(pb); rsize++; break; \
  625. default: var = defval; break; \
  626. }
  627. /**
  628. * Load a single ASF packet into the demuxer.
  629. * @param s demux context
  630. * @param pb context to read data from
  631. * @return 0 on success, <0 on error
  632. */
  633. static int ff_asf_get_packet(AVFormatContext *s, ByteIOContext *pb)
  634. {
  635. ASFContext *asf = s->priv_data;
  636. uint32_t packet_length, padsize;
  637. int rsize = 8;
  638. int c, d, e, off;
  639. // if we do not know packet size, allow skipping up to 32 kB
  640. off= 32768;
  641. if (s->packet_size > 0)
  642. off= (url_ftell(pb) - s->data_offset) % s->packet_size + 3;
  643. c=d=e=-1;
  644. while(off-- > 0){
  645. c=d; d=e;
  646. e= get_byte(pb);
  647. if(c == 0x82 && !d && !e)
  648. break;
  649. }
  650. if (c != 0x82) {
  651. /**
  652. * This code allows handling of -EAGAIN at packet boundaries (i.e.
  653. * if the packet sync code above triggers -EAGAIN). This does not
  654. * imply complete -EAGAIN handling support at random positions in
  655. * the stream.
  656. */
  657. if (url_ferror(pb) == AVERROR(EAGAIN))
  658. return AVERROR(EAGAIN);
  659. if (!url_feof(pb))
  660. av_log(s, AV_LOG_ERROR, "ff asf bad header %x at:%"PRId64"\n", c, url_ftell(pb));
  661. }
  662. if ((c & 0x8f) == 0x82) {
  663. if (d || e) {
  664. if (!url_feof(pb))
  665. av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n");
  666. return -1;
  667. }
  668. c= get_byte(pb);
  669. d= get_byte(pb);
  670. rsize+=3;
  671. }else{
  672. url_fseek(pb, -1, SEEK_CUR); //FIXME
  673. }
  674. asf->packet_flags = c;
  675. asf->packet_property = d;
  676. DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size);
  677. DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored
  678. DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length
  679. //the following checks prevent overflows and infinite loops
  680. if(!packet_length || packet_length >= (1U<<29)){
  681. av_log(s, AV_LOG_ERROR, "invalid packet_length %d at:%"PRId64"\n", packet_length, url_ftell(pb));
  682. return -1;
  683. }
  684. if(padsize >= packet_length){
  685. av_log(s, AV_LOG_ERROR, "invalid padsize %d at:%"PRId64"\n", padsize, url_ftell(pb));
  686. return -1;
  687. }
  688. asf->packet_timestamp = get_le32(pb);
  689. get_le16(pb); /* duration */
  690. // rsize has at least 11 bytes which have to be present
  691. if (asf->packet_flags & 0x01) {
  692. asf->packet_segsizetype = get_byte(pb); rsize++;
  693. asf->packet_segments = asf->packet_segsizetype & 0x3f;
  694. } else {
  695. asf->packet_segments = 1;
  696. asf->packet_segsizetype = 0x80;
  697. }
  698. asf->packet_size_left = packet_length - padsize - rsize;
  699. if (packet_length < asf->hdr.min_pktsize)
  700. padsize += asf->hdr.min_pktsize - packet_length;
  701. asf->packet_padsize = padsize;
  702. av_dlog(s, "packet: size=%d padsize=%d left=%d\n", s->packet_size, asf->packet_padsize, asf->packet_size_left);
  703. return 0;
  704. }
  705. /**
  706. *
  707. * @return <0 if error
  708. */
  709. static int asf_read_frame_header(AVFormatContext *s, ByteIOContext *pb){
  710. ASFContext *asf = s->priv_data;
  711. int rsize = 1;
  712. int num = get_byte(pb);
  713. int64_t ts0, ts1;
  714. asf->packet_segments--;
  715. asf->packet_key_frame = num >> 7;
  716. asf->stream_index = asf->asfid2avid[num & 0x7f];
  717. // sequence should be ignored!
  718. DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0);
  719. DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0);
  720. DO_2BITS(asf->packet_property, asf->packet_replic_size, 0);
  721. //printf("key:%d stream:%d seq:%d offset:%d replic_size:%d\n", asf->packet_key_frame, asf->stream_index, asf->packet_seq, //asf->packet_frag_offset, asf->packet_replic_size);
  722. if (asf->packet_replic_size >= 8) {
  723. asf->packet_obj_size = get_le32(pb);
  724. if(asf->packet_obj_size >= (1<<24) || asf->packet_obj_size <= 0){
  725. av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n");
  726. return -1;
  727. }
  728. asf->packet_frag_timestamp = get_le32(pb); // timestamp
  729. if(asf->packet_replic_size >= 8+38+4){
  730. // for(i=0; i<asf->packet_replic_size-8; i++)
  731. // av_log(s, AV_LOG_DEBUG, "%02X ",get_byte(pb));
  732. // av_log(s, AV_LOG_DEBUG, "\n");
  733. url_fskip(pb, 10);
  734. ts0= get_le64(pb);
  735. ts1= get_le64(pb);
  736. url_fskip(pb, 12);
  737. get_le32(pb);
  738. url_fskip(pb, asf->packet_replic_size - 8 - 38 - 4);
  739. if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
  740. else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
  741. }else
  742. url_fskip(pb, asf->packet_replic_size - 8);
  743. rsize += asf->packet_replic_size; // FIXME - check validity
  744. } else if (asf->packet_replic_size==1){
  745. // multipacket - frag_offset is beginning timestamp
  746. asf->packet_time_start = asf->packet_frag_offset;
  747. asf->packet_frag_offset = 0;
  748. asf->packet_frag_timestamp = asf->packet_timestamp;
  749. asf->packet_time_delta = get_byte(pb);
  750. rsize++;
  751. }else if(asf->packet_replic_size!=0){
  752. av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", asf->packet_replic_size);
  753. return -1;
  754. }
  755. if (asf->packet_flags & 0x01) {
  756. DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal
  757. if(asf->packet_frag_size > asf->packet_size_left - rsize){
  758. av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid\n");
  759. return -1;
  760. }
  761. //printf("Fragsize %d\n", asf->packet_frag_size);
  762. } else {
  763. asf->packet_frag_size = asf->packet_size_left - rsize;
  764. //printf("Using rest %d %d %d\n", asf->packet_frag_size, asf->packet_size_left, rsize);
  765. }
  766. if (asf->packet_replic_size == 1) {
  767. asf->packet_multi_size = asf->packet_frag_size;
  768. if (asf->packet_multi_size > asf->packet_size_left)
  769. return -1;
  770. }
  771. asf->packet_size_left -= rsize;
  772. //printf("___objsize____ %d %d rs:%d\n", asf->packet_obj_size, asf->packet_frag_offset, rsize);
  773. return 0;
  774. }
  775. /**
  776. * Parse data from individual ASF packets (which were previously loaded
  777. * with asf_get_packet()).
  778. * @param s demux context
  779. * @param pb context to read data from
  780. * @param pkt pointer to store packet data into
  781. * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF
  782. * packets need to be loaded (through asf_get_packet())
  783. */
  784. static int ff_asf_parse_packet(AVFormatContext *s, ByteIOContext *pb, AVPacket *pkt)
  785. {
  786. ASFContext *asf = s->priv_data;
  787. ASFStream *asf_st = 0;
  788. for (;;) {
  789. int ret;
  790. if(url_feof(pb))
  791. return AVERROR_EOF;
  792. if (asf->packet_size_left < FRAME_HEADER_SIZE
  793. || asf->packet_segments < 1) {
  794. //asf->packet_size_left <= asf->packet_padsize) {
  795. int ret = asf->packet_size_left + asf->packet_padsize;
  796. //printf("PacketLeftSize:%d Pad:%d Pos:%"PRId64"\n", asf->packet_size_left, asf->packet_padsize, url_ftell(pb));
  797. assert(ret>=0);
  798. /* fail safe */
  799. url_fskip(pb, ret);
  800. asf->packet_pos= url_ftell(pb);
  801. if (asf->data_object_size != (uint64_t)-1 &&
  802. (asf->packet_pos - asf->data_object_offset >= asf->data_object_size))
  803. return AVERROR_EOF; /* Do not exceed the size of the data object */
  804. return 1;
  805. }
  806. if (asf->packet_time_start == 0) {
  807. if(asf_read_frame_header(s, pb) < 0){
  808. asf->packet_segments= 0;
  809. continue;
  810. }
  811. if (asf->stream_index < 0
  812. || s->streams[asf->stream_index]->discard >= AVDISCARD_ALL
  813. || (!asf->packet_key_frame && s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)
  814. ) {
  815. asf->packet_time_start = 0;
  816. /* unhandled packet (should not happen) */
  817. url_fskip(pb, asf->packet_frag_size);
  818. asf->packet_size_left -= asf->packet_frag_size;
  819. if(asf->stream_index < 0)
  820. av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
  821. continue;
  822. }
  823. asf->asf_st = s->streams[asf->stream_index]->priv_data;
  824. }
  825. asf_st = asf->asf_st;
  826. if (asf->packet_replic_size == 1) {
  827. // frag_offset is here used as the beginning timestamp
  828. asf->packet_frag_timestamp = asf->packet_time_start;
  829. asf->packet_time_start += asf->packet_time_delta;
  830. asf->packet_obj_size = asf->packet_frag_size = get_byte(pb);
  831. asf->packet_size_left--;
  832. asf->packet_multi_size--;
  833. if (asf->packet_multi_size < asf->packet_obj_size)
  834. {
  835. asf->packet_time_start = 0;
  836. url_fskip(pb, asf->packet_multi_size);
  837. asf->packet_size_left -= asf->packet_multi_size;
  838. continue;
  839. }
  840. asf->packet_multi_size -= asf->packet_obj_size;
  841. //printf("COMPRESS size %d %d %d ms:%d\n", asf->packet_obj_size, asf->packet_frag_timestamp, asf->packet_size_left, asf->packet_multi_size);
  842. }
  843. if( /*asf->packet_frag_size == asf->packet_obj_size*/
  844. asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size
  845. && asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size){
  846. av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n",
  847. asf_st->frag_offset, asf->packet_frag_size,
  848. asf->packet_obj_size, asf_st->pkt.size);
  849. asf->packet_obj_size= asf_st->pkt.size;
  850. }
  851. if ( asf_st->pkt.size != asf->packet_obj_size
  852. || asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { //FIXME is this condition sufficient?
  853. if(asf_st->pkt.data){
  854. av_log(s, AV_LOG_INFO, "freeing incomplete packet size %d, new %d\n", asf_st->pkt.size, asf->packet_obj_size);
  855. asf_st->frag_offset = 0;
  856. av_free_packet(&asf_st->pkt);
  857. }
  858. /* new packet */
  859. av_new_packet(&asf_st->pkt, asf->packet_obj_size);
  860. asf_st->seq = asf->packet_seq;
  861. asf_st->pkt.dts = asf->packet_frag_timestamp;
  862. asf_st->pkt.stream_index = asf->stream_index;
  863. asf_st->pkt.pos =
  864. asf_st->packet_pos= asf->packet_pos;
  865. //printf("new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n",
  866. //asf->stream_index, asf->packet_key_frame, asf_st->pkt.flags & AV_PKT_FLAG_KEY,
  867. //s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO, asf->packet_obj_size);
  868. if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  869. asf->packet_key_frame = 1;
  870. if (asf->packet_key_frame)
  871. asf_st->pkt.flags |= AV_PKT_FLAG_KEY;
  872. }
  873. /* read data */
  874. //printf("READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n",
  875. // s->packet_size, asf_st->pkt.size, asf->packet_frag_offset,
  876. // asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data);
  877. asf->packet_size_left -= asf->packet_frag_size;
  878. if (asf->packet_size_left < 0)
  879. continue;
  880. if( asf->packet_frag_offset >= asf_st->pkt.size
  881. || asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset){
  882. av_log(s, AV_LOG_ERROR, "packet fragment position invalid %u,%u not in %u\n",
  883. asf->packet_frag_offset, asf->packet_frag_size, asf_st->pkt.size);
  884. continue;
  885. }
  886. ret = get_buffer(pb, asf_st->pkt.data + asf->packet_frag_offset,
  887. asf->packet_frag_size);
  888. if (ret != asf->packet_frag_size) {
  889. if (ret < 0 || asf->packet_frag_offset + ret == 0)
  890. return ret < 0 ? ret : AVERROR_EOF;
  891. if (asf_st->ds_span > 1) {
  892. // scrambling, we can either drop it completely or fill the remainder
  893. // TODO: should we fill the whole packet instead of just the current
  894. // fragment?
  895. memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0,
  896. asf->packet_frag_size - ret);
  897. ret = asf->packet_frag_size;
  898. } else
  899. // no scrambling, so we can return partial packets
  900. av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret);
  901. }
  902. if (s->key && s->keylen == 20)
  903. ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset,
  904. ret);
  905. asf_st->frag_offset += ret;
  906. /* test if whole packet is read */
  907. if (asf_st->frag_offset == asf_st->pkt.size) {
  908. //workaround for macroshit radio DVR-MS files
  909. if( s->streams[asf->stream_index]->codec->codec_id == CODEC_ID_MPEG2VIDEO
  910. && asf_st->pkt.size > 100){
  911. int i;
  912. for(i=0; i<asf_st->pkt.size && !asf_st->pkt.data[i]; i++);
  913. if(i == asf_st->pkt.size){
  914. av_log(s, AV_LOG_DEBUG, "discarding ms fart\n");
  915. asf_st->frag_offset = 0;
  916. av_free_packet(&asf_st->pkt);
  917. continue;
  918. }
  919. }
  920. /* return packet */
  921. if (asf_st->ds_span > 1) {
  922. if(asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span){
  923. av_log(s, AV_LOG_ERROR, "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", asf_st->pkt.size, asf_st->ds_packet_size, asf_st->ds_span);
  924. }else{
  925. /* packet descrambling */
  926. uint8_t *newdata = av_malloc(asf_st->pkt.size + FF_INPUT_BUFFER_PADDING_SIZE);
  927. if (newdata) {
  928. int offset = 0;
  929. memset(newdata + asf_st->pkt.size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
  930. while (offset < asf_st->pkt.size) {
  931. int off = offset / asf_st->ds_chunk_size;
  932. int row = off / asf_st->ds_span;
  933. int col = off % asf_st->ds_span;
  934. int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size;
  935. //printf("off:%d row:%d col:%d idx:%d\n", off, row, col, idx);
  936. assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size);
  937. assert(idx+1 <= asf_st->pkt.size / asf_st->ds_chunk_size);
  938. memcpy(newdata + offset,
  939. asf_st->pkt.data + idx * asf_st->ds_chunk_size,
  940. asf_st->ds_chunk_size);
  941. offset += asf_st->ds_chunk_size;
  942. }
  943. av_free(asf_st->pkt.data);
  944. asf_st->pkt.data = newdata;
  945. }
  946. }
  947. }
  948. asf_st->frag_offset = 0;
  949. *pkt= asf_st->pkt;
  950. //printf("packet %d %d\n", asf_st->pkt.size, asf->packet_frag_size);
  951. asf_st->pkt.size = 0;
  952. asf_st->pkt.data = 0;
  953. break; // packet completed
  954. }
  955. }
  956. return 0;
  957. }
  958. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  959. {
  960. ASFContext *asf = s->priv_data;
  961. for (;;) {
  962. int ret;
  963. /* parse cached packets, if any */
  964. if ((ret = ff_asf_parse_packet(s, s->pb, pkt)) <= 0)
  965. return ret;
  966. if ((ret = ff_asf_get_packet(s, s->pb)) < 0)
  967. assert(asf->packet_size_left < FRAME_HEADER_SIZE || asf->packet_segments < 1);
  968. asf->packet_time_start = 0;
  969. }
  970. return 0;
  971. }
  972. // Added to support seeking after packets have been read
  973. // If information is not reset, read_packet fails due to
  974. // leftover information from previous reads
  975. static void asf_reset_header(AVFormatContext *s)
  976. {
  977. ASFContext *asf = s->priv_data;
  978. ASFStream *asf_st;
  979. int i;
  980. asf->packet_size_left = 0;
  981. asf->packet_segments = 0;
  982. asf->packet_flags = 0;
  983. asf->packet_property = 0;
  984. asf->packet_timestamp = 0;
  985. asf->packet_segsizetype = 0;
  986. asf->packet_segments = 0;
  987. asf->packet_seq = 0;
  988. asf->packet_replic_size = 0;
  989. asf->packet_key_frame = 0;
  990. asf->packet_padsize = 0;
  991. asf->packet_frag_offset = 0;
  992. asf->packet_frag_size = 0;
  993. asf->packet_frag_timestamp = 0;
  994. asf->packet_multi_size = 0;
  995. asf->packet_obj_size = 0;
  996. asf->packet_time_delta = 0;
  997. asf->packet_time_start = 0;
  998. for(i=0; i<s->nb_streams; i++){
  999. asf_st= s->streams[i]->priv_data;
  1000. av_free_packet(&asf_st->pkt);
  1001. asf_st->frag_offset=0;
  1002. asf_st->seq=0;
  1003. }
  1004. asf->asf_st= NULL;
  1005. }
  1006. static int asf_read_close(AVFormatContext *s)
  1007. {
  1008. int i;
  1009. asf_reset_header(s);
  1010. for(i=0;i<s->nb_streams;i++) {
  1011. AVStream *st = s->streams[i];
  1012. av_free(st->codec->palctrl);
  1013. }
  1014. return 0;
  1015. }
  1016. static int64_t asf_read_pts(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit)
  1017. {
  1018. AVPacket pkt1, *pkt = &pkt1;
  1019. ASFStream *asf_st;
  1020. int64_t pts;
  1021. int64_t pos= *ppos;
  1022. int i;
  1023. int64_t start_pos[ASF_MAX_STREAMS];
  1024. for(i=0; i<s->nb_streams; i++){
  1025. start_pos[i]= pos;
  1026. }
  1027. if (s->packet_size > 0)
  1028. pos= (pos+s->packet_size-1-s->data_offset)/s->packet_size*s->packet_size+ s->data_offset;
  1029. *ppos= pos;
  1030. url_fseek(s->pb, pos, SEEK_SET);
  1031. //printf("asf_read_pts\n");
  1032. asf_reset_header(s);
  1033. for(;;){
  1034. if (av_read_frame(s, pkt) < 0){
  1035. av_log(s, AV_LOG_INFO, "asf_read_pts failed\n");
  1036. return AV_NOPTS_VALUE;
  1037. }
  1038. pts= pkt->pts;
  1039. av_free_packet(pkt);
  1040. if(pkt->flags&AV_PKT_FLAG_KEY){
  1041. i= pkt->stream_index;
  1042. asf_st= s->streams[i]->priv_data;
  1043. // assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0);
  1044. pos= asf_st->packet_pos;
  1045. av_add_index_entry(s->streams[i], pos, pts, pkt->size, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
  1046. start_pos[i]= asf_st->packet_pos + 1;
  1047. if(pkt->stream_index == stream_index)
  1048. break;
  1049. }
  1050. }
  1051. *ppos= pos;
  1052. //printf("found keyframe at %"PRId64" stream %d stamp:%"PRId64"\n", *ppos, stream_index, pts);
  1053. return pts;
  1054. }
  1055. static void asf_build_simple_index(AVFormatContext *s, int stream_index)
  1056. {
  1057. ff_asf_guid g;
  1058. ASFContext *asf = s->priv_data;
  1059. int64_t current_pos= url_ftell(s->pb);
  1060. int i;
  1061. url_fseek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET);
  1062. ff_get_guid(s->pb, &g);
  1063. /* the data object can be followed by other top-level objects,
  1064. skip them until the simple index object is reached */
  1065. while (ff_guidcmp(&g, &index_guid)) {
  1066. int64_t gsize= get_le64(s->pb);
  1067. if (gsize < 24 || url_feof(s->pb)) {
  1068. url_fseek(s->pb, current_pos, SEEK_SET);
  1069. return;
  1070. }
  1071. url_fseek(s->pb, gsize-24, SEEK_CUR);
  1072. ff_get_guid(s->pb, &g);
  1073. }
  1074. {
  1075. int64_t itime, last_pos=-1;
  1076. int pct, ict;
  1077. int64_t av_unused gsize= get_le64(s->pb);
  1078. ff_get_guid(s->pb, &g);
  1079. itime=get_le64(s->pb);
  1080. pct=get_le32(s->pb);
  1081. ict=get_le32(s->pb);
  1082. av_log(s, AV_LOG_DEBUG, "itime:0x%"PRIx64", pct:%d, ict:%d\n",itime,pct,ict);
  1083. for (i=0;i<ict;i++){
  1084. int pktnum=get_le32(s->pb);
  1085. int pktct =get_le16(s->pb);
  1086. int64_t pos = s->data_offset + s->packet_size*(int64_t)pktnum;
  1087. int64_t index_pts= av_rescale(itime, i, 10000);
  1088. if(pos != last_pos){
  1089. av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d\n", pktnum, pktct);
  1090. av_add_index_entry(s->streams[stream_index], pos, index_pts, s->packet_size, 0, AVINDEX_KEYFRAME);
  1091. last_pos=pos;
  1092. }
  1093. }
  1094. asf->index_read= 1;
  1095. }
  1096. url_fseek(s->pb, current_pos, SEEK_SET);
  1097. }
  1098. static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int flags)
  1099. {
  1100. ASFContext *asf = s->priv_data;
  1101. AVStream *st = s->streams[stream_index];
  1102. int64_t pos;
  1103. int index;
  1104. if (s->packet_size <= 0)
  1105. return -1;
  1106. /* Try using the protocol's read_seek if available */
  1107. if(s->pb) {
  1108. int ret = av_url_read_fseek(s->pb, stream_index, pts, flags);
  1109. if(ret >= 0)
  1110. asf_reset_header(s);
  1111. if (ret != AVERROR(ENOSYS))
  1112. return ret;
  1113. }
  1114. if (!asf->index_read)
  1115. asf_build_simple_index(s, stream_index);
  1116. if(!(asf->index_read && st->index_entries)){
  1117. if(av_seek_frame_binary(s, stream_index, pts, flags)<0)
  1118. return -1;
  1119. }else{
  1120. index= av_index_search_timestamp(st, pts, flags);
  1121. if(index<0)
  1122. return -1;
  1123. /* find the position */
  1124. pos = st->index_entries[index].pos;
  1125. // various attempts to find key frame have failed so far
  1126. // asf_reset_header(s);
  1127. // url_fseek(s->pb, pos, SEEK_SET);
  1128. // key_pos = pos;
  1129. // for(i=0;i<16;i++){
  1130. // pos = url_ftell(s->pb);
  1131. // if (av_read_frame(s, &pkt) < 0){
  1132. // av_log(s, AV_LOG_INFO, "seek failed\n");
  1133. // return -1;
  1134. // }
  1135. // asf_st = s->streams[stream_index]->priv_data;
  1136. // pos += st->parser->frame_offset;
  1137. //
  1138. // if (pkt.size > b) {
  1139. // b = pkt.size;
  1140. // key_pos = pos;
  1141. // }
  1142. //
  1143. // av_free_packet(&pkt);
  1144. // }
  1145. /* do the seek */
  1146. av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos);
  1147. url_fseek(s->pb, pos, SEEK_SET);
  1148. }
  1149. asf_reset_header(s);
  1150. return 0;
  1151. }
  1152. AVInputFormat ff_asf_demuxer = {
  1153. "asf",
  1154. NULL_IF_CONFIG_SMALL("ASF format"),
  1155. sizeof(ASFContext),
  1156. asf_probe,
  1157. asf_read_header,
  1158. asf_read_packet,
  1159. asf_read_close,
  1160. asf_read_seek,
  1161. asf_read_pts,
  1162. };