You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1791 lines
63KB

  1. /*
  2. * Microsoft Advanced Streaming Format demuxer
  3. * Copyright (c) 2014 Alexandra Hájková
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "libavutil/attributes.h"
  22. #include "libavutil/avstring.h"
  23. #include "libavutil/bswap.h"
  24. #include "libavutil/common.h"
  25. #include "libavutil/dict.h"
  26. #include "libavutil/internal.h"
  27. #include "libavutil/mathematics.h"
  28. #include "libavutil/opt.h"
  29. #include "libavutil/time_internal.h"
  30. #include "avformat.h"
  31. #include "avio_internal.h"
  32. #include "avlanguage.h"
  33. #include "id3v2.h"
  34. #include "internal.h"
  35. #include "riff.h"
  36. #include "asf.h"
  37. #include "asfcrypt.h"
  38. #define ASF_BOOL 0x2
  39. #define ASF_WORD 0x5
  40. #define ASF_GUID 0x6
  41. #define ASF_DWORD 0x3
  42. #define ASF_QWORD 0x4
  43. #define ASF_UNICODE 0x0
  44. #define ASF_FLAG_BROADCAST 0x1
  45. #define ASF_BYTE_ARRAY 0x1
  46. #define ASF_TYPE_AUDIO 0x2
  47. #define ASF_TYPE_VIDEO 0x1
  48. #define ASF_STREAM_NUM 0x7F
  49. #define ASF_MAX_STREAMS 128
  50. #define BMP_HEADER_SIZE 40
  51. #define ASF_NUM_OF_PAYLOADS 0x3F
  52. #define ASF_ERROR_CORRECTION_LENGTH_TYPE 0x60
  53. #define ASF_PACKET_ERROR_CORRECTION_DATA_SIZE 0x2
  54. typedef struct GUIDParseTable {
  55. const char *name;
  56. ff_asf_guid guid;
  57. int (*read_object)(AVFormatContext *, const struct GUIDParseTable *);
  58. int is_subobject;
  59. } GUIDParseTable;
  60. typedef struct ASFPacket {
  61. AVPacket avpkt;
  62. int64_t dts;
  63. uint32_t frame_num; // ASF payloads with the same number are parts of the same frame
  64. int flags;
  65. int data_size;
  66. int duration;
  67. int size_left;
  68. uint8_t stream_index;
  69. } ASFPacket;
  70. typedef struct ASFStream {
  71. uint8_t stream_index; // from packet header
  72. int index; // stream index in AVFormatContext, set in asf_read_stream_properties
  73. int type;
  74. int indexed; // added index entries from the Simple Index Object or not
  75. int8_t span; // for deinterleaving
  76. uint16_t virtual_pkt_len;
  77. uint16_t virtual_chunk_len;
  78. int16_t lang_idx;
  79. ASFPacket pkt;
  80. } ASFStream;
  81. typedef struct ASFStreamData{
  82. char langs[32];
  83. AVDictionary *asf_met; // for storing per-stream metadata
  84. AVRational aspect_ratio;
  85. } ASFStreamData;
  86. typedef struct ASFContext {
  87. int data_reached;
  88. int is_simple_index; // is simple index present or not 1/0
  89. int is_header;
  90. uint64_t preroll;
  91. uint64_t nb_packets; // ASF packets
  92. uint32_t packet_size;
  93. int64_t send_time;
  94. int duration;
  95. uint32_t b_flags; // flags with broadcast flag
  96. uint32_t prop_flags; // file properties object flags
  97. uint64_t data_size; // data object size
  98. uint64_t unknown_size; // size of the unknown object
  99. int64_t offset; // offset of the current object
  100. int64_t data_offset;
  101. int64_t first_packet_offset; // packet offset
  102. int64_t unknown_offset; // for top level header objects or subobjects without specified behavior
  103. // ASF file must not contain more than 128 streams according to the specification
  104. ASFStream *asf_st[ASF_MAX_STREAMS];
  105. ASFStreamData asf_sd[ASF_MAX_STREAMS];
  106. int nb_streams;
  107. int stream_index; // from packet header, for the subpayload case
  108. // packet parameters
  109. uint64_t sub_header_offset; // offset of subpayload header
  110. int64_t sub_dts;
  111. uint8_t dts_delta; // for subpayloads
  112. uint32_t packet_size_internal; // packet size stored inside ASFPacket, can be 0
  113. int64_t packet_offset; // offset of the current packet inside Data Object
  114. uint32_t pad_len; // padding after payload
  115. uint32_t rep_data_len;
  116. // packet state
  117. uint64_t sub_left; // subpayloads left or not
  118. unsigned int nb_sub; // number of subpayloads read so far from the current ASF packet
  119. uint16_t mult_sub_len; // total length of subpayloads array inside multiple payload
  120. uint64_t nb_mult_left; // multiple payloads left
  121. int return_subpayload;
  122. enum {
  123. PARSE_PACKET_HEADER,
  124. READ_SINGLE,
  125. READ_MULTI,
  126. READ_MULTI_SUB
  127. } state;
  128. } ASFContext;
  129. static int detect_unknown_subobject(AVFormatContext *s, int64_t offset, int64_t size);
  130. static const GUIDParseTable *find_guid(ff_asf_guid guid);
  131. static int asf_probe(AVProbeData *pd)
  132. {
  133. /* check file header */
  134. if (!ff_guidcmp(pd->buf, &ff_asf_header))
  135. return AVPROBE_SCORE_MAX;
  136. else
  137. return 0;
  138. }
  139. static void swap_guid(ff_asf_guid guid)
  140. {
  141. FFSWAP(unsigned char, guid[0], guid[3]);
  142. FFSWAP(unsigned char, guid[1], guid[2]);
  143. FFSWAP(unsigned char, guid[4], guid[5]);
  144. FFSWAP(unsigned char, guid[6], guid[7]);
  145. }
  146. static void align_position(AVIOContext *pb, int64_t offset, uint64_t size)
  147. {
  148. if (avio_tell(pb) != offset + size)
  149. avio_seek(pb, offset + size, SEEK_SET);
  150. }
  151. static int asf_read_unknown(AVFormatContext *s, const GUIDParseTable *g)
  152. {
  153. ASFContext *asf = s->priv_data;
  154. AVIOContext *pb = s->pb;
  155. uint64_t size = avio_rl64(pb);
  156. int ret;
  157. if (size > INT64_MAX)
  158. return AVERROR_INVALIDDATA;
  159. if (asf->is_header)
  160. asf->unknown_size = size;
  161. asf->is_header = 0;
  162. if (!g->is_subobject) {
  163. if (!(ret = strcmp(g->name, "Header Extension")))
  164. avio_skip(pb, 22); // skip reserved fields and Data Size
  165. if ((ret = detect_unknown_subobject(s, asf->unknown_offset,
  166. asf->unknown_size)) < 0)
  167. return ret;
  168. } else {
  169. if (size < 24) {
  170. av_log(s, AV_LOG_ERROR, "Too small size %"PRIu64" (< 24).\n", size);
  171. return AVERROR_INVALIDDATA;
  172. }
  173. avio_skip(pb, size - 24);
  174. }
  175. return 0;
  176. }
  177. static int get_asf_string(AVIOContext *pb, int maxlen, char *buf, int buflen)
  178. {
  179. char *q = buf;
  180. int ret = 0;
  181. if (buflen <= 0)
  182. return AVERROR(EINVAL);
  183. while (ret + 1 < maxlen) {
  184. uint8_t tmp;
  185. uint32_t ch;
  186. GET_UTF16(ch, (ret += 2) <= maxlen ? avio_rl16(pb) : 0, break;);
  187. PUT_UTF8(ch, tmp, if (q - buf < buflen - 1) *q++ = tmp;)
  188. }
  189. *q = 0;
  190. return ret;
  191. }
  192. static int asf_read_marker(AVFormatContext *s, const GUIDParseTable *g)
  193. {
  194. ASFContext *asf = s->priv_data;
  195. AVIOContext *pb = s->pb;
  196. uint64_t size = avio_rl64(pb);
  197. int i, nb_markers, ret;
  198. size_t len;
  199. char name[1024];
  200. avio_skip(pb, 8);
  201. avio_skip(pb, 8); // skip reserved GUID
  202. nb_markers = avio_rl32(pb);
  203. avio_skip(pb, 2); // skip reserved field
  204. len = avio_rl16(pb);
  205. for (i = 0; i < len; i++)
  206. avio_skip(pb, 1);
  207. for (i = 0; i < nb_markers; i++) {
  208. int64_t pts;
  209. avio_skip(pb, 8);
  210. pts = avio_rl64(pb);
  211. pts -= asf->preroll * 10000;
  212. avio_skip(pb, 2); // entry length
  213. avio_skip(pb, 4); // send time
  214. avio_skip(pb, 4); // flags
  215. len = avio_rl32(pb);
  216. if ((ret = avio_get_str16le(pb, len, name,
  217. sizeof(name))) < len)
  218. avio_skip(pb, len - ret);
  219. avpriv_new_chapter(s, i, (AVRational) { 1, 10000000 }, pts,
  220. AV_NOPTS_VALUE, name);
  221. }
  222. align_position(pb, asf->offset, size);
  223. return 0;
  224. }
  225. static int asf_read_metadata(AVFormatContext *s, const char *title, uint16_t len,
  226. unsigned char *ch, uint16_t buflen)
  227. {
  228. AVIOContext *pb = s->pb;
  229. avio_get_str16le(pb, len, ch, buflen);
  230. if (ch[0]) {
  231. if (av_dict_set(&s->metadata, title, ch, 0) < 0)
  232. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  233. }
  234. return 0;
  235. }
  236. static int asf_read_value(AVFormatContext *s, const uint8_t *name,
  237. uint16_t val_len, int type, AVDictionary **met)
  238. {
  239. int ret;
  240. uint8_t *value;
  241. uint16_t buflen = 2 * val_len + 1;
  242. AVIOContext *pb = s->pb;
  243. value = av_malloc(buflen);
  244. if (!value)
  245. return AVERROR(ENOMEM);
  246. if (type == ASF_UNICODE) {
  247. // get_asf_string reads UTF-16 and converts it to UTF-8 which needs longer buffer
  248. if ((ret = get_asf_string(pb, val_len, value, buflen)) < 0)
  249. goto failed;
  250. if (av_dict_set(met, name, value, 0) < 0)
  251. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  252. } else {
  253. char buf[256];
  254. if (val_len > sizeof(buf)) {
  255. ret = AVERROR_INVALIDDATA;
  256. goto failed;
  257. }
  258. if ((ret = avio_read(pb, value, val_len)) < 0)
  259. goto failed;
  260. if (ret < 2 * val_len)
  261. value[ret] = '\0';
  262. else
  263. value[2 * val_len - 1] = '\0';
  264. snprintf(buf, sizeof(buf), "%s", value);
  265. if (av_dict_set(met, name, buf, 0) < 0)
  266. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  267. }
  268. av_freep(&value);
  269. return 0;
  270. failed:
  271. av_freep(&value);
  272. return ret;
  273. }
  274. static int asf_read_generic_value(AVIOContext *pb, int type, uint64_t *value)
  275. {
  276. switch (type) {
  277. case ASF_BOOL:
  278. *value = avio_rl16(pb);
  279. break;
  280. case ASF_DWORD:
  281. *value = avio_rl32(pb);
  282. break;
  283. case ASF_QWORD:
  284. *value = avio_rl64(pb);
  285. break;
  286. case ASF_WORD:
  287. *value = avio_rl16(pb);
  288. break;
  289. default:
  290. return AVERROR_INVALIDDATA;
  291. }
  292. return 0;
  293. }
  294. static int asf_set_metadata(AVFormatContext *s, const uint8_t *name,
  295. int type, AVDictionary **met)
  296. {
  297. AVIOContext *pb = s->pb;
  298. uint64_t value;
  299. char buf[32];
  300. int ret;
  301. ret = asf_read_generic_value(pb, type, &value);
  302. if (ret < 0)
  303. return ret;
  304. snprintf(buf, sizeof(buf), "%"PRIu64, value);
  305. if (av_dict_set(met, name, buf, 0) < 0)
  306. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  307. return 0;
  308. }
  309. /* MSDN claims that this should be "compatible with the ID3 frame, APIC",
  310. * but in reality this is only loosely similar */
  311. static int asf_read_picture(AVFormatContext *s, int len)
  312. {
  313. ASFContext *asf = s->priv_data;
  314. AVPacket pkt = { 0 };
  315. const CodecMime *mime = ff_id3v2_mime_tags;
  316. enum AVCodecID id = AV_CODEC_ID_NONE;
  317. char mimetype[64];
  318. uint8_t *desc = NULL;
  319. AVStream *st = NULL;
  320. int ret, type, picsize, desc_len;
  321. ASFStream *asf_st;
  322. /* type + picsize + mime + desc */
  323. if (len < 1 + 4 + 2 + 2) {
  324. av_log(s, AV_LOG_ERROR, "Invalid attached picture size: %d.\n", len);
  325. return AVERROR_INVALIDDATA;
  326. }
  327. /* picture type */
  328. type = avio_r8(s->pb);
  329. len--;
  330. if (type >= FF_ARRAY_ELEMS(ff_id3v2_picture_types) || type < 0) {
  331. av_log(s, AV_LOG_WARNING, "Unknown attached picture type: %d.\n", type);
  332. type = 0;
  333. }
  334. /* picture data size */
  335. picsize = avio_rl32(s->pb);
  336. len -= 4;
  337. /* picture MIME type */
  338. len -= avio_get_str16le(s->pb, len, mimetype, sizeof(mimetype));
  339. while (mime->id != AV_CODEC_ID_NONE) {
  340. if (!strncmp(mime->str, mimetype, sizeof(mimetype))) {
  341. id = mime->id;
  342. break;
  343. }
  344. mime++;
  345. }
  346. if (id == AV_CODEC_ID_NONE) {
  347. av_log(s, AV_LOG_ERROR, "Unknown attached picture mimetype: %s.\n",
  348. mimetype);
  349. return 0;
  350. }
  351. if (picsize >= len) {
  352. av_log(s, AV_LOG_ERROR, "Invalid attached picture data size: %d >= %d.\n",
  353. picsize, len);
  354. return AVERROR_INVALIDDATA;
  355. }
  356. /* picture description */
  357. desc_len = (len - picsize) * 2 + 1;
  358. desc = av_malloc(desc_len);
  359. if (!desc)
  360. return AVERROR(ENOMEM);
  361. len -= avio_get_str16le(s->pb, len - picsize, desc, desc_len);
  362. ret = av_get_packet(s->pb, &pkt, picsize);
  363. if (ret < 0)
  364. goto fail;
  365. st = avformat_new_stream(s, NULL);
  366. if (!st) {
  367. ret = AVERROR(ENOMEM);
  368. goto fail;
  369. }
  370. asf->asf_st[asf->nb_streams] = av_mallocz(sizeof(*asf_st));
  371. asf_st = asf->asf_st[asf->nb_streams];
  372. if (!asf_st) {
  373. ret = AVERROR(ENOMEM);
  374. goto fail;
  375. }
  376. st->disposition |= AV_DISPOSITION_ATTACHED_PIC;
  377. st->codecpar->codec_type = asf_st->type = AVMEDIA_TYPE_VIDEO;
  378. st->codecpar->codec_id = id;
  379. st->attached_pic = pkt;
  380. st->attached_pic.stream_index = asf_st->index = st->index;
  381. st->attached_pic.flags |= AV_PKT_FLAG_KEY;
  382. asf->nb_streams++;
  383. if (*desc) {
  384. if (av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL) < 0)
  385. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  386. } else
  387. av_freep(&desc);
  388. if (av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0) < 0)
  389. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  390. return 0;
  391. fail:
  392. av_freep(&desc);
  393. av_packet_unref(&pkt);
  394. return ret;
  395. }
  396. static void get_id3_tag(AVFormatContext *s)
  397. {
  398. ID3v2ExtraMeta *id3v2_extra_meta = NULL;
  399. ff_id3v2_read(s, ID3v2_DEFAULT_MAGIC, &id3v2_extra_meta);
  400. if (id3v2_extra_meta)
  401. ff_id3v2_parse_apic(s, &id3v2_extra_meta);
  402. ff_id3v2_free_extra_meta(&id3v2_extra_meta);
  403. }
  404. static int process_metadata(AVFormatContext *s, const uint8_t *name, uint16_t name_len,
  405. uint16_t val_len, uint16_t type, AVDictionary **met)
  406. {
  407. int ret;
  408. ff_asf_guid guid;
  409. if (val_len) {
  410. switch (type) {
  411. case ASF_UNICODE:
  412. asf_read_value(s, name, val_len, type, met);
  413. break;
  414. case ASF_BYTE_ARRAY:
  415. if (!strcmp(name, "WM/Picture")) // handle cover art
  416. asf_read_picture(s, val_len);
  417. else if (!strcmp(name, "ID3")) // handle ID3 tag
  418. get_id3_tag(s);
  419. else
  420. asf_read_value(s, name, val_len, type, met);
  421. break;
  422. case ASF_GUID:
  423. ff_get_guid(s->pb, &guid);
  424. break;
  425. default:
  426. if ((ret = asf_set_metadata(s, name, type, met)) < 0)
  427. return ret;
  428. break;
  429. }
  430. }
  431. return 0;
  432. }
  433. static int asf_read_ext_content(AVFormatContext *s, const GUIDParseTable *g)
  434. {
  435. ASFContext *asf = s->priv_data;
  436. AVIOContext *pb = s->pb;
  437. uint64_t size = avio_rl64(pb);
  438. uint16_t nb_desc = avio_rl16(pb);
  439. int i, ret;
  440. for (i = 0; i < nb_desc; i++) {
  441. uint16_t name_len, type, val_len;
  442. uint8_t *name = NULL;
  443. name_len = avio_rl16(pb);
  444. if (!name_len)
  445. return AVERROR_INVALIDDATA;
  446. name = av_malloc(name_len);
  447. if (!name)
  448. return AVERROR(ENOMEM);
  449. avio_get_str16le(pb, name_len, name,
  450. name_len);
  451. type = avio_rl16(pb);
  452. // BOOL values are 16 bits long in the Metadata Object
  453. // but 32 bits long in the Extended Content Description Object
  454. if (type == ASF_BOOL)
  455. type = ASF_DWORD;
  456. val_len = avio_rl16(pb);
  457. ret = process_metadata(s, name, name_len, val_len, type, &s->metadata);
  458. av_freep(&name);
  459. if (ret < 0)
  460. return ret;
  461. }
  462. align_position(pb, asf->offset, size);
  463. return 0;
  464. }
  465. static AVStream *find_stream(AVFormatContext *s, uint16_t st_num)
  466. {
  467. AVStream *st = NULL;
  468. ASFContext *asf = s->priv_data;
  469. int i;
  470. for (i = 0; i < asf->nb_streams; i++) {
  471. if (asf->asf_st[i]->stream_index == st_num) {
  472. st = s->streams[asf->asf_st[i]->index];
  473. break;
  474. }
  475. }
  476. return st;
  477. }
  478. static int asf_store_aspect_ratio(AVFormatContext *s, uint8_t st_num, uint8_t *name, int type)
  479. {
  480. ASFContext *asf = s->priv_data;
  481. AVIOContext *pb = s->pb;
  482. uint64_t value = 0;
  483. int ret;
  484. ret = asf_read_generic_value(pb, type, &value);
  485. if (ret < 0)
  486. return ret;
  487. if (st_num < ASF_MAX_STREAMS) {
  488. if (!strcmp(name, "AspectRatioX"))
  489. asf->asf_sd[st_num].aspect_ratio.num = value;
  490. else
  491. asf->asf_sd[st_num].aspect_ratio.den = value;
  492. }
  493. return 0;
  494. }
  495. static int asf_read_metadata_obj(AVFormatContext *s, const GUIDParseTable *g)
  496. {
  497. ASFContext *asf = s->priv_data;
  498. AVIOContext *pb = s->pb;
  499. uint64_t size = avio_rl64(pb);
  500. uint16_t nb_recs = avio_rl16(pb); // number of records in the Description Records list
  501. int i, ret;
  502. for (i = 0; i < nb_recs; i++) {
  503. uint16_t name_len, buflen, type, val_len, st_num;
  504. uint8_t *name = NULL;
  505. avio_skip(pb, 2); // skip reserved field
  506. st_num = avio_rl16(pb);
  507. name_len = avio_rl16(pb);
  508. buflen = 2 * name_len + 1;
  509. if (!name_len)
  510. break;
  511. type = avio_rl16(pb);
  512. val_len = avio_rl32(pb);
  513. name = av_malloc(buflen);
  514. if (!name)
  515. return AVERROR(ENOMEM);
  516. avio_get_str16le(pb, name_len, name,
  517. buflen);
  518. if (!strcmp(name, "AspectRatioX") || !strcmp(name, "AspectRatioY")) {
  519. ret = asf_store_aspect_ratio(s, st_num, name, type);
  520. if (ret < 0) {
  521. av_freep(&name);
  522. break;
  523. }
  524. } else {
  525. if (st_num < ASF_MAX_STREAMS) {
  526. if ((ret = process_metadata(s, name, name_len, val_len, type,
  527. &asf->asf_sd[st_num].asf_met)) < 0) {
  528. av_freep(&name);
  529. break;
  530. }
  531. }
  532. }
  533. av_freep(&name);
  534. }
  535. align_position(pb, asf->offset, size);
  536. return 0;
  537. }
  538. static int asf_read_content_desc(AVFormatContext *s, const GUIDParseTable *g)
  539. {
  540. ASFContext *asf = s->priv_data;
  541. AVIOContext *pb = s->pb;
  542. int i;
  543. static const char *const titles[] =
  544. { "Title", "Author", "Copyright", "Description", "Rate" };
  545. uint16_t len[5], buflen[5] = { 0 };
  546. uint8_t *ch;
  547. uint64_t size = avio_rl64(pb);
  548. for (i = 0; i < 5; i++) {
  549. len[i] = avio_rl16(pb);
  550. // utf8 string should be <= 2 * utf16 string, extra byte for the terminator
  551. buflen[i] = 2 * len[i] + 1;
  552. }
  553. for (i = 0; i < 5; i++) {
  554. ch = av_malloc(buflen[i]);
  555. if (!ch)
  556. return(AVERROR(ENOMEM));
  557. asf_read_metadata(s, titles[i], len[i], ch, buflen[i]);
  558. av_freep(&ch);
  559. }
  560. align_position(pb, asf->offset, size);
  561. return 0;
  562. }
  563. static int asf_read_properties(AVFormatContext *s, const GUIDParseTable *g)
  564. {
  565. ASFContext *asf = s->priv_data;
  566. AVIOContext *pb = s->pb;
  567. time_t creation_time;
  568. avio_rl64(pb); // read object size
  569. avio_skip(pb, 16); // skip File ID
  570. avio_skip(pb, 8); // skip File size
  571. creation_time = avio_rl64(pb);
  572. if (!(asf->b_flags & ASF_FLAG_BROADCAST)) {
  573. struct tm tmbuf;
  574. struct tm *tm;
  575. char buf[64];
  576. // creation date is in 100 ns units from 1 Jan 1601, conversion to s
  577. creation_time /= 10000000;
  578. // there are 11644473600 seconds between 1 Jan 1601 and 1 Jan 1970
  579. creation_time -= 11644473600;
  580. tm = gmtime_r(&creation_time, &tmbuf);
  581. if (tm) {
  582. if (!strftime(buf, sizeof(buf), "%Y-%m-%d %H:%M:%S", tm))
  583. buf[0] = '\0';
  584. } else
  585. buf[0] = '\0';
  586. if (buf[0]) {
  587. if (av_dict_set(&s->metadata, "creation_time", buf, 0) < 0)
  588. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  589. }
  590. }
  591. asf->nb_packets = avio_rl64(pb);
  592. asf->duration = avio_rl64(pb) / 10000; // stream duration
  593. avio_skip(pb, 8); // skip send duration
  594. asf->preroll = avio_rl64(pb);
  595. asf->duration -= asf->preroll;
  596. asf->b_flags = avio_rl32(pb);
  597. avio_skip(pb, 4); // skip minimal packet size
  598. asf->packet_size = avio_rl32(pb);
  599. avio_skip(pb, 4); // skip max_bitrate
  600. return 0;
  601. }
  602. static int parse_video_info(AVIOContext *pb, AVStream *st)
  603. {
  604. uint16_t size;
  605. unsigned int tag;
  606. st->codecpar->width = avio_rl32(pb);
  607. st->codecpar->height = avio_rl32(pb);
  608. avio_skip(pb, 1); // skip reserved flags
  609. size = avio_rl16(pb); // size of the Format Data
  610. tag = ff_get_bmp_header(pb, st);
  611. st->codecpar->codec_tag = tag;
  612. st->codecpar->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag);
  613. if (size > BMP_HEADER_SIZE) {
  614. int ret;
  615. st->codecpar->extradata_size = size - BMP_HEADER_SIZE;
  616. if (!(st->codecpar->extradata = av_malloc(st->codecpar->extradata_size +
  617. AV_INPUT_BUFFER_PADDING_SIZE))) {
  618. st->codecpar->extradata_size = 0;
  619. return AVERROR(ENOMEM);
  620. }
  621. memset(st->codecpar->extradata + st->codecpar->extradata_size , 0,
  622. AV_INPUT_BUFFER_PADDING_SIZE);
  623. if ((ret = avio_read(pb, st->codecpar->extradata,
  624. st->codecpar->extradata_size)) < 0)
  625. return ret;
  626. }
  627. return 0;
  628. }
  629. static int asf_read_stream_properties(AVFormatContext *s, const GUIDParseTable *g)
  630. {
  631. ASFContext *asf = s->priv_data;
  632. AVIOContext *pb = s->pb;
  633. uint64_t size;
  634. uint32_t err_data_len, ts_data_len; // type specific data length
  635. uint16_t flags;
  636. ff_asf_guid stream_type;
  637. enum AVMediaType type;
  638. int i, ret;
  639. uint8_t stream_index;
  640. AVStream *st;
  641. ASFStream *asf_st;
  642. // ASF file must not contain more than 128 streams according to the specification
  643. if (asf->nb_streams >= ASF_MAX_STREAMS)
  644. return AVERROR_INVALIDDATA;
  645. size = avio_rl64(pb);
  646. ff_get_guid(pb, &stream_type);
  647. if (!ff_guidcmp(&stream_type, &ff_asf_audio_stream))
  648. type = AVMEDIA_TYPE_AUDIO;
  649. else if (!ff_guidcmp(&stream_type, &ff_asf_video_stream))
  650. type = AVMEDIA_TYPE_VIDEO;
  651. else if (!ff_guidcmp(&stream_type, &ff_asf_jfif_media))
  652. type = AVMEDIA_TYPE_VIDEO;
  653. else if (!ff_guidcmp(&stream_type, &ff_asf_command_stream))
  654. type = AVMEDIA_TYPE_DATA;
  655. else if (!ff_guidcmp(&stream_type,
  656. &ff_asf_ext_stream_embed_stream_header))
  657. type = AVMEDIA_TYPE_UNKNOWN;
  658. else
  659. return AVERROR_INVALIDDATA;
  660. ff_get_guid(pb, &stream_type); // error correction type
  661. avio_skip(pb, 8); // skip the time offset
  662. ts_data_len = avio_rl32(pb);
  663. err_data_len = avio_rl32(pb);
  664. flags = avio_rl16(pb); // bit 15 - Encrypted Content
  665. stream_index = flags & ASF_STREAM_NUM;
  666. for (i = 0; i < asf->nb_streams; i++)
  667. if (stream_index == asf->asf_st[i]->stream_index) {
  668. av_log(s, AV_LOG_WARNING,
  669. "Duplicate stream found, this stream will be ignored.\n");
  670. align_position(pb, asf->offset, size);
  671. return 0;
  672. }
  673. st = avformat_new_stream(s, NULL);
  674. if (!st)
  675. return AVERROR(ENOMEM);
  676. avpriv_set_pts_info(st, 32, 1, 1000); // pts should be dword, in milliseconds
  677. st->codecpar->codec_type = type;
  678. asf->asf_st[asf->nb_streams] = av_mallocz(sizeof(*asf_st));
  679. if (!asf->asf_st[asf->nb_streams])
  680. return AVERROR(ENOMEM);
  681. asf_st = asf->asf_st[asf->nb_streams];
  682. asf->nb_streams++;
  683. asf_st->stream_index = stream_index;
  684. asf_st->index = st->index;
  685. asf_st->indexed = 0;
  686. st->id = flags & ASF_STREAM_NUM;
  687. av_init_packet(&asf_st->pkt.avpkt);
  688. asf_st->pkt.data_size = 0;
  689. avio_skip(pb, 4); // skip reserved field
  690. switch (type) {
  691. case AVMEDIA_TYPE_AUDIO:
  692. asf_st->type = AVMEDIA_TYPE_AUDIO;
  693. if ((ret = ff_get_wav_header(s, pb, st->codecpar, ts_data_len)) < 0)
  694. return ret;
  695. break;
  696. case AVMEDIA_TYPE_VIDEO:
  697. asf_st->type = AVMEDIA_TYPE_VIDEO;
  698. if ((ret = parse_video_info(pb, st)) < 0)
  699. return ret;
  700. break;
  701. default:
  702. avio_skip(pb, ts_data_len);
  703. break;
  704. }
  705. if (err_data_len) {
  706. if (type == AVMEDIA_TYPE_AUDIO) {
  707. uint8_t span = avio_r8(pb);
  708. if (span > 1) {
  709. asf_st->span = span;
  710. asf_st->virtual_pkt_len = avio_rl16(pb);
  711. asf_st->virtual_chunk_len = avio_rl16(pb);
  712. if (!asf_st->virtual_chunk_len || !asf_st->virtual_pkt_len)
  713. return AVERROR_INVALIDDATA;
  714. avio_skip(pb, err_data_len - 5);
  715. } else
  716. avio_skip(pb, err_data_len - 1);
  717. } else
  718. avio_skip(pb, err_data_len);
  719. }
  720. align_position(pb, asf->offset, size);
  721. return 0;
  722. }
  723. static void set_language(AVFormatContext *s, const char *rfc1766, AVDictionary **met)
  724. {
  725. // language abbr should contain at least 2 chars
  726. if (rfc1766 && strlen(rfc1766) > 1) {
  727. const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any
  728. const char *iso6392 = av_convert_lang_to(primary_tag,
  729. AV_LANG_ISO639_2_BIBL);
  730. if (iso6392)
  731. if (av_dict_set(met, "language", iso6392, 0) < 0)
  732. av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n");
  733. }
  734. }
  735. static int asf_read_ext_stream_properties(AVFormatContext *s, const GUIDParseTable *g)
  736. {
  737. ASFContext *asf = s->priv_data;
  738. AVIOContext *pb = s->pb;
  739. AVStream *st = NULL;
  740. ff_asf_guid guid;
  741. uint16_t nb_st_name, nb_pay_exts, st_num, lang_idx;
  742. int i, ret;
  743. uint32_t bitrate;
  744. uint64_t start_time, end_time, time_per_frame;
  745. uint64_t size = avio_rl64(pb);
  746. start_time = avio_rl64(pb);
  747. end_time = avio_rl64(pb);
  748. bitrate = avio_rl32(pb);
  749. avio_skip(pb, 28); // skip some unused values
  750. st_num = avio_rl16(pb);
  751. st_num &= ASF_STREAM_NUM;
  752. lang_idx = avio_rl16(pb); // Stream Language ID Index
  753. for (i = 0; i < asf->nb_streams; i++) {
  754. if (st_num == asf->asf_st[i]->stream_index) {
  755. st = s->streams[asf->asf_st[i]->index];
  756. asf->asf_st[i]->lang_idx = lang_idx;
  757. break;
  758. }
  759. }
  760. time_per_frame = avio_rl64(pb); // average time per frame
  761. if (st) {
  762. st->start_time = start_time;
  763. st->duration = end_time - start_time;
  764. st->codecpar->bit_rate = bitrate;
  765. st->avg_frame_rate.num = 10000000;
  766. st->avg_frame_rate.den = time_per_frame;
  767. }
  768. nb_st_name = avio_rl16(pb);
  769. nb_pay_exts = avio_rl16(pb);
  770. for (i = 0; i < nb_st_name; i++) {
  771. uint16_t len;
  772. avio_rl16(pb); // Language ID Index
  773. len = avio_rl16(pb);
  774. avio_skip(pb, len);
  775. }
  776. for (i = 0; i < nb_pay_exts; i++) {
  777. uint32_t len;
  778. avio_skip(pb, 16); // Extension System ID
  779. avio_skip(pb, 2); // Extension Data Size
  780. len = avio_rl32(pb);
  781. avio_skip(pb, len);
  782. }
  783. if ((ret = ff_get_guid(pb, &guid)) < 0) {
  784. align_position(pb, asf->offset, size);
  785. return 0;
  786. }
  787. g = find_guid(guid);
  788. if (g && !(strcmp(g->name, "Stream Properties"))) {
  789. if ((ret = g->read_object(s, g)) < 0)
  790. return ret;
  791. }
  792. align_position(pb, asf->offset, size);
  793. return 0;
  794. }
  795. static int asf_read_language_list(AVFormatContext *s, const GUIDParseTable *g)
  796. {
  797. ASFContext *asf = s->priv_data;
  798. AVIOContext *pb = s->pb;
  799. int i, ret;
  800. uint64_t size = avio_rl64(pb);
  801. uint16_t nb_langs = avio_rl16(pb);
  802. if (nb_langs < ASF_MAX_STREAMS) {
  803. for (i = 0; i < nb_langs; i++) {
  804. size_t len;
  805. len = avio_r8(pb);
  806. if (!len)
  807. len = 6;
  808. if ((ret = get_asf_string(pb, len, asf->asf_sd[i].langs,
  809. sizeof(asf->asf_sd[i].langs))) < 0) {
  810. return ret;
  811. }
  812. }
  813. }
  814. align_position(pb, asf->offset, size);
  815. return 0;
  816. }
  817. // returns data object offset when reading this object for the first time
  818. static int asf_read_data(AVFormatContext *s, const GUIDParseTable *g)
  819. {
  820. ASFContext *asf = s->priv_data;
  821. AVIOContext *pb = s->pb;
  822. uint64_t size = asf->data_size = avio_rl64(pb);
  823. int i;
  824. if (!asf->data_reached) {
  825. asf->data_reached = 1;
  826. asf->data_offset = asf->offset;
  827. }
  828. for (i = 0; i < asf->nb_streams; i++) {
  829. if (!(asf->b_flags & ASF_FLAG_BROADCAST))
  830. s->streams[i]->duration = asf->duration;
  831. }
  832. asf->nb_mult_left = 0;
  833. asf->sub_left = 0;
  834. asf->state = PARSE_PACKET_HEADER;
  835. asf->return_subpayload = 0;
  836. asf->packet_size_internal = 0;
  837. avio_skip(pb, 16); // skip File ID
  838. size = avio_rl64(pb); // Total Data Packets
  839. if (size != asf->nb_packets)
  840. av_log(s, AV_LOG_WARNING,
  841. "Number of Packets from File Properties Object is not equal to Total"
  842. "Datapackets value! num of packets %"PRIu64" total num %"PRIu64".\n",
  843. size, asf->nb_packets);
  844. avio_skip(pb, 2); // skip reserved field
  845. asf->first_packet_offset = avio_tell(pb);
  846. if (pb->seekable && !(asf->b_flags & ASF_FLAG_BROADCAST))
  847. align_position(pb, asf->offset, asf->data_size);
  848. return 0;
  849. }
  850. static int asf_read_simple_index(AVFormatContext *s, const GUIDParseTable *g)
  851. {
  852. ASFContext *asf = s->priv_data;
  853. AVIOContext *pb = s->pb;
  854. AVStream *st = NULL;
  855. uint64_t interval; // index entry time interval in 100 ns units, usually it's 1s
  856. uint32_t pkt_num, nb_entries;
  857. int32_t prev_pkt_num = -1;
  858. int i, ret;
  859. uint64_t size = avio_rl64(pb);
  860. // simple index objects should be ordered by stream number, this loop tries to find
  861. // the first not indexed video stream
  862. for (i = 0; i < asf->nb_streams; i++) {
  863. if ((asf->asf_st[i]->type == AVMEDIA_TYPE_VIDEO) && !asf->asf_st[i]->indexed) {
  864. asf->asf_st[i]->indexed = 1;
  865. st = s->streams[asf->asf_st[i]->index];
  866. break;
  867. }
  868. }
  869. if (!st) {
  870. avio_skip(pb, size - 24); // if there's no video stream, skip index object
  871. return 0;
  872. }
  873. avio_skip(pb, 16); // skip File ID
  874. interval = avio_rl64(pb);
  875. avio_skip(pb, 4);
  876. nb_entries = avio_rl32(pb);
  877. for (i = 0; i < nb_entries; i++) {
  878. pkt_num = avio_rl32(pb);
  879. ret = avio_skip(pb, 2);
  880. if (ret < 0) {
  881. av_log(s, AV_LOG_ERROR, "Skipping failed in asf_read_simple_index.\n");
  882. return ret;
  883. }
  884. if (prev_pkt_num != pkt_num) {
  885. av_add_index_entry(st, asf->first_packet_offset + asf->packet_size *
  886. pkt_num, av_rescale(interval, i, 10000),
  887. asf->packet_size, 0, AVINDEX_KEYFRAME);
  888. prev_pkt_num = pkt_num;
  889. }
  890. }
  891. asf->is_simple_index = 1;
  892. align_position(pb, asf->offset, size);
  893. return 0;
  894. }
  895. static const GUIDParseTable gdef[] = {
  896. { "Data", { 0x75, 0xB2, 0x26, 0x36, 0x66, 0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_data, 1 },
  897. { "Simple Index", { 0x33, 0x00, 0x08, 0x90, 0xE5, 0xB1, 0x11, 0xCF, 0x89, 0xF4, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xCB }, asf_read_simple_index, 1 },
  898. { "Content Description", { 0x75, 0xB2, 0x26, 0x33, 0x66 ,0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_content_desc, 1 },
  899. { "Extended Content Description", { 0xD2, 0xD0, 0xA4, 0x40, 0xE3, 0x07, 0x11, 0xD2, 0x97, 0xF0, 0x00, 0xA0, 0xC9, 0x5e, 0xA8, 0x50 }, asf_read_ext_content, 1 },
  900. { "Stream Bitrate Properties", { 0x7B, 0xF8, 0x75, 0xCE, 0x46, 0x8D, 0x11, 0xD1, 0x8D, 0x82, 0x00, 0x60, 0x97, 0xC9, 0xA2, 0xB2 }, asf_read_unknown, 1 },
  901. { "File Properties", { 0x8C, 0xAB, 0xDC, 0xA1, 0xA9, 0x47, 0x11, 0xCF, 0x8E, 0xE4, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_properties, 1 },
  902. { "Header Extension", { 0x5F, 0xBF, 0x03, 0xB5, 0xA9, 0x2E, 0x11, 0xCF, 0x8E, 0xE3, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_unknown, 0 },
  903. { "Stream Properties", { 0xB7, 0xDC, 0x07, 0x91, 0xA9, 0xB7, 0x11, 0xCF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_stream_properties, 1 },
  904. { "Codec List", { 0x86, 0xD1, 0x52, 0x40, 0x31, 0x1D, 0x11, 0xD0, 0xA3, 0xA4, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6 }, asf_read_unknown, 1 },
  905. { "Marker", { 0xF4, 0x87, 0xCD, 0x01, 0xA9, 0x51, 0x11, 0xCF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_marker, 1 },
  906. { "Script Command", { 0x1E, 0xFB, 0x1A, 0x30, 0x0B, 0x62, 0x11, 0xD0, 0xA3, 0x9B, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6 }, asf_read_unknown, 1 },
  907. { "Language List", { 0x7C, 0x43, 0x46, 0xa9, 0xef, 0xe0, 0x4B, 0xFC, 0xB2, 0x29, 0x39, 0x3e, 0xde, 0x41, 0x5c, 0x85 }, asf_read_language_list, 1},
  908. { "Padding", { 0x18, 0x06, 0xD4, 0x74, 0xCA, 0xDF, 0x45, 0x09, 0xA4, 0xBA, 0x9A, 0xAB, 0xCB, 0x96, 0xAA, 0xE8 }, asf_read_unknown, 1 },
  909. { "DRMv1 Header", { 0x22, 0x11, 0xB3, 0xFB, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 },
  910. { "DRMv2 Header", { 0x29, 0x8A, 0xE6, 0x14, 0x26, 0x22, 0x4C, 0x17, 0xB9, 0x35, 0xDA, 0xE0, 0x7E, 0xE9, 0x28, 0x9c }, asf_read_unknown, 1 },
  911. { "Index", { 0xD6, 0xE2, 0x29, 0xD3, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  912. { "Media Object Index", { 0xFE, 0xB1, 0x03, 0xF8, 0x12, 0xAD, 0x4C, 0x64, 0x84, 0x0F, 0x2A, 0x1D, 0x2F, 0x7A, 0xD4, 0x8C }, asf_read_unknown, 1 },
  913. { "Timecode Index", { 0x3C, 0xB7, 0x3F, 0xD0, 0x0C, 0x4A, 0x48, 0x03, 0x95, 0x3D, 0xED, 0xF7, 0xB6, 0x22, 0x8F, 0x0C }, asf_read_unknown, 0 },
  914. { "Bitrate_Mutual_Exclusion", { 0xD6, 0xE2, 0x29, 0xDC, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  915. { "Error Correction", { 0x75, 0xB2, 0x26, 0x35, 0x66, 0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_unknown, 1 },
  916. { "Content Branding", { 0x22, 0x11, 0xB3, 0xFA, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 },
  917. { "Content Encryption", { 0x22, 0x11, 0xB3, 0xFB, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 },
  918. { "Extended Content Encryption", { 0x29, 0x8A, 0xE6, 0x14, 0x26, 0x22, 0x4C, 0x17, 0xB9, 0x35, 0xDA, 0xE0, 0x7E, 0xE9, 0x28, 0x9C }, asf_read_unknown, 1 },
  919. { "Digital Signature", { 0x22, 0x11, 0xB3, 0xFC, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 },
  920. { "Extended Stream Properties", { 0x14, 0xE6, 0xA5, 0xCB, 0xC6, 0x72, 0x43, 0x32, 0x83, 0x99, 0xA9, 0x69, 0x52, 0x06, 0x5B, 0x5A }, asf_read_ext_stream_properties, 1 },
  921. { "Advanced Mutual Exclusion", { 0xA0, 0x86, 0x49, 0xCF, 0x47, 0x75, 0x46, 0x70, 0x8A, 0x16, 0x6E, 0x35, 0x35, 0x75, 0x66, 0xCD }, asf_read_unknown, 1 },
  922. { "Group Mutual Exclusion", { 0xD1, 0x46, 0x5A, 0x40, 0x5A, 0x79, 0x43, 0x38, 0xB7, 0x1B, 0xE3, 0x6B, 0x8F, 0xD6, 0xC2, 0x49 }, asf_read_unknown, 1},
  923. { "Stream Prioritization", { 0xD4, 0xFE, 0xD1, 0x5B, 0x88, 0xD3, 0x45, 0x4F, 0x81, 0xF0, 0xED, 0x5C, 0x45, 0x99, 0x9E, 0x24 }, asf_read_unknown, 1 },
  924. { "Bandwidth Sharing Object", { 0xA6, 0x96, 0x09, 0xE6, 0x51, 0x7B, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 },
  925. { "Metadata", { 0xC5, 0xF8, 0xCB, 0xEA, 0x5B, 0xAF, 0x48, 0x77, 0x84, 0x67, 0xAA, 0x8C, 0x44, 0xFA, 0x4C, 0xCA }, asf_read_metadata_obj, 1 },
  926. { "Metadata Library", { 0x44, 0x23, 0x1C, 0x94, 0x94, 0x98, 0x49, 0xD1, 0xA1, 0x41, 0x1D, 0x13, 0x4E, 0x45, 0x70, 0x54 }, asf_read_metadata_obj, 1 },
  927. { "Audio Spread", { 0xBF, 0xC3, 0xCD, 0x50, 0x61, 0x8F, 0x11, 0xCF, 0x8B, 0xB2, 0x00, 0xAA, 0x00, 0xB4, 0xE2, 0x20 }, asf_read_unknown, 1 },
  928. { "Index Parameters", { 0xD6, 0xE2, 0x29, 0xDF, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  929. { "Content Encryption System Windows Media DRM Network Devices",
  930. { 0x7A, 0x07, 0x9B, 0xB6, 0xDA, 0XA4, 0x4e, 0x12, 0xA5, 0xCA, 0x91, 0xD3, 0x8D, 0xC1, 0x1A, 0x8D }, asf_read_unknown, 1 },
  931. { "Mutex Language", { 0xD6, 0xE2, 0x2A, 0x00, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  932. { "Mutex Bitrate", { 0xD6, 0xE2, 0x2A, 0x01, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  933. { "Mutex Unknown", { 0xD6, 0xE2, 0x2A, 0x02, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 },
  934. { "Bandwith Sharing Exclusive", { 0xAF, 0x60, 0x60, 0xAA, 0x51, 0x97, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 },
  935. { "Bandwith Sharing Partial", { 0xAF, 0x60, 0x60, 0xAB, 0x51, 0x97, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 },
  936. { "Payload Extension System Timecode", { 0x39, 0x95, 0x95, 0xEC, 0x86, 0x67, 0x4E, 0x2D, 0x8F, 0xDB, 0x98, 0x81, 0x4C, 0xE7, 0x6C, 0x1E }, asf_read_unknown, 1 },
  937. { "Payload Extension System File Name", { 0xE1, 0x65, 0xEC, 0x0E, 0x19, 0xED, 0x45, 0xD7, 0xB4, 0xA7, 0x25, 0xCB, 0xD1, 0xE2, 0x8E, 0x9B }, asf_read_unknown, 1 },
  938. { "Payload Extension System Content Type", { 0xD5, 0x90, 0xDC, 0x20, 0x07, 0xBC, 0x43, 0x6C, 0x9C, 0xF7, 0xF3, 0xBB, 0xFB, 0xF1, 0xA4, 0xDC }, asf_read_unknown, 1 },
  939. { "Payload Extension System Pixel Aspect Ratio", { 0x1, 0x1E, 0xE5, 0x54, 0xF9, 0xEA, 0x4B, 0xC8, 0x82, 0x1A, 0x37, 0x6B, 0x74, 0xE4, 0xC4, 0xB8 }, asf_read_unknown, 1 },
  940. { "Payload Extension System Sample Duration", { 0xC6, 0xBD, 0x94, 0x50, 0x86, 0x7F, 0x49, 0x07, 0x83, 0xA3, 0xC7, 0x79, 0x21, 0xB7, 0x33, 0xAD }, asf_read_unknown, 1 },
  941. { "Payload Extension System Encryption Sample ID", { 0x66, 0x98, 0xB8, 0x4E, 0x0A, 0xFA, 0x43, 0x30, 0xAE, 0xB2, 0x1C, 0x0A, 0x98, 0xD7, 0xA4, 0x4D }, asf_read_unknown, 1 },
  942. { "Payload Extension System Degradable JPEG", { 0x00, 0xE1, 0xAF, 0x06, 0x7B, 0xEC, 0x11, 0xD1, 0xA5, 0x82, 0x00, 0xC0, 0x4F, 0xC2, 0x9C, 0xFB }, asf_read_unknown, 1 },
  943. };
  944. #define READ_LEN(flag, name, len) \
  945. do { \
  946. if ((flag) == name ## IS_BYTE) \
  947. len = avio_r8(pb); \
  948. else if ((flag) == name ## IS_WORD) \
  949. len = avio_rl16(pb); \
  950. else if ((flag) == name ## IS_DWORD) \
  951. len = avio_rl32(pb); \
  952. else \
  953. len = 0; \
  954. } while(0)
  955. static int asf_read_subpayload(AVFormatContext *s, AVPacket *pkt, int is_header)
  956. {
  957. ASFContext *asf = s->priv_data;
  958. AVIOContext *pb = s->pb;
  959. uint8_t sub_len;
  960. int ret, i;
  961. if (is_header) {
  962. asf->dts_delta = avio_r8(pb);
  963. if (asf->nb_mult_left) {
  964. asf->mult_sub_len = avio_rl16(pb); // total
  965. }
  966. asf->sub_header_offset = avio_tell(pb);
  967. asf->nb_sub = 0;
  968. asf->sub_left = 1;
  969. }
  970. sub_len = avio_r8(pb);
  971. if ((ret = av_get_packet(pb, pkt, sub_len)) < 0) // each subpayload is entire frame
  972. return ret;
  973. for (i = 0; i < asf->nb_streams; i++) {
  974. if (asf->stream_index == asf->asf_st[i]->stream_index) {
  975. pkt->stream_index = asf->asf_st[i]->index;
  976. break;
  977. }
  978. }
  979. asf->return_subpayload = 1;
  980. if (!sub_len)
  981. asf->return_subpayload = 0;
  982. if (sub_len)
  983. asf->nb_sub++;
  984. pkt->dts = asf->sub_dts + (asf->nb_sub - 1) * asf->dts_delta - asf->preroll;
  985. if (asf->nb_mult_left && (avio_tell(pb) >=
  986. (asf->sub_header_offset + asf->mult_sub_len))) {
  987. asf->sub_left = 0;
  988. asf->nb_mult_left--;
  989. }
  990. if (avio_tell(pb) >= asf->packet_offset + asf->packet_size - asf->pad_len) {
  991. asf->sub_left = 0;
  992. if (!asf->nb_mult_left) {
  993. avio_skip(pb, asf->pad_len);
  994. if (avio_tell(pb) != asf->packet_offset + asf->packet_size) {
  995. if (!asf->packet_size)
  996. return AVERROR_INVALIDDATA;
  997. av_log(s, AV_LOG_WARNING,
  998. "Position %"PRId64" wrong, should be %"PRId64"\n",
  999. avio_tell(pb), asf->packet_offset + asf->packet_size);
  1000. avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET);
  1001. }
  1002. }
  1003. }
  1004. return 0;
  1005. }
  1006. static void reset_packet(ASFPacket *asf_pkt)
  1007. {
  1008. asf_pkt->size_left = 0;
  1009. asf_pkt->data_size = 0;
  1010. asf_pkt->duration = 0;
  1011. asf_pkt->flags = 0;
  1012. asf_pkt->dts = 0;
  1013. asf_pkt->duration = 0;
  1014. av_packet_unref(&asf_pkt->avpkt);
  1015. av_init_packet(&asf_pkt->avpkt);
  1016. }
  1017. static int asf_read_replicated_data(AVFormatContext *s, ASFPacket *asf_pkt)
  1018. {
  1019. ASFContext *asf = s->priv_data;
  1020. AVIOContext *pb = s->pb;
  1021. int ret, data_size;
  1022. if (!asf_pkt->data_size) {
  1023. data_size = avio_rl32(pb); // read media object size
  1024. if (data_size <= 0)
  1025. return AVERROR_INVALIDDATA;
  1026. if ((ret = av_new_packet(&asf_pkt->avpkt, data_size)) < 0)
  1027. return ret;
  1028. asf_pkt->data_size = asf_pkt->size_left = data_size;
  1029. } else
  1030. avio_skip(pb, 4); // reading of media object size is already done
  1031. asf_pkt->dts = avio_rl32(pb); // read presentation time
  1032. if (asf->rep_data_len && (asf->rep_data_len >= 8))
  1033. avio_skip(pb, asf->rep_data_len - 8); // skip replicated data
  1034. return 0;
  1035. }
  1036. static int asf_read_multiple_payload(AVFormatContext *s, AVPacket *pkt,
  1037. ASFPacket *asf_pkt)
  1038. {
  1039. ASFContext *asf = s->priv_data;
  1040. AVIOContext *pb = s->pb;
  1041. uint16_t pay_len;
  1042. unsigned char *p;
  1043. int ret;
  1044. int skip = 0;
  1045. // if replicated length is 1, subpayloads are present
  1046. if (asf->rep_data_len == 1) {
  1047. asf->sub_left = 1;
  1048. asf->state = READ_MULTI_SUB;
  1049. pkt->flags = asf_pkt->flags;
  1050. if ((ret = asf_read_subpayload(s, pkt, 1)) < 0)
  1051. return ret;
  1052. } else {
  1053. if (asf->rep_data_len)
  1054. if ((ret = asf_read_replicated_data(s, asf_pkt)) < 0)
  1055. return ret;
  1056. pay_len = avio_rl16(pb); // payload length should be WORD
  1057. if (pay_len > asf->packet_size) {
  1058. av_log(s, AV_LOG_ERROR,
  1059. "Error: invalid data packet size, pay_len %"PRIu16", "
  1060. "asf->packet_size %"PRIu32", offset %"PRId64".\n",
  1061. pay_len, asf->packet_size, avio_tell(pb));
  1062. return AVERROR_INVALIDDATA;
  1063. }
  1064. p = asf_pkt->avpkt.data + asf_pkt->data_size - asf_pkt->size_left;
  1065. if (pay_len > asf_pkt->size_left) {
  1066. av_log(s, AV_LOG_ERROR,
  1067. "Error: invalid buffer size, pay_len %d, data size left %d.\n",
  1068. pay_len, asf_pkt->size_left);
  1069. skip = pay_len - asf_pkt->size_left;
  1070. pay_len = asf_pkt->size_left;
  1071. }
  1072. if (asf_pkt->size_left <= 0)
  1073. return AVERROR_INVALIDDATA;
  1074. if ((ret = avio_read(pb, p, pay_len)) < 0)
  1075. return ret;
  1076. if (s->key && s->keylen == 20)
  1077. ff_asfcrypt_dec(s->key, p, ret);
  1078. avio_skip(pb, skip);
  1079. asf_pkt->size_left -= pay_len;
  1080. asf->nb_mult_left--;
  1081. }
  1082. return 0;
  1083. }
  1084. static int asf_read_single_payload(AVFormatContext *s, ASFPacket *asf_pkt)
  1085. {
  1086. ASFContext *asf = s->priv_data;
  1087. AVIOContext *pb = s->pb;
  1088. int64_t offset;
  1089. uint64_t size;
  1090. unsigned char *p;
  1091. int ret, data_size;
  1092. if (!asf_pkt->data_size) {
  1093. data_size = avio_rl32(pb); // read media object size
  1094. if (data_size <= 0)
  1095. return AVERROR_EOF;
  1096. if ((ret = av_new_packet(&asf_pkt->avpkt, data_size)) < 0)
  1097. return ret;
  1098. asf_pkt->data_size = asf_pkt->size_left = data_size;
  1099. } else
  1100. avio_skip(pb, 4); // skip media object size
  1101. asf_pkt->dts = avio_rl32(pb); // read presentation time
  1102. if (asf->rep_data_len >= 8)
  1103. avio_skip(pb, asf->rep_data_len - 8); // skip replicated data
  1104. offset = avio_tell(pb);
  1105. // size of the payload - size of the packet without header and padding
  1106. if (asf->packet_size_internal)
  1107. size = asf->packet_size_internal - offset + asf->packet_offset - asf->pad_len;
  1108. else
  1109. size = asf->packet_size - offset + asf->packet_offset - asf->pad_len;
  1110. if (size > asf->packet_size) {
  1111. av_log(s, AV_LOG_ERROR,
  1112. "Error: invalid data packet size, offset %"PRId64".\n",
  1113. avio_tell(pb));
  1114. return AVERROR_INVALIDDATA;
  1115. }
  1116. p = asf_pkt->avpkt.data + asf_pkt->data_size - asf_pkt->size_left;
  1117. if (size > asf_pkt->size_left || asf_pkt->size_left <= 0)
  1118. return AVERROR_INVALIDDATA;
  1119. if (asf_pkt->size_left > size)
  1120. asf_pkt->size_left -= size;
  1121. else
  1122. asf_pkt->size_left = 0;
  1123. if ((ret = avio_read(pb, p, size)) < 0)
  1124. return ret;
  1125. if (s->key && s->keylen == 20)
  1126. ff_asfcrypt_dec(s->key, p, ret);
  1127. if (asf->packet_size_internal)
  1128. avio_skip(pb, asf->packet_size - asf->packet_size_internal);
  1129. avio_skip(pb, asf->pad_len); // skip padding
  1130. return 0;
  1131. }
  1132. static int asf_read_payload(AVFormatContext *s, AVPacket *pkt)
  1133. {
  1134. ASFContext *asf = s->priv_data;
  1135. AVIOContext *pb = s->pb;
  1136. int ret, i;
  1137. ASFPacket *asf_pkt = NULL;
  1138. if (!asf->sub_left) {
  1139. uint32_t off_len, media_len;
  1140. uint8_t stream_num;
  1141. stream_num = avio_r8(pb);
  1142. asf->stream_index = stream_num & ASF_STREAM_NUM;
  1143. for (i = 0; i < asf->nb_streams; i++) {
  1144. if (asf->stream_index == asf->asf_st[i]->stream_index) {
  1145. asf_pkt = &asf->asf_st[i]->pkt;
  1146. asf_pkt->stream_index = asf->asf_st[i]->index;
  1147. break;
  1148. }
  1149. }
  1150. if (!asf_pkt) {
  1151. if (asf->packet_offset + asf->packet_size <= asf->data_offset + asf->data_size) {
  1152. if (!asf->packet_size) {
  1153. av_log(s, AV_LOG_ERROR, "Invalid packet size 0.\n");
  1154. return AVERROR_INVALIDDATA;
  1155. }
  1156. avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET);
  1157. av_log(s, AV_LOG_WARNING, "Skipping the stream with the invalid stream index %d.\n",
  1158. asf->stream_index);
  1159. return AVERROR(EAGAIN);
  1160. } else
  1161. return AVERROR_INVALIDDATA;
  1162. }
  1163. if (stream_num >> 7)
  1164. asf_pkt->flags |= AV_PKT_FLAG_KEY;
  1165. READ_LEN(asf->prop_flags & ASF_PL_MASK_MEDIA_OBJECT_NUMBER_LENGTH_FIELD_SIZE,
  1166. ASF_PL_FLAG_MEDIA_OBJECT_NUMBER_LENGTH_FIELD_, media_len);
  1167. READ_LEN(asf->prop_flags & ASF_PL_MASK_OFFSET_INTO_MEDIA_OBJECT_LENGTH_FIELD_SIZE,
  1168. ASF_PL_FLAG_OFFSET_INTO_MEDIA_OBJECT_LENGTH_FIELD_, off_len);
  1169. READ_LEN(asf->prop_flags & ASF_PL_MASK_REPLICATED_DATA_LENGTH_FIELD_SIZE,
  1170. ASF_PL_FLAG_REPLICATED_DATA_LENGTH_FIELD_, asf->rep_data_len);
  1171. if (asf_pkt->size_left && (asf_pkt->frame_num != media_len)) {
  1172. av_log(s, AV_LOG_WARNING, "Unfinished frame will be ignored\n");
  1173. reset_packet(asf_pkt);
  1174. }
  1175. asf_pkt->frame_num = media_len;
  1176. asf->sub_dts = off_len;
  1177. if (asf->nb_mult_left) {
  1178. if ((ret = asf_read_multiple_payload(s, pkt, asf_pkt)) < 0)
  1179. return ret;
  1180. } else if (asf->rep_data_len == 1) {
  1181. asf->sub_left = 1;
  1182. asf->state = READ_SINGLE;
  1183. pkt->flags = asf_pkt->flags;
  1184. if ((ret = asf_read_subpayload(s, pkt, 1)) < 0)
  1185. return ret;
  1186. } else {
  1187. if ((ret = asf_read_single_payload(s, asf_pkt)) < 0)
  1188. return ret;
  1189. }
  1190. } else {
  1191. for (i = 0; i <= asf->nb_streams; i++) {
  1192. if (asf->stream_index == asf->asf_st[i]->stream_index) {
  1193. asf_pkt = &asf->asf_st[i]->pkt;
  1194. break;
  1195. }
  1196. }
  1197. if (!asf_pkt)
  1198. return AVERROR_INVALIDDATA;
  1199. pkt->flags = asf_pkt->flags;
  1200. pkt->dts = asf_pkt->dts;
  1201. pkt->stream_index = asf->asf_st[i]->index;
  1202. if ((ret = asf_read_subpayload(s, pkt, 0)) < 0) // read subpayload without its header
  1203. return ret;
  1204. }
  1205. return 0;
  1206. }
  1207. static int asf_read_packet_header(AVFormatContext *s)
  1208. {
  1209. ASFContext *asf = s->priv_data;
  1210. AVIOContext *pb = s->pb;
  1211. uint64_t size;
  1212. uint32_t av_unused seq;
  1213. unsigned char error_flags, len_flags, pay_flags;
  1214. asf->packet_offset = avio_tell(pb);
  1215. error_flags = avio_r8(pb); // read Error Correction Flags
  1216. if (error_flags & ASF_PACKET_FLAG_ERROR_CORRECTION_PRESENT) {
  1217. if (!(error_flags & ASF_ERROR_CORRECTION_LENGTH_TYPE)) {
  1218. size = error_flags & ASF_PACKET_ERROR_CORRECTION_DATA_SIZE;
  1219. avio_skip(pb, size);
  1220. }
  1221. len_flags = avio_r8(pb);
  1222. } else
  1223. len_flags = error_flags;
  1224. asf->prop_flags = avio_r8(pb);
  1225. READ_LEN(len_flags & ASF_PPI_MASK_PACKET_LENGTH_FIELD_SIZE,
  1226. ASF_PPI_FLAG_PACKET_LENGTH_FIELD_, asf->packet_size_internal);
  1227. READ_LEN(len_flags & ASF_PPI_MASK_SEQUENCE_FIELD_SIZE,
  1228. ASF_PPI_FLAG_SEQUENCE_FIELD_, seq);
  1229. READ_LEN(len_flags & ASF_PPI_MASK_PADDING_LENGTH_FIELD_SIZE,
  1230. ASF_PPI_FLAG_PADDING_LENGTH_FIELD_, asf->pad_len );
  1231. asf->send_time = avio_rl32(pb); // send time
  1232. avio_skip(pb, 2); // skip duration
  1233. if (len_flags & ASF_PPI_FLAG_MULTIPLE_PAYLOADS_PRESENT) { // Multiple Payloads present
  1234. pay_flags = avio_r8(pb);
  1235. asf->nb_mult_left = (pay_flags & ASF_NUM_OF_PAYLOADS);
  1236. }
  1237. return 0;
  1238. }
  1239. static int asf_deinterleave(AVFormatContext *s, ASFPacket *asf_pkt, int st_num)
  1240. {
  1241. ASFContext *asf = s->priv_data;
  1242. ASFStream *asf_st = asf->asf_st[st_num];
  1243. unsigned char *p = asf_pkt->avpkt.data;
  1244. uint16_t pkt_len = asf->asf_st[st_num]->virtual_pkt_len;
  1245. uint16_t chunk_len = asf->asf_st[st_num]->virtual_chunk_len;
  1246. int nchunks = pkt_len / chunk_len;
  1247. AVPacket pkt;
  1248. int pos = 0, j, l, ret;
  1249. if ((ret = av_new_packet(&pkt, asf_pkt->data_size)) < 0)
  1250. return ret;
  1251. while (asf_pkt->data_size >= asf_st->span * pkt_len + pos) {
  1252. if (pos >= asf_pkt->data_size) {
  1253. break;
  1254. }
  1255. for (l = 0; l < pkt_len; l++) {
  1256. if (pos >= asf_pkt->data_size) {
  1257. break;
  1258. }
  1259. for (j = 0; j < asf_st->span; j++) {
  1260. if ((pos + chunk_len) >= asf_pkt->data_size)
  1261. break;
  1262. memcpy(pkt.data + pos,
  1263. p + (j * nchunks + l) * chunk_len,
  1264. chunk_len);
  1265. pos += chunk_len;
  1266. }
  1267. }
  1268. p += asf_st->span * pkt_len;
  1269. if (p > asf_pkt->avpkt.data + asf_pkt->data_size)
  1270. break;
  1271. }
  1272. av_packet_unref(&asf_pkt->avpkt);
  1273. asf_pkt->avpkt = pkt;
  1274. return 0;
  1275. }
  1276. static int asf_read_packet(AVFormatContext *s, AVPacket *pkt)
  1277. {
  1278. ASFContext *asf = s->priv_data;
  1279. AVIOContext *pb = s->pb;
  1280. int ret, i;
  1281. if ((avio_tell(pb) >= asf->data_offset + asf->data_size) &&
  1282. !(asf->b_flags & ASF_FLAG_BROADCAST))
  1283. return AVERROR_EOF;
  1284. while (!pb->eof_reached) {
  1285. if (asf->state == PARSE_PACKET_HEADER) {
  1286. asf_read_packet_header(s);
  1287. if (pb->eof_reached)
  1288. break;
  1289. if (!asf->nb_mult_left)
  1290. asf->state = READ_SINGLE;
  1291. else
  1292. asf->state = READ_MULTI;
  1293. }
  1294. ret = asf_read_payload(s, pkt);
  1295. if (ret == AVERROR(EAGAIN)) {
  1296. asf->state = PARSE_PACKET_HEADER;
  1297. continue;
  1298. }
  1299. else if (ret < 0)
  1300. return ret;
  1301. switch (asf->state) {
  1302. case READ_SINGLE:
  1303. if (!asf->sub_left)
  1304. asf->state = PARSE_PACKET_HEADER;
  1305. break;
  1306. case READ_MULTI_SUB:
  1307. if (!asf->sub_left && !asf->nb_mult_left) {
  1308. asf->state = PARSE_PACKET_HEADER;
  1309. if (!asf->return_subpayload &&
  1310. (avio_tell(pb) <= asf->packet_offset +
  1311. asf->packet_size - asf->pad_len))
  1312. avio_skip(pb, asf->pad_len); // skip padding
  1313. if (asf->packet_offset + asf->packet_size > avio_tell(pb))
  1314. avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET);
  1315. } else if (!asf->sub_left)
  1316. asf->state = READ_MULTI;
  1317. break;
  1318. case READ_MULTI:
  1319. if (!asf->nb_mult_left) {
  1320. asf->state = PARSE_PACKET_HEADER;
  1321. if (!asf->return_subpayload &&
  1322. (avio_tell(pb) <= asf->packet_offset +
  1323. asf->packet_size - asf->pad_len))
  1324. avio_skip(pb, asf->pad_len); // skip padding
  1325. if (asf->packet_offset + asf->packet_size > avio_tell(pb))
  1326. avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET);
  1327. }
  1328. break;
  1329. }
  1330. if (asf->return_subpayload) {
  1331. asf->return_subpayload = 0;
  1332. return 0;
  1333. }
  1334. for (i = 0; i < s->nb_streams; i++) {
  1335. ASFPacket *asf_pkt = &asf->asf_st[i]->pkt;
  1336. if (asf_pkt && !asf_pkt->size_left && asf_pkt->data_size) {
  1337. if (asf->asf_st[i]->span > 1 &&
  1338. asf->asf_st[i]->type == AVMEDIA_TYPE_AUDIO)
  1339. if ((ret = asf_deinterleave(s, asf_pkt, i)) < 0)
  1340. return ret;
  1341. av_packet_move_ref(pkt, &asf_pkt->avpkt);
  1342. pkt->stream_index = asf->asf_st[i]->index;
  1343. pkt->flags = asf_pkt->flags;
  1344. pkt->dts = asf_pkt->dts - asf->preroll;
  1345. asf_pkt->data_size = 0;
  1346. asf_pkt->frame_num = 0;
  1347. return 0;
  1348. }
  1349. }
  1350. }
  1351. if (pb->eof_reached)
  1352. return AVERROR_EOF;
  1353. return 0;
  1354. }
  1355. static int asf_read_close(AVFormatContext *s)
  1356. {
  1357. ASFContext *asf = s->priv_data;
  1358. int i;
  1359. for (i = 0; i < ASF_MAX_STREAMS; i++) {
  1360. av_dict_free(&asf->asf_sd[i].asf_met);
  1361. if (i < asf->nb_streams) {
  1362. av_packet_unref(&asf->asf_st[i]->pkt.avpkt);
  1363. av_freep(&asf->asf_st[i]);
  1364. }
  1365. }
  1366. asf->nb_streams = 0;
  1367. return 0;
  1368. }
  1369. static void reset_packet_state(AVFormatContext *s)
  1370. {
  1371. ASFContext *asf = s->priv_data;
  1372. int i;
  1373. asf->state = PARSE_PACKET_HEADER;
  1374. asf->offset = 0;
  1375. asf->return_subpayload = 0;
  1376. asf->sub_left = 0;
  1377. asf->sub_header_offset = 0;
  1378. asf->packet_offset = asf->first_packet_offset;
  1379. asf->pad_len = 0;
  1380. asf->rep_data_len = 0;
  1381. asf->dts_delta = 0;
  1382. asf->mult_sub_len = 0;
  1383. asf->nb_mult_left = 0;
  1384. asf->nb_sub = 0;
  1385. asf->prop_flags = 0;
  1386. asf->sub_dts = 0;
  1387. for (i = 0; i < asf->nb_streams; i++) {
  1388. ASFPacket *pkt = &asf->asf_st[i]->pkt;
  1389. pkt->size_left = 0;
  1390. pkt->data_size = 0;
  1391. pkt->duration = 0;
  1392. pkt->flags = 0;
  1393. pkt->dts = 0;
  1394. pkt->duration = 0;
  1395. av_packet_unref(&pkt->avpkt);
  1396. av_init_packet(&pkt->avpkt);
  1397. }
  1398. }
  1399. /*
  1400. * Find a timestamp for the requested position within the payload
  1401. * where the pos (position) is the offset inside the Data Object.
  1402. * When position is not on the packet boundary, asf_read_timestamp tries
  1403. * to find the closest packet offset after this position. If this packet
  1404. * is a key frame, this packet timestamp is read and an index entry is created
  1405. * for the packet. If this packet belongs to the requested stream,
  1406. * asf_read_timestamp upgrades pos to the packet beginning offset and
  1407. * returns this packet's dts. So returned dts is the dts of the first key frame with
  1408. * matching stream number after given position.
  1409. */
  1410. static int64_t asf_read_timestamp(AVFormatContext *s, int stream_index,
  1411. int64_t *pos, int64_t pos_limit)
  1412. {
  1413. ASFContext *asf = s->priv_data;
  1414. int64_t pkt_pos = *pos, pkt_offset, dts = AV_NOPTS_VALUE, data_end;
  1415. AVPacket pkt;
  1416. int n;
  1417. data_end = asf->data_offset + asf->data_size;
  1418. n = (pkt_pos - asf->first_packet_offset + asf->packet_size - 1) /
  1419. asf->packet_size;
  1420. n = av_clip(n, 0, ((data_end - asf->first_packet_offset) / asf->packet_size - 1));
  1421. pkt_pos = asf->first_packet_offset + n * asf->packet_size;
  1422. avio_seek(s->pb, pkt_pos, SEEK_SET);
  1423. pkt_offset = pkt_pos;
  1424. reset_packet_state(s);
  1425. while (avio_tell(s->pb) < data_end) {
  1426. int i, ret, st_found;
  1427. av_init_packet(&pkt);
  1428. pkt_offset = avio_tell(s->pb);
  1429. if ((ret = asf_read_packet(s, &pkt)) < 0) {
  1430. dts = AV_NOPTS_VALUE;
  1431. return ret;
  1432. }
  1433. // ASFPacket may contain fragments of packets belonging to different streams,
  1434. // pkt_offset is the offset of the first fragment within it.
  1435. if ((pkt_offset >= (pkt_pos + asf->packet_size)))
  1436. pkt_pos += asf->packet_size;
  1437. for (i = 0; i < asf->nb_streams; i++) {
  1438. ASFStream *st = asf->asf_st[i];
  1439. st_found = 0;
  1440. if (pkt.flags & AV_PKT_FLAG_KEY) {
  1441. dts = pkt.dts;
  1442. if (dts) {
  1443. av_add_index_entry(s->streams[pkt.stream_index], pkt_pos,
  1444. dts, pkt.size, 0, AVINDEX_KEYFRAME);
  1445. if (stream_index == st->index) {
  1446. st_found = 1;
  1447. break;
  1448. }
  1449. }
  1450. }
  1451. }
  1452. if (st_found)
  1453. break;
  1454. av_packet_unref(&pkt);
  1455. }
  1456. *pos = pkt_pos;
  1457. av_packet_unref(&pkt);
  1458. return dts;
  1459. }
  1460. static int asf_read_seek(AVFormatContext *s, int stream_index,
  1461. int64_t timestamp, int flags)
  1462. {
  1463. ASFContext *asf = s->priv_data;
  1464. int idx, ret;
  1465. if (s->streams[stream_index]->nb_index_entries && asf->is_simple_index) {
  1466. idx = av_index_search_timestamp(s->streams[stream_index], timestamp, flags);
  1467. if (idx < 0 || idx >= s->streams[stream_index]->nb_index_entries)
  1468. return AVERROR_INVALIDDATA;
  1469. avio_seek(s->pb, s->streams[stream_index]->index_entries[idx].pos, SEEK_SET);
  1470. } else {
  1471. if ((ret = ff_seek_frame_binary(s, stream_index, timestamp, flags)) < 0)
  1472. return ret;
  1473. }
  1474. reset_packet_state(s);
  1475. return 0;
  1476. }
  1477. static const GUIDParseTable *find_guid(ff_asf_guid guid)
  1478. {
  1479. int j, ret;
  1480. const GUIDParseTable *g;
  1481. swap_guid(guid);
  1482. g = gdef;
  1483. for (j = 0; j < FF_ARRAY_ELEMS(gdef); j++) {
  1484. if (!(ret = memcmp(guid, g->guid, sizeof(g->guid))))
  1485. return g;
  1486. g++;
  1487. }
  1488. return NULL;
  1489. }
  1490. static int detect_unknown_subobject(AVFormatContext *s, int64_t offset, int64_t size)
  1491. {
  1492. ASFContext *asf = s->priv_data;
  1493. AVIOContext *pb = s->pb;
  1494. const GUIDParseTable *g = NULL;
  1495. ff_asf_guid guid;
  1496. int ret;
  1497. while (avio_tell(pb) <= offset + size) {
  1498. if (avio_tell(pb) == asf->offset)
  1499. break;
  1500. asf->offset = avio_tell(pb);
  1501. if ((ret = ff_get_guid(pb, &guid)) < 0)
  1502. return ret;
  1503. g = find_guid(guid);
  1504. if (g) {
  1505. if ((ret = g->read_object(s, g)) < 0)
  1506. return ret;
  1507. } else {
  1508. GUIDParseTable g2;
  1509. g2.name = "Unknown";
  1510. g2.is_subobject = 1;
  1511. asf_read_unknown(s, &g2);
  1512. }
  1513. }
  1514. return 0;
  1515. }
  1516. static int asf_read_header(AVFormatContext *s)
  1517. {
  1518. ASFContext *asf = s->priv_data;
  1519. AVIOContext *pb = s->pb;
  1520. const GUIDParseTable *g = NULL;
  1521. ff_asf_guid guid;
  1522. int i, ret;
  1523. uint64_t size;
  1524. asf->preroll = 0;
  1525. asf->is_simple_index = 0;
  1526. ff_get_guid(pb, &guid);
  1527. if (ff_guidcmp(&guid, &ff_asf_header))
  1528. return AVERROR_INVALIDDATA;
  1529. avio_skip(pb, 8); // skip header object size
  1530. avio_skip(pb, 6); // skip number of header objects and 2 reserved bytes
  1531. asf->data_reached = 0;
  1532. /* 1 is here instead of pb->eof_reached because (when not streaming), Data are skipped
  1533. * for the first time,
  1534. * Index object is processed and got eof and then seeking back to the Data is performed.
  1535. */
  1536. while (1) {
  1537. // for the cases when object size is invalid
  1538. if (avio_tell(pb) == asf->offset)
  1539. break;
  1540. asf->offset = avio_tell(pb);
  1541. if ((ret = ff_get_guid(pb, &guid)) < 0) {
  1542. if (ret == AVERROR_EOF && asf->data_reached)
  1543. break;
  1544. else
  1545. goto failed;
  1546. }
  1547. g = find_guid(guid);
  1548. if (g) {
  1549. asf->unknown_offset = asf->offset;
  1550. asf->is_header = 1;
  1551. if ((ret = g->read_object(s, g)) < 0)
  1552. goto failed;
  1553. } else {
  1554. size = avio_rl64(pb);
  1555. align_position(pb, asf->offset, size);
  1556. }
  1557. if (asf->data_reached && (!pb->seekable || (asf->b_flags & ASF_FLAG_BROADCAST)))
  1558. break;
  1559. }
  1560. if (!asf->data_reached) {
  1561. av_log(s, AV_LOG_ERROR, "Data Object was not found.\n");
  1562. ret = AVERROR_INVALIDDATA;
  1563. goto failed;
  1564. }
  1565. if (pb->seekable)
  1566. avio_seek(pb, asf->first_packet_offset, SEEK_SET);
  1567. for (i = 0; i < asf->nb_streams; i++) {
  1568. const char *rfc1766 = asf->asf_sd[asf->asf_st[i]->lang_idx].langs;
  1569. AVStream *st = s->streams[asf->asf_st[i]->index];
  1570. set_language(s, rfc1766, &st->metadata);
  1571. }
  1572. for (i = 0; i < ASF_MAX_STREAMS; i++) {
  1573. AVStream *st = NULL;
  1574. st = find_stream(s, i);
  1575. if (st) {
  1576. av_dict_copy(&st->metadata, asf->asf_sd[i].asf_met, AV_DICT_IGNORE_SUFFIX);
  1577. if (asf->asf_sd[i].aspect_ratio.num > 0 && asf->asf_sd[i].aspect_ratio.den > 0) {
  1578. st->sample_aspect_ratio.num = asf->asf_sd[i].aspect_ratio.num;
  1579. st->sample_aspect_ratio.den = asf->asf_sd[i].aspect_ratio.den;
  1580. }
  1581. }
  1582. }
  1583. return 0;
  1584. failed:
  1585. asf_read_close(s);
  1586. return ret;
  1587. }
  1588. AVInputFormat ff_asf_demuxer = {
  1589. .name = "asf",
  1590. .long_name = NULL_IF_CONFIG_SMALL("ASF (Advanced / Active Streaming Format)"),
  1591. .priv_data_size = sizeof(ASFContext),
  1592. .read_probe = asf_probe,
  1593. .read_header = asf_read_header,
  1594. .read_packet = asf_read_packet,
  1595. .read_close = asf_read_close,
  1596. .read_timestamp = asf_read_timestamp,
  1597. .read_seek = asf_read_seek,
  1598. .flags = AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH,
  1599. };