You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

623 lines
20KB

  1. /*
  2. * AVI muxer
  3. * Copyright (c) 2000 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "avformat.h"
  22. #include "avi.h"
  23. #include "riff.h"
  24. /*
  25. * TODO:
  26. * - fill all fields if non streamed (nb_frames for example)
  27. */
  28. typedef struct AVIIentry {
  29. unsigned int flags, pos, len;
  30. } AVIIentry;
  31. #define AVI_INDEX_CLUSTER_SIZE 16384
  32. typedef struct AVIIndex {
  33. int64_t indx_start;
  34. int entry;
  35. int ents_allocated;
  36. AVIIentry** cluster;
  37. } AVIIndex;
  38. typedef struct {
  39. int64_t riff_start, movi_list, odml_list;
  40. int64_t frames_hdr_all, frames_hdr_strm[MAX_STREAMS];
  41. int audio_strm_length[MAX_STREAMS];
  42. int riff_id;
  43. int packet_count[MAX_STREAMS];
  44. AVIIndex indexes[MAX_STREAMS];
  45. } AVIContext;
  46. static inline AVIIentry* avi_get_ientry(AVIIndex* idx, int ent_id)
  47. {
  48. int cl = ent_id / AVI_INDEX_CLUSTER_SIZE;
  49. int id = ent_id % AVI_INDEX_CLUSTER_SIZE;
  50. return &idx->cluster[cl][id];
  51. }
  52. static int64_t avi_start_new_riff(AVIContext *avi, ByteIOContext *pb,
  53. const char* riff_tag, const char* list_tag)
  54. {
  55. int64_t loff;
  56. int i;
  57. avi->riff_id++;
  58. for (i=0; i<MAX_STREAMS; i++)
  59. avi->indexes[i].entry = 0;
  60. avi->riff_start = ff_start_tag(pb, "RIFF");
  61. put_tag(pb, riff_tag);
  62. loff = ff_start_tag(pb, "LIST");
  63. put_tag(pb, list_tag);
  64. return loff;
  65. }
  66. static char* avi_stream2fourcc(char* tag, int index, enum CodecType type)
  67. {
  68. tag[0] = '0';
  69. tag[1] = '0' + index;
  70. if (type == CODEC_TYPE_VIDEO) {
  71. tag[2] = 'd';
  72. tag[3] = 'c';
  73. } else if (type == CODEC_TYPE_SUBTITLE) {
  74. // note: this is not an official code
  75. tag[2] = 's';
  76. tag[3] = 'b';
  77. } else {
  78. tag[2] = 'w';
  79. tag[3] = 'b';
  80. }
  81. tag[4] = '\0';
  82. return tag;
  83. }
  84. static void avi_write_info_tag(ByteIOContext *pb, const char *tag, const char *str)
  85. {
  86. int len = strlen(str);
  87. if (len > 0) {
  88. len++;
  89. put_tag(pb, tag);
  90. put_le32(pb, len);
  91. put_strz(pb, str);
  92. if (len & 1)
  93. put_byte(pb, 0);
  94. }
  95. }
  96. static void avi_write_info_tag2(AVFormatContext *s, const char *fourcc, const char *key1, const char *key2)
  97. {
  98. AVMetadataTag *tag= av_metadata_get(s->metadata, key1, NULL, 0);
  99. if(!tag && key2)
  100. tag= av_metadata_get(s->metadata, key2, NULL, 0);
  101. if(tag)
  102. avi_write_info_tag(s->pb, fourcc, tag->value);
  103. }
  104. static int avi_write_counters(AVFormatContext* s, int riff_id)
  105. {
  106. ByteIOContext *pb = s->pb;
  107. AVIContext *avi = s->priv_data;
  108. int n, au_byterate, au_ssize, au_scale, nb_frames = 0;
  109. int64_t file_size;
  110. AVCodecContext* stream;
  111. file_size = url_ftell(pb);
  112. for(n = 0; n < s->nb_streams; n++) {
  113. assert(avi->frames_hdr_strm[n]);
  114. stream = s->streams[n]->codec;
  115. url_fseek(pb, avi->frames_hdr_strm[n], SEEK_SET);
  116. ff_parse_specific_params(stream, &au_byterate, &au_ssize, &au_scale);
  117. if(au_ssize == 0) {
  118. put_le32(pb, avi->packet_count[n]);
  119. } else {
  120. put_le32(pb, avi->audio_strm_length[n] / au_ssize);
  121. }
  122. if(stream->codec_type == CODEC_TYPE_VIDEO)
  123. nb_frames = FFMAX(nb_frames, avi->packet_count[n]);
  124. }
  125. if(riff_id == 1) {
  126. assert(avi->frames_hdr_all);
  127. url_fseek(pb, avi->frames_hdr_all, SEEK_SET);
  128. put_le32(pb, nb_frames);
  129. }
  130. url_fseek(pb, file_size, SEEK_SET);
  131. return 0;
  132. }
  133. static int avi_write_header(AVFormatContext *s)
  134. {
  135. AVIContext *avi = s->priv_data;
  136. ByteIOContext *pb = s->pb;
  137. int bitrate, n, i, nb_frames, au_byterate, au_ssize, au_scale;
  138. AVCodecContext *stream, *video_enc;
  139. int64_t list1, list2, strh, strf;
  140. /* header list */
  141. avi->riff_id = 0;
  142. list1 = avi_start_new_riff(avi, pb, "AVI ", "hdrl");
  143. /* avi header */
  144. put_tag(pb, "avih");
  145. put_le32(pb, 14 * 4);
  146. bitrate = 0;
  147. video_enc = NULL;
  148. for(n=0;n<s->nb_streams;n++) {
  149. stream = s->streams[n]->codec;
  150. bitrate += stream->bit_rate;
  151. if (stream->codec_type == CODEC_TYPE_VIDEO)
  152. video_enc = stream;
  153. }
  154. nb_frames = 0;
  155. if(video_enc){
  156. put_le32(pb, (uint32_t)(INT64_C(1000000) * video_enc->time_base.num / video_enc->time_base.den));
  157. } else {
  158. put_le32(pb, 0);
  159. }
  160. put_le32(pb, bitrate / 8); /* XXX: not quite exact */
  161. put_le32(pb, 0); /* padding */
  162. if (url_is_streamed(pb))
  163. put_le32(pb, AVIF_TRUSTCKTYPE | AVIF_ISINTERLEAVED); /* flags */
  164. else
  165. put_le32(pb, AVIF_TRUSTCKTYPE | AVIF_HASINDEX | AVIF_ISINTERLEAVED); /* flags */
  166. avi->frames_hdr_all = url_ftell(pb); /* remember this offset to fill later */
  167. put_le32(pb, nb_frames); /* nb frames, filled later */
  168. put_le32(pb, 0); /* initial frame */
  169. put_le32(pb, s->nb_streams); /* nb streams */
  170. put_le32(pb, 1024 * 1024); /* suggested buffer size */
  171. if(video_enc){
  172. put_le32(pb, video_enc->width);
  173. put_le32(pb, video_enc->height);
  174. } else {
  175. put_le32(pb, 0);
  176. put_le32(pb, 0);
  177. }
  178. put_le32(pb, 0); /* reserved */
  179. put_le32(pb, 0); /* reserved */
  180. put_le32(pb, 0); /* reserved */
  181. put_le32(pb, 0); /* reserved */
  182. /* stream list */
  183. for(i=0;i<n;i++) {
  184. list2 = ff_start_tag(pb, "LIST");
  185. put_tag(pb, "strl");
  186. stream = s->streams[i]->codec;
  187. /* stream generic header */
  188. strh = ff_start_tag(pb, "strh");
  189. switch(stream->codec_type) {
  190. case CODEC_TYPE_SUBTITLE:
  191. // XSUB subtitles behave like video tracks, other subtitles
  192. // are not (yet) supported.
  193. if (stream->codec_id != CODEC_ID_XSUB) break;
  194. case CODEC_TYPE_VIDEO: put_tag(pb, "vids"); break;
  195. case CODEC_TYPE_AUDIO: put_tag(pb, "auds"); break;
  196. // case CODEC_TYPE_TEXT : put_tag(pb, "txts"); break;
  197. case CODEC_TYPE_DATA : put_tag(pb, "dats"); break;
  198. }
  199. if(stream->codec_type == CODEC_TYPE_VIDEO ||
  200. stream->codec_id == CODEC_ID_XSUB)
  201. put_le32(pb, stream->codec_tag);
  202. else
  203. put_le32(pb, 1);
  204. put_le32(pb, 0); /* flags */
  205. put_le16(pb, 0); /* priority */
  206. put_le16(pb, 0); /* language */
  207. put_le32(pb, 0); /* initial frame */
  208. ff_parse_specific_params(stream, &au_byterate, &au_ssize, &au_scale);
  209. put_le32(pb, au_scale); /* scale */
  210. put_le32(pb, au_byterate); /* rate */
  211. av_set_pts_info(s->streams[i], 64, au_scale, au_byterate);
  212. put_le32(pb, 0); /* start */
  213. avi->frames_hdr_strm[i] = url_ftell(pb); /* remember this offset to fill later */
  214. if (url_is_streamed(pb))
  215. put_le32(pb, AVI_MAX_RIFF_SIZE); /* FIXME: this may be broken, but who cares */
  216. else
  217. put_le32(pb, 0); /* length, XXX: filled later */
  218. /* suggested buffer size */ //FIXME set at the end to largest chunk
  219. if(stream->codec_type == CODEC_TYPE_VIDEO)
  220. put_le32(pb, 1024 * 1024);
  221. else if(stream->codec_type == CODEC_TYPE_AUDIO)
  222. put_le32(pb, 12 * 1024);
  223. else
  224. put_le32(pb, 0);
  225. put_le32(pb, -1); /* quality */
  226. put_le32(pb, au_ssize); /* sample size */
  227. put_le32(pb, 0);
  228. put_le16(pb, stream->width);
  229. put_le16(pb, stream->height);
  230. ff_end_tag(pb, strh);
  231. if(stream->codec_type != CODEC_TYPE_DATA){
  232. strf = ff_start_tag(pb, "strf");
  233. switch(stream->codec_type) {
  234. case CODEC_TYPE_SUBTITLE:
  235. // XSUB subtitles behave like video tracks, other subtitles
  236. // are not (yet) supported.
  237. if (stream->codec_id != CODEC_ID_XSUB) break;
  238. case CODEC_TYPE_VIDEO:
  239. ff_put_bmp_header(pb, stream, ff_codec_bmp_tags, 0);
  240. break;
  241. case CODEC_TYPE_AUDIO:
  242. if (ff_put_wav_header(pb, stream) < 0) {
  243. return -1;
  244. }
  245. break;
  246. default:
  247. return -1;
  248. }
  249. ff_end_tag(pb, strf);
  250. }
  251. if (!url_is_streamed(pb)) {
  252. unsigned char tag[5];
  253. int j;
  254. /* Starting to lay out AVI OpenDML master index.
  255. * We want to make it JUNK entry for now, since we'd
  256. * like to get away without making AVI an OpenDML one
  257. * for compatibility reasons.
  258. */
  259. avi->indexes[i].entry = avi->indexes[i].ents_allocated = 0;
  260. avi->indexes[i].indx_start = ff_start_tag(pb, "JUNK");
  261. put_le16(pb, 4); /* wLongsPerEntry */
  262. put_byte(pb, 0); /* bIndexSubType (0 == frame index) */
  263. put_byte(pb, 0); /* bIndexType (0 == AVI_INDEX_OF_INDEXES) */
  264. put_le32(pb, 0); /* nEntriesInUse (will fill out later on) */
  265. put_tag(pb, avi_stream2fourcc(&tag[0], i, stream->codec_type));
  266. /* dwChunkId */
  267. put_le64(pb, 0); /* dwReserved[3]
  268. put_le32(pb, 0); Must be 0. */
  269. for (j=0; j < AVI_MASTER_INDEX_SIZE * 2; j++)
  270. put_le64(pb, 0);
  271. ff_end_tag(pb, avi->indexes[i].indx_start);
  272. }
  273. if( stream->codec_type == CODEC_TYPE_VIDEO
  274. && s->streams[i]->sample_aspect_ratio.num>0
  275. && s->streams[i]->sample_aspect_ratio.den>0){
  276. int vprp= ff_start_tag(pb, "vprp");
  277. AVRational dar = av_mul_q(s->streams[i]->sample_aspect_ratio,
  278. (AVRational){stream->width, stream->height});
  279. int num, den;
  280. av_reduce(&num, &den, dar.num, dar.den, 0xFFFF);
  281. put_le32(pb, 0); //video format = unknown
  282. put_le32(pb, 0); //video standard= unknown
  283. put_le32(pb, lrintf(1.0/av_q2d(stream->time_base)));
  284. put_le32(pb, stream->width );
  285. put_le32(pb, stream->height);
  286. put_le16(pb, den);
  287. put_le16(pb, num);
  288. put_le32(pb, stream->width );
  289. put_le32(pb, stream->height);
  290. put_le32(pb, 1); //progressive FIXME
  291. put_le32(pb, stream->height);
  292. put_le32(pb, stream->width );
  293. put_le32(pb, stream->height);
  294. put_le32(pb, stream->width );
  295. put_le32(pb, 0);
  296. put_le32(pb, 0);
  297. put_le32(pb, 0);
  298. put_le32(pb, 0);
  299. ff_end_tag(pb, vprp);
  300. }
  301. ff_end_tag(pb, list2);
  302. }
  303. if (!url_is_streamed(pb)) {
  304. /* AVI could become an OpenDML one, if it grows beyond 2Gb range */
  305. avi->odml_list = ff_start_tag(pb, "JUNK");
  306. put_tag(pb, "odml");
  307. put_tag(pb, "dmlh");
  308. put_le32(pb, 248);
  309. for (i = 0; i < 248; i+= 4)
  310. put_le32(pb, 0);
  311. ff_end_tag(pb, avi->odml_list);
  312. }
  313. ff_end_tag(pb, list1);
  314. list2 = ff_start_tag(pb, "LIST");
  315. put_tag(pb, "INFO");
  316. avi_write_info_tag2(s, "INAM", "Title", NULL);
  317. avi_write_info_tag2(s, "IART", "Artist", "Author");
  318. avi_write_info_tag2(s, "ICOP", "Copyright", NULL);
  319. avi_write_info_tag2(s, "ICMT", "Comment", NULL);
  320. avi_write_info_tag2(s, "IPRD", "Album", NULL);
  321. avi_write_info_tag2(s, "IGNR", "Genre", NULL);
  322. avi_write_info_tag2(s, "IPRT", "Track", NULL);
  323. if(!(s->streams[0]->codec->flags & CODEC_FLAG_BITEXACT))
  324. avi_write_info_tag(pb, "ISFT", LIBAVFORMAT_IDENT);
  325. ff_end_tag(pb, list2);
  326. /* some padding for easier tag editing */
  327. list2 = ff_start_tag(pb, "JUNK");
  328. for (i = 0; i < 1016; i += 4)
  329. put_le32(pb, 0);
  330. ff_end_tag(pb, list2);
  331. avi->movi_list = ff_start_tag(pb, "LIST");
  332. put_tag(pb, "movi");
  333. put_flush_packet(pb);
  334. return 0;
  335. }
  336. static int avi_write_ix(AVFormatContext *s)
  337. {
  338. ByteIOContext *pb = s->pb;
  339. AVIContext *avi = s->priv_data;
  340. char tag[5];
  341. char ix_tag[] = "ix00";
  342. int i, j;
  343. assert(!url_is_streamed(pb));
  344. if (avi->riff_id > AVI_MASTER_INDEX_SIZE)
  345. return -1;
  346. for (i=0;i<s->nb_streams;i++) {
  347. int64_t ix, pos;
  348. avi_stream2fourcc(&tag[0], i, s->streams[i]->codec->codec_type);
  349. ix_tag[3] = '0' + i;
  350. /* Writing AVI OpenDML leaf index chunk */
  351. ix = url_ftell(pb);
  352. put_tag(pb, &ix_tag[0]); /* ix?? */
  353. put_le32(pb, avi->indexes[i].entry * 8 + 24);
  354. /* chunk size */
  355. put_le16(pb, 2); /* wLongsPerEntry */
  356. put_byte(pb, 0); /* bIndexSubType (0 == frame index) */
  357. put_byte(pb, 1); /* bIndexType (1 == AVI_INDEX_OF_CHUNKS) */
  358. put_le32(pb, avi->indexes[i].entry);
  359. /* nEntriesInUse */
  360. put_tag(pb, &tag[0]); /* dwChunkId */
  361. put_le64(pb, avi->movi_list);/* qwBaseOffset */
  362. put_le32(pb, 0); /* dwReserved_3 (must be 0) */
  363. for (j=0; j<avi->indexes[i].entry; j++) {
  364. AVIIentry* ie = avi_get_ientry(&avi->indexes[i], j);
  365. put_le32(pb, ie->pos + 8);
  366. put_le32(pb, ((uint32_t)ie->len & ~0x80000000) |
  367. (ie->flags & 0x10 ? 0 : 0x80000000));
  368. }
  369. put_flush_packet(pb);
  370. pos = url_ftell(pb);
  371. /* Updating one entry in the AVI OpenDML master index */
  372. url_fseek(pb, avi->indexes[i].indx_start - 8, SEEK_SET);
  373. put_tag(pb, "indx"); /* enabling this entry */
  374. url_fskip(pb, 8);
  375. put_le32(pb, avi->riff_id); /* nEntriesInUse */
  376. url_fskip(pb, 16*avi->riff_id);
  377. put_le64(pb, ix); /* qwOffset */
  378. put_le32(pb, pos - ix); /* dwSize */
  379. put_le32(pb, avi->indexes[i].entry); /* dwDuration */
  380. url_fseek(pb, pos, SEEK_SET);
  381. }
  382. return 0;
  383. }
  384. static int avi_write_idx1(AVFormatContext *s)
  385. {
  386. ByteIOContext *pb = s->pb;
  387. AVIContext *avi = s->priv_data;
  388. int64_t idx_chunk;
  389. int i;
  390. char tag[5];
  391. if (!url_is_streamed(pb)) {
  392. AVIIentry* ie = 0, *tie;
  393. int entry[MAX_STREAMS];
  394. int empty, stream_id = -1;
  395. idx_chunk = ff_start_tag(pb, "idx1");
  396. memset(&entry[0], 0, sizeof(entry));
  397. do {
  398. empty = 1;
  399. for (i=0; i<s->nb_streams; i++) {
  400. if (avi->indexes[i].entry <= entry[i])
  401. continue;
  402. tie = avi_get_ientry(&avi->indexes[i], entry[i]);
  403. if (empty || tie->pos < ie->pos) {
  404. ie = tie;
  405. stream_id = i;
  406. }
  407. empty = 0;
  408. }
  409. if (!empty) {
  410. avi_stream2fourcc(&tag[0], stream_id,
  411. s->streams[stream_id]->codec->codec_type);
  412. put_tag(pb, &tag[0]);
  413. put_le32(pb, ie->flags);
  414. put_le32(pb, ie->pos);
  415. put_le32(pb, ie->len);
  416. entry[stream_id]++;
  417. }
  418. } while (!empty);
  419. ff_end_tag(pb, idx_chunk);
  420. avi_write_counters(s, avi->riff_id);
  421. }
  422. return 0;
  423. }
  424. static int avi_write_packet(AVFormatContext *s, AVPacket *pkt)
  425. {
  426. AVIContext *avi = s->priv_data;
  427. ByteIOContext *pb = s->pb;
  428. unsigned char tag[5];
  429. unsigned int flags=0;
  430. const int stream_index= pkt->stream_index;
  431. AVCodecContext *enc= s->streams[stream_index]->codec;
  432. int size= pkt->size;
  433. // av_log(s, AV_LOG_DEBUG, "%"PRId64" %d %d\n", pkt->dts, avi->packet_count[stream_index], stream_index);
  434. while(enc->block_align==0 && pkt->dts != AV_NOPTS_VALUE && pkt->dts > avi->packet_count[stream_index]){
  435. AVPacket empty_packet;
  436. av_init_packet(&empty_packet);
  437. empty_packet.size= 0;
  438. empty_packet.data= NULL;
  439. empty_packet.stream_index= stream_index;
  440. avi_write_packet(s, &empty_packet);
  441. // av_log(s, AV_LOG_DEBUG, "dup %"PRId64" %d\n", pkt->dts, avi->packet_count[stream_index]);
  442. }
  443. avi->packet_count[stream_index]++;
  444. // Make sure to put an OpenDML chunk when the file size exceeds the limits
  445. if (!url_is_streamed(pb) &&
  446. (url_ftell(pb) - avi->riff_start > AVI_MAX_RIFF_SIZE)) {
  447. avi_write_ix(s);
  448. ff_end_tag(pb, avi->movi_list);
  449. if (avi->riff_id == 1)
  450. avi_write_idx1(s);
  451. ff_end_tag(pb, avi->riff_start);
  452. avi->movi_list = avi_start_new_riff(avi, pb, "AVIX", "movi");
  453. }
  454. avi_stream2fourcc(&tag[0], stream_index, enc->codec_type);
  455. if(pkt->flags&PKT_FLAG_KEY)
  456. flags = 0x10;
  457. if (enc->codec_type == CODEC_TYPE_AUDIO) {
  458. avi->audio_strm_length[stream_index] += size;
  459. }
  460. if (!url_is_streamed(s->pb)) {
  461. AVIIndex* idx = &avi->indexes[stream_index];
  462. int cl = idx->entry / AVI_INDEX_CLUSTER_SIZE;
  463. int id = idx->entry % AVI_INDEX_CLUSTER_SIZE;
  464. if (idx->ents_allocated <= idx->entry) {
  465. idx->cluster = av_realloc(idx->cluster, (cl+1)*sizeof(void*));
  466. if (!idx->cluster)
  467. return -1;
  468. idx->cluster[cl] = av_malloc(AVI_INDEX_CLUSTER_SIZE*sizeof(AVIIentry));
  469. if (!idx->cluster[cl])
  470. return -1;
  471. idx->ents_allocated += AVI_INDEX_CLUSTER_SIZE;
  472. }
  473. idx->cluster[cl][id].flags = flags;
  474. idx->cluster[cl][id].pos = url_ftell(pb) - avi->movi_list;
  475. idx->cluster[cl][id].len = size;
  476. idx->entry++;
  477. }
  478. put_buffer(pb, tag, 4);
  479. put_le32(pb, size);
  480. put_buffer(pb, pkt->data, size);
  481. if (size & 1)
  482. put_byte(pb, 0);
  483. put_flush_packet(pb);
  484. return 0;
  485. }
  486. static int avi_write_trailer(AVFormatContext *s)
  487. {
  488. AVIContext *avi = s->priv_data;
  489. ByteIOContext *pb = s->pb;
  490. int res = 0;
  491. int i, j, n, nb_frames;
  492. int64_t file_size;
  493. if (!url_is_streamed(pb)){
  494. if (avi->riff_id == 1) {
  495. ff_end_tag(pb, avi->movi_list);
  496. res = avi_write_idx1(s);
  497. ff_end_tag(pb, avi->riff_start);
  498. } else {
  499. avi_write_ix(s);
  500. ff_end_tag(pb, avi->movi_list);
  501. ff_end_tag(pb, avi->riff_start);
  502. file_size = url_ftell(pb);
  503. url_fseek(pb, avi->odml_list - 8, SEEK_SET);
  504. put_tag(pb, "LIST"); /* Making this AVI OpenDML one */
  505. url_fskip(pb, 16);
  506. for (n=nb_frames=0;n<s->nb_streams;n++) {
  507. AVCodecContext *stream = s->streams[n]->codec;
  508. if (stream->codec_type == CODEC_TYPE_VIDEO) {
  509. if (nb_frames < avi->packet_count[n])
  510. nb_frames = avi->packet_count[n];
  511. } else {
  512. if (stream->codec_id == CODEC_ID_MP2 || stream->codec_id == CODEC_ID_MP3) {
  513. nb_frames += avi->packet_count[n];
  514. }
  515. }
  516. }
  517. put_le32(pb, nb_frames);
  518. url_fseek(pb, file_size, SEEK_SET);
  519. avi_write_counters(s, avi->riff_id);
  520. }
  521. }
  522. put_flush_packet(pb);
  523. for (i=0; i<MAX_STREAMS; i++) {
  524. for (j=0; j<avi->indexes[i].ents_allocated/AVI_INDEX_CLUSTER_SIZE; j++)
  525. av_free(avi->indexes[i].cluster[j]);
  526. av_freep(&avi->indexes[i].cluster);
  527. avi->indexes[i].ents_allocated = avi->indexes[i].entry = 0;
  528. }
  529. return res;
  530. }
  531. AVOutputFormat avi_muxer = {
  532. "avi",
  533. NULL_IF_CONFIG_SMALL("AVI format"),
  534. "video/x-msvideo",
  535. "avi",
  536. sizeof(AVIContext),
  537. CODEC_ID_MP2,
  538. CODEC_ID_MPEG4,
  539. avi_write_header,
  540. avi_write_packet,
  541. avi_write_trailer,
  542. .codec_tag= (const AVCodecTag* const []){ff_codec_bmp_tags, ff_codec_wav_tags, 0},
  543. .flags= AVFMT_VARIABLE_FPS,
  544. };