You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

460 lines
12KB

  1. /*
  2. * Libavformat API example: Output a media file in any supported
  3. * libavformat format. The default codecs are used.
  4. *
  5. * Copyright (c) 2003 Fabrice Bellard
  6. *
  7. * Permission is hereby granted, free of charge, to any person obtaining a copy
  8. * of this software and associated documentation files (the "Software"), to deal
  9. * in the Software without restriction, including without limitation the rights
  10. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  11. * copies of the Software, and to permit persons to whom the Software is
  12. * furnished to do so, subject to the following conditions:
  13. *
  14. * The above copyright notice and this permission notice shall be included in
  15. * all copies or substantial portions of the Software.
  16. *
  17. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  18. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  19. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  20. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  21. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  22. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  23. * THE SOFTWARE.
  24. */
  25. #include <stdlib.h>
  26. #include <stdio.h>
  27. #include <math.h>
  28. #include "avformat.h"
  29. /* 5 seconds stream duration */
  30. #define STREAM_DURATION 5.0
  31. /**************************************************************/
  32. /* audio output */
  33. float t, tincr, tincr2;
  34. int16_t *samples;
  35. uint8_t *audio_outbuf;
  36. int audio_outbuf_size;
  37. int audio_input_frame_size;
  38. /*
  39. * add an audio output stream
  40. */
  41. AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
  42. {
  43. AVCodecContext *c;
  44. AVStream *st;
  45. st = av_new_stream(oc, 1);
  46. if (!st) {
  47. fprintf(stderr, "Could not alloc stream\n");
  48. exit(1);
  49. }
  50. c = &st->codec;
  51. c->codec_id = codec_id;
  52. c->codec_type = CODEC_TYPE_AUDIO;
  53. /* put sample parameters */
  54. c->bit_rate = 64000;
  55. c->sample_rate = 44100;
  56. c->channels = 2;
  57. return st;
  58. }
  59. void open_audio(AVFormatContext *oc, AVStream *st)
  60. {
  61. AVCodecContext *c;
  62. AVCodec *codec;
  63. c = &st->codec;
  64. /* find the audio encoder */
  65. codec = avcodec_find_encoder(c->codec_id);
  66. if (!codec) {
  67. fprintf(stderr, "codec not found\n");
  68. exit(1);
  69. }
  70. /* open it */
  71. if (avcodec_open(c, codec) < 0) {
  72. fprintf(stderr, "could not open codec\n");
  73. exit(1);
  74. }
  75. /* init signal generator */
  76. t = 0;
  77. tincr = 2 * M_PI * 110.0 / c->sample_rate;
  78. /* increment frequency by 110 Hz per second */
  79. tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
  80. audio_outbuf_size = 10000;
  81. audio_outbuf = malloc(audio_outbuf_size);
  82. /* ugly hack for PCM codecs (will be removed ASAP with new PCM
  83. support to compute the input frame size in samples */
  84. if (c->frame_size <= 1) {
  85. audio_input_frame_size = audio_outbuf_size / c->channels;
  86. switch(st->codec.codec_id) {
  87. case CODEC_ID_PCM_S16LE:
  88. case CODEC_ID_PCM_S16BE:
  89. case CODEC_ID_PCM_U16LE:
  90. case CODEC_ID_PCM_U16BE:
  91. audio_input_frame_size >>= 1;
  92. break;
  93. default:
  94. break;
  95. }
  96. } else {
  97. audio_input_frame_size = c->frame_size;
  98. }
  99. samples = malloc(audio_input_frame_size * 2 * c->channels);
  100. }
  101. void write_audio_frame(AVFormatContext *oc, AVStream *st)
  102. {
  103. int j, out_size;
  104. AVCodecContext *c;
  105. c = &st->codec;
  106. for(j=0;j<audio_input_frame_size;j++) {
  107. samples[2*j] = (int)(sin(t) * 10000);
  108. samples[2*j+1] = samples[2*j];
  109. t += tincr;
  110. tincr += tincr2;
  111. }
  112. out_size = avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
  113. /* write the compressed frame in the media file */
  114. if (av_write_frame(oc, st->index, audio_outbuf, out_size) != 0) {
  115. fprintf(stderr, "Error while writing audio frame\n");
  116. exit(1);
  117. }
  118. }
  119. void close_audio(AVFormatContext *oc, AVStream *st)
  120. {
  121. avcodec_close(&st->codec);
  122. av_free(samples);
  123. av_free(audio_outbuf);
  124. }
  125. /**************************************************************/
  126. /* video output */
  127. AVFrame *picture, *tmp_picture;
  128. uint8_t *video_outbuf;
  129. int frame_count, video_outbuf_size;
  130. /* add a video output stream */
  131. AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
  132. {
  133. AVCodecContext *c;
  134. AVStream *st;
  135. st = av_new_stream(oc, 0);
  136. if (!st) {
  137. fprintf(stderr, "Could not alloc stream\n");
  138. exit(1);
  139. }
  140. c = &st->codec;
  141. c->codec_id = codec_id;
  142. c->codec_type = CODEC_TYPE_VIDEO;
  143. /* put sample parameters */
  144. c->bit_rate = 400000;
  145. /* resolution must be a multiple of two */
  146. c->width = 352;
  147. c->height = 288;
  148. /* frames per second */
  149. c->frame_rate = 25;
  150. c->frame_rate_base= 1;
  151. c->gop_size = 12; /* emit one intra frame every twelve frames */
  152. return st;
  153. }
  154. AVFrame *alloc_picture(int pix_fmt, int width, int height)
  155. {
  156. AVFrame *picture;
  157. uint8_t *picture_buf;
  158. int size;
  159. picture = avcodec_alloc_frame();
  160. if (!picture)
  161. return NULL;
  162. size = avpicture_get_size(pix_fmt, width, height);
  163. picture_buf = malloc(size);
  164. if (!picture_buf) {
  165. av_free(picture);
  166. return NULL;
  167. }
  168. avpicture_fill((AVPicture *)picture, picture_buf,
  169. pix_fmt, width, height);
  170. return picture;
  171. }
  172. void open_video(AVFormatContext *oc, AVStream *st)
  173. {
  174. AVCodec *codec;
  175. AVCodecContext *c;
  176. c = &st->codec;
  177. /* find the video encoder */
  178. codec = avcodec_find_encoder(c->codec_id);
  179. if (!codec) {
  180. fprintf(stderr, "codec not found\n");
  181. exit(1);
  182. }
  183. /* open the codec */
  184. if (avcodec_open(c, codec) < 0) {
  185. fprintf(stderr, "could not open codec\n");
  186. exit(1);
  187. }
  188. video_outbuf = NULL;
  189. if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
  190. /* allocate output buffer */
  191. /* XXX: API change will be done */
  192. video_outbuf_size = 200000;
  193. video_outbuf = malloc(video_outbuf_size);
  194. }
  195. /* allocate the encoded raw picture */
  196. picture = alloc_picture(c->pix_fmt, c->width, c->height);
  197. if (!picture) {
  198. fprintf(stderr, "Could not allocate picture\n");
  199. exit(1);
  200. }
  201. /* if the output format is not YUV420P, then a temporary YUV420P
  202. picture is needed too. It is then converted to the required
  203. output format */
  204. tmp_picture = NULL;
  205. if (c->pix_fmt != PIX_FMT_YUV420P) {
  206. tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
  207. if (!tmp_picture) {
  208. fprintf(stderr, "Could not allocate temporary picture\n");
  209. exit(1);
  210. }
  211. }
  212. }
  213. /* prepare a dummy image */
  214. void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
  215. {
  216. int x, y, i;
  217. i = frame_index;
  218. /* Y */
  219. for(y=0;y<height;y++) {
  220. for(x=0;x<width;x++) {
  221. pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
  222. }
  223. }
  224. /* Cb and Cr */
  225. for(y=0;y<height/2;y++) {
  226. for(x=0;x<width/2;x++) {
  227. pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
  228. pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
  229. }
  230. }
  231. }
  232. void write_video_frame(AVFormatContext *oc, AVStream *st)
  233. {
  234. int out_size, ret;
  235. AVCodecContext *c;
  236. c = &st->codec;
  237. if (c->pix_fmt != PIX_FMT_YUV420P) {
  238. /* as we only generate a YUV420P picture, we must convert it
  239. to the codec pixel format if needed */
  240. fill_yuv_image(tmp_picture, frame_count, c->width, c->height);
  241. img_convert((AVPicture *)picture, c->pix_fmt,
  242. (AVPicture *)tmp_picture, PIX_FMT_YUV420P,
  243. c->width, c->height);
  244. } else {
  245. fill_yuv_image(picture, frame_count, c->width, c->height);
  246. }
  247. if (oc->oformat->flags & AVFMT_RAWPICTURE) {
  248. /* raw video case. The API will change slightly in the near
  249. futur for that */
  250. ret = av_write_frame(oc, st->index,
  251. (uint8_t *)picture, sizeof(AVPicture));
  252. } else {
  253. /* encode the image */
  254. out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
  255. /* write the compressed frame in the media file */
  256. ret = av_write_frame(oc, st->index, video_outbuf, out_size);
  257. }
  258. if (ret != 0) {
  259. fprintf(stderr, "Error while writing video frame\n");
  260. exit(1);
  261. }
  262. frame_count++;
  263. }
  264. void close_video(AVFormatContext *oc, AVStream *st)
  265. {
  266. avcodec_close(&st->codec);
  267. av_free(picture->data[0]);
  268. av_free(picture);
  269. if (tmp_picture) {
  270. av_free(tmp_picture->data[0]);
  271. av_free(tmp_picture);
  272. }
  273. av_free(video_outbuf);
  274. }
  275. /**************************************************************/
  276. /* media file output */
  277. int main(int argc, char **argv)
  278. {
  279. const char *filename;
  280. AVOutputFormat *fmt;
  281. AVFormatContext *oc;
  282. AVStream *audio_st, *video_st;
  283. double audio_pts, video_pts;
  284. int i;
  285. /* initialize libavcodec, and register all codecs and formats */
  286. av_register_all();
  287. if (argc != 2) {
  288. printf("usage: %s output_file\n"
  289. "API example program to output a media file with libavformat.\n"
  290. "The output format is automatically guessed according to the file extension.\n"
  291. "Raw images can also be output by using '%%d' in the filename\n"
  292. "\n", argv[0]);
  293. exit(1);
  294. }
  295. filename = argv[1];
  296. /* auto detect the output format from the name. default is
  297. mpeg. */
  298. fmt = guess_format(NULL, filename, NULL);
  299. if (!fmt) {
  300. printf("Could not deduce output format from file extension: using MPEG.\n");
  301. fmt = guess_format("mpeg", NULL, NULL);
  302. }
  303. if (!fmt) {
  304. fprintf(stderr, "Could not find suitable output format\n");
  305. exit(1);
  306. }
  307. /* allocate the output media context */
  308. oc = av_mallocz(sizeof(AVFormatContext));
  309. if (!oc) {
  310. fprintf(stderr, "Memory error\n");
  311. exit(1);
  312. }
  313. oc->oformat = fmt;
  314. snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
  315. /* add the audio and video streams using the default format codecs
  316. and initialize the codecs */
  317. video_st = NULL;
  318. audio_st = NULL;
  319. if (fmt->video_codec != CODEC_ID_NONE) {
  320. video_st = add_video_stream(oc, fmt->video_codec);
  321. }
  322. if (fmt->audio_codec != CODEC_ID_NONE) {
  323. audio_st = add_audio_stream(oc, fmt->audio_codec);
  324. }
  325. /* set the output parameters (must be done even if no
  326. parameters). */
  327. if (av_set_parameters(oc, NULL) < 0) {
  328. fprintf(stderr, "Invalid output format parameters\n");
  329. exit(1);
  330. }
  331. dump_format(oc, 0, filename, 1);
  332. /* now that all the parameters are set, we can open the audio and
  333. video codecs and allocate the necessary encode buffers */
  334. if (video_st)
  335. open_video(oc, video_st);
  336. if (audio_st)
  337. open_audio(oc, audio_st);
  338. /* open the output file, if needed */
  339. if (!(fmt->flags & AVFMT_NOFILE)) {
  340. if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
  341. fprintf(stderr, "Could not open '%s'\n", filename);
  342. exit(1);
  343. }
  344. }
  345. /* write the stream header, if any */
  346. av_write_header(oc);
  347. for(;;) {
  348. /* compute current audio and video time */
  349. if (audio_st)
  350. audio_pts = (double)audio_st->pts.val * oc->pts_num / oc->pts_den;
  351. else
  352. audio_pts = 0.0;
  353. if (video_st)
  354. video_pts = (double)video_st->pts.val * oc->pts_num / oc->pts_den;
  355. else
  356. video_pts = 0.0;
  357. if ((!audio_st || audio_pts >= STREAM_DURATION) &&
  358. (!video_st || video_pts >= STREAM_DURATION))
  359. break;
  360. /* write interleaved audio and video frames */
  361. if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
  362. write_audio_frame(oc, audio_st);
  363. } else {
  364. write_video_frame(oc, video_st);
  365. }
  366. }
  367. /* close each codec */
  368. if (video_st)
  369. close_video(oc, video_st);
  370. if (audio_st)
  371. close_audio(oc, audio_st);
  372. /* write the trailer, if any */
  373. av_write_trailer(oc);
  374. /* free the streams */
  375. for(i = 0; i < oc->nb_streams; i++) {
  376. av_freep(&oc->streams[i]);
  377. }
  378. if (!(fmt->flags & AVFMT_NOFILE)) {
  379. /* close the output file */
  380. url_fclose(&oc->pb);
  381. }
  382. /* free the stream */
  383. av_free(oc);
  384. return 0;
  385. }