You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1139 lines
42KB

  1. /*
  2. * MPEG1/2 encoder
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * MPEG1/2 encoder
  25. */
  26. #include <stdint.h>
  27. #include "libavutil/attributes.h"
  28. #include "libavutil/avassert.h"
  29. #include "libavutil/log.h"
  30. #include "libavutil/opt.h"
  31. #include "libavutil/timecode.h"
  32. #include "libavutil/stereo3d.h"
  33. #include "avcodec.h"
  34. #include "bytestream.h"
  35. #include "mathops.h"
  36. #include "mpeg12.h"
  37. #include "mpeg12data.h"
  38. #include "mpegutils.h"
  39. #include "mpegvideo.h"
  40. static const uint8_t inv_non_linear_qscale[] = {
  41. 0, 2, 4, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16,
  42. };
  43. static const uint8_t svcd_scan_offset_placeholder[] = {
  44. 0x10, 0x0E, 0x00, 0x80, 0x81, 0x00, 0x80,
  45. 0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  46. };
  47. static uint8_t mv_penalty[MAX_FCODE + 1][MAX_MV * 2 + 1];
  48. static uint8_t fcode_tab[MAX_MV * 2 + 1];
  49. static uint8_t uni_mpeg1_ac_vlc_len[64 * 64 * 2];
  50. static uint8_t uni_mpeg2_ac_vlc_len[64 * 64 * 2];
  51. /* simple include everything table for dc, first byte is bits
  52. * number next 3 are code */
  53. static uint32_t mpeg1_lum_dc_uni[512];
  54. static uint32_t mpeg1_chr_dc_uni[512];
  55. static uint8_t mpeg1_index_run[2][64];
  56. static int8_t mpeg1_max_level[2][64];
  57. static av_cold void init_uni_ac_vlc(RLTable *rl, uint8_t *uni_ac_vlc_len)
  58. {
  59. int i;
  60. for (i = 0; i < 128; i++) {
  61. int level = i - 64;
  62. int run;
  63. if (!level)
  64. continue;
  65. for (run = 0; run < 64; run++) {
  66. int len, code;
  67. int alevel = FFABS(level);
  68. if (alevel > rl->max_level[0][run])
  69. code = 111; /* rl->n */
  70. else
  71. code = rl->index_run[0][run] + alevel - 1;
  72. if (code < 111) { /* rl->n */
  73. /* length of VLC and sign */
  74. len = rl->table_vlc[code][1] + 1;
  75. } else {
  76. len = rl->table_vlc[111 /* rl->n */][1] + 6;
  77. if (alevel < 128)
  78. len += 8;
  79. else
  80. len += 16;
  81. }
  82. uni_ac_vlc_len[UNI_AC_ENC_INDEX(run, i)] = len;
  83. }
  84. }
  85. }
  86. static int find_frame_rate_index(MpegEncContext *s)
  87. {
  88. int i;
  89. AVRational bestq = (AVRational) {0, 0};
  90. AVRational ext;
  91. AVRational target = av_inv_q(s->avctx->time_base);
  92. for (i = 1; i < 14; i++) {
  93. if (s->avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL &&
  94. i >= 9)
  95. break;
  96. for (ext.num=1; ext.num <= 4; ext.num++) {
  97. for (ext.den=1; ext.den <= 32; ext.den++) {
  98. AVRational q = av_mul_q(ext, ff_mpeg12_frame_rate_tab[i]);
  99. if (s->codec_id != AV_CODEC_ID_MPEG2VIDEO && (ext.den!=1 || ext.num!=1))
  100. continue;
  101. if (av_gcd(ext.den, ext.num) != 1)
  102. continue;
  103. if ( bestq.num==0
  104. || av_nearer_q(target, bestq, q) < 0
  105. || ext.num==1 && ext.den==1 && av_nearer_q(target, bestq, q) == 0) {
  106. bestq = q;
  107. s->frame_rate_index = i;
  108. s->mpeg2_frame_rate_ext.num = ext.num;
  109. s->mpeg2_frame_rate_ext.den = ext.den;
  110. }
  111. }
  112. }
  113. }
  114. if (av_cmp_q(target, bestq))
  115. return -1;
  116. else
  117. return 0;
  118. }
  119. static av_cold int encode_init(AVCodecContext *avctx)
  120. {
  121. MpegEncContext *s = avctx->priv_data;
  122. if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && avctx->height > 2800)
  123. avctx->thread_count = 1;
  124. if (ff_MPV_encode_init(avctx) < 0)
  125. return -1;
  126. if (find_frame_rate_index(s) < 0) {
  127. if (s->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) {
  128. av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n",
  129. avctx->time_base.den, avctx->time_base.num);
  130. return -1;
  131. } else {
  132. av_log(avctx, AV_LOG_INFO,
  133. "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n",
  134. avctx->time_base.den, avctx->time_base.num);
  135. }
  136. }
  137. if (avctx->profile == FF_PROFILE_UNKNOWN) {
  138. if (avctx->level != FF_LEVEL_UNKNOWN) {
  139. av_log(avctx, AV_LOG_ERROR, "Set profile and level\n");
  140. return -1;
  141. }
  142. /* Main or 4:2:2 */
  143. avctx->profile = s->chroma_format == CHROMA_420 ? 4 : 0;
  144. }
  145. if (avctx->level == FF_LEVEL_UNKNOWN) {
  146. if (avctx->profile == 0) { /* 4:2:2 */
  147. if (avctx->width <= 720 && avctx->height <= 608)
  148. avctx->level = 5; /* Main */
  149. else
  150. avctx->level = 2; /* High */
  151. } else {
  152. if (avctx->profile != 1 && s->chroma_format != CHROMA_420) {
  153. av_log(avctx, AV_LOG_ERROR,
  154. "Only High(1) and 4:2:2(0) profiles support 4:2:2 color sampling\n");
  155. return -1;
  156. }
  157. if (avctx->width <= 720 && avctx->height <= 576)
  158. avctx->level = 8; /* Main */
  159. else if (avctx->width <= 1440)
  160. avctx->level = 6; /* High 1440 */
  161. else
  162. avctx->level = 4; /* High */
  163. }
  164. }
  165. if ((avctx->width & 0xFFF) == 0 && (avctx->height & 0xFFF) == 1) {
  166. av_log(avctx, AV_LOG_ERROR, "Width / Height is invalid for MPEG2\n");
  167. return AVERROR(EINVAL);
  168. }
  169. if (s->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
  170. if ((avctx->width & 0xFFF) == 0 || (avctx->height & 0xFFF) == 0) {
  171. av_log(avctx, AV_LOG_ERROR, "Width or Height are not allowed to be multiplies of 4096\n"
  172. "add '-strict %d' if you want to use them anyway.\n", FF_COMPLIANCE_UNOFFICIAL);
  173. return AVERROR(EINVAL);
  174. }
  175. }
  176. s->drop_frame_timecode = s->drop_frame_timecode || !!(avctx->flags2 & CODEC_FLAG2_DROP_FRAME_TIMECODE);
  177. if (s->drop_frame_timecode)
  178. s->tc.flags |= AV_TIMECODE_FLAG_DROPFRAME;
  179. if (s->drop_frame_timecode && s->frame_rate_index != 4) {
  180. av_log(avctx, AV_LOG_ERROR,
  181. "Drop frame time code only allowed with 1001/30000 fps\n");
  182. return -1;
  183. }
  184. if (s->tc_opt_str) {
  185. AVRational rate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  186. int ret = av_timecode_init_from_string(&s->tc, rate, s->tc_opt_str, s);
  187. if (ret < 0)
  188. return ret;
  189. s->drop_frame_timecode = !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME);
  190. s->avctx->timecode_frame_start = s->tc.start;
  191. } else {
  192. s->avctx->timecode_frame_start = 0; // default is -1
  193. }
  194. return 0;
  195. }
  196. static void put_header(MpegEncContext *s, int header)
  197. {
  198. avpriv_align_put_bits(&s->pb);
  199. put_bits(&s->pb, 16, header >> 16);
  200. put_sbits(&s->pb, 16, header);
  201. }
  202. /* put sequence header if needed */
  203. static void mpeg1_encode_sequence_header(MpegEncContext *s)
  204. {
  205. unsigned int vbv_buffer_size, fps, v;
  206. int i, constraint_parameter_flag;
  207. uint64_t time_code;
  208. float best_aspect_error = 1E10;
  209. float aspect_ratio = av_q2d(s->avctx->sample_aspect_ratio);
  210. if (aspect_ratio == 0.0)
  211. aspect_ratio = 1.0; // pixel aspect 1.1 (VGA)
  212. if (s->current_picture.f.key_frame) {
  213. AVRational framerate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  214. /* mpeg1 header repeated every gop */
  215. put_header(s, SEQ_START_CODE);
  216. put_sbits(&s->pb, 12, s->width & 0xFFF);
  217. put_sbits(&s->pb, 12, s->height & 0xFFF);
  218. for (i = 1; i < 15; i++) {
  219. float error = aspect_ratio;
  220. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO || i <= 1)
  221. error -= 1.0 / ff_mpeg1_aspect[i];
  222. else
  223. error -= av_q2d(ff_mpeg2_aspect[i]) * s->height / s->width;
  224. error = FFABS(error);
  225. if (error < best_aspect_error) {
  226. best_aspect_error = error;
  227. s->aspect_ratio_info = i;
  228. }
  229. }
  230. put_bits(&s->pb, 4, s->aspect_ratio_info);
  231. put_bits(&s->pb, 4, s->frame_rate_index);
  232. if (s->avctx->rc_max_rate) {
  233. v = (s->avctx->rc_max_rate + 399) / 400;
  234. if (v > 0x3ffff && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  235. v = 0x3ffff;
  236. } else {
  237. v = 0x3FFFF;
  238. }
  239. if (s->avctx->rc_buffer_size)
  240. vbv_buffer_size = s->avctx->rc_buffer_size;
  241. else
  242. /* VBV calculation: Scaled so that a VCD has the proper
  243. * VBV size of 40 kilobytes */
  244. vbv_buffer_size = ((20 * s->bit_rate) / (1151929 / 2)) * 8 * 1024;
  245. vbv_buffer_size = (vbv_buffer_size + 16383) / 16384;
  246. put_sbits(&s->pb, 18, v);
  247. put_bits(&s->pb, 1, 1); // marker
  248. put_sbits(&s->pb, 10, vbv_buffer_size);
  249. constraint_parameter_flag =
  250. s->width <= 768 &&
  251. s->height <= 576 &&
  252. s->mb_width * s->mb_height <= 396 &&
  253. s->mb_width * s->mb_height * framerate.num <= 396 * 25 * framerate.den &&
  254. framerate.num <= framerate.den * 30 &&
  255. s->avctx->me_range &&
  256. s->avctx->me_range < 128 &&
  257. vbv_buffer_size <= 20 &&
  258. v <= 1856000 / 400 &&
  259. s->codec_id == AV_CODEC_ID_MPEG1VIDEO;
  260. put_bits(&s->pb, 1, constraint_parameter_flag);
  261. ff_write_quant_matrix(&s->pb, s->avctx->intra_matrix);
  262. ff_write_quant_matrix(&s->pb, s->avctx->inter_matrix);
  263. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  264. put_header(s, EXT_START_CODE);
  265. put_bits(&s->pb, 4, 1); // seq ext
  266. put_bits(&s->pb, 1, s->avctx->profile == 0); // escx 1 for 4:2:2 profile
  267. put_bits(&s->pb, 3, s->avctx->profile); // profile
  268. put_bits(&s->pb, 4, s->avctx->level); // level
  269. put_bits(&s->pb, 1, s->progressive_sequence);
  270. put_bits(&s->pb, 2, s->chroma_format);
  271. put_bits(&s->pb, 2, s->width >> 12);
  272. put_bits(&s->pb, 2, s->height >> 12);
  273. put_bits(&s->pb, 12, v >> 18); // bitrate ext
  274. put_bits(&s->pb, 1, 1); // marker
  275. put_bits(&s->pb, 8, vbv_buffer_size >> 10); // vbv buffer ext
  276. put_bits(&s->pb, 1, s->low_delay);
  277. put_bits(&s->pb, 2, s->mpeg2_frame_rate_ext.num-1); // frame_rate_ext_n
  278. put_bits(&s->pb, 5, s->mpeg2_frame_rate_ext.den-1); // frame_rate_ext_d
  279. }
  280. put_header(s, GOP_START_CODE);
  281. put_bits(&s->pb, 1, s->drop_frame_timecode); // drop frame flag
  282. /* time code: we must convert from the real frame rate to a
  283. * fake MPEG frame rate in case of low frame rate */
  284. fps = (framerate.num + framerate.den / 2) / framerate.den;
  285. time_code = s->current_picture_ptr->f.coded_picture_number +
  286. s->avctx->timecode_frame_start;
  287. s->gop_picture_number = s->current_picture_ptr->f.coded_picture_number;
  288. av_assert0(s->drop_frame_timecode == !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME));
  289. if (s->drop_frame_timecode)
  290. time_code = av_timecode_adjust_ntsc_framenum2(time_code, fps);
  291. put_bits(&s->pb, 5, (uint32_t)((time_code / (fps * 3600)) % 24));
  292. put_bits(&s->pb, 6, (uint32_t)((time_code / (fps * 60)) % 60));
  293. put_bits(&s->pb, 1, 1);
  294. put_bits(&s->pb, 6, (uint32_t)((time_code / fps) % 60));
  295. put_bits(&s->pb, 6, (uint32_t)((time_code % fps)));
  296. put_bits(&s->pb, 1, !!(s->flags & CODEC_FLAG_CLOSED_GOP) || s->intra_only || !s->gop_picture_number);
  297. put_bits(&s->pb, 1, 0); // broken link
  298. }
  299. }
  300. static inline void encode_mb_skip_run(MpegEncContext *s, int run)
  301. {
  302. while (run >= 33) {
  303. put_bits(&s->pb, 11, 0x008);
  304. run -= 33;
  305. }
  306. put_bits(&s->pb, ff_mpeg12_mbAddrIncrTable[run][1],
  307. ff_mpeg12_mbAddrIncrTable[run][0]);
  308. }
  309. static av_always_inline void put_qscale(MpegEncContext *s)
  310. {
  311. if (s->q_scale_type) {
  312. av_assert2(s->qscale >= 1 && s->qscale <= 12);
  313. put_bits(&s->pb, 5, inv_non_linear_qscale[s->qscale]);
  314. } else {
  315. put_bits(&s->pb, 5, s->qscale);
  316. }
  317. }
  318. void ff_mpeg1_encode_slice_header(MpegEncContext *s)
  319. {
  320. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO && s->height > 2800) {
  321. put_header(s, SLICE_MIN_START_CODE + (s->mb_y & 127));
  322. /* slice_vertical_position_extension */
  323. put_bits(&s->pb, 3, s->mb_y >> 7);
  324. } else {
  325. put_header(s, SLICE_MIN_START_CODE + s->mb_y);
  326. }
  327. put_qscale(s);
  328. /* slice extra information */
  329. put_bits(&s->pb, 1, 0);
  330. }
  331. void ff_mpeg1_encode_picture_header(MpegEncContext *s, int picture_number)
  332. {
  333. AVFrameSideData *side_data;
  334. mpeg1_encode_sequence_header(s);
  335. /* mpeg1 picture header */
  336. put_header(s, PICTURE_START_CODE);
  337. /* temporal reference */
  338. // RAL: s->picture_number instead of s->fake_picture_number
  339. put_bits(&s->pb, 10,
  340. (s->picture_number - s->gop_picture_number) & 0x3ff);
  341. put_bits(&s->pb, 3, s->pict_type);
  342. s->vbv_delay_ptr = s->pb.buf + put_bits_count(&s->pb) / 8;
  343. put_bits(&s->pb, 16, 0xFFFF); /* vbv_delay */
  344. // RAL: Forward f_code also needed for B-frames
  345. if (s->pict_type == AV_PICTURE_TYPE_P ||
  346. s->pict_type == AV_PICTURE_TYPE_B) {
  347. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  348. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  349. put_bits(&s->pb, 3, s->f_code); /* forward_f_code */
  350. else
  351. put_bits(&s->pb, 3, 7); /* forward_f_code */
  352. }
  353. // RAL: Backward f_code necessary for B-frames
  354. if (s->pict_type == AV_PICTURE_TYPE_B) {
  355. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  356. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  357. put_bits(&s->pb, 3, s->b_code); /* backward_f_code */
  358. else
  359. put_bits(&s->pb, 3, 7); /* backward_f_code */
  360. }
  361. put_bits(&s->pb, 1, 0); /* extra bit picture */
  362. s->frame_pred_frame_dct = 1;
  363. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  364. put_header(s, EXT_START_CODE);
  365. put_bits(&s->pb, 4, 8); /* pic ext */
  366. if (s->pict_type == AV_PICTURE_TYPE_P ||
  367. s->pict_type == AV_PICTURE_TYPE_B) {
  368. put_bits(&s->pb, 4, s->f_code);
  369. put_bits(&s->pb, 4, s->f_code);
  370. } else {
  371. put_bits(&s->pb, 8, 255);
  372. }
  373. if (s->pict_type == AV_PICTURE_TYPE_B) {
  374. put_bits(&s->pb, 4, s->b_code);
  375. put_bits(&s->pb, 4, s->b_code);
  376. } else {
  377. put_bits(&s->pb, 8, 255);
  378. }
  379. put_bits(&s->pb, 2, s->intra_dc_precision);
  380. av_assert0(s->picture_structure == PICT_FRAME);
  381. put_bits(&s->pb, 2, s->picture_structure);
  382. if (s->progressive_sequence)
  383. put_bits(&s->pb, 1, 0); /* no repeat */
  384. else
  385. put_bits(&s->pb, 1, s->current_picture_ptr->f.top_field_first);
  386. /* XXX: optimize the generation of this flag with entropy measures */
  387. s->frame_pred_frame_dct = s->progressive_sequence;
  388. put_bits(&s->pb, 1, s->frame_pred_frame_dct);
  389. put_bits(&s->pb, 1, s->concealment_motion_vectors);
  390. put_bits(&s->pb, 1, s->q_scale_type);
  391. put_bits(&s->pb, 1, s->intra_vlc_format);
  392. put_bits(&s->pb, 1, s->alternate_scan);
  393. put_bits(&s->pb, 1, s->repeat_first_field);
  394. s->progressive_frame = s->progressive_sequence;
  395. /* chroma_420_type */
  396. put_bits(&s->pb, 1, s->chroma_format ==
  397. CHROMA_420 ? s->progressive_frame : 0);
  398. put_bits(&s->pb, 1, s->progressive_frame);
  399. put_bits(&s->pb, 1, 0); /* composite_display_flag */
  400. }
  401. if (s->scan_offset) {
  402. int i;
  403. put_header(s, USER_START_CODE);
  404. for (i = 0; i < sizeof(svcd_scan_offset_placeholder); i++)
  405. put_bits(&s->pb, 8, svcd_scan_offset_placeholder[i]);
  406. }
  407. side_data = av_frame_get_side_data(&s->current_picture_ptr->f,
  408. AV_FRAME_DATA_STEREO3D);
  409. if (side_data) {
  410. AVStereo3D *stereo = (AVStereo3D *)side_data->data;
  411. uint8_t fpa_type;
  412. switch (stereo->type) {
  413. case AV_STEREO3D_SIDEBYSIDE:
  414. fpa_type = 0x03;
  415. break;
  416. case AV_STEREO3D_TOPBOTTOM:
  417. fpa_type = 0x04;
  418. break;
  419. case AV_STEREO3D_2D:
  420. fpa_type = 0x08;
  421. break;
  422. case AV_STEREO3D_SIDEBYSIDE_QUINCUNX:
  423. fpa_type = 0x23;
  424. break;
  425. default:
  426. fpa_type = 0;
  427. break;
  428. }
  429. if (fpa_type != 0) {
  430. put_header(s, USER_START_CODE);
  431. put_bits(&s->pb, 8, 'J'); // S3D_video_format_signaling_identifier
  432. put_bits(&s->pb, 8, 'P');
  433. put_bits(&s->pb, 8, '3');
  434. put_bits(&s->pb, 8, 'D');
  435. put_bits(&s->pb, 8, 0x03); // S3D_video_format_length
  436. put_bits(&s->pb, 1, 1); // reserved_bit
  437. put_bits(&s->pb, 7, fpa_type); // S3D_video_format_type
  438. put_bits(&s->pb, 8, 0x04); // reserved_data[0]
  439. put_bits(&s->pb, 8, 0xFF); // reserved_data[1]
  440. }
  441. }
  442. s->mb_y = 0;
  443. ff_mpeg1_encode_slice_header(s);
  444. }
  445. static inline void put_mb_modes(MpegEncContext *s, int n, int bits,
  446. int has_mv, int field_motion)
  447. {
  448. put_bits(&s->pb, n, bits);
  449. if (!s->frame_pred_frame_dct) {
  450. if (has_mv)
  451. /* motion_type: frame/field */
  452. put_bits(&s->pb, 2, 2 - field_motion);
  453. put_bits(&s->pb, 1, s->interlaced_dct);
  454. }
  455. }
  456. // RAL: Parameter added: f_or_b_code
  457. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code)
  458. {
  459. if (val == 0) {
  460. /* zero vector */
  461. put_bits(&s->pb,
  462. ff_mpeg12_mbMotionVectorTable[0][1],
  463. ff_mpeg12_mbMotionVectorTable[0][0]);
  464. } else {
  465. int code, sign, bits;
  466. int bit_size = f_or_b_code - 1;
  467. int range = 1 << bit_size;
  468. /* modulo encoding */
  469. val = sign_extend(val, 5 + bit_size);
  470. if (val >= 0) {
  471. val--;
  472. code = (val >> bit_size) + 1;
  473. bits = val & (range - 1);
  474. sign = 0;
  475. } else {
  476. val = -val;
  477. val--;
  478. code = (val >> bit_size) + 1;
  479. bits = val & (range - 1);
  480. sign = 1;
  481. }
  482. av_assert2(code > 0 && code <= 16);
  483. put_bits(&s->pb,
  484. ff_mpeg12_mbMotionVectorTable[code][1],
  485. ff_mpeg12_mbMotionVectorTable[code][0]);
  486. put_bits(&s->pb, 1, sign);
  487. if (bit_size > 0)
  488. put_bits(&s->pb, bit_size, bits);
  489. }
  490. }
  491. static inline void encode_dc(MpegEncContext *s, int diff, int component)
  492. {
  493. if (((unsigned) (diff + 255)) >= 511) {
  494. int index;
  495. if (diff < 0) {
  496. index = av_log2_16bit(-2 * diff);
  497. diff--;
  498. } else {
  499. index = av_log2_16bit(2 * diff);
  500. }
  501. if (component == 0)
  502. put_bits(&s->pb,
  503. ff_mpeg12_vlc_dc_lum_bits[index] + index,
  504. (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  505. (diff & ((1 << index) - 1)));
  506. else
  507. put_bits(&s->pb,
  508. ff_mpeg12_vlc_dc_chroma_bits[index] + index,
  509. (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  510. (diff & ((1 << index) - 1)));
  511. } else {
  512. if (component == 0)
  513. put_bits(&s->pb,
  514. mpeg1_lum_dc_uni[diff + 255] & 0xFF,
  515. mpeg1_lum_dc_uni[diff + 255] >> 8);
  516. else
  517. put_bits(&s->pb,
  518. mpeg1_chr_dc_uni[diff + 255] & 0xFF,
  519. mpeg1_chr_dc_uni[diff + 255] >> 8);
  520. }
  521. }
  522. static void mpeg1_encode_block(MpegEncContext *s, int16_t *block, int n)
  523. {
  524. int alevel, level, last_non_zero, dc, diff, i, j, run, last_index, sign;
  525. int code, component;
  526. const uint16_t (*table_vlc)[2] = ff_rl_mpeg1.table_vlc;
  527. last_index = s->block_last_index[n];
  528. /* DC coef */
  529. if (s->mb_intra) {
  530. component = (n <= 3 ? 0 : (n & 1) + 1);
  531. dc = block[0]; /* overflow is impossible */
  532. diff = dc - s->last_dc[component];
  533. encode_dc(s, diff, component);
  534. s->last_dc[component] = dc;
  535. i = 1;
  536. if (s->intra_vlc_format)
  537. table_vlc = ff_rl_mpeg2.table_vlc;
  538. } else {
  539. /* encode the first coefficient: needs to be done here because
  540. * it is handled slightly differently */
  541. level = block[0];
  542. if (abs(level) == 1) {
  543. code = ((uint32_t)level >> 31); /* the sign bit */
  544. put_bits(&s->pb, 2, code | 0x02);
  545. i = 1;
  546. } else {
  547. i = 0;
  548. last_non_zero = -1;
  549. goto next_coef;
  550. }
  551. }
  552. /* now quantify & encode AC coefs */
  553. last_non_zero = i - 1;
  554. for (; i <= last_index; i++) {
  555. j = s->intra_scantable.permutated[i];
  556. level = block[j];
  557. next_coef:
  558. /* encode using VLC */
  559. if (level != 0) {
  560. run = i - last_non_zero - 1;
  561. alevel = level;
  562. MASK_ABS(sign, alevel);
  563. sign &= 1;
  564. if (alevel <= mpeg1_max_level[0][run]) {
  565. code = mpeg1_index_run[0][run] + alevel - 1;
  566. /* store the VLC & sign at once */
  567. put_bits(&s->pb, table_vlc[code][1] + 1,
  568. (table_vlc[code][0] << 1) + sign);
  569. } else {
  570. /* escape seems to be pretty rare <5% so I do not optimize it */
  571. put_bits(&s->pb, table_vlc[111][1], table_vlc[111][0]);
  572. /* escape: only clip in this case */
  573. put_bits(&s->pb, 6, run);
  574. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  575. if (alevel < 128) {
  576. put_sbits(&s->pb, 8, level);
  577. } else {
  578. if (level < 0)
  579. put_bits(&s->pb, 16, 0x8001 + level + 255);
  580. else
  581. put_sbits(&s->pb, 16, level);
  582. }
  583. } else {
  584. put_sbits(&s->pb, 12, level);
  585. }
  586. }
  587. last_non_zero = i;
  588. }
  589. }
  590. /* end of block */
  591. put_bits(&s->pb, table_vlc[112][1], table_vlc[112][0]);
  592. }
  593. static av_always_inline void mpeg1_encode_mb_internal(MpegEncContext *s,
  594. int16_t block[6][64],
  595. int motion_x, int motion_y,
  596. int mb_block_count)
  597. {
  598. int i, cbp;
  599. const int mb_x = s->mb_x;
  600. const int mb_y = s->mb_y;
  601. const int first_mb = mb_x == s->resync_mb_x && mb_y == s->resync_mb_y;
  602. /* compute cbp */
  603. cbp = 0;
  604. for (i = 0; i < mb_block_count; i++)
  605. if (s->block_last_index[i] >= 0)
  606. cbp |= 1 << (mb_block_count - 1 - i);
  607. if (cbp == 0 && !first_mb && s->mv_type == MV_TYPE_16X16 &&
  608. (mb_x != s->mb_width - 1 ||
  609. (mb_y != s->end_mb_y - 1 && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)) &&
  610. ((s->pict_type == AV_PICTURE_TYPE_P && (motion_x | motion_y) == 0) ||
  611. (s->pict_type == AV_PICTURE_TYPE_B && s->mv_dir == s->last_mv_dir &&
  612. (((s->mv_dir & MV_DIR_FORWARD)
  613. ? ((s->mv[0][0][0] - s->last_mv[0][0][0]) |
  614. (s->mv[0][0][1] - s->last_mv[0][0][1])) : 0) |
  615. ((s->mv_dir & MV_DIR_BACKWARD)
  616. ? ((s->mv[1][0][0] - s->last_mv[1][0][0]) |
  617. (s->mv[1][0][1] - s->last_mv[1][0][1])) : 0)) == 0))) {
  618. s->mb_skip_run++;
  619. s->qscale -= s->dquant;
  620. s->skip_count++;
  621. s->misc_bits++;
  622. s->last_bits++;
  623. if (s->pict_type == AV_PICTURE_TYPE_P) {
  624. s->last_mv[0][0][0] =
  625. s->last_mv[0][0][1] =
  626. s->last_mv[0][1][0] =
  627. s->last_mv[0][1][1] = 0;
  628. }
  629. } else {
  630. if (first_mb) {
  631. av_assert0(s->mb_skip_run == 0);
  632. encode_mb_skip_run(s, s->mb_x);
  633. } else {
  634. encode_mb_skip_run(s, s->mb_skip_run);
  635. }
  636. if (s->pict_type == AV_PICTURE_TYPE_I) {
  637. if (s->dquant && cbp) {
  638. /* macroblock_type: macroblock_quant = 1 */
  639. put_mb_modes(s, 2, 1, 0, 0);
  640. put_qscale(s);
  641. } else {
  642. /* macroblock_type: macroblock_quant = 0 */
  643. put_mb_modes(s, 1, 1, 0, 0);
  644. s->qscale -= s->dquant;
  645. }
  646. s->misc_bits += get_bits_diff(s);
  647. s->i_count++;
  648. } else if (s->mb_intra) {
  649. if (s->dquant && cbp) {
  650. put_mb_modes(s, 6, 0x01, 0, 0);
  651. put_qscale(s);
  652. } else {
  653. put_mb_modes(s, 5, 0x03, 0, 0);
  654. s->qscale -= s->dquant;
  655. }
  656. s->misc_bits += get_bits_diff(s);
  657. s->i_count++;
  658. memset(s->last_mv, 0, sizeof(s->last_mv));
  659. } else if (s->pict_type == AV_PICTURE_TYPE_P) {
  660. if (s->mv_type == MV_TYPE_16X16) {
  661. if (cbp != 0) {
  662. if ((motion_x | motion_y) == 0) {
  663. if (s->dquant) {
  664. /* macroblock_pattern & quant */
  665. put_mb_modes(s, 5, 1, 0, 0);
  666. put_qscale(s);
  667. } else {
  668. /* macroblock_pattern only */
  669. put_mb_modes(s, 2, 1, 0, 0);
  670. }
  671. s->misc_bits += get_bits_diff(s);
  672. } else {
  673. if (s->dquant) {
  674. put_mb_modes(s, 5, 2, 1, 0); /* motion + cbp */
  675. put_qscale(s);
  676. } else {
  677. put_mb_modes(s, 1, 1, 1, 0); /* motion + cbp */
  678. }
  679. s->misc_bits += get_bits_diff(s);
  680. // RAL: f_code parameter added
  681. mpeg1_encode_motion(s,
  682. motion_x - s->last_mv[0][0][0],
  683. s->f_code);
  684. // RAL: f_code parameter added
  685. mpeg1_encode_motion(s,
  686. motion_y - s->last_mv[0][0][1],
  687. s->f_code);
  688. s->mv_bits += get_bits_diff(s);
  689. }
  690. } else {
  691. put_bits(&s->pb, 3, 1); /* motion only */
  692. if (!s->frame_pred_frame_dct)
  693. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  694. s->misc_bits += get_bits_diff(s);
  695. // RAL: f_code parameter added
  696. mpeg1_encode_motion(s,
  697. motion_x - s->last_mv[0][0][0],
  698. s->f_code);
  699. // RAL: f_code parameter added
  700. mpeg1_encode_motion(s,
  701. motion_y - s->last_mv[0][0][1],
  702. s->f_code);
  703. s->qscale -= s->dquant;
  704. s->mv_bits += get_bits_diff(s);
  705. }
  706. s->last_mv[0][1][0] = s->last_mv[0][0][0] = motion_x;
  707. s->last_mv[0][1][1] = s->last_mv[0][0][1] = motion_y;
  708. } else {
  709. av_assert2(!s->frame_pred_frame_dct && s->mv_type == MV_TYPE_FIELD);
  710. if (cbp) {
  711. if (s->dquant) {
  712. put_mb_modes(s, 5, 2, 1, 1); /* motion + cbp */
  713. put_qscale(s);
  714. } else {
  715. put_mb_modes(s, 1, 1, 1, 1); /* motion + cbp */
  716. }
  717. } else {
  718. put_bits(&s->pb, 3, 1); /* motion only */
  719. put_bits(&s->pb, 2, 1); /* motion_type: field */
  720. s->qscale -= s->dquant;
  721. }
  722. s->misc_bits += get_bits_diff(s);
  723. for (i = 0; i < 2; i++) {
  724. put_bits(&s->pb, 1, s->field_select[0][i]);
  725. mpeg1_encode_motion(s,
  726. s->mv[0][i][0] - s->last_mv[0][i][0],
  727. s->f_code);
  728. mpeg1_encode_motion(s,
  729. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  730. s->f_code);
  731. s->last_mv[0][i][0] = s->mv[0][i][0];
  732. s->last_mv[0][i][1] = 2 * s->mv[0][i][1];
  733. }
  734. s->mv_bits += get_bits_diff(s);
  735. }
  736. if (cbp) {
  737. if (s->chroma_y_shift) {
  738. put_bits(&s->pb,
  739. ff_mpeg12_mbPatTable[cbp][1],
  740. ff_mpeg12_mbPatTable[cbp][0]);
  741. } else {
  742. put_bits(&s->pb,
  743. ff_mpeg12_mbPatTable[cbp >> 2][1],
  744. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  745. put_sbits(&s->pb, 2, cbp);
  746. }
  747. }
  748. s->f_count++;
  749. } else {
  750. if (s->mv_type == MV_TYPE_16X16) {
  751. if (cbp) { // With coded bloc pattern
  752. if (s->dquant) {
  753. if (s->mv_dir == MV_DIR_FORWARD)
  754. put_mb_modes(s, 6, 3, 1, 0);
  755. else
  756. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 0);
  757. put_qscale(s);
  758. } else {
  759. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 0);
  760. }
  761. } else { // No coded bloc pattern
  762. put_bits(&s->pb, 5 - s->mv_dir, 2);
  763. if (!s->frame_pred_frame_dct)
  764. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  765. s->qscale -= s->dquant;
  766. }
  767. s->misc_bits += get_bits_diff(s);
  768. if (s->mv_dir & MV_DIR_FORWARD) {
  769. mpeg1_encode_motion(s,
  770. s->mv[0][0][0] - s->last_mv[0][0][0],
  771. s->f_code);
  772. mpeg1_encode_motion(s,
  773. s->mv[0][0][1] - s->last_mv[0][0][1],
  774. s->f_code);
  775. s->last_mv[0][0][0] =
  776. s->last_mv[0][1][0] = s->mv[0][0][0];
  777. s->last_mv[0][0][1] =
  778. s->last_mv[0][1][1] = s->mv[0][0][1];
  779. s->f_count++;
  780. }
  781. if (s->mv_dir & MV_DIR_BACKWARD) {
  782. mpeg1_encode_motion(s,
  783. s->mv[1][0][0] - s->last_mv[1][0][0],
  784. s->b_code);
  785. mpeg1_encode_motion(s,
  786. s->mv[1][0][1] - s->last_mv[1][0][1],
  787. s->b_code);
  788. s->last_mv[1][0][0] =
  789. s->last_mv[1][1][0] = s->mv[1][0][0];
  790. s->last_mv[1][0][1] =
  791. s->last_mv[1][1][1] = s->mv[1][0][1];
  792. s->b_count++;
  793. }
  794. } else {
  795. av_assert2(s->mv_type == MV_TYPE_FIELD);
  796. av_assert2(!s->frame_pred_frame_dct);
  797. if (cbp) { // With coded bloc pattern
  798. if (s->dquant) {
  799. if (s->mv_dir == MV_DIR_FORWARD)
  800. put_mb_modes(s, 6, 3, 1, 1);
  801. else
  802. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 1);
  803. put_qscale(s);
  804. } else {
  805. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 1);
  806. }
  807. } else { // No coded bloc pattern
  808. put_bits(&s->pb, 5 - s->mv_dir, 2);
  809. put_bits(&s->pb, 2, 1); /* motion_type: field */
  810. s->qscale -= s->dquant;
  811. }
  812. s->misc_bits += get_bits_diff(s);
  813. if (s->mv_dir & MV_DIR_FORWARD) {
  814. for (i = 0; i < 2; i++) {
  815. put_bits(&s->pb, 1, s->field_select[0][i]);
  816. mpeg1_encode_motion(s,
  817. s->mv[0][i][0] - s->last_mv[0][i][0],
  818. s->f_code);
  819. mpeg1_encode_motion(s,
  820. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  821. s->f_code);
  822. s->last_mv[0][i][0] = s->mv[0][i][0];
  823. s->last_mv[0][i][1] = s->mv[0][i][1] * 2;
  824. }
  825. s->f_count++;
  826. }
  827. if (s->mv_dir & MV_DIR_BACKWARD) {
  828. for (i = 0; i < 2; i++) {
  829. put_bits(&s->pb, 1, s->field_select[1][i]);
  830. mpeg1_encode_motion(s,
  831. s->mv[1][i][0] - s->last_mv[1][i][0],
  832. s->b_code);
  833. mpeg1_encode_motion(s,
  834. s->mv[1][i][1] - (s->last_mv[1][i][1] >> 1),
  835. s->b_code);
  836. s->last_mv[1][i][0] = s->mv[1][i][0];
  837. s->last_mv[1][i][1] = s->mv[1][i][1] * 2;
  838. }
  839. s->b_count++;
  840. }
  841. }
  842. s->mv_bits += get_bits_diff(s);
  843. if (cbp) {
  844. if (s->chroma_y_shift) {
  845. put_bits(&s->pb,
  846. ff_mpeg12_mbPatTable[cbp][1],
  847. ff_mpeg12_mbPatTable[cbp][0]);
  848. } else {
  849. put_bits(&s->pb,
  850. ff_mpeg12_mbPatTable[cbp >> 2][1],
  851. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  852. put_sbits(&s->pb, 2, cbp);
  853. }
  854. }
  855. }
  856. for (i = 0; i < mb_block_count; i++)
  857. if (cbp & (1 << (mb_block_count - 1 - i)))
  858. mpeg1_encode_block(s, block[i], i);
  859. s->mb_skip_run = 0;
  860. if (s->mb_intra)
  861. s->i_tex_bits += get_bits_diff(s);
  862. else
  863. s->p_tex_bits += get_bits_diff(s);
  864. }
  865. }
  866. void ff_mpeg1_encode_mb(MpegEncContext *s, int16_t block[6][64],
  867. int motion_x, int motion_y)
  868. {
  869. if (s->chroma_format == CHROMA_420)
  870. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 6);
  871. else
  872. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 8);
  873. }
  874. av_cold void ff_mpeg1_encode_init(MpegEncContext *s)
  875. {
  876. static int done = 0;
  877. ff_mpeg12_common_init(s);
  878. if (!done) {
  879. int f_code;
  880. int mv;
  881. int i;
  882. done = 1;
  883. ff_init_rl(&ff_rl_mpeg1, ff_mpeg12_static_rl_table_store[0]);
  884. ff_init_rl(&ff_rl_mpeg2, ff_mpeg12_static_rl_table_store[1]);
  885. for (i = 0; i < 64; i++) {
  886. mpeg1_max_level[0][i] = ff_rl_mpeg1.max_level[0][i];
  887. mpeg1_index_run[0][i] = ff_rl_mpeg1.index_run[0][i];
  888. }
  889. init_uni_ac_vlc(&ff_rl_mpeg1, uni_mpeg1_ac_vlc_len);
  890. if (s->intra_vlc_format)
  891. init_uni_ac_vlc(&ff_rl_mpeg2, uni_mpeg2_ac_vlc_len);
  892. /* build unified dc encoding tables */
  893. for (i = -255; i < 256; i++) {
  894. int adiff, index;
  895. int bits, code;
  896. int diff = i;
  897. adiff = FFABS(diff);
  898. if (diff < 0)
  899. diff--;
  900. index = av_log2(2 * adiff);
  901. bits = ff_mpeg12_vlc_dc_lum_bits[index] + index;
  902. code = (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  903. (diff & ((1 << index) - 1));
  904. mpeg1_lum_dc_uni[i + 255] = bits + (code << 8);
  905. bits = ff_mpeg12_vlc_dc_chroma_bits[index] + index;
  906. code = (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  907. (diff & ((1 << index) - 1));
  908. mpeg1_chr_dc_uni[i + 255] = bits + (code << 8);
  909. }
  910. for (f_code = 1; f_code <= MAX_FCODE; f_code++)
  911. for (mv = -MAX_MV; mv <= MAX_MV; mv++) {
  912. int len;
  913. if (mv == 0) {
  914. len = ff_mpeg12_mbMotionVectorTable[0][1];
  915. } else {
  916. int val, bit_size, code;
  917. bit_size = f_code - 1;
  918. val = mv;
  919. if (val < 0)
  920. val = -val;
  921. val--;
  922. code = (val >> bit_size) + 1;
  923. if (code < 17)
  924. len = ff_mpeg12_mbMotionVectorTable[code][1] +
  925. 1 + bit_size;
  926. else
  927. len = ff_mpeg12_mbMotionVectorTable[16][1] +
  928. 2 + bit_size;
  929. }
  930. mv_penalty[f_code][mv + MAX_MV] = len;
  931. }
  932. for (f_code = MAX_FCODE; f_code > 0; f_code--)
  933. for (mv = -(8 << f_code); mv < (8 << f_code); mv++)
  934. fcode_tab[mv + MAX_MV] = f_code;
  935. }
  936. s->me.mv_penalty = mv_penalty;
  937. s->fcode_tab = fcode_tab;
  938. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  939. s->min_qcoeff = -255;
  940. s->max_qcoeff = 255;
  941. } else {
  942. s->min_qcoeff = -2047;
  943. s->max_qcoeff = 2047;
  944. }
  945. if (s->intra_vlc_format) {
  946. s->intra_ac_vlc_length =
  947. s->intra_ac_vlc_last_length = uni_mpeg2_ac_vlc_len;
  948. } else {
  949. s->intra_ac_vlc_length =
  950. s->intra_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  951. }
  952. s->inter_ac_vlc_length =
  953. s->inter_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  954. }
  955. #define OFFSET(x) offsetof(MpegEncContext, x)
  956. #define VE AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
  957. #define COMMON_OPTS \
  958. { "gop_timecode", "MPEG GOP Timecode in hh:mm:ss[:;.]ff format", \
  959. OFFSET(tc_opt_str), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, VE },\
  960. { "intra_vlc", "Use MPEG-2 intra VLC table.", \
  961. OFFSET(intra_vlc_format), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  962. { "drop_frame_timecode", "Timecode is in drop frame format.", \
  963. OFFSET(drop_frame_timecode), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  964. { "scan_offset", "Reserve space for SVCD scan offset user data.", \
  965. OFFSET(scan_offset), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  966. static const AVOption mpeg1_options[] = {
  967. COMMON_OPTS
  968. FF_MPV_COMMON_OPTS
  969. { NULL },
  970. };
  971. static const AVOption mpeg2_options[] = {
  972. COMMON_OPTS
  973. { "non_linear_quant", "Use nonlinear quantizer.", OFFSET(q_scale_type), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  974. { "alternate_scan", "Enable alternate scantable.", OFFSET(alternate_scan), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  975. FF_MPV_COMMON_OPTS
  976. { NULL },
  977. };
  978. #define mpeg12_class(x) \
  979. static const AVClass mpeg ## x ## _class = { \
  980. .class_name = "mpeg" # x "video encoder", \
  981. .item_name = av_default_item_name, \
  982. .option = mpeg ## x ## _options, \
  983. .version = LIBAVUTIL_VERSION_INT, \
  984. };
  985. mpeg12_class(1)
  986. mpeg12_class(2)
  987. AVCodec ff_mpeg1video_encoder = {
  988. .name = "mpeg1video",
  989. .long_name = NULL_IF_CONFIG_SMALL("MPEG-1 video"),
  990. .type = AVMEDIA_TYPE_VIDEO,
  991. .id = AV_CODEC_ID_MPEG1VIDEO,
  992. .priv_data_size = sizeof(MpegEncContext),
  993. .init = encode_init,
  994. .encode2 = ff_MPV_encode_picture,
  995. .close = ff_MPV_encode_end,
  996. .supported_framerates = ff_mpeg12_frame_rate_tab + 1,
  997. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  998. AV_PIX_FMT_NONE },
  999. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  1000. .priv_class = &mpeg1_class,
  1001. };
  1002. AVCodec ff_mpeg2video_encoder = {
  1003. .name = "mpeg2video",
  1004. .long_name = NULL_IF_CONFIG_SMALL("MPEG-2 video"),
  1005. .type = AVMEDIA_TYPE_VIDEO,
  1006. .id = AV_CODEC_ID_MPEG2VIDEO,
  1007. .priv_data_size = sizeof(MpegEncContext),
  1008. .init = encode_init,
  1009. .encode2 = ff_MPV_encode_picture,
  1010. .close = ff_MPV_encode_end,
  1011. .supported_framerates = ff_mpeg2_frame_rate_tab,
  1012. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  1013. AV_PIX_FMT_YUV422P,
  1014. AV_PIX_FMT_NONE },
  1015. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  1016. .priv_class = &mpeg2_class,
  1017. };