You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1180 lines
45KB

  1. /*
  2. * MPEG1/2 encoder
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * MPEG1/2 encoder
  25. */
  26. #include <stdint.h>
  27. #include "libavutil/attributes.h"
  28. #include "libavutil/avassert.h"
  29. #include "libavutil/log.h"
  30. #include "libavutil/opt.h"
  31. #include "libavutil/timecode.h"
  32. #include "libavutil/stereo3d.h"
  33. #include "avcodec.h"
  34. #include "bytestream.h"
  35. #include "mathops.h"
  36. #include "mpeg12.h"
  37. #include "mpeg12data.h"
  38. #include "mpegutils.h"
  39. #include "mpegvideo.h"
  40. static const int8_t inv_non_linear_qscale[] = {
  41. 0, 2, 4, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16,
  42. -1,17,-1,18,-1,19, -1, 20, -1, 21, -1, 22, -1,
  43. 23,-1,24,-1,-1,-1
  44. };
  45. static const uint8_t svcd_scan_offset_placeholder[] = {
  46. 0x10, 0x0E, 0x00, 0x80, 0x81, 0x00, 0x80,
  47. 0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  48. };
  49. static uint8_t mv_penalty[MAX_FCODE + 1][MAX_MV * 2 + 1];
  50. static uint8_t fcode_tab[MAX_MV * 2 + 1];
  51. static uint8_t uni_mpeg1_ac_vlc_len[64 * 64 * 2];
  52. static uint8_t uni_mpeg2_ac_vlc_len[64 * 64 * 2];
  53. /* simple include everything table for dc, first byte is bits
  54. * number next 3 are code */
  55. static uint32_t mpeg1_lum_dc_uni[512];
  56. static uint32_t mpeg1_chr_dc_uni[512];
  57. static uint8_t mpeg1_index_run[2][64];
  58. static int8_t mpeg1_max_level[2][64];
  59. static av_cold void init_uni_ac_vlc(RLTable *rl, uint8_t *uni_ac_vlc_len)
  60. {
  61. int i;
  62. for (i = 0; i < 128; i++) {
  63. int level = i - 64;
  64. int run;
  65. if (!level)
  66. continue;
  67. for (run = 0; run < 64; run++) {
  68. int len, code;
  69. int alevel = FFABS(level);
  70. if (alevel > rl->max_level[0][run])
  71. code = 111; /* rl->n */
  72. else
  73. code = rl->index_run[0][run] + alevel - 1;
  74. if (code < 111) { /* rl->n */
  75. /* length of VLC and sign */
  76. len = rl->table_vlc[code][1] + 1;
  77. } else {
  78. len = rl->table_vlc[111 /* rl->n */][1] + 6;
  79. if (alevel < 128)
  80. len += 8;
  81. else
  82. len += 16;
  83. }
  84. uni_ac_vlc_len[UNI_AC_ENC_INDEX(run, i)] = len;
  85. }
  86. }
  87. }
  88. static int find_frame_rate_index(MpegEncContext *s)
  89. {
  90. int i;
  91. AVRational bestq = (AVRational) {0, 0};
  92. AVRational ext;
  93. AVRational target = av_inv_q(s->avctx->time_base);
  94. for (i = 1; i < 14; i++) {
  95. if (s->avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL &&
  96. i >= 9)
  97. break;
  98. for (ext.num=1; ext.num <= 4; ext.num++) {
  99. for (ext.den=1; ext.den <= 32; ext.den++) {
  100. AVRational q = av_mul_q(ext, ff_mpeg12_frame_rate_tab[i]);
  101. if (s->codec_id != AV_CODEC_ID_MPEG2VIDEO && (ext.den!=1 || ext.num!=1))
  102. continue;
  103. if (av_gcd(ext.den, ext.num) != 1)
  104. continue;
  105. if ( bestq.num==0
  106. || av_nearer_q(target, bestq, q) < 0
  107. || ext.num==1 && ext.den==1 && av_nearer_q(target, bestq, q) == 0) {
  108. bestq = q;
  109. s->frame_rate_index = i;
  110. s->mpeg2_frame_rate_ext.num = ext.num;
  111. s->mpeg2_frame_rate_ext.den = ext.den;
  112. }
  113. }
  114. }
  115. }
  116. if (av_cmp_q(target, bestq))
  117. return -1;
  118. else
  119. return 0;
  120. }
  121. static av_cold int encode_init(AVCodecContext *avctx)
  122. {
  123. MpegEncContext *s = avctx->priv_data;
  124. if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && avctx->height > 2800)
  125. avctx->thread_count = 1;
  126. if (ff_mpv_encode_init(avctx) < 0)
  127. return -1;
  128. if (find_frame_rate_index(s) < 0) {
  129. if (s->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) {
  130. av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n",
  131. avctx->time_base.den, avctx->time_base.num);
  132. return -1;
  133. } else {
  134. av_log(avctx, AV_LOG_INFO,
  135. "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n",
  136. avctx->time_base.den, avctx->time_base.num);
  137. }
  138. }
  139. if (avctx->profile == FF_PROFILE_UNKNOWN) {
  140. if (avctx->level != FF_LEVEL_UNKNOWN) {
  141. av_log(avctx, AV_LOG_ERROR, "Set profile and level\n");
  142. return -1;
  143. }
  144. /* Main or 4:2:2 */
  145. avctx->profile = s->chroma_format == CHROMA_420 ? 4 : 0;
  146. }
  147. if (avctx->level == FF_LEVEL_UNKNOWN) {
  148. if (avctx->profile == 0) { /* 4:2:2 */
  149. if (avctx->width <= 720 && avctx->height <= 608)
  150. avctx->level = 5; /* Main */
  151. else
  152. avctx->level = 2; /* High */
  153. } else {
  154. if (avctx->profile != 1 && s->chroma_format != CHROMA_420) {
  155. av_log(avctx, AV_LOG_ERROR,
  156. "Only High(1) and 4:2:2(0) profiles support 4:2:2 color sampling\n");
  157. return -1;
  158. }
  159. if (avctx->width <= 720 && avctx->height <= 576)
  160. avctx->level = 8; /* Main */
  161. else if (avctx->width <= 1440)
  162. avctx->level = 6; /* High 1440 */
  163. else
  164. avctx->level = 4; /* High */
  165. }
  166. }
  167. if ((avctx->width & 0xFFF) == 0 && (avctx->height & 0xFFF) == 1) {
  168. av_log(avctx, AV_LOG_ERROR, "Width / Height is invalid for MPEG2\n");
  169. return AVERROR(EINVAL);
  170. }
  171. if (s->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
  172. if ((avctx->width & 0xFFF) == 0 || (avctx->height & 0xFFF) == 0) {
  173. av_log(avctx, AV_LOG_ERROR, "Width or Height are not allowed to be multiples of 4096\n"
  174. "add '-strict %d' if you want to use them anyway.\n", FF_COMPLIANCE_UNOFFICIAL);
  175. return AVERROR(EINVAL);
  176. }
  177. }
  178. s->drop_frame_timecode = s->drop_frame_timecode || !!(avctx->flags2 & AV_CODEC_FLAG2_DROP_FRAME_TIMECODE);
  179. if (s->drop_frame_timecode)
  180. s->tc.flags |= AV_TIMECODE_FLAG_DROPFRAME;
  181. if (s->drop_frame_timecode && s->frame_rate_index != 4) {
  182. av_log(avctx, AV_LOG_ERROR,
  183. "Drop frame time code only allowed with 1001/30000 fps\n");
  184. return -1;
  185. }
  186. if (s->tc_opt_str) {
  187. AVRational rate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  188. int ret = av_timecode_init_from_string(&s->tc, rate, s->tc_opt_str, s);
  189. if (ret < 0)
  190. return ret;
  191. s->drop_frame_timecode = !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME);
  192. s->avctx->timecode_frame_start = s->tc.start;
  193. } else {
  194. s->avctx->timecode_frame_start = 0; // default is -1
  195. }
  196. return 0;
  197. }
  198. static void put_header(MpegEncContext *s, int header)
  199. {
  200. avpriv_align_put_bits(&s->pb);
  201. put_bits(&s->pb, 16, header >> 16);
  202. put_sbits(&s->pb, 16, header);
  203. }
  204. /* put sequence header if needed */
  205. static void mpeg1_encode_sequence_header(MpegEncContext *s)
  206. {
  207. unsigned int vbv_buffer_size, fps, v;
  208. int i, constraint_parameter_flag;
  209. uint64_t time_code;
  210. int64_t best_aspect_error = INT64_MAX;
  211. AVRational aspect_ratio = s->avctx->sample_aspect_ratio;
  212. if (aspect_ratio.num == 0 || aspect_ratio.den == 0)
  213. aspect_ratio = (AVRational){1,1}; // pixel aspect 1.1 (VGA)
  214. if (s->current_picture.f->key_frame) {
  215. AVRational framerate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  216. /* mpeg1 header repeated every gop */
  217. put_header(s, SEQ_START_CODE);
  218. put_sbits(&s->pb, 12, s->width & 0xFFF);
  219. put_sbits(&s->pb, 12, s->height & 0xFFF);
  220. for (i = 1; i < 15; i++) {
  221. int64_t error = aspect_ratio.num * (1LL<<32) / aspect_ratio.den;
  222. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO || i <= 1)
  223. error -= (1LL<<32) / ff_mpeg1_aspect[i];
  224. else
  225. error -= (1LL<<32)*ff_mpeg2_aspect[i].num * s->height / s->width / ff_mpeg2_aspect[i].den;
  226. error = FFABS(error);
  227. if (error - 2 <= best_aspect_error) {
  228. best_aspect_error = error;
  229. s->aspect_ratio_info = i;
  230. }
  231. }
  232. put_bits(&s->pb, 4, s->aspect_ratio_info);
  233. put_bits(&s->pb, 4, s->frame_rate_index);
  234. if (s->avctx->rc_max_rate) {
  235. v = (s->avctx->rc_max_rate + 399) / 400;
  236. if (v > 0x3ffff && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  237. v = 0x3ffff;
  238. } else {
  239. v = 0x3FFFF;
  240. }
  241. if (s->avctx->rc_buffer_size)
  242. vbv_buffer_size = s->avctx->rc_buffer_size;
  243. else
  244. /* VBV calculation: Scaled so that a VCD has the proper
  245. * VBV size of 40 kilobytes */
  246. vbv_buffer_size = ((20 * s->bit_rate) / (1151929 / 2)) * 8 * 1024;
  247. vbv_buffer_size = (vbv_buffer_size + 16383) / 16384;
  248. put_sbits(&s->pb, 18, v);
  249. put_bits(&s->pb, 1, 1); // marker
  250. put_sbits(&s->pb, 10, vbv_buffer_size);
  251. constraint_parameter_flag =
  252. s->width <= 768 &&
  253. s->height <= 576 &&
  254. s->mb_width * s->mb_height <= 396 &&
  255. s->mb_width * s->mb_height * framerate.num <= 396 * 25 * framerate.den &&
  256. framerate.num <= framerate.den * 30 &&
  257. s->avctx->me_range &&
  258. s->avctx->me_range < 128 &&
  259. vbv_buffer_size <= 20 &&
  260. v <= 1856000 / 400 &&
  261. s->codec_id == AV_CODEC_ID_MPEG1VIDEO;
  262. put_bits(&s->pb, 1, constraint_parameter_flag);
  263. ff_write_quant_matrix(&s->pb, s->avctx->intra_matrix);
  264. ff_write_quant_matrix(&s->pb, s->avctx->inter_matrix);
  265. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  266. AVFrameSideData *side_data;
  267. int width = s->width;
  268. int height = s->height;
  269. int use_seq_disp_ext;
  270. put_header(s, EXT_START_CODE);
  271. put_bits(&s->pb, 4, 1); // seq ext
  272. put_bits(&s->pb, 1, s->avctx->profile == 0); // escx 1 for 4:2:2 profile
  273. put_bits(&s->pb, 3, s->avctx->profile); // profile
  274. put_bits(&s->pb, 4, s->avctx->level); // level
  275. put_bits(&s->pb, 1, s->progressive_sequence);
  276. put_bits(&s->pb, 2, s->chroma_format);
  277. put_bits(&s->pb, 2, s->width >> 12);
  278. put_bits(&s->pb, 2, s->height >> 12);
  279. put_bits(&s->pb, 12, v >> 18); // bitrate ext
  280. put_bits(&s->pb, 1, 1); // marker
  281. put_bits(&s->pb, 8, vbv_buffer_size >> 10); // vbv buffer ext
  282. put_bits(&s->pb, 1, s->low_delay);
  283. put_bits(&s->pb, 2, s->mpeg2_frame_rate_ext.num-1); // frame_rate_ext_n
  284. put_bits(&s->pb, 5, s->mpeg2_frame_rate_ext.den-1); // frame_rate_ext_d
  285. side_data = av_frame_get_side_data(s->current_picture_ptr->f, AV_FRAME_DATA_PANSCAN);
  286. if (side_data) {
  287. AVPanScan *pan_scan = (AVPanScan *)side_data->data;
  288. if (pan_scan->width && pan_scan->height) {
  289. width = pan_scan->width >> 4;
  290. height = pan_scan->height >> 4;
  291. }
  292. }
  293. use_seq_disp_ext = (width != s->width ||
  294. height != s->height ||
  295. s->avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  296. s->avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  297. s->avctx->colorspace != AVCOL_SPC_UNSPECIFIED);
  298. if (s->seq_disp_ext == 1 || (s->seq_disp_ext == -1 && use_seq_disp_ext)) {
  299. put_header(s, EXT_START_CODE);
  300. put_bits(&s->pb, 4, 2); // sequence display extension
  301. put_bits(&s->pb, 3, 0); // video_format: 0 is components
  302. put_bits(&s->pb, 1, 1); // colour_description
  303. put_bits(&s->pb, 8, s->avctx->color_primaries); // colour_primaries
  304. put_bits(&s->pb, 8, s->avctx->color_trc); // transfer_characteristics
  305. put_bits(&s->pb, 8, s->avctx->colorspace); // matrix_coefficients
  306. put_bits(&s->pb, 14, width); // display_horizontal_size
  307. put_bits(&s->pb, 1, 1); // marker_bit
  308. put_bits(&s->pb, 14, height); // display_vertical_size
  309. put_bits(&s->pb, 3, 0); // remaining 3 bits are zero padding
  310. }
  311. }
  312. put_header(s, GOP_START_CODE);
  313. put_bits(&s->pb, 1, s->drop_frame_timecode); // drop frame flag
  314. /* time code: we must convert from the real frame rate to a
  315. * fake MPEG frame rate in case of low frame rate */
  316. fps = (framerate.num + framerate.den / 2) / framerate.den;
  317. time_code = s->current_picture_ptr->f->coded_picture_number +
  318. s->avctx->timecode_frame_start;
  319. s->gop_picture_number = s->current_picture_ptr->f->coded_picture_number;
  320. av_assert0(s->drop_frame_timecode == !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME));
  321. if (s->drop_frame_timecode)
  322. time_code = av_timecode_adjust_ntsc_framenum2(time_code, fps);
  323. put_bits(&s->pb, 5, (uint32_t)((time_code / (fps * 3600)) % 24));
  324. put_bits(&s->pb, 6, (uint32_t)((time_code / (fps * 60)) % 60));
  325. put_bits(&s->pb, 1, 1);
  326. put_bits(&s->pb, 6, (uint32_t)((time_code / fps) % 60));
  327. put_bits(&s->pb, 6, (uint32_t)((time_code % fps)));
  328. put_bits(&s->pb, 1, !!(s->avctx->flags & AV_CODEC_FLAG_CLOSED_GOP) || s->intra_only || !s->gop_picture_number);
  329. put_bits(&s->pb, 1, 0); // broken link
  330. }
  331. }
  332. static inline void encode_mb_skip_run(MpegEncContext *s, int run)
  333. {
  334. while (run >= 33) {
  335. put_bits(&s->pb, 11, 0x008);
  336. run -= 33;
  337. }
  338. put_bits(&s->pb, ff_mpeg12_mbAddrIncrTable[run][1],
  339. ff_mpeg12_mbAddrIncrTable[run][0]);
  340. }
  341. static av_always_inline void put_qscale(MpegEncContext *s)
  342. {
  343. if (s->q_scale_type) {
  344. int qp = inv_non_linear_qscale[s->qscale];
  345. av_assert2(s->qscale >= 1 && qp > 0);
  346. put_bits(&s->pb, 5, qp);
  347. } else {
  348. put_bits(&s->pb, 5, s->qscale);
  349. }
  350. }
  351. void ff_mpeg1_encode_slice_header(MpegEncContext *s)
  352. {
  353. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO && s->height > 2800) {
  354. put_header(s, SLICE_MIN_START_CODE + (s->mb_y & 127));
  355. /* slice_vertical_position_extension */
  356. put_bits(&s->pb, 3, s->mb_y >> 7);
  357. } else {
  358. put_header(s, SLICE_MIN_START_CODE + s->mb_y);
  359. }
  360. put_qscale(s);
  361. /* slice extra information */
  362. put_bits(&s->pb, 1, 0);
  363. }
  364. void ff_mpeg1_encode_picture_header(MpegEncContext *s, int picture_number)
  365. {
  366. AVFrameSideData *side_data;
  367. mpeg1_encode_sequence_header(s);
  368. /* mpeg1 picture header */
  369. put_header(s, PICTURE_START_CODE);
  370. /* temporal reference */
  371. // RAL: s->picture_number instead of s->fake_picture_number
  372. put_bits(&s->pb, 10,
  373. (s->picture_number - s->gop_picture_number) & 0x3ff);
  374. put_bits(&s->pb, 3, s->pict_type);
  375. s->vbv_delay_ptr = s->pb.buf + put_bits_count(&s->pb) / 8;
  376. put_bits(&s->pb, 16, 0xFFFF); /* vbv_delay */
  377. // RAL: Forward f_code also needed for B-frames
  378. if (s->pict_type == AV_PICTURE_TYPE_P ||
  379. s->pict_type == AV_PICTURE_TYPE_B) {
  380. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  381. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  382. put_bits(&s->pb, 3, s->f_code); /* forward_f_code */
  383. else
  384. put_bits(&s->pb, 3, 7); /* forward_f_code */
  385. }
  386. // RAL: Backward f_code necessary for B-frames
  387. if (s->pict_type == AV_PICTURE_TYPE_B) {
  388. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  389. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  390. put_bits(&s->pb, 3, s->b_code); /* backward_f_code */
  391. else
  392. put_bits(&s->pb, 3, 7); /* backward_f_code */
  393. }
  394. put_bits(&s->pb, 1, 0); /* extra bit picture */
  395. s->frame_pred_frame_dct = 1;
  396. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  397. put_header(s, EXT_START_CODE);
  398. put_bits(&s->pb, 4, 8); /* pic ext */
  399. if (s->pict_type == AV_PICTURE_TYPE_P ||
  400. s->pict_type == AV_PICTURE_TYPE_B) {
  401. put_bits(&s->pb, 4, s->f_code);
  402. put_bits(&s->pb, 4, s->f_code);
  403. } else {
  404. put_bits(&s->pb, 8, 255);
  405. }
  406. if (s->pict_type == AV_PICTURE_TYPE_B) {
  407. put_bits(&s->pb, 4, s->b_code);
  408. put_bits(&s->pb, 4, s->b_code);
  409. } else {
  410. put_bits(&s->pb, 8, 255);
  411. }
  412. put_bits(&s->pb, 2, s->intra_dc_precision);
  413. av_assert0(s->picture_structure == PICT_FRAME);
  414. put_bits(&s->pb, 2, s->picture_structure);
  415. if (s->progressive_sequence)
  416. put_bits(&s->pb, 1, 0); /* no repeat */
  417. else
  418. put_bits(&s->pb, 1, s->current_picture_ptr->f->top_field_first);
  419. /* XXX: optimize the generation of this flag with entropy measures */
  420. s->frame_pred_frame_dct = s->progressive_sequence;
  421. put_bits(&s->pb, 1, s->frame_pred_frame_dct);
  422. put_bits(&s->pb, 1, s->concealment_motion_vectors);
  423. put_bits(&s->pb, 1, s->q_scale_type);
  424. put_bits(&s->pb, 1, s->intra_vlc_format);
  425. put_bits(&s->pb, 1, s->alternate_scan);
  426. put_bits(&s->pb, 1, s->repeat_first_field);
  427. s->progressive_frame = s->progressive_sequence;
  428. /* chroma_420_type */
  429. put_bits(&s->pb, 1, s->chroma_format ==
  430. CHROMA_420 ? s->progressive_frame : 0);
  431. put_bits(&s->pb, 1, s->progressive_frame);
  432. put_bits(&s->pb, 1, 0); /* composite_display_flag */
  433. }
  434. if (s->scan_offset) {
  435. int i;
  436. put_header(s, USER_START_CODE);
  437. for (i = 0; i < sizeof(svcd_scan_offset_placeholder); i++)
  438. put_bits(&s->pb, 8, svcd_scan_offset_placeholder[i]);
  439. }
  440. side_data = av_frame_get_side_data(s->current_picture_ptr->f,
  441. AV_FRAME_DATA_STEREO3D);
  442. if (side_data) {
  443. AVStereo3D *stereo = (AVStereo3D *)side_data->data;
  444. uint8_t fpa_type;
  445. switch (stereo->type) {
  446. case AV_STEREO3D_SIDEBYSIDE:
  447. fpa_type = 0x03;
  448. break;
  449. case AV_STEREO3D_TOPBOTTOM:
  450. fpa_type = 0x04;
  451. break;
  452. case AV_STEREO3D_2D:
  453. fpa_type = 0x08;
  454. break;
  455. case AV_STEREO3D_SIDEBYSIDE_QUINCUNX:
  456. fpa_type = 0x23;
  457. break;
  458. default:
  459. fpa_type = 0;
  460. break;
  461. }
  462. if (fpa_type != 0) {
  463. put_header(s, USER_START_CODE);
  464. put_bits(&s->pb, 8, 'J'); // S3D_video_format_signaling_identifier
  465. put_bits(&s->pb, 8, 'P');
  466. put_bits(&s->pb, 8, '3');
  467. put_bits(&s->pb, 8, 'D');
  468. put_bits(&s->pb, 8, 0x03); // S3D_video_format_length
  469. put_bits(&s->pb, 1, 1); // reserved_bit
  470. put_bits(&s->pb, 7, fpa_type); // S3D_video_format_type
  471. put_bits(&s->pb, 8, 0x04); // reserved_data[0]
  472. put_bits(&s->pb, 8, 0xFF); // reserved_data[1]
  473. }
  474. }
  475. s->mb_y = 0;
  476. ff_mpeg1_encode_slice_header(s);
  477. }
  478. static inline void put_mb_modes(MpegEncContext *s, int n, int bits,
  479. int has_mv, int field_motion)
  480. {
  481. put_bits(&s->pb, n, bits);
  482. if (!s->frame_pred_frame_dct) {
  483. if (has_mv)
  484. /* motion_type: frame/field */
  485. put_bits(&s->pb, 2, 2 - field_motion);
  486. put_bits(&s->pb, 1, s->interlaced_dct);
  487. }
  488. }
  489. // RAL: Parameter added: f_or_b_code
  490. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code)
  491. {
  492. if (val == 0) {
  493. /* zero vector */
  494. put_bits(&s->pb,
  495. ff_mpeg12_mbMotionVectorTable[0][1],
  496. ff_mpeg12_mbMotionVectorTable[0][0]);
  497. } else {
  498. int code, sign, bits;
  499. int bit_size = f_or_b_code - 1;
  500. int range = 1 << bit_size;
  501. /* modulo encoding */
  502. val = sign_extend(val, 5 + bit_size);
  503. if (val >= 0) {
  504. val--;
  505. code = (val >> bit_size) + 1;
  506. bits = val & (range - 1);
  507. sign = 0;
  508. } else {
  509. val = -val;
  510. val--;
  511. code = (val >> bit_size) + 1;
  512. bits = val & (range - 1);
  513. sign = 1;
  514. }
  515. av_assert2(code > 0 && code <= 16);
  516. put_bits(&s->pb,
  517. ff_mpeg12_mbMotionVectorTable[code][1],
  518. ff_mpeg12_mbMotionVectorTable[code][0]);
  519. put_bits(&s->pb, 1, sign);
  520. if (bit_size > 0)
  521. put_bits(&s->pb, bit_size, bits);
  522. }
  523. }
  524. static inline void encode_dc(MpegEncContext *s, int diff, int component)
  525. {
  526. unsigned int diff_u = diff + 255;
  527. if (diff_u >= 511) {
  528. int index;
  529. if (diff < 0) {
  530. index = av_log2_16bit(-2 * diff);
  531. diff--;
  532. } else {
  533. index = av_log2_16bit(2 * diff);
  534. }
  535. if (component == 0)
  536. put_bits(&s->pb,
  537. ff_mpeg12_vlc_dc_lum_bits[index] + index,
  538. (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  539. av_mod_uintp2(diff, index));
  540. else
  541. put_bits(&s->pb,
  542. ff_mpeg12_vlc_dc_chroma_bits[index] + index,
  543. (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  544. av_mod_uintp2(diff, index));
  545. } else {
  546. if (component == 0)
  547. put_bits(&s->pb,
  548. mpeg1_lum_dc_uni[diff + 255] & 0xFF,
  549. mpeg1_lum_dc_uni[diff + 255] >> 8);
  550. else
  551. put_bits(&s->pb,
  552. mpeg1_chr_dc_uni[diff + 255] & 0xFF,
  553. mpeg1_chr_dc_uni[diff + 255] >> 8);
  554. }
  555. }
  556. static void mpeg1_encode_block(MpegEncContext *s, int16_t *block, int n)
  557. {
  558. int alevel, level, last_non_zero, dc, diff, i, j, run, last_index, sign;
  559. int code, component;
  560. const uint16_t (*table_vlc)[2] = ff_rl_mpeg1.table_vlc;
  561. last_index = s->block_last_index[n];
  562. /* DC coef */
  563. if (s->mb_intra) {
  564. component = (n <= 3 ? 0 : (n & 1) + 1);
  565. dc = block[0]; /* overflow is impossible */
  566. diff = dc - s->last_dc[component];
  567. encode_dc(s, diff, component);
  568. s->last_dc[component] = dc;
  569. i = 1;
  570. if (s->intra_vlc_format)
  571. table_vlc = ff_rl_mpeg2.table_vlc;
  572. } else {
  573. /* encode the first coefficient: needs to be done here because
  574. * it is handled slightly differently */
  575. level = block[0];
  576. if (abs(level) == 1) {
  577. code = ((uint32_t)level >> 31); /* the sign bit */
  578. put_bits(&s->pb, 2, code | 0x02);
  579. i = 1;
  580. } else {
  581. i = 0;
  582. last_non_zero = -1;
  583. goto next_coef;
  584. }
  585. }
  586. /* now quantify & encode AC coefs */
  587. last_non_zero = i - 1;
  588. for (; i <= last_index; i++) {
  589. j = s->intra_scantable.permutated[i];
  590. level = block[j];
  591. next_coef:
  592. /* encode using VLC */
  593. if (level != 0) {
  594. run = i - last_non_zero - 1;
  595. alevel = level;
  596. MASK_ABS(sign, alevel);
  597. sign &= 1;
  598. if (alevel <= mpeg1_max_level[0][run]) {
  599. code = mpeg1_index_run[0][run] + alevel - 1;
  600. /* store the VLC & sign at once */
  601. put_bits(&s->pb, table_vlc[code][1] + 1,
  602. (table_vlc[code][0] << 1) + sign);
  603. } else {
  604. /* escape seems to be pretty rare <5% so I do not optimize it */
  605. put_bits(&s->pb, table_vlc[111][1], table_vlc[111][0]);
  606. /* escape: only clip in this case */
  607. put_bits(&s->pb, 6, run);
  608. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  609. if (alevel < 128) {
  610. put_sbits(&s->pb, 8, level);
  611. } else {
  612. if (level < 0)
  613. put_bits(&s->pb, 16, 0x8001 + level + 255);
  614. else
  615. put_sbits(&s->pb, 16, level);
  616. }
  617. } else {
  618. put_sbits(&s->pb, 12, level);
  619. }
  620. }
  621. last_non_zero = i;
  622. }
  623. }
  624. /* end of block */
  625. put_bits(&s->pb, table_vlc[112][1], table_vlc[112][0]);
  626. }
  627. static av_always_inline void mpeg1_encode_mb_internal(MpegEncContext *s,
  628. int16_t block[8][64],
  629. int motion_x, int motion_y,
  630. int mb_block_count)
  631. {
  632. int i, cbp;
  633. const int mb_x = s->mb_x;
  634. const int mb_y = s->mb_y;
  635. const int first_mb = mb_x == s->resync_mb_x && mb_y == s->resync_mb_y;
  636. /* compute cbp */
  637. cbp = 0;
  638. for (i = 0; i < mb_block_count; i++)
  639. if (s->block_last_index[i] >= 0)
  640. cbp |= 1 << (mb_block_count - 1 - i);
  641. if (cbp == 0 && !first_mb && s->mv_type == MV_TYPE_16X16 &&
  642. (mb_x != s->mb_width - 1 ||
  643. (mb_y != s->end_mb_y - 1 && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)) &&
  644. ((s->pict_type == AV_PICTURE_TYPE_P && (motion_x | motion_y) == 0) ||
  645. (s->pict_type == AV_PICTURE_TYPE_B && s->mv_dir == s->last_mv_dir &&
  646. (((s->mv_dir & MV_DIR_FORWARD)
  647. ? ((s->mv[0][0][0] - s->last_mv[0][0][0]) |
  648. (s->mv[0][0][1] - s->last_mv[0][0][1])) : 0) |
  649. ((s->mv_dir & MV_DIR_BACKWARD)
  650. ? ((s->mv[1][0][0] - s->last_mv[1][0][0]) |
  651. (s->mv[1][0][1] - s->last_mv[1][0][1])) : 0)) == 0))) {
  652. s->mb_skip_run++;
  653. s->qscale -= s->dquant;
  654. s->skip_count++;
  655. s->misc_bits++;
  656. s->last_bits++;
  657. if (s->pict_type == AV_PICTURE_TYPE_P) {
  658. s->last_mv[0][0][0] =
  659. s->last_mv[0][0][1] =
  660. s->last_mv[0][1][0] =
  661. s->last_mv[0][1][1] = 0;
  662. }
  663. } else {
  664. if (first_mb) {
  665. av_assert0(s->mb_skip_run == 0);
  666. encode_mb_skip_run(s, s->mb_x);
  667. } else {
  668. encode_mb_skip_run(s, s->mb_skip_run);
  669. }
  670. if (s->pict_type == AV_PICTURE_TYPE_I) {
  671. if (s->dquant && cbp) {
  672. /* macroblock_type: macroblock_quant = 1 */
  673. put_mb_modes(s, 2, 1, 0, 0);
  674. put_qscale(s);
  675. } else {
  676. /* macroblock_type: macroblock_quant = 0 */
  677. put_mb_modes(s, 1, 1, 0, 0);
  678. s->qscale -= s->dquant;
  679. }
  680. s->misc_bits += get_bits_diff(s);
  681. s->i_count++;
  682. } else if (s->mb_intra) {
  683. if (s->dquant && cbp) {
  684. put_mb_modes(s, 6, 0x01, 0, 0);
  685. put_qscale(s);
  686. } else {
  687. put_mb_modes(s, 5, 0x03, 0, 0);
  688. s->qscale -= s->dquant;
  689. }
  690. s->misc_bits += get_bits_diff(s);
  691. s->i_count++;
  692. memset(s->last_mv, 0, sizeof(s->last_mv));
  693. } else if (s->pict_type == AV_PICTURE_TYPE_P) {
  694. if (s->mv_type == MV_TYPE_16X16) {
  695. if (cbp != 0) {
  696. if ((motion_x | motion_y) == 0) {
  697. if (s->dquant) {
  698. /* macroblock_pattern & quant */
  699. put_mb_modes(s, 5, 1, 0, 0);
  700. put_qscale(s);
  701. } else {
  702. /* macroblock_pattern only */
  703. put_mb_modes(s, 2, 1, 0, 0);
  704. }
  705. s->misc_bits += get_bits_diff(s);
  706. } else {
  707. if (s->dquant) {
  708. put_mb_modes(s, 5, 2, 1, 0); /* motion + cbp */
  709. put_qscale(s);
  710. } else {
  711. put_mb_modes(s, 1, 1, 1, 0); /* motion + cbp */
  712. }
  713. s->misc_bits += get_bits_diff(s);
  714. // RAL: f_code parameter added
  715. mpeg1_encode_motion(s,
  716. motion_x - s->last_mv[0][0][0],
  717. s->f_code);
  718. // RAL: f_code parameter added
  719. mpeg1_encode_motion(s,
  720. motion_y - s->last_mv[0][0][1],
  721. s->f_code);
  722. s->mv_bits += get_bits_diff(s);
  723. }
  724. } else {
  725. put_bits(&s->pb, 3, 1); /* motion only */
  726. if (!s->frame_pred_frame_dct)
  727. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  728. s->misc_bits += get_bits_diff(s);
  729. // RAL: f_code parameter added
  730. mpeg1_encode_motion(s,
  731. motion_x - s->last_mv[0][0][0],
  732. s->f_code);
  733. // RAL: f_code parameter added
  734. mpeg1_encode_motion(s,
  735. motion_y - s->last_mv[0][0][1],
  736. s->f_code);
  737. s->qscale -= s->dquant;
  738. s->mv_bits += get_bits_diff(s);
  739. }
  740. s->last_mv[0][1][0] = s->last_mv[0][0][0] = motion_x;
  741. s->last_mv[0][1][1] = s->last_mv[0][0][1] = motion_y;
  742. } else {
  743. av_assert2(!s->frame_pred_frame_dct && s->mv_type == MV_TYPE_FIELD);
  744. if (cbp) {
  745. if (s->dquant) {
  746. put_mb_modes(s, 5, 2, 1, 1); /* motion + cbp */
  747. put_qscale(s);
  748. } else {
  749. put_mb_modes(s, 1, 1, 1, 1); /* motion + cbp */
  750. }
  751. } else {
  752. put_bits(&s->pb, 3, 1); /* motion only */
  753. put_bits(&s->pb, 2, 1); /* motion_type: field */
  754. s->qscale -= s->dquant;
  755. }
  756. s->misc_bits += get_bits_diff(s);
  757. for (i = 0; i < 2; i++) {
  758. put_bits(&s->pb, 1, s->field_select[0][i]);
  759. mpeg1_encode_motion(s,
  760. s->mv[0][i][0] - s->last_mv[0][i][0],
  761. s->f_code);
  762. mpeg1_encode_motion(s,
  763. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  764. s->f_code);
  765. s->last_mv[0][i][0] = s->mv[0][i][0];
  766. s->last_mv[0][i][1] = 2 * s->mv[0][i][1];
  767. }
  768. s->mv_bits += get_bits_diff(s);
  769. }
  770. if (cbp) {
  771. if (s->chroma_y_shift) {
  772. put_bits(&s->pb,
  773. ff_mpeg12_mbPatTable[cbp][1],
  774. ff_mpeg12_mbPatTable[cbp][0]);
  775. } else {
  776. put_bits(&s->pb,
  777. ff_mpeg12_mbPatTable[cbp >> 2][1],
  778. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  779. put_sbits(&s->pb, 2, cbp);
  780. }
  781. }
  782. s->f_count++;
  783. } else {
  784. if (s->mv_type == MV_TYPE_16X16) {
  785. if (cbp) { // With coded bloc pattern
  786. if (s->dquant) {
  787. if (s->mv_dir == MV_DIR_FORWARD)
  788. put_mb_modes(s, 6, 3, 1, 0);
  789. else
  790. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 0);
  791. put_qscale(s);
  792. } else {
  793. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 0);
  794. }
  795. } else { // No coded bloc pattern
  796. put_bits(&s->pb, 5 - s->mv_dir, 2);
  797. if (!s->frame_pred_frame_dct)
  798. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  799. s->qscale -= s->dquant;
  800. }
  801. s->misc_bits += get_bits_diff(s);
  802. if (s->mv_dir & MV_DIR_FORWARD) {
  803. mpeg1_encode_motion(s,
  804. s->mv[0][0][0] - s->last_mv[0][0][0],
  805. s->f_code);
  806. mpeg1_encode_motion(s,
  807. s->mv[0][0][1] - s->last_mv[0][0][1],
  808. s->f_code);
  809. s->last_mv[0][0][0] =
  810. s->last_mv[0][1][0] = s->mv[0][0][0];
  811. s->last_mv[0][0][1] =
  812. s->last_mv[0][1][1] = s->mv[0][0][1];
  813. s->f_count++;
  814. }
  815. if (s->mv_dir & MV_DIR_BACKWARD) {
  816. mpeg1_encode_motion(s,
  817. s->mv[1][0][0] - s->last_mv[1][0][0],
  818. s->b_code);
  819. mpeg1_encode_motion(s,
  820. s->mv[1][0][1] - s->last_mv[1][0][1],
  821. s->b_code);
  822. s->last_mv[1][0][0] =
  823. s->last_mv[1][1][0] = s->mv[1][0][0];
  824. s->last_mv[1][0][1] =
  825. s->last_mv[1][1][1] = s->mv[1][0][1];
  826. s->b_count++;
  827. }
  828. } else {
  829. av_assert2(s->mv_type == MV_TYPE_FIELD);
  830. av_assert2(!s->frame_pred_frame_dct);
  831. if (cbp) { // With coded bloc pattern
  832. if (s->dquant) {
  833. if (s->mv_dir == MV_DIR_FORWARD)
  834. put_mb_modes(s, 6, 3, 1, 1);
  835. else
  836. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 1);
  837. put_qscale(s);
  838. } else {
  839. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 1);
  840. }
  841. } else { // No coded bloc pattern
  842. put_bits(&s->pb, 5 - s->mv_dir, 2);
  843. put_bits(&s->pb, 2, 1); /* motion_type: field */
  844. s->qscale -= s->dquant;
  845. }
  846. s->misc_bits += get_bits_diff(s);
  847. if (s->mv_dir & MV_DIR_FORWARD) {
  848. for (i = 0; i < 2; i++) {
  849. put_bits(&s->pb, 1, s->field_select[0][i]);
  850. mpeg1_encode_motion(s,
  851. s->mv[0][i][0] - s->last_mv[0][i][0],
  852. s->f_code);
  853. mpeg1_encode_motion(s,
  854. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  855. s->f_code);
  856. s->last_mv[0][i][0] = s->mv[0][i][0];
  857. s->last_mv[0][i][1] = s->mv[0][i][1] * 2;
  858. }
  859. s->f_count++;
  860. }
  861. if (s->mv_dir & MV_DIR_BACKWARD) {
  862. for (i = 0; i < 2; i++) {
  863. put_bits(&s->pb, 1, s->field_select[1][i]);
  864. mpeg1_encode_motion(s,
  865. s->mv[1][i][0] - s->last_mv[1][i][0],
  866. s->b_code);
  867. mpeg1_encode_motion(s,
  868. s->mv[1][i][1] - (s->last_mv[1][i][1] >> 1),
  869. s->b_code);
  870. s->last_mv[1][i][0] = s->mv[1][i][0];
  871. s->last_mv[1][i][1] = s->mv[1][i][1] * 2;
  872. }
  873. s->b_count++;
  874. }
  875. }
  876. s->mv_bits += get_bits_diff(s);
  877. if (cbp) {
  878. if (s->chroma_y_shift) {
  879. put_bits(&s->pb,
  880. ff_mpeg12_mbPatTable[cbp][1],
  881. ff_mpeg12_mbPatTable[cbp][0]);
  882. } else {
  883. put_bits(&s->pb,
  884. ff_mpeg12_mbPatTable[cbp >> 2][1],
  885. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  886. put_sbits(&s->pb, 2, cbp);
  887. }
  888. }
  889. }
  890. for (i = 0; i < mb_block_count; i++)
  891. if (cbp & (1 << (mb_block_count - 1 - i)))
  892. mpeg1_encode_block(s, block[i], i);
  893. s->mb_skip_run = 0;
  894. if (s->mb_intra)
  895. s->i_tex_bits += get_bits_diff(s);
  896. else
  897. s->p_tex_bits += get_bits_diff(s);
  898. }
  899. }
  900. void ff_mpeg1_encode_mb(MpegEncContext *s, int16_t block[8][64],
  901. int motion_x, int motion_y)
  902. {
  903. if (s->chroma_format == CHROMA_420)
  904. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 6);
  905. else
  906. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 8);
  907. }
  908. av_cold void ff_mpeg1_encode_init(MpegEncContext *s)
  909. {
  910. static int done = 0;
  911. ff_mpeg12_common_init(s);
  912. if (!done) {
  913. int f_code;
  914. int mv;
  915. int i;
  916. done = 1;
  917. ff_rl_init(&ff_rl_mpeg1, ff_mpeg12_static_rl_table_store[0]);
  918. ff_rl_init(&ff_rl_mpeg2, ff_mpeg12_static_rl_table_store[1]);
  919. for (i = 0; i < 64; i++) {
  920. mpeg1_max_level[0][i] = ff_rl_mpeg1.max_level[0][i];
  921. mpeg1_index_run[0][i] = ff_rl_mpeg1.index_run[0][i];
  922. }
  923. init_uni_ac_vlc(&ff_rl_mpeg1, uni_mpeg1_ac_vlc_len);
  924. if (s->intra_vlc_format)
  925. init_uni_ac_vlc(&ff_rl_mpeg2, uni_mpeg2_ac_vlc_len);
  926. /* build unified dc encoding tables */
  927. for (i = -255; i < 256; i++) {
  928. int adiff, index;
  929. int bits, code;
  930. int diff = i;
  931. adiff = FFABS(diff);
  932. if (diff < 0)
  933. diff--;
  934. index = av_log2(2 * adiff);
  935. bits = ff_mpeg12_vlc_dc_lum_bits[index] + index;
  936. code = (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  937. av_mod_uintp2(diff, index);
  938. mpeg1_lum_dc_uni[i + 255] = bits + (code << 8);
  939. bits = ff_mpeg12_vlc_dc_chroma_bits[index] + index;
  940. code = (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  941. av_mod_uintp2(diff, index);
  942. mpeg1_chr_dc_uni[i + 255] = bits + (code << 8);
  943. }
  944. for (f_code = 1; f_code <= MAX_FCODE; f_code++)
  945. for (mv = -MAX_MV; mv <= MAX_MV; mv++) {
  946. int len;
  947. if (mv == 0) {
  948. len = ff_mpeg12_mbMotionVectorTable[0][1];
  949. } else {
  950. int val, bit_size, code;
  951. bit_size = f_code - 1;
  952. val = mv;
  953. if (val < 0)
  954. val = -val;
  955. val--;
  956. code = (val >> bit_size) + 1;
  957. if (code < 17)
  958. len = ff_mpeg12_mbMotionVectorTable[code][1] +
  959. 1 + bit_size;
  960. else
  961. len = ff_mpeg12_mbMotionVectorTable[16][1] +
  962. 2 + bit_size;
  963. }
  964. mv_penalty[f_code][mv + MAX_MV] = len;
  965. }
  966. for (f_code = MAX_FCODE; f_code > 0; f_code--)
  967. for (mv = -(8 << f_code); mv < (8 << f_code); mv++)
  968. fcode_tab[mv + MAX_MV] = f_code;
  969. }
  970. s->me.mv_penalty = mv_penalty;
  971. s->fcode_tab = fcode_tab;
  972. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  973. s->min_qcoeff = -255;
  974. s->max_qcoeff = 255;
  975. } else {
  976. s->min_qcoeff = -2047;
  977. s->max_qcoeff = 2047;
  978. }
  979. if (s->intra_vlc_format) {
  980. s->intra_ac_vlc_length =
  981. s->intra_ac_vlc_last_length = uni_mpeg2_ac_vlc_len;
  982. } else {
  983. s->intra_ac_vlc_length =
  984. s->intra_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  985. }
  986. s->inter_ac_vlc_length =
  987. s->inter_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  988. }
  989. #define OFFSET(x) offsetof(MpegEncContext, x)
  990. #define VE AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
  991. #define COMMON_OPTS \
  992. { "gop_timecode", "MPEG GOP Timecode in hh:mm:ss[:;.]ff format", \
  993. OFFSET(tc_opt_str), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, VE },\
  994. { "intra_vlc", "Use MPEG-2 intra VLC table.", \
  995. OFFSET(intra_vlc_format), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  996. { "drop_frame_timecode", "Timecode is in drop frame format.", \
  997. OFFSET(drop_frame_timecode), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  998. { "scan_offset", "Reserve space for SVCD scan offset user data.", \
  999. OFFSET(scan_offset), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  1000. static const AVOption mpeg1_options[] = {
  1001. COMMON_OPTS
  1002. FF_MPV_COMMON_OPTS
  1003. { NULL },
  1004. };
  1005. static const AVOption mpeg2_options[] = {
  1006. COMMON_OPTS
  1007. { "non_linear_quant", "Use nonlinear quantizer.", OFFSET(q_scale_type), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  1008. { "alternate_scan", "Enable alternate scantable.", OFFSET(alternate_scan), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  1009. { "seq_disp_ext", "Write sequence_display_extension blocks.", OFFSET(seq_disp_ext), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VE, "seq_disp_ext" },
  1010. { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = -1}, 0, 0, VE, "seq_disp_ext" },
  1011. { "never", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, VE, "seq_disp_ext" },
  1012. { "always", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, VE, "seq_disp_ext" },
  1013. FF_MPV_COMMON_OPTS
  1014. { NULL },
  1015. };
  1016. #define mpeg12_class(x) \
  1017. static const AVClass mpeg ## x ## _class = { \
  1018. .class_name = "mpeg" # x "video encoder", \
  1019. .item_name = av_default_item_name, \
  1020. .option = mpeg ## x ## _options, \
  1021. .version = LIBAVUTIL_VERSION_INT, \
  1022. };
  1023. mpeg12_class(1)
  1024. mpeg12_class(2)
  1025. AVCodec ff_mpeg1video_encoder = {
  1026. .name = "mpeg1video",
  1027. .long_name = NULL_IF_CONFIG_SMALL("MPEG-1 video"),
  1028. .type = AVMEDIA_TYPE_VIDEO,
  1029. .id = AV_CODEC_ID_MPEG1VIDEO,
  1030. .priv_data_size = sizeof(MpegEncContext),
  1031. .init = encode_init,
  1032. .encode2 = ff_mpv_encode_picture,
  1033. .close = ff_mpv_encode_end,
  1034. .supported_framerates = ff_mpeg12_frame_rate_tab + 1,
  1035. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  1036. AV_PIX_FMT_NONE },
  1037. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_SLICE_THREADS,
  1038. .priv_class = &mpeg1_class,
  1039. };
  1040. AVCodec ff_mpeg2video_encoder = {
  1041. .name = "mpeg2video",
  1042. .long_name = NULL_IF_CONFIG_SMALL("MPEG-2 video"),
  1043. .type = AVMEDIA_TYPE_VIDEO,
  1044. .id = AV_CODEC_ID_MPEG2VIDEO,
  1045. .priv_data_size = sizeof(MpegEncContext),
  1046. .init = encode_init,
  1047. .encode2 = ff_mpv_encode_picture,
  1048. .close = ff_mpv_encode_end,
  1049. .supported_framerates = ff_mpeg2_frame_rate_tab,
  1050. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  1051. AV_PIX_FMT_YUV422P,
  1052. AV_PIX_FMT_NONE },
  1053. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_SLICE_THREADS,
  1054. .priv_class = &mpeg2_class,
  1055. };