You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1098 lines
40KB

  1. /*
  2. * MPEG1/2 encoder
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * This file is part of Libav.
  7. *
  8. * Libav is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * Libav is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with Libav; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * MPEG1/2 encoder
  25. */
  26. #include <stdint.h>
  27. #include "libavutil/attributes.h"
  28. #include "libavutil/log.h"
  29. #include "libavutil/opt.h"
  30. #include "libavutil/stereo3d.h"
  31. #include "avcodec.h"
  32. #include "bytestream.h"
  33. #include "mathops.h"
  34. #include "mpeg12.h"
  35. #include "mpeg12data.h"
  36. #include "mpegvideo.h"
  37. static const uint8_t inv_non_linear_qscale[] = {
  38. 0, 2, 4, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16,
  39. };
  40. static const uint8_t svcd_scan_offset_placeholder[] = {
  41. 0x10, 0x0E, 0x00, 0x80, 0x81, 0x00, 0x80,
  42. 0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  43. };
  44. static uint8_t mv_penalty[MAX_FCODE + 1][MAX_MV * 2 + 1];
  45. static uint8_t fcode_tab[MAX_MV * 2 + 1];
  46. static uint8_t uni_mpeg1_ac_vlc_len[64 * 64 * 2];
  47. static uint8_t uni_mpeg2_ac_vlc_len[64 * 64 * 2];
  48. /* simple include everything table for dc, first byte is bits
  49. * number next 3 are code */
  50. static uint32_t mpeg1_lum_dc_uni[512];
  51. static uint32_t mpeg1_chr_dc_uni[512];
  52. static uint8_t mpeg1_index_run[2][64];
  53. static int8_t mpeg1_max_level[2][64];
  54. static av_cold void init_uni_ac_vlc(RLTable *rl, uint8_t *uni_ac_vlc_len)
  55. {
  56. int i;
  57. for (i = 0; i < 128; i++) {
  58. int level = i - 64;
  59. int run;
  60. if (!level)
  61. continue;
  62. for (run = 0; run < 64; run++) {
  63. int len, code;
  64. int alevel = FFABS(level);
  65. if (alevel > rl->max_level[0][run])
  66. code = 111; /* rl->n */
  67. else
  68. code = rl->index_run[0][run] + alevel - 1;
  69. if (code < 111) { /* rl->n */
  70. /* length of VLC and sign */
  71. len = rl->table_vlc[code][1] + 1;
  72. } else {
  73. len = rl->table_vlc[111][1] + 6; /* rl->n */
  74. if (alevel < 128)
  75. len += 8;
  76. else
  77. len += 16;
  78. }
  79. uni_ac_vlc_len[UNI_AC_ENC_INDEX(run, i)] = len;
  80. }
  81. }
  82. }
  83. static int find_frame_rate_index(MpegEncContext *s)
  84. {
  85. int i;
  86. int64_t dmin = INT64_MAX;
  87. int64_t d;
  88. for (i = 1; i < 14; i++) {
  89. int64_t n0 = 1001LL / ff_mpeg12_frame_rate_tab[i].den *
  90. ff_mpeg12_frame_rate_tab[i].num * s->avctx->time_base.num;
  91. int64_t n1 = 1001LL * s->avctx->time_base.den;
  92. if (s->avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL &&
  93. i >= 9)
  94. break;
  95. d = FFABS(n0 - n1);
  96. if (d < dmin) {
  97. dmin = d;
  98. s->frame_rate_index = i;
  99. }
  100. }
  101. if (dmin)
  102. return -1;
  103. else
  104. return 0;
  105. }
  106. static av_cold int encode_init(AVCodecContext *avctx)
  107. {
  108. MpegEncContext *s = avctx->priv_data;
  109. if (ff_MPV_encode_init(avctx) < 0)
  110. return -1;
  111. if (find_frame_rate_index(s) < 0) {
  112. if (s->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) {
  113. av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n",
  114. avctx->time_base.den, avctx->time_base.num);
  115. return -1;
  116. } else {
  117. av_log(avctx, AV_LOG_INFO,
  118. "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n",
  119. avctx->time_base.den, avctx->time_base.num);
  120. }
  121. }
  122. if (avctx->profile == FF_PROFILE_UNKNOWN) {
  123. if (avctx->level != FF_LEVEL_UNKNOWN) {
  124. av_log(avctx, AV_LOG_ERROR, "Set profile and level\n");
  125. return -1;
  126. }
  127. /* Main or 4:2:2 */
  128. avctx->profile = s->chroma_format == CHROMA_420 ? 4 : 0;
  129. }
  130. if (avctx->level == FF_LEVEL_UNKNOWN) {
  131. if (avctx->profile == 0) { /* 4:2:2 */
  132. if (avctx->width <= 720 && avctx->height <= 608)
  133. avctx->level = 5; /* Main */
  134. else
  135. avctx->level = 2; /* High */
  136. } else {
  137. if (avctx->profile != 1 && s->chroma_format != CHROMA_420) {
  138. av_log(avctx, AV_LOG_ERROR,
  139. "Only High(1) and 4:2:2(0) profiles support 4:2:2 color sampling\n");
  140. return -1;
  141. }
  142. if (avctx->width <= 720 && avctx->height <= 576)
  143. avctx->level = 8; /* Main */
  144. else if (avctx->width <= 1440)
  145. avctx->level = 6; /* High 1440 */
  146. else
  147. avctx->level = 4; /* High */
  148. }
  149. }
  150. if (s->drop_frame_timecode && s->frame_rate_index != 4) {
  151. av_log(avctx, AV_LOG_ERROR,
  152. "Drop frame time code only allowed with 1001/30000 fps\n");
  153. return -1;
  154. }
  155. return 0;
  156. }
  157. static void put_header(MpegEncContext *s, int header)
  158. {
  159. avpriv_align_put_bits(&s->pb);
  160. put_bits(&s->pb, 16, header >> 16);
  161. put_sbits(&s->pb, 16, header);
  162. }
  163. /* put sequence header if needed */
  164. static void mpeg1_encode_sequence_header(MpegEncContext *s)
  165. {
  166. unsigned int vbv_buffer_size, fps, v;
  167. int i, constraint_parameter_flag;
  168. uint64_t time_code;
  169. float best_aspect_error = 1E10;
  170. float aspect_ratio = av_q2d(s->avctx->sample_aspect_ratio);
  171. if (aspect_ratio == 0.0)
  172. aspect_ratio = 1.0; // pixel aspect 1.1 (VGA)
  173. if (s->current_picture.f.key_frame) {
  174. AVRational framerate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  175. /* mpeg1 header repeated every gop */
  176. put_header(s, SEQ_START_CODE);
  177. put_sbits(&s->pb, 12, s->width);
  178. put_sbits(&s->pb, 12, s->height);
  179. for (i = 1; i < 15; i++) {
  180. float error = aspect_ratio;
  181. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO || i <= 1)
  182. error -= 1.0 / ff_mpeg1_aspect[i];
  183. else
  184. error -= av_q2d(ff_mpeg2_aspect[i]) * s->height / s->width;
  185. error = FFABS(error);
  186. if (error < best_aspect_error) {
  187. best_aspect_error = error;
  188. s->aspect_ratio_info = i;
  189. }
  190. }
  191. put_bits(&s->pb, 4, s->aspect_ratio_info);
  192. put_bits(&s->pb, 4, s->frame_rate_index);
  193. if (s->avctx->rc_max_rate) {
  194. v = (s->avctx->rc_max_rate + 399) / 400;
  195. if (v > 0x3ffff && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  196. v = 0x3ffff;
  197. } else {
  198. v = 0x3FFFF;
  199. }
  200. if (s->avctx->rc_buffer_size)
  201. vbv_buffer_size = s->avctx->rc_buffer_size;
  202. else
  203. /* VBV calculation: Scaled so that a VCD has the proper
  204. * VBV size of 40 kilobytes */
  205. vbv_buffer_size = ((20 * s->bit_rate) / (1151929 / 2)) * 8 * 1024;
  206. vbv_buffer_size = (vbv_buffer_size + 16383) / 16384;
  207. put_sbits(&s->pb, 18, v);
  208. put_bits(&s->pb, 1, 1); // marker
  209. put_sbits(&s->pb, 10, vbv_buffer_size);
  210. constraint_parameter_flag =
  211. s->width <= 768 &&
  212. s->height <= 576 &&
  213. s->mb_width * s->mb_height <= 396 &&
  214. s->mb_width * s->mb_height * framerate.num <= 396 * 25 * framerate.den &&
  215. framerate.num <= framerate.den * 30 &&
  216. s->avctx->me_range &&
  217. s->avctx->me_range < 128 &&
  218. vbv_buffer_size <= 20 &&
  219. v <= 1856000 / 400 &&
  220. s->codec_id == AV_CODEC_ID_MPEG1VIDEO;
  221. put_bits(&s->pb, 1, constraint_parameter_flag);
  222. ff_write_quant_matrix(&s->pb, s->avctx->intra_matrix);
  223. ff_write_quant_matrix(&s->pb, s->avctx->inter_matrix);
  224. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  225. put_header(s, EXT_START_CODE);
  226. put_bits(&s->pb, 4, 1); // seq ext
  227. put_bits(&s->pb, 1, s->avctx->profile == 0); // escx 1 for 4:2:2 profile
  228. put_bits(&s->pb, 3, s->avctx->profile); // profile
  229. put_bits(&s->pb, 4, s->avctx->level); // level
  230. put_bits(&s->pb, 1, s->progressive_sequence);
  231. put_bits(&s->pb, 2, s->chroma_format);
  232. put_bits(&s->pb, 2, s->width >> 12);
  233. put_bits(&s->pb, 2, s->height >> 12);
  234. put_bits(&s->pb, 12, v >> 18); // bitrate ext
  235. put_bits(&s->pb, 1, 1); // marker
  236. put_bits(&s->pb, 8, vbv_buffer_size >> 10); // vbv buffer ext
  237. put_bits(&s->pb, 1, s->low_delay);
  238. put_bits(&s->pb, 2, 0); // frame_rate_ext_n
  239. put_bits(&s->pb, 5, 0); // frame_rate_ext_d
  240. }
  241. put_header(s, GOP_START_CODE);
  242. put_bits(&s->pb, 1, s->drop_frame_timecode); // drop frame flag
  243. /* time code: we must convert from the real frame rate to a
  244. * fake MPEG frame rate in case of low frame rate */
  245. fps = (framerate.num + framerate.den / 2) / framerate.den;
  246. time_code = s->current_picture_ptr->f.coded_picture_number +
  247. s->avctx->timecode_frame_start;
  248. s->gop_picture_number = s->current_picture_ptr->f.coded_picture_number;
  249. if (s->drop_frame_timecode) {
  250. /* only works for NTSC 29.97 */
  251. int d = time_code / 17982;
  252. int m = time_code % 17982;
  253. /* not needed since -2,-1 / 1798 in C returns 0 */
  254. // if (m < 2)
  255. // m += 2;
  256. time_code += 18 * d + 2 * ((m - 2) / 1798);
  257. }
  258. put_bits(&s->pb, 5, (uint32_t)((time_code / (fps * 3600)) % 24));
  259. put_bits(&s->pb, 6, (uint32_t)((time_code / (fps * 60)) % 60));
  260. put_bits(&s->pb, 1, 1);
  261. put_bits(&s->pb, 6, (uint32_t)((time_code / fps) % 60));
  262. put_bits(&s->pb, 6, (uint32_t)((time_code % fps)));
  263. put_bits(&s->pb, 1, !!(s->flags & CODEC_FLAG_CLOSED_GOP));
  264. put_bits(&s->pb, 1, 0); // broken link
  265. }
  266. }
  267. static inline void encode_mb_skip_run(MpegEncContext *s, int run)
  268. {
  269. while (run >= 33) {
  270. put_bits(&s->pb, 11, 0x008);
  271. run -= 33;
  272. }
  273. put_bits(&s->pb, ff_mpeg12_mbAddrIncrTable[run][1],
  274. ff_mpeg12_mbAddrIncrTable[run][0]);
  275. }
  276. static av_always_inline void put_qscale(MpegEncContext *s)
  277. {
  278. if (s->q_scale_type) {
  279. assert(s->qscale >= 1 && s->qscale <= 12);
  280. put_bits(&s->pb, 5, inv_non_linear_qscale[s->qscale]);
  281. } else {
  282. put_bits(&s->pb, 5, s->qscale);
  283. }
  284. }
  285. void ff_mpeg1_encode_slice_header(MpegEncContext *s)
  286. {
  287. if (s->height > 2800) {
  288. put_header(s, SLICE_MIN_START_CODE + (s->mb_y & 127));
  289. /* slice_vertical_position_extension */
  290. put_bits(&s->pb, 3, s->mb_y >> 7);
  291. } else {
  292. put_header(s, SLICE_MIN_START_CODE + s->mb_y);
  293. }
  294. put_qscale(s);
  295. /* slice extra information */
  296. put_bits(&s->pb, 1, 0);
  297. }
  298. void ff_mpeg1_encode_picture_header(MpegEncContext *s, int picture_number)
  299. {
  300. AVFrameSideData *side_data;
  301. mpeg1_encode_sequence_header(s);
  302. /* mpeg1 picture header */
  303. put_header(s, PICTURE_START_CODE);
  304. /* temporal reference */
  305. // RAL: s->picture_number instead of s->fake_picture_number
  306. put_bits(&s->pb, 10,
  307. (s->picture_number - s->gop_picture_number) & 0x3ff);
  308. put_bits(&s->pb, 3, s->pict_type);
  309. s->vbv_delay_ptr = s->pb.buf + put_bits_count(&s->pb) / 8;
  310. put_bits(&s->pb, 16, 0xFFFF); /* vbv_delay */
  311. // RAL: Forward f_code also needed for B-frames
  312. if (s->pict_type == AV_PICTURE_TYPE_P ||
  313. s->pict_type == AV_PICTURE_TYPE_B) {
  314. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  315. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  316. put_bits(&s->pb, 3, s->f_code); /* forward_f_code */
  317. else
  318. put_bits(&s->pb, 3, 7); /* forward_f_code */
  319. }
  320. // RAL: Backward f_code necessary for B-frames
  321. if (s->pict_type == AV_PICTURE_TYPE_B) {
  322. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  323. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  324. put_bits(&s->pb, 3, s->b_code); /* backward_f_code */
  325. else
  326. put_bits(&s->pb, 3, 7); /* backward_f_code */
  327. }
  328. put_bits(&s->pb, 1, 0); /* extra bit picture */
  329. s->frame_pred_frame_dct = 1;
  330. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  331. put_header(s, EXT_START_CODE);
  332. put_bits(&s->pb, 4, 8); /* pic ext */
  333. if (s->pict_type == AV_PICTURE_TYPE_P ||
  334. s->pict_type == AV_PICTURE_TYPE_B) {
  335. put_bits(&s->pb, 4, s->f_code);
  336. put_bits(&s->pb, 4, s->f_code);
  337. } else {
  338. put_bits(&s->pb, 8, 255);
  339. }
  340. if (s->pict_type == AV_PICTURE_TYPE_B) {
  341. put_bits(&s->pb, 4, s->b_code);
  342. put_bits(&s->pb, 4, s->b_code);
  343. } else {
  344. put_bits(&s->pb, 8, 255);
  345. }
  346. put_bits(&s->pb, 2, s->intra_dc_precision);
  347. assert(s->picture_structure == PICT_FRAME);
  348. put_bits(&s->pb, 2, s->picture_structure);
  349. if (s->progressive_sequence)
  350. put_bits(&s->pb, 1, 0); /* no repeat */
  351. else
  352. put_bits(&s->pb, 1, s->current_picture_ptr->f.top_field_first);
  353. /* XXX: optimize the generation of this flag with entropy measures */
  354. s->frame_pred_frame_dct = s->progressive_sequence;
  355. put_bits(&s->pb, 1, s->frame_pred_frame_dct);
  356. put_bits(&s->pb, 1, s->concealment_motion_vectors);
  357. put_bits(&s->pb, 1, s->q_scale_type);
  358. put_bits(&s->pb, 1, s->intra_vlc_format);
  359. put_bits(&s->pb, 1, s->alternate_scan);
  360. put_bits(&s->pb, 1, s->repeat_first_field);
  361. s->progressive_frame = s->progressive_sequence;
  362. /* chroma_420_type */
  363. put_bits(&s->pb, 1, s->chroma_format ==
  364. CHROMA_420 ? s->progressive_frame : 0);
  365. put_bits(&s->pb, 1, s->progressive_frame);
  366. put_bits(&s->pb, 1, 0); /* composite_display_flag */
  367. }
  368. if (s->scan_offset) {
  369. int i;
  370. put_header(s, USER_START_CODE);
  371. for (i = 0; i < sizeof(svcd_scan_offset_placeholder); i++)
  372. put_bits(&s->pb, 8, svcd_scan_offset_placeholder[i]);
  373. }
  374. side_data = av_frame_get_side_data(&s->current_picture_ptr->f,
  375. AV_FRAME_DATA_STEREO3D);
  376. if (side_data) {
  377. AVStereo3D *stereo = (AVStereo3D *)side_data->data;
  378. uint8_t fpa_type;
  379. switch (stereo->type) {
  380. case AV_STEREO3D_SIDEBYSIDE:
  381. fpa_type = 0x03;
  382. break;
  383. case AV_STEREO3D_TOPBOTTOM:
  384. fpa_type = 0x04;
  385. break;
  386. case AV_STEREO3D_2D:
  387. fpa_type = 0x08;
  388. break;
  389. case AV_STEREO3D_SIDEBYSIDE_QUINCUNX:
  390. fpa_type = 0x23;
  391. break;
  392. default:
  393. fpa_type = 0;
  394. break;
  395. }
  396. if (fpa_type != 0) {
  397. put_header(s, USER_START_CODE);
  398. put_bits(&s->pb, 8, 'J'); // S3D_video_format_signaling_identifier
  399. put_bits(&s->pb, 8, 'P');
  400. put_bits(&s->pb, 8, '3');
  401. put_bits(&s->pb, 8, 'D');
  402. put_bits(&s->pb, 8, 0x03); // S3D_video_format_length
  403. put_bits(&s->pb, 1, 1); // reserved_bit
  404. put_bits(&s->pb, 7, fpa_type); // S3D_video_format_type
  405. put_bits(&s->pb, 8, 0x04); // reserved_data[0]
  406. put_bits(&s->pb, 8, 0xFF); // reserved_data[1]
  407. }
  408. }
  409. s->mb_y = 0;
  410. ff_mpeg1_encode_slice_header(s);
  411. }
  412. static inline void put_mb_modes(MpegEncContext *s, int n, int bits,
  413. int has_mv, int field_motion)
  414. {
  415. put_bits(&s->pb, n, bits);
  416. if (!s->frame_pred_frame_dct) {
  417. if (has_mv)
  418. /* motion_type: frame/field */
  419. put_bits(&s->pb, 2, 2 - field_motion);
  420. put_bits(&s->pb, 1, s->interlaced_dct);
  421. }
  422. }
  423. // RAL: Parameter added: f_or_b_code
  424. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code)
  425. {
  426. if (val == 0) {
  427. /* zero vector */
  428. put_bits(&s->pb,
  429. ff_mpeg12_mbMotionVectorTable[0][1],
  430. ff_mpeg12_mbMotionVectorTable[0][0]);
  431. } else {
  432. int code, sign, bits;
  433. int bit_size = f_or_b_code - 1;
  434. int range = 1 << bit_size;
  435. /* modulo encoding */
  436. val = sign_extend(val, 5 + bit_size);
  437. if (val >= 0) {
  438. val--;
  439. code = (val >> bit_size) + 1;
  440. bits = val & (range - 1);
  441. sign = 0;
  442. } else {
  443. val = -val;
  444. val--;
  445. code = (val >> bit_size) + 1;
  446. bits = val & (range - 1);
  447. sign = 1;
  448. }
  449. assert(code > 0 && code <= 16);
  450. put_bits(&s->pb,
  451. ff_mpeg12_mbMotionVectorTable[code][1],
  452. ff_mpeg12_mbMotionVectorTable[code][0]);
  453. put_bits(&s->pb, 1, sign);
  454. if (bit_size > 0)
  455. put_bits(&s->pb, bit_size, bits);
  456. }
  457. }
  458. static inline void encode_dc(MpegEncContext *s, int diff, int component)
  459. {
  460. if (((unsigned) (diff + 255)) >= 511) {
  461. int index;
  462. if (diff < 0) {
  463. index = av_log2_16bit(-2 * diff);
  464. diff--;
  465. } else {
  466. index = av_log2_16bit(2 * diff);
  467. }
  468. if (component == 0)
  469. put_bits(&s->pb,
  470. ff_mpeg12_vlc_dc_lum_bits[index] + index,
  471. (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  472. (diff & ((1 << index) - 1)));
  473. else
  474. put_bits(&s->pb,
  475. ff_mpeg12_vlc_dc_chroma_bits[index] + index,
  476. (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  477. (diff & ((1 << index) - 1)));
  478. } else {
  479. if (component == 0)
  480. put_bits(&s->pb,
  481. mpeg1_lum_dc_uni[diff + 255] & 0xFF,
  482. mpeg1_lum_dc_uni[diff + 255] >> 8);
  483. else
  484. put_bits(&s->pb,
  485. mpeg1_chr_dc_uni[diff + 255] & 0xFF,
  486. mpeg1_chr_dc_uni[diff + 255] >> 8);
  487. }
  488. }
  489. static void mpeg1_encode_block(MpegEncContext *s, int16_t *block, int n)
  490. {
  491. int alevel, level, last_non_zero, dc, diff, i, j, run, last_index, sign;
  492. int code, component;
  493. const uint16_t (*table_vlc)[2] = ff_rl_mpeg1.table_vlc;
  494. last_index = s->block_last_index[n];
  495. /* DC coef */
  496. if (s->mb_intra) {
  497. component = (n <= 3 ? 0 : (n & 1) + 1);
  498. dc = block[0]; /* overflow is impossible */
  499. diff = dc - s->last_dc[component];
  500. encode_dc(s, diff, component);
  501. s->last_dc[component] = dc;
  502. i = 1;
  503. if (s->intra_vlc_format)
  504. table_vlc = ff_rl_mpeg2.table_vlc;
  505. } else {
  506. /* encode the first coefficient: needs to be done here because
  507. * it is handled slightly differently */
  508. level = block[0];
  509. if (abs(level) == 1) {
  510. code = ((uint32_t)level >> 31); /* the sign bit */
  511. put_bits(&s->pb, 2, code | 0x02);
  512. i = 1;
  513. } else {
  514. i = 0;
  515. last_non_zero = -1;
  516. goto next_coef;
  517. }
  518. }
  519. /* now quantify & encode AC coefs */
  520. last_non_zero = i - 1;
  521. for (; i <= last_index; i++) {
  522. j = s->intra_scantable.permutated[i];
  523. level = block[j];
  524. next_coef:
  525. /* encode using VLC */
  526. if (level != 0) {
  527. run = i - last_non_zero - 1;
  528. alevel = level;
  529. MASK_ABS(sign, alevel);
  530. sign &= 1;
  531. if (alevel <= mpeg1_max_level[0][run]) {
  532. code = mpeg1_index_run[0][run] + alevel - 1;
  533. /* store the VLC & sign at once */
  534. put_bits(&s->pb, table_vlc[code][1] + 1,
  535. (table_vlc[code][0] << 1) + sign);
  536. } else {
  537. /* escape seems to be pretty rare <5% so I do not optimize it */
  538. put_bits(&s->pb, table_vlc[111][1], table_vlc[111][0]);
  539. /* escape: only clip in this case */
  540. put_bits(&s->pb, 6, run);
  541. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  542. if (alevel < 128) {
  543. put_sbits(&s->pb, 8, level);
  544. } else {
  545. if (level < 0)
  546. put_bits(&s->pb, 16, 0x8001 + level + 255);
  547. else
  548. put_sbits(&s->pb, 16, level);
  549. }
  550. } else {
  551. put_sbits(&s->pb, 12, level);
  552. }
  553. }
  554. last_non_zero = i;
  555. }
  556. }
  557. /* end of block */
  558. put_bits(&s->pb, table_vlc[112][1], table_vlc[112][0]);
  559. }
  560. static av_always_inline void mpeg1_encode_mb_internal(MpegEncContext *s,
  561. int16_t block[6][64],
  562. int motion_x, int motion_y,
  563. int mb_block_count)
  564. {
  565. int i, cbp;
  566. const int mb_x = s->mb_x;
  567. const int mb_y = s->mb_y;
  568. const int first_mb = mb_x == s->resync_mb_x && mb_y == s->resync_mb_y;
  569. /* compute cbp */
  570. cbp = 0;
  571. for (i = 0; i < mb_block_count; i++)
  572. if (s->block_last_index[i] >= 0)
  573. cbp |= 1 << (mb_block_count - 1 - i);
  574. if (cbp == 0 && !first_mb && s->mv_type == MV_TYPE_16X16 &&
  575. (mb_x != s->mb_width - 1 ||
  576. (mb_y != s->mb_height - 1 && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)) &&
  577. ((s->pict_type == AV_PICTURE_TYPE_P && (motion_x | motion_y) == 0) ||
  578. (s->pict_type == AV_PICTURE_TYPE_B && s->mv_dir == s->last_mv_dir &&
  579. (((s->mv_dir & MV_DIR_FORWARD)
  580. ? ((s->mv[0][0][0] - s->last_mv[0][0][0]) |
  581. (s->mv[0][0][1] - s->last_mv[0][0][1])) : 0) |
  582. ((s->mv_dir & MV_DIR_BACKWARD)
  583. ? ((s->mv[1][0][0] - s->last_mv[1][0][0]) |
  584. (s->mv[1][0][1] - s->last_mv[1][0][1])) : 0)) == 0))) {
  585. s->mb_skip_run++;
  586. s->qscale -= s->dquant;
  587. s->skip_count++;
  588. s->misc_bits++;
  589. s->last_bits++;
  590. if (s->pict_type == AV_PICTURE_TYPE_P) {
  591. s->last_mv[0][0][0] =
  592. s->last_mv[0][0][1] =
  593. s->last_mv[0][1][0] =
  594. s->last_mv[0][1][1] = 0;
  595. }
  596. } else {
  597. if (first_mb) {
  598. assert(s->mb_skip_run == 0);
  599. encode_mb_skip_run(s, s->mb_x);
  600. } else {
  601. encode_mb_skip_run(s, s->mb_skip_run);
  602. }
  603. if (s->pict_type == AV_PICTURE_TYPE_I) {
  604. if (s->dquant && cbp) {
  605. /* macroblock_type: macroblock_quant = 1 */
  606. put_mb_modes(s, 2, 1, 0, 0);
  607. put_qscale(s);
  608. } else {
  609. /* macroblock_type: macroblock_quant = 0 */
  610. put_mb_modes(s, 1, 1, 0, 0);
  611. s->qscale -= s->dquant;
  612. }
  613. s->misc_bits += get_bits_diff(s);
  614. s->i_count++;
  615. } else if (s->mb_intra) {
  616. if (s->dquant && cbp) {
  617. put_mb_modes(s, 6, 0x01, 0, 0);
  618. put_qscale(s);
  619. } else {
  620. put_mb_modes(s, 5, 0x03, 0, 0);
  621. s->qscale -= s->dquant;
  622. }
  623. s->misc_bits += get_bits_diff(s);
  624. s->i_count++;
  625. memset(s->last_mv, 0, sizeof(s->last_mv));
  626. } else if (s->pict_type == AV_PICTURE_TYPE_P) {
  627. if (s->mv_type == MV_TYPE_16X16) {
  628. if (cbp != 0) {
  629. if ((motion_x | motion_y) == 0) {
  630. if (s->dquant) {
  631. /* macroblock_pattern & quant */
  632. put_mb_modes(s, 5, 1, 0, 0);
  633. put_qscale(s);
  634. } else {
  635. /* macroblock_pattern only */
  636. put_mb_modes(s, 2, 1, 0, 0);
  637. }
  638. s->misc_bits += get_bits_diff(s);
  639. } else {
  640. if (s->dquant) {
  641. put_mb_modes(s, 5, 2, 1, 0); /* motion + cbp */
  642. put_qscale(s);
  643. } else {
  644. put_mb_modes(s, 1, 1, 1, 0); /* motion + cbp */
  645. }
  646. s->misc_bits += get_bits_diff(s);
  647. // RAL: f_code parameter added
  648. mpeg1_encode_motion(s,
  649. motion_x - s->last_mv[0][0][0],
  650. s->f_code);
  651. // RAL: f_code parameter added
  652. mpeg1_encode_motion(s,
  653. motion_y - s->last_mv[0][0][1],
  654. s->f_code);
  655. s->mv_bits += get_bits_diff(s);
  656. }
  657. } else {
  658. put_bits(&s->pb, 3, 1); /* motion only */
  659. if (!s->frame_pred_frame_dct)
  660. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  661. s->misc_bits += get_bits_diff(s);
  662. // RAL: f_code parameter added
  663. mpeg1_encode_motion(s,
  664. motion_x - s->last_mv[0][0][0],
  665. s->f_code);
  666. // RAL: f_code parameter added
  667. mpeg1_encode_motion(s,
  668. motion_y - s->last_mv[0][0][1],
  669. s->f_code);
  670. s->qscale -= s->dquant;
  671. s->mv_bits += get_bits_diff(s);
  672. }
  673. s->last_mv[0][1][0] = s->last_mv[0][0][0] = motion_x;
  674. s->last_mv[0][1][1] = s->last_mv[0][0][1] = motion_y;
  675. } else {
  676. assert(!s->frame_pred_frame_dct && s->mv_type == MV_TYPE_FIELD);
  677. if (cbp) {
  678. if (s->dquant) {
  679. put_mb_modes(s, 5, 2, 1, 1); /* motion + cbp */
  680. put_qscale(s);
  681. } else {
  682. put_mb_modes(s, 1, 1, 1, 1); /* motion + cbp */
  683. }
  684. } else {
  685. put_bits(&s->pb, 3, 1); /* motion only */
  686. put_bits(&s->pb, 2, 1); /* motion_type: field */
  687. s->qscale -= s->dquant;
  688. }
  689. s->misc_bits += get_bits_diff(s);
  690. for (i = 0; i < 2; i++) {
  691. put_bits(&s->pb, 1, s->field_select[0][i]);
  692. mpeg1_encode_motion(s,
  693. s->mv[0][i][0] - s->last_mv[0][i][0],
  694. s->f_code);
  695. mpeg1_encode_motion(s,
  696. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  697. s->f_code);
  698. s->last_mv[0][i][0] = s->mv[0][i][0];
  699. s->last_mv[0][i][1] = 2 * s->mv[0][i][1];
  700. }
  701. s->mv_bits += get_bits_diff(s);
  702. }
  703. if (cbp) {
  704. if (s->chroma_y_shift) {
  705. put_bits(&s->pb,
  706. ff_mpeg12_mbPatTable[cbp][1],
  707. ff_mpeg12_mbPatTable[cbp][0]);
  708. } else {
  709. put_bits(&s->pb,
  710. ff_mpeg12_mbPatTable[cbp >> 2][1],
  711. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  712. put_sbits(&s->pb, 2, cbp);
  713. }
  714. }
  715. s->f_count++;
  716. } else {
  717. if (s->mv_type == MV_TYPE_16X16) {
  718. if (cbp) { // With coded bloc pattern
  719. if (s->dquant) {
  720. if (s->mv_dir == MV_DIR_FORWARD)
  721. put_mb_modes(s, 6, 3, 1, 0);
  722. else
  723. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 0);
  724. put_qscale(s);
  725. } else {
  726. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 0);
  727. }
  728. } else { // No coded bloc pattern
  729. put_bits(&s->pb, 5 - s->mv_dir, 2);
  730. if (!s->frame_pred_frame_dct)
  731. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  732. s->qscale -= s->dquant;
  733. }
  734. s->misc_bits += get_bits_diff(s);
  735. if (s->mv_dir & MV_DIR_FORWARD) {
  736. mpeg1_encode_motion(s,
  737. s->mv[0][0][0] - s->last_mv[0][0][0],
  738. s->f_code);
  739. mpeg1_encode_motion(s,
  740. s->mv[0][0][1] - s->last_mv[0][0][1],
  741. s->f_code);
  742. s->last_mv[0][0][0] =
  743. s->last_mv[0][1][0] = s->mv[0][0][0];
  744. s->last_mv[0][0][1] =
  745. s->last_mv[0][1][1] = s->mv[0][0][1];
  746. s->f_count++;
  747. }
  748. if (s->mv_dir & MV_DIR_BACKWARD) {
  749. mpeg1_encode_motion(s,
  750. s->mv[1][0][0] - s->last_mv[1][0][0],
  751. s->b_code);
  752. mpeg1_encode_motion(s,
  753. s->mv[1][0][1] - s->last_mv[1][0][1],
  754. s->b_code);
  755. s->last_mv[1][0][0] =
  756. s->last_mv[1][1][0] = s->mv[1][0][0];
  757. s->last_mv[1][0][1] =
  758. s->last_mv[1][1][1] = s->mv[1][0][1];
  759. s->b_count++;
  760. }
  761. } else {
  762. assert(s->mv_type == MV_TYPE_FIELD);
  763. assert(!s->frame_pred_frame_dct);
  764. if (cbp) { // With coded bloc pattern
  765. if (s->dquant) {
  766. if (s->mv_dir == MV_DIR_FORWARD)
  767. put_mb_modes(s, 6, 3, 1, 1);
  768. else
  769. put_mb_modes(s, 8 - s->mv_dir, 2, 1, 1);
  770. put_qscale(s);
  771. } else {
  772. put_mb_modes(s, 5 - s->mv_dir, 3, 1, 1);
  773. }
  774. } else { // No coded bloc pattern
  775. put_bits(&s->pb, 5 - s->mv_dir, 2);
  776. put_bits(&s->pb, 2, 1); /* motion_type: field */
  777. s->qscale -= s->dquant;
  778. }
  779. s->misc_bits += get_bits_diff(s);
  780. if (s->mv_dir & MV_DIR_FORWARD) {
  781. for (i = 0; i < 2; i++) {
  782. put_bits(&s->pb, 1, s->field_select[0][i]);
  783. mpeg1_encode_motion(s,
  784. s->mv[0][i][0] - s->last_mv[0][i][0],
  785. s->f_code);
  786. mpeg1_encode_motion(s,
  787. s->mv[0][i][1] - (s->last_mv[0][i][1] >> 1),
  788. s->f_code);
  789. s->last_mv[0][i][0] = s->mv[0][i][0];
  790. s->last_mv[0][i][1] = s->mv[0][i][1] * 2;
  791. }
  792. s->f_count++;
  793. }
  794. if (s->mv_dir & MV_DIR_BACKWARD) {
  795. for (i = 0; i < 2; i++) {
  796. put_bits(&s->pb, 1, s->field_select[1][i]);
  797. mpeg1_encode_motion(s,
  798. s->mv[1][i][0] - s->last_mv[1][i][0],
  799. s->b_code);
  800. mpeg1_encode_motion(s,
  801. s->mv[1][i][1] - (s->last_mv[1][i][1] >> 1),
  802. s->b_code);
  803. s->last_mv[1][i][0] = s->mv[1][i][0];
  804. s->last_mv[1][i][1] = s->mv[1][i][1] * 2;
  805. }
  806. s->b_count++;
  807. }
  808. }
  809. s->mv_bits += get_bits_diff(s);
  810. if (cbp) {
  811. if (s->chroma_y_shift) {
  812. put_bits(&s->pb,
  813. ff_mpeg12_mbPatTable[cbp][1],
  814. ff_mpeg12_mbPatTable[cbp][0]);
  815. } else {
  816. put_bits(&s->pb,
  817. ff_mpeg12_mbPatTable[cbp >> 2][1],
  818. ff_mpeg12_mbPatTable[cbp >> 2][0]);
  819. put_sbits(&s->pb, 2, cbp);
  820. }
  821. }
  822. }
  823. for (i = 0; i < mb_block_count; i++)
  824. if (cbp & (1 << (mb_block_count - 1 - i)))
  825. mpeg1_encode_block(s, block[i], i);
  826. s->mb_skip_run = 0;
  827. if (s->mb_intra)
  828. s->i_tex_bits += get_bits_diff(s);
  829. else
  830. s->p_tex_bits += get_bits_diff(s);
  831. }
  832. }
  833. void ff_mpeg1_encode_mb(MpegEncContext *s, int16_t block[6][64],
  834. int motion_x, int motion_y)
  835. {
  836. if (s->chroma_format == CHROMA_420)
  837. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 6);
  838. else
  839. mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 8);
  840. }
  841. av_cold void ff_mpeg1_encode_init(MpegEncContext *s)
  842. {
  843. static int done = 0;
  844. ff_mpeg12_common_init(s);
  845. if (!done) {
  846. int f_code;
  847. int mv;
  848. int i;
  849. done = 1;
  850. ff_init_rl(&ff_rl_mpeg1, ff_mpeg12_static_rl_table_store[0]);
  851. ff_init_rl(&ff_rl_mpeg2, ff_mpeg12_static_rl_table_store[1]);
  852. for (i = 0; i < 64; i++) {
  853. mpeg1_max_level[0][i] = ff_rl_mpeg1.max_level[0][i];
  854. mpeg1_index_run[0][i] = ff_rl_mpeg1.index_run[0][i];
  855. }
  856. init_uni_ac_vlc(&ff_rl_mpeg1, uni_mpeg1_ac_vlc_len);
  857. if (s->intra_vlc_format)
  858. init_uni_ac_vlc(&ff_rl_mpeg2, uni_mpeg2_ac_vlc_len);
  859. /* build unified dc encoding tables */
  860. for (i = -255; i < 256; i++) {
  861. int adiff, index;
  862. int bits, code;
  863. int diff = i;
  864. adiff = FFABS(diff);
  865. if (diff < 0)
  866. diff--;
  867. index = av_log2(2 * adiff);
  868. bits = ff_mpeg12_vlc_dc_lum_bits[index] + index;
  869. code = (ff_mpeg12_vlc_dc_lum_code[index] << index) +
  870. (diff & ((1 << index) - 1));
  871. mpeg1_lum_dc_uni[i + 255] = bits + (code << 8);
  872. bits = ff_mpeg12_vlc_dc_chroma_bits[index] + index;
  873. code = (ff_mpeg12_vlc_dc_chroma_code[index] << index) +
  874. (diff & ((1 << index) - 1));
  875. mpeg1_chr_dc_uni[i + 255] = bits + (code << 8);
  876. }
  877. for (f_code = 1; f_code <= MAX_FCODE; f_code++)
  878. for (mv = -MAX_MV; mv <= MAX_MV; mv++) {
  879. int len;
  880. if (mv == 0) {
  881. len = ff_mpeg12_mbMotionVectorTable[0][1];
  882. } else {
  883. int val, bit_size, code;
  884. bit_size = f_code - 1;
  885. val = mv;
  886. if (val < 0)
  887. val = -val;
  888. val--;
  889. code = (val >> bit_size) + 1;
  890. if (code < 17)
  891. len = ff_mpeg12_mbMotionVectorTable[code][1] +
  892. 1 + bit_size;
  893. else
  894. len = ff_mpeg12_mbMotionVectorTable[16][1] +
  895. 2 + bit_size;
  896. }
  897. mv_penalty[f_code][mv + MAX_MV] = len;
  898. }
  899. for (f_code = MAX_FCODE; f_code > 0; f_code--)
  900. for (mv = -(8 << f_code); mv < (8 << f_code); mv++)
  901. fcode_tab[mv + MAX_MV] = f_code;
  902. }
  903. s->me.mv_penalty = mv_penalty;
  904. s->fcode_tab = fcode_tab;
  905. if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
  906. s->min_qcoeff = -255;
  907. s->max_qcoeff = 255;
  908. } else {
  909. s->min_qcoeff = -2047;
  910. s->max_qcoeff = 2047;
  911. }
  912. if (s->intra_vlc_format) {
  913. s->intra_ac_vlc_length =
  914. s->intra_ac_vlc_last_length = uni_mpeg2_ac_vlc_len;
  915. } else {
  916. s->intra_ac_vlc_length =
  917. s->intra_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  918. }
  919. s->inter_ac_vlc_length =
  920. s->inter_ac_vlc_last_length = uni_mpeg1_ac_vlc_len;
  921. }
  922. #define OFFSET(x) offsetof(MpegEncContext, x)
  923. #define VE AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
  924. #define COMMON_OPTS \
  925. { "intra_vlc", "Use MPEG-2 intra VLC table.", \
  926. OFFSET(intra_vlc_format), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  927. { "drop_frame_timecode", "Timecode is in drop frame format.", \
  928. OFFSET(drop_frame_timecode), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE }, \
  929. { "scan_offset", "Reserve space for SVCD scan offset user data.", \
  930. OFFSET(scan_offset), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  931. static const AVOption mpeg1_options[] = {
  932. COMMON_OPTS
  933. FF_MPV_COMMON_OPTS
  934. { NULL },
  935. };
  936. static const AVOption mpeg2_options[] = {
  937. COMMON_OPTS
  938. { "non_linear_quant", "Use nonlinear quantizer.", OFFSET(q_scale_type), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  939. { "alternate_scan", "Enable alternate scantable.", OFFSET(alternate_scan), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  940. FF_MPV_COMMON_OPTS
  941. { NULL },
  942. };
  943. #define mpeg12_class(x) \
  944. static const AVClass mpeg ## x ## _class = { \
  945. .class_name = "mpeg" # x "video encoder", \
  946. .item_name = av_default_item_name, \
  947. .option = mpeg ## x ## _options, \
  948. .version = LIBAVUTIL_VERSION_INT, \
  949. };
  950. mpeg12_class(1)
  951. mpeg12_class(2)
  952. AVCodec ff_mpeg1video_encoder = {
  953. .name = "mpeg1video",
  954. .long_name = NULL_IF_CONFIG_SMALL("MPEG-1 video"),
  955. .type = AVMEDIA_TYPE_VIDEO,
  956. .id = AV_CODEC_ID_MPEG1VIDEO,
  957. .priv_data_size = sizeof(MpegEncContext),
  958. .init = encode_init,
  959. .encode2 = ff_MPV_encode_picture,
  960. .close = ff_MPV_encode_end,
  961. .supported_framerates = ff_mpeg12_frame_rate_tab + 1,
  962. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  963. AV_PIX_FMT_NONE },
  964. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  965. .priv_class = &mpeg1_class,
  966. };
  967. AVCodec ff_mpeg2video_encoder = {
  968. .name = "mpeg2video",
  969. .long_name = NULL_IF_CONFIG_SMALL("MPEG-2 video"),
  970. .type = AVMEDIA_TYPE_VIDEO,
  971. .id = AV_CODEC_ID_MPEG2VIDEO,
  972. .priv_data_size = sizeof(MpegEncContext),
  973. .init = encode_init,
  974. .encode2 = ff_MPV_encode_picture,
  975. .close = ff_MPV_encode_end,
  976. .supported_framerates = ff_mpeg12_frame_rate_tab + 1,
  977. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  978. AV_PIX_FMT_YUV422P,
  979. AV_PIX_FMT_NONE },
  980. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  981. .priv_class = &mpeg2_class,
  982. };