You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1025 lines
38KB

  1. /*
  2. * MPEG1/2 encoder
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * MPEG1/2 encoder
  25. */
  26. #include "avcodec.h"
  27. #include "dsputil.h"
  28. #include "mathops.h"
  29. #include "mpegvideo.h"
  30. #include "mpeg12.h"
  31. #include "mpeg12data.h"
  32. #include "bytestream.h"
  33. #include "libavutil/log.h"
  34. #include "libavutil/opt.h"
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/timecode.h"
  37. static const uint8_t inv_non_linear_qscale[13] = {
  38. 0, 2, 4, 6, 8,
  39. 9,10,11,12,13,14,15,16,
  40. };
  41. static const uint8_t svcd_scan_offset_placeholder[14] = {
  42. 0x10, 0x0E,
  43. 0x00, 0x80, 0x81,
  44. 0x00, 0x80, 0x81,
  45. 0xff, 0xff, 0xff,
  46. 0xff, 0xff, 0xff,
  47. };
  48. static void mpeg1_encode_block(MpegEncContext *s,
  49. int16_t *block,
  50. int component);
  51. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code); // RAL: f_code parameter added
  52. static uint8_t mv_penalty[MAX_FCODE+1][MAX_MV*2+1];
  53. static uint8_t fcode_tab[MAX_MV*2+1];
  54. static uint8_t uni_mpeg1_ac_vlc_len [64*64*2];
  55. static uint8_t uni_mpeg2_ac_vlc_len [64*64*2];
  56. /* simple include everything table for dc, first byte is bits number next 3 are code*/
  57. static uint32_t mpeg1_lum_dc_uni[512];
  58. static uint32_t mpeg1_chr_dc_uni[512];
  59. static uint8_t mpeg1_index_run[2][64];
  60. static int8_t mpeg1_max_level[2][64];
  61. static void init_uni_ac_vlc(RLTable *rl, uint8_t *uni_ac_vlc_len){
  62. int i;
  63. for(i=0; i<128; i++){
  64. int level= i-64;
  65. int run;
  66. if (!level)
  67. continue;
  68. for(run=0; run<64; run++){
  69. int len, code;
  70. int alevel= FFABS(level);
  71. if (alevel > rl->max_level[0][run])
  72. code= 111; /*rl->n*/
  73. else
  74. code= rl->index_run[0][run] + alevel - 1;
  75. if (code < 111 /* rl->n */) {
  76. /* length of vlc and sign */
  77. len= rl->table_vlc[code][1]+1;
  78. } else {
  79. len= rl->table_vlc[111/*rl->n*/][1]+6;
  80. if (alevel < 128) {
  81. len += 8;
  82. } else {
  83. len += 16;
  84. }
  85. }
  86. uni_ac_vlc_len [UNI_AC_ENC_INDEX(run, i)]= len;
  87. }
  88. }
  89. }
  90. static int find_frame_rate_index(MpegEncContext *s){
  91. int i;
  92. AVRational bestq= (AVRational){0, 0};
  93. AVRational ext;
  94. AVRational target = av_inv_q(s->avctx->time_base);
  95. for(i=1;i<14;i++) {
  96. if(s->avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL && i>=9) break;
  97. for (ext.num=1; ext.num <= 4; ext.num++) {
  98. for (ext.den=1; ext.den <= 32; ext.den++) {
  99. AVRational q = av_mul_q(ext, ff_mpeg12_frame_rate_tab[i]);
  100. if(s->codec_id != AV_CODEC_ID_MPEG2VIDEO && (ext.den!=1 || ext.num!=1))
  101. continue;
  102. if(av_gcd(ext.den, ext.num) != 1)
  103. continue;
  104. if( bestq.num==0
  105. || av_nearer_q(target, bestq, q) < 0
  106. || ext.num==1 && ext.den==1 && av_nearer_q(target, bestq, q) == 0){
  107. bestq = q;
  108. s->frame_rate_index= i;
  109. s->mpeg2_frame_rate_ext.num = ext.num;
  110. s->mpeg2_frame_rate_ext.den = ext.den;
  111. }
  112. }
  113. }
  114. }
  115. if(av_cmp_q(target, bestq))
  116. return -1;
  117. else
  118. return 0;
  119. }
  120. static av_cold int encode_init(AVCodecContext *avctx)
  121. {
  122. MpegEncContext *s = avctx->priv_data;
  123. if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && avctx->height > 2800)
  124. avctx->thread_count = 1;
  125. if(ff_MPV_encode_init(avctx) < 0)
  126. return -1;
  127. if(find_frame_rate_index(s) < 0){
  128. if(s->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL){
  129. av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n", avctx->time_base.den, avctx->time_base.num);
  130. return -1;
  131. }else{
  132. av_log(avctx, AV_LOG_INFO, "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n", avctx->time_base.den, avctx->time_base.num);
  133. }
  134. }
  135. if(avctx->profile == FF_PROFILE_UNKNOWN){
  136. if(avctx->level != FF_LEVEL_UNKNOWN){
  137. av_log(avctx, AV_LOG_ERROR, "Set profile and level\n");
  138. return -1;
  139. }
  140. avctx->profile = s->chroma_format == CHROMA_420 ? 4 : 0; /* Main or 4:2:2 */
  141. }
  142. if(avctx->level == FF_LEVEL_UNKNOWN){
  143. if(avctx->profile == 0){ /* 4:2:2 */
  144. if(avctx->width <= 720 && avctx->height <= 608) avctx->level = 5; /* Main */
  145. else avctx->level = 2; /* High */
  146. }else{
  147. if(avctx->profile != 1 && s->chroma_format != CHROMA_420){
  148. av_log(avctx, AV_LOG_ERROR, "Only High(1) and 4:2:2(0) profiles support 4:2:2 color sampling\n");
  149. return -1;
  150. }
  151. if(avctx->width <= 720 && avctx->height <= 576) avctx->level = 8; /* Main */
  152. else if(avctx->width <= 1440) avctx->level = 6; /* High 1440 */
  153. else avctx->level = 4; /* High */
  154. }
  155. }
  156. if ((avctx->width & 0xFFF) == 0 && (avctx->height & 0xFFF) == 1) {
  157. av_log(avctx, AV_LOG_ERROR, "Width / Height is invalid for MPEG2\n");
  158. return AVERROR(EINVAL);
  159. }
  160. if (s->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
  161. if ((avctx->width & 0xFFF) == 0 || (avctx->height & 0xFFF) == 0) {
  162. av_log(avctx, AV_LOG_ERROR, "Width or Height are not allowed to be multiplies of 4096\n"
  163. "add '-strict %d' if you want to use them anyway.\n", FF_COMPLIANCE_UNOFFICIAL);
  164. return AVERROR(EINVAL);
  165. }
  166. }
  167. s->drop_frame_timecode = s->drop_frame_timecode || !!(avctx->flags2 & CODEC_FLAG2_DROP_FRAME_TIMECODE);
  168. if (s->drop_frame_timecode)
  169. s->tc.flags |= AV_TIMECODE_FLAG_DROPFRAME;
  170. if (s->drop_frame_timecode && s->frame_rate_index != 4) {
  171. av_log(avctx, AV_LOG_ERROR, "Drop frame time code only allowed with 1001/30000 fps\n");
  172. return -1;
  173. }
  174. if (s->tc_opt_str) {
  175. AVRational rate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  176. int ret = av_timecode_init_from_string(&s->tc, rate, s->tc_opt_str, s);
  177. if (ret < 0)
  178. return ret;
  179. s->drop_frame_timecode = !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME);
  180. s->avctx->timecode_frame_start = s->tc.start;
  181. } else {
  182. s->avctx->timecode_frame_start = 0; // default is -1
  183. }
  184. return 0;
  185. }
  186. static void put_header(MpegEncContext *s, int header)
  187. {
  188. avpriv_align_put_bits(&s->pb);
  189. put_bits(&s->pb, 16, header>>16);
  190. put_sbits(&s->pb, 16, header);
  191. }
  192. /* put sequence header if needed */
  193. static void mpeg1_encode_sequence_header(MpegEncContext *s)
  194. {
  195. unsigned int vbv_buffer_size;
  196. unsigned int fps, v;
  197. int i;
  198. uint64_t time_code;
  199. float best_aspect_error= 1E10;
  200. float aspect_ratio= av_q2d(s->avctx->sample_aspect_ratio);
  201. int constraint_parameter_flag;
  202. if(aspect_ratio==0.0) aspect_ratio= 1.0; //pixel aspect 1:1 (VGA)
  203. if (s->current_picture.f.key_frame) {
  204. AVRational framerate = ff_mpeg12_frame_rate_tab[s->frame_rate_index];
  205. /* mpeg1 header repeated every gop */
  206. put_header(s, SEQ_START_CODE);
  207. put_sbits(&s->pb, 12, s->width & 0xFFF);
  208. put_sbits(&s->pb, 12, s->height & 0xFFF);
  209. for(i=1; i<15; i++){
  210. float error= aspect_ratio;
  211. if(s->codec_id == AV_CODEC_ID_MPEG1VIDEO || i <=1)
  212. error-= 1.0/ff_mpeg1_aspect[i];
  213. else
  214. error-= av_q2d(ff_mpeg2_aspect[i])*s->height/s->width;
  215. error= FFABS(error);
  216. if(error < best_aspect_error){
  217. best_aspect_error= error;
  218. s->aspect_ratio_info= i;
  219. }
  220. }
  221. put_bits(&s->pb, 4, s->aspect_ratio_info);
  222. put_bits(&s->pb, 4, s->frame_rate_index);
  223. if(s->avctx->rc_max_rate){
  224. v = (s->avctx->rc_max_rate + 399) / 400;
  225. if (v > 0x3ffff && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  226. v = 0x3ffff;
  227. }else{
  228. v= 0x3FFFF;
  229. }
  230. if(s->avctx->rc_buffer_size)
  231. vbv_buffer_size = s->avctx->rc_buffer_size;
  232. else
  233. /* VBV calculation: Scaled so that a VCD has the proper VBV size of 40 kilobytes */
  234. vbv_buffer_size = (( 20 * s->bit_rate) / (1151929 / 2)) * 8 * 1024;
  235. vbv_buffer_size= (vbv_buffer_size + 16383) / 16384;
  236. put_sbits(&s->pb, 18, v);
  237. put_bits(&s->pb, 1, 1); /* marker */
  238. put_sbits(&s->pb, 10, vbv_buffer_size);
  239. constraint_parameter_flag=
  240. s->width <= 768 && s->height <= 576 &&
  241. s->mb_width * s->mb_height <= 396 &&
  242. s->mb_width * s->mb_height * framerate.num <= framerate.den*396*25 &&
  243. framerate.num <= framerate.den*30 &&
  244. s->avctx->me_range && s->avctx->me_range < 128 &&
  245. vbv_buffer_size <= 20 &&
  246. v <= 1856000/400 &&
  247. s->codec_id == AV_CODEC_ID_MPEG1VIDEO;
  248. put_bits(&s->pb, 1, constraint_parameter_flag);
  249. ff_write_quant_matrix(&s->pb, s->avctx->intra_matrix);
  250. ff_write_quant_matrix(&s->pb, s->avctx->inter_matrix);
  251. if(s->codec_id == AV_CODEC_ID_MPEG2VIDEO){
  252. put_header(s, EXT_START_CODE);
  253. put_bits(&s->pb, 4, 1); //seq ext
  254. put_bits(&s->pb, 1, s->avctx->profile == 0); //escx 1 for 4:2:2 profile */
  255. put_bits(&s->pb, 3, s->avctx->profile); //profile
  256. put_bits(&s->pb, 4, s->avctx->level); //level
  257. put_bits(&s->pb, 1, s->progressive_sequence);
  258. put_bits(&s->pb, 2, s->chroma_format);
  259. put_bits(&s->pb, 2, s->width >>12);
  260. put_bits(&s->pb, 2, s->height>>12);
  261. put_bits(&s->pb, 12, v>>18); //bitrate ext
  262. put_bits(&s->pb, 1, 1); //marker
  263. put_bits(&s->pb, 8, vbv_buffer_size >>10); //vbv buffer ext
  264. put_bits(&s->pb, 1, s->low_delay);
  265. put_bits(&s->pb, 2, s->mpeg2_frame_rate_ext.num-1); // frame_rate_ext_n
  266. put_bits(&s->pb, 5, s->mpeg2_frame_rate_ext.den-1); // frame_rate_ext_d
  267. }
  268. put_header(s, GOP_START_CODE);
  269. put_bits(&s->pb, 1, s->drop_frame_timecode); /* drop frame flag */
  270. /* time code : we must convert from the real frame rate to a
  271. fake mpeg frame rate in case of low frame rate */
  272. fps = (framerate.num + framerate.den/2)/ framerate.den;
  273. time_code = s->current_picture_ptr->f.coded_picture_number + s->avctx->timecode_frame_start;
  274. s->gop_picture_number = s->current_picture_ptr->f.coded_picture_number;
  275. av_assert0(s->drop_frame_timecode == !!(s->tc.flags & AV_TIMECODE_FLAG_DROPFRAME));
  276. if (s->drop_frame_timecode)
  277. time_code = av_timecode_adjust_ntsc_framenum2(time_code, fps);
  278. put_bits(&s->pb, 5, (uint32_t)((time_code / (fps * 3600)) % 24));
  279. put_bits(&s->pb, 6, (uint32_t)((time_code / (fps * 60)) % 60));
  280. put_bits(&s->pb, 1, 1);
  281. put_bits(&s->pb, 6, (uint32_t)((time_code / fps) % 60));
  282. put_bits(&s->pb, 6, (uint32_t)((time_code % fps)));
  283. put_bits(&s->pb, 1, !!(s->flags & CODEC_FLAG_CLOSED_GOP));
  284. put_bits(&s->pb, 1, 0); /* broken link */
  285. }
  286. }
  287. static inline void encode_mb_skip_run(MpegEncContext *s, int run){
  288. while (run >= 33) {
  289. put_bits(&s->pb, 11, 0x008);
  290. run -= 33;
  291. }
  292. put_bits(&s->pb, ff_mpeg12_mbAddrIncrTable[run][1],
  293. ff_mpeg12_mbAddrIncrTable[run][0]);
  294. }
  295. static av_always_inline void put_qscale(MpegEncContext *s)
  296. {
  297. if(s->q_scale_type){
  298. av_assert2(s->qscale>=1 && s->qscale <=12);
  299. put_bits(&s->pb, 5, inv_non_linear_qscale[s->qscale]);
  300. }else{
  301. put_bits(&s->pb, 5, s->qscale);
  302. }
  303. }
  304. void ff_mpeg1_encode_slice_header(MpegEncContext *s){
  305. if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO && s->height > 2800) {
  306. put_header(s, SLICE_MIN_START_CODE + (s->mb_y & 127));
  307. put_bits(&s->pb, 3, s->mb_y >> 7); /* slice_vertical_position_extension */
  308. } else {
  309. put_header(s, SLICE_MIN_START_CODE + s->mb_y);
  310. }
  311. put_qscale(s);
  312. put_bits(&s->pb, 1, 0); /* slice extra information */
  313. }
  314. void ff_mpeg1_encode_picture_header(MpegEncContext *s, int picture_number)
  315. {
  316. mpeg1_encode_sequence_header(s);
  317. /* mpeg1 picture header */
  318. put_header(s, PICTURE_START_CODE);
  319. /* temporal reference */
  320. // RAL: s->picture_number instead of s->fake_picture_number
  321. put_bits(&s->pb, 10, (s->picture_number -
  322. s->gop_picture_number) & 0x3ff);
  323. put_bits(&s->pb, 3, s->pict_type);
  324. s->vbv_delay_ptr= s->pb.buf + put_bits_count(&s->pb)/8;
  325. put_bits(&s->pb, 16, 0xFFFF); /* vbv_delay */
  326. // RAL: Forward f_code also needed for B frames
  327. if (s->pict_type == AV_PICTURE_TYPE_P || s->pict_type == AV_PICTURE_TYPE_B) {
  328. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  329. if(s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  330. put_bits(&s->pb, 3, s->f_code); /* forward_f_code */
  331. else
  332. put_bits(&s->pb, 3, 7); /* forward_f_code */
  333. }
  334. // RAL: Backward f_code necessary for B frames
  335. if (s->pict_type == AV_PICTURE_TYPE_B) {
  336. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  337. if(s->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  338. put_bits(&s->pb, 3, s->b_code); /* backward_f_code */
  339. else
  340. put_bits(&s->pb, 3, 7); /* backward_f_code */
  341. }
  342. put_bits(&s->pb, 1, 0); /* extra bit picture */
  343. s->frame_pred_frame_dct = 1;
  344. if(s->codec_id == AV_CODEC_ID_MPEG2VIDEO){
  345. put_header(s, EXT_START_CODE);
  346. put_bits(&s->pb, 4, 8); //pic ext
  347. if (s->pict_type == AV_PICTURE_TYPE_P || s->pict_type == AV_PICTURE_TYPE_B) {
  348. put_bits(&s->pb, 4, s->f_code);
  349. put_bits(&s->pb, 4, s->f_code);
  350. }else{
  351. put_bits(&s->pb, 8, 255);
  352. }
  353. if (s->pict_type == AV_PICTURE_TYPE_B) {
  354. put_bits(&s->pb, 4, s->b_code);
  355. put_bits(&s->pb, 4, s->b_code);
  356. }else{
  357. put_bits(&s->pb, 8, 255);
  358. }
  359. put_bits(&s->pb, 2, s->intra_dc_precision);
  360. av_assert0(s->picture_structure == PICT_FRAME);
  361. put_bits(&s->pb, 2, s->picture_structure);
  362. if (s->progressive_sequence) {
  363. put_bits(&s->pb, 1, 0); /* no repeat */
  364. } else {
  365. put_bits(&s->pb, 1, s->current_picture_ptr->f.top_field_first);
  366. }
  367. /* XXX: optimize the generation of this flag with entropy
  368. measures */
  369. s->frame_pred_frame_dct = s->progressive_sequence;
  370. put_bits(&s->pb, 1, s->frame_pred_frame_dct);
  371. put_bits(&s->pb, 1, s->concealment_motion_vectors);
  372. put_bits(&s->pb, 1, s->q_scale_type);
  373. put_bits(&s->pb, 1, s->intra_vlc_format);
  374. put_bits(&s->pb, 1, s->alternate_scan);
  375. put_bits(&s->pb, 1, s->repeat_first_field);
  376. s->progressive_frame = s->progressive_sequence;
  377. put_bits(&s->pb, 1, s->chroma_format == CHROMA_420 ? s->progressive_frame : 0); /* chroma_420_type */
  378. put_bits(&s->pb, 1, s->progressive_frame);
  379. put_bits(&s->pb, 1, 0); //composite_display_flag
  380. }
  381. if (s->scan_offset) {
  382. int i;
  383. put_header(s, USER_START_CODE);
  384. for(i=0; i<sizeof(svcd_scan_offset_placeholder); i++){
  385. put_bits(&s->pb, 8, svcd_scan_offset_placeholder[i]);
  386. }
  387. }
  388. s->mb_y=0;
  389. ff_mpeg1_encode_slice_header(s);
  390. }
  391. static inline void put_mb_modes(MpegEncContext *s, int n, int bits,
  392. int has_mv, int field_motion)
  393. {
  394. put_bits(&s->pb, n, bits);
  395. if (!s->frame_pred_frame_dct) {
  396. if (has_mv)
  397. put_bits(&s->pb, 2, 2 - field_motion); /* motion_type: frame/field */
  398. put_bits(&s->pb, 1, s->interlaced_dct);
  399. }
  400. }
  401. static av_always_inline void mpeg1_encode_mb_internal(MpegEncContext *s,
  402. int16_t block[6][64],
  403. int motion_x, int motion_y,
  404. int mb_block_count)
  405. {
  406. int i, cbp;
  407. const int mb_x = s->mb_x;
  408. const int mb_y = s->mb_y;
  409. const int first_mb= mb_x == s->resync_mb_x && mb_y == s->resync_mb_y;
  410. /* compute cbp */
  411. cbp = 0;
  412. for(i=0;i<mb_block_count;i++) {
  413. if (s->block_last_index[i] >= 0)
  414. cbp |= 1 << (mb_block_count - 1 - i);
  415. }
  416. if (cbp == 0 && !first_mb && s->mv_type == MV_TYPE_16X16 &&
  417. (mb_x != s->mb_width - 1 || (mb_y != s->end_mb_y - 1 && s->codec_id == AV_CODEC_ID_MPEG1VIDEO)) &&
  418. ((s->pict_type == AV_PICTURE_TYPE_P && (motion_x | motion_y) == 0) ||
  419. (s->pict_type == AV_PICTURE_TYPE_B && s->mv_dir == s->last_mv_dir && (((s->mv_dir & MV_DIR_FORWARD) ? ((s->mv[0][0][0] - s->last_mv[0][0][0])|(s->mv[0][0][1] - s->last_mv[0][0][1])) : 0) |
  420. ((s->mv_dir & MV_DIR_BACKWARD) ? ((s->mv[1][0][0] - s->last_mv[1][0][0])|(s->mv[1][0][1] - s->last_mv[1][0][1])) : 0)) == 0))) {
  421. s->mb_skip_run++;
  422. s->qscale -= s->dquant;
  423. s->skip_count++;
  424. s->misc_bits++;
  425. s->last_bits++;
  426. if(s->pict_type == AV_PICTURE_TYPE_P){
  427. s->last_mv[0][1][0]= s->last_mv[0][0][0]=
  428. s->last_mv[0][1][1]= s->last_mv[0][0][1]= 0;
  429. }
  430. } else {
  431. if(first_mb){
  432. av_assert0(s->mb_skip_run == 0);
  433. encode_mb_skip_run(s, s->mb_x);
  434. }else{
  435. encode_mb_skip_run(s, s->mb_skip_run);
  436. }
  437. if (s->pict_type == AV_PICTURE_TYPE_I) {
  438. if(s->dquant && cbp){
  439. put_mb_modes(s, 2, 1, 0, 0); /* macroblock_type : macroblock_quant = 1 */
  440. put_qscale(s);
  441. }else{
  442. put_mb_modes(s, 1, 1, 0, 0); /* macroblock_type : macroblock_quant = 0 */
  443. s->qscale -= s->dquant;
  444. }
  445. s->misc_bits+= get_bits_diff(s);
  446. s->i_count++;
  447. } else if (s->mb_intra) {
  448. if(s->dquant && cbp){
  449. put_mb_modes(s, 6, 0x01, 0, 0);
  450. put_qscale(s);
  451. }else{
  452. put_mb_modes(s, 5, 0x03, 0, 0);
  453. s->qscale -= s->dquant;
  454. }
  455. s->misc_bits+= get_bits_diff(s);
  456. s->i_count++;
  457. memset(s->last_mv, 0, sizeof(s->last_mv));
  458. } else if (s->pict_type == AV_PICTURE_TYPE_P) {
  459. if(s->mv_type == MV_TYPE_16X16){
  460. if (cbp != 0) {
  461. if ((motion_x|motion_y) == 0) {
  462. if(s->dquant){
  463. put_mb_modes(s, 5, 1, 0, 0); /* macroblock_pattern & quant */
  464. put_qscale(s);
  465. }else{
  466. put_mb_modes(s, 2, 1, 0, 0); /* macroblock_pattern only */
  467. }
  468. s->misc_bits+= get_bits_diff(s);
  469. } else {
  470. if(s->dquant){
  471. put_mb_modes(s, 5, 2, 1, 0); /* motion + cbp */
  472. put_qscale(s);
  473. }else{
  474. put_mb_modes(s, 1, 1, 1, 0); /* motion + cbp */
  475. }
  476. s->misc_bits+= get_bits_diff(s);
  477. mpeg1_encode_motion(s, motion_x - s->last_mv[0][0][0], s->f_code); // RAL: f_code parameter added
  478. mpeg1_encode_motion(s, motion_y - s->last_mv[0][0][1], s->f_code); // RAL: f_code parameter added
  479. s->mv_bits+= get_bits_diff(s);
  480. }
  481. } else {
  482. put_bits(&s->pb, 3, 1); /* motion only */
  483. if (!s->frame_pred_frame_dct)
  484. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  485. s->misc_bits+= get_bits_diff(s);
  486. mpeg1_encode_motion(s, motion_x - s->last_mv[0][0][0], s->f_code); // RAL: f_code parameter added
  487. mpeg1_encode_motion(s, motion_y - s->last_mv[0][0][1], s->f_code); // RAL: f_code parameter added
  488. s->qscale -= s->dquant;
  489. s->mv_bits+= get_bits_diff(s);
  490. }
  491. s->last_mv[0][1][0]= s->last_mv[0][0][0]= motion_x;
  492. s->last_mv[0][1][1]= s->last_mv[0][0][1]= motion_y;
  493. }else{
  494. av_assert2(!s->frame_pred_frame_dct && s->mv_type == MV_TYPE_FIELD);
  495. if (cbp) {
  496. if(s->dquant){
  497. put_mb_modes(s, 5, 2, 1, 1); /* motion + cbp */
  498. put_qscale(s);
  499. }else{
  500. put_mb_modes(s, 1, 1, 1, 1); /* motion + cbp */
  501. }
  502. } else {
  503. put_bits(&s->pb, 3, 1); /* motion only */
  504. put_bits(&s->pb, 2, 1); /* motion_type: field */
  505. s->qscale -= s->dquant;
  506. }
  507. s->misc_bits+= get_bits_diff(s);
  508. for(i=0; i<2; i++){
  509. put_bits(&s->pb, 1, s->field_select[0][i]);
  510. mpeg1_encode_motion(s, s->mv[0][i][0] - s->last_mv[0][i][0] , s->f_code);
  511. mpeg1_encode_motion(s, s->mv[0][i][1] - (s->last_mv[0][i][1]>>1), s->f_code);
  512. s->last_mv[0][i][0]= s->mv[0][i][0];
  513. s->last_mv[0][i][1]= 2*s->mv[0][i][1];
  514. }
  515. s->mv_bits+= get_bits_diff(s);
  516. }
  517. if(cbp) {
  518. if (s->chroma_y_shift) {
  519. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp][1], ff_mpeg12_mbPatTable[cbp][0]);
  520. } else {
  521. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp>>2][1], ff_mpeg12_mbPatTable[cbp>>2][0]);
  522. put_sbits(&s->pb, 2, cbp);
  523. }
  524. }
  525. s->f_count++;
  526. } else{
  527. if(s->mv_type == MV_TYPE_16X16){
  528. if (cbp){ // With coded bloc pattern
  529. if (s->dquant) {
  530. if(s->mv_dir == MV_DIR_FORWARD)
  531. put_mb_modes(s, 6, 3, 1, 0);
  532. else
  533. put_mb_modes(s, 8-s->mv_dir, 2, 1, 0);
  534. put_qscale(s);
  535. } else {
  536. put_mb_modes(s, 5-s->mv_dir, 3, 1, 0);
  537. }
  538. }else{ // No coded bloc pattern
  539. put_bits(&s->pb, 5-s->mv_dir, 2);
  540. if (!s->frame_pred_frame_dct)
  541. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  542. s->qscale -= s->dquant;
  543. }
  544. s->misc_bits += get_bits_diff(s);
  545. if (s->mv_dir&MV_DIR_FORWARD){
  546. mpeg1_encode_motion(s, s->mv[0][0][0] - s->last_mv[0][0][0], s->f_code);
  547. mpeg1_encode_motion(s, s->mv[0][0][1] - s->last_mv[0][0][1], s->f_code);
  548. s->last_mv[0][0][0]=s->last_mv[0][1][0]= s->mv[0][0][0];
  549. s->last_mv[0][0][1]=s->last_mv[0][1][1]= s->mv[0][0][1];
  550. s->f_count++;
  551. }
  552. if (s->mv_dir&MV_DIR_BACKWARD){
  553. mpeg1_encode_motion(s, s->mv[1][0][0] - s->last_mv[1][0][0], s->b_code);
  554. mpeg1_encode_motion(s, s->mv[1][0][1] - s->last_mv[1][0][1], s->b_code);
  555. s->last_mv[1][0][0]=s->last_mv[1][1][0]= s->mv[1][0][0];
  556. s->last_mv[1][0][1]=s->last_mv[1][1][1]= s->mv[1][0][1];
  557. s->b_count++;
  558. }
  559. }else{
  560. av_assert2(s->mv_type == MV_TYPE_FIELD);
  561. av_assert2(!s->frame_pred_frame_dct);
  562. if (cbp){ // With coded bloc pattern
  563. if (s->dquant) {
  564. if(s->mv_dir == MV_DIR_FORWARD)
  565. put_mb_modes(s, 6, 3, 1, 1);
  566. else
  567. put_mb_modes(s, 8-s->mv_dir, 2, 1, 1);
  568. put_qscale(s);
  569. } else {
  570. put_mb_modes(s, 5-s->mv_dir, 3, 1, 1);
  571. }
  572. }else{ // No coded bloc pattern
  573. put_bits(&s->pb, 5-s->mv_dir, 2);
  574. put_bits(&s->pb, 2, 1); /* motion_type: field */
  575. s->qscale -= s->dquant;
  576. }
  577. s->misc_bits += get_bits_diff(s);
  578. if (s->mv_dir&MV_DIR_FORWARD){
  579. for(i=0; i<2; i++){
  580. put_bits(&s->pb, 1, s->field_select[0][i]);
  581. mpeg1_encode_motion(s, s->mv[0][i][0] - s->last_mv[0][i][0] , s->f_code);
  582. mpeg1_encode_motion(s, s->mv[0][i][1] - (s->last_mv[0][i][1]>>1), s->f_code);
  583. s->last_mv[0][i][0]= s->mv[0][i][0];
  584. s->last_mv[0][i][1]= 2*s->mv[0][i][1];
  585. }
  586. s->f_count++;
  587. }
  588. if (s->mv_dir&MV_DIR_BACKWARD){
  589. for(i=0; i<2; i++){
  590. put_bits(&s->pb, 1, s->field_select[1][i]);
  591. mpeg1_encode_motion(s, s->mv[1][i][0] - s->last_mv[1][i][0] , s->b_code);
  592. mpeg1_encode_motion(s, s->mv[1][i][1] - (s->last_mv[1][i][1]>>1), s->b_code);
  593. s->last_mv[1][i][0]= s->mv[1][i][0];
  594. s->last_mv[1][i][1]= 2*s->mv[1][i][1];
  595. }
  596. s->b_count++;
  597. }
  598. }
  599. s->mv_bits += get_bits_diff(s);
  600. if(cbp) {
  601. if (s->chroma_y_shift) {
  602. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp][1], ff_mpeg12_mbPatTable[cbp][0]);
  603. } else {
  604. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp>>2][1], ff_mpeg12_mbPatTable[cbp>>2][0]);
  605. put_sbits(&s->pb, 2, cbp);
  606. }
  607. }
  608. }
  609. for(i=0;i<mb_block_count;i++) {
  610. if (cbp & (1 << (mb_block_count - 1 - i))) {
  611. mpeg1_encode_block(s, block[i], i);
  612. }
  613. }
  614. s->mb_skip_run = 0;
  615. if(s->mb_intra)
  616. s->i_tex_bits+= get_bits_diff(s);
  617. else
  618. s->p_tex_bits+= get_bits_diff(s);
  619. }
  620. }
  621. void ff_mpeg1_encode_mb(MpegEncContext *s, int16_t block[6][64], int motion_x, int motion_y)
  622. {
  623. if (s->chroma_format == CHROMA_420) mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 6);
  624. else mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 8);
  625. }
  626. // RAL: Parameter added: f_or_b_code
  627. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code)
  628. {
  629. if (val == 0) {
  630. /* zero vector */
  631. put_bits(&s->pb,
  632. ff_mpeg12_mbMotionVectorTable[0][1],
  633. ff_mpeg12_mbMotionVectorTable[0][0]);
  634. } else {
  635. int code, sign, bits;
  636. int bit_size = f_or_b_code - 1;
  637. int range = 1 << bit_size;
  638. /* modulo encoding */
  639. val = sign_extend(val, 5 + bit_size);
  640. if (val >= 0) {
  641. val--;
  642. code = (val >> bit_size) + 1;
  643. bits = val & (range - 1);
  644. sign = 0;
  645. } else {
  646. val = -val;
  647. val--;
  648. code = (val >> bit_size) + 1;
  649. bits = val & (range - 1);
  650. sign = 1;
  651. }
  652. av_assert2(code > 0 && code <= 16);
  653. put_bits(&s->pb,
  654. ff_mpeg12_mbMotionVectorTable[code][1],
  655. ff_mpeg12_mbMotionVectorTable[code][0]);
  656. put_bits(&s->pb, 1, sign);
  657. if (bit_size > 0) {
  658. put_bits(&s->pb, bit_size, bits);
  659. }
  660. }
  661. }
  662. void ff_mpeg1_encode_init(MpegEncContext *s)
  663. {
  664. static int done=0;
  665. ff_mpeg12_common_init(s);
  666. if(!done){
  667. int f_code;
  668. int mv;
  669. int i;
  670. done=1;
  671. ff_init_rl(&ff_rl_mpeg1, ff_mpeg12_static_rl_table_store[0]);
  672. ff_init_rl(&ff_rl_mpeg2, ff_mpeg12_static_rl_table_store[1]);
  673. for(i=0; i<64; i++)
  674. {
  675. mpeg1_max_level[0][i]= ff_rl_mpeg1.max_level[0][i];
  676. mpeg1_index_run[0][i]= ff_rl_mpeg1.index_run[0][i];
  677. }
  678. init_uni_ac_vlc(&ff_rl_mpeg1, uni_mpeg1_ac_vlc_len);
  679. if(s->intra_vlc_format)
  680. init_uni_ac_vlc(&ff_rl_mpeg2, uni_mpeg2_ac_vlc_len);
  681. /* build unified dc encoding tables */
  682. for(i=-255; i<256; i++)
  683. {
  684. int adiff, index;
  685. int bits, code;
  686. int diff=i;
  687. adiff = FFABS(diff);
  688. if(diff<0) diff--;
  689. index = av_log2(2*adiff);
  690. bits= ff_mpeg12_vlc_dc_lum_bits[index] + index;
  691. code= (ff_mpeg12_vlc_dc_lum_code[index]<<index) + (diff & ((1 << index) - 1));
  692. mpeg1_lum_dc_uni[i+255]= bits + (code<<8);
  693. bits= ff_mpeg12_vlc_dc_chroma_bits[index] + index;
  694. code= (ff_mpeg12_vlc_dc_chroma_code[index]<<index) + (diff & ((1 << index) - 1));
  695. mpeg1_chr_dc_uni[i+255]= bits + (code<<8);
  696. }
  697. for(f_code=1; f_code<=MAX_FCODE; f_code++){
  698. for(mv=-MAX_MV; mv<=MAX_MV; mv++){
  699. int len;
  700. if(mv==0) len= ff_mpeg12_mbMotionVectorTable[0][1];
  701. else{
  702. int val, bit_size, code;
  703. bit_size = f_code - 1;
  704. val=mv;
  705. if (val < 0)
  706. val = -val;
  707. val--;
  708. code = (val >> bit_size) + 1;
  709. if(code<17){
  710. len= ff_mpeg12_mbMotionVectorTable[code][1] + 1 + bit_size;
  711. }else{
  712. len= ff_mpeg12_mbMotionVectorTable[16][1] + 2 + bit_size;
  713. }
  714. }
  715. mv_penalty[f_code][mv+MAX_MV]= len;
  716. }
  717. }
  718. for(f_code=MAX_FCODE; f_code>0; f_code--){
  719. for(mv=-(8<<f_code); mv<(8<<f_code); mv++){
  720. fcode_tab[mv+MAX_MV]= f_code;
  721. }
  722. }
  723. }
  724. s->me.mv_penalty= mv_penalty;
  725. s->fcode_tab= fcode_tab;
  726. if(s->codec_id == AV_CODEC_ID_MPEG1VIDEO){
  727. s->min_qcoeff=-255;
  728. s->max_qcoeff= 255;
  729. }else{
  730. s->min_qcoeff=-2047;
  731. s->max_qcoeff= 2047;
  732. }
  733. if (s->intra_vlc_format) {
  734. s->intra_ac_vlc_length=
  735. s->intra_ac_vlc_last_length= uni_mpeg2_ac_vlc_len;
  736. } else {
  737. s->intra_ac_vlc_length=
  738. s->intra_ac_vlc_last_length= uni_mpeg1_ac_vlc_len;
  739. }
  740. s->inter_ac_vlc_length=
  741. s->inter_ac_vlc_last_length= uni_mpeg1_ac_vlc_len;
  742. }
  743. static inline void encode_dc(MpegEncContext *s, int diff, int component)
  744. {
  745. if(((unsigned) (diff+255)) >= 511){
  746. int index;
  747. if(diff<0){
  748. index= av_log2_16bit(-2*diff);
  749. diff--;
  750. }else{
  751. index= av_log2_16bit(2*diff);
  752. }
  753. if (component == 0) {
  754. put_bits(
  755. &s->pb,
  756. ff_mpeg12_vlc_dc_lum_bits[index] + index,
  757. (ff_mpeg12_vlc_dc_lum_code[index]<<index) + (diff & ((1 << index) - 1)));
  758. }else{
  759. put_bits(
  760. &s->pb,
  761. ff_mpeg12_vlc_dc_chroma_bits[index] + index,
  762. (ff_mpeg12_vlc_dc_chroma_code[index]<<index) + (diff & ((1 << index) - 1)));
  763. }
  764. }else{
  765. if (component == 0) {
  766. put_bits(
  767. &s->pb,
  768. mpeg1_lum_dc_uni[diff+255]&0xFF,
  769. mpeg1_lum_dc_uni[diff+255]>>8);
  770. } else {
  771. put_bits(
  772. &s->pb,
  773. mpeg1_chr_dc_uni[diff+255]&0xFF,
  774. mpeg1_chr_dc_uni[diff+255]>>8);
  775. }
  776. }
  777. }
  778. static void mpeg1_encode_block(MpegEncContext *s,
  779. int16_t *block,
  780. int n)
  781. {
  782. int alevel, level, last_non_zero, dc, diff, i, j, run, last_index, sign;
  783. int code, component;
  784. const uint16_t (*table_vlc)[2] = ff_rl_mpeg1.table_vlc;
  785. last_index = s->block_last_index[n];
  786. /* DC coef */
  787. if (s->mb_intra) {
  788. component = (n <= 3 ? 0 : (n&1) + 1);
  789. dc = block[0]; /* overflow is impossible */
  790. diff = dc - s->last_dc[component];
  791. encode_dc(s, diff, component);
  792. s->last_dc[component] = dc;
  793. i = 1;
  794. if (s->intra_vlc_format)
  795. table_vlc = ff_rl_mpeg2.table_vlc;
  796. } else {
  797. /* encode the first coefficient : needs to be done here because
  798. it is handled slightly differently */
  799. level = block[0];
  800. if (abs(level) == 1) {
  801. code = ((uint32_t)level >> 31); /* the sign bit */
  802. put_bits(&s->pb, 2, code | 0x02);
  803. i = 1;
  804. } else {
  805. i = 0;
  806. last_non_zero = -1;
  807. goto next_coef;
  808. }
  809. }
  810. /* now quantify & encode AC coefs */
  811. last_non_zero = i - 1;
  812. for(;i<=last_index;i++) {
  813. j = s->intra_scantable.permutated[i];
  814. level = block[j];
  815. next_coef:
  816. /* encode using VLC */
  817. if (level != 0) {
  818. run = i - last_non_zero - 1;
  819. alevel= level;
  820. MASK_ABS(sign, alevel);
  821. sign&=1;
  822. if (alevel <= mpeg1_max_level[0][run]){
  823. code= mpeg1_index_run[0][run] + alevel - 1;
  824. /* store the vlc & sign at once */
  825. put_bits(&s->pb, table_vlc[code][1]+1, (table_vlc[code][0]<<1) + sign);
  826. } else {
  827. /* escape seems to be pretty rare <5% so I do not optimize it */
  828. put_bits(&s->pb, table_vlc[111][1], table_vlc[111][0]);
  829. /* escape: only clip in this case */
  830. put_bits(&s->pb, 6, run);
  831. if(s->codec_id == AV_CODEC_ID_MPEG1VIDEO){
  832. if (alevel < 128) {
  833. put_sbits(&s->pb, 8, level);
  834. } else {
  835. if (level < 0) {
  836. put_bits(&s->pb, 16, 0x8001 + level + 255);
  837. } else {
  838. put_sbits(&s->pb, 16, level);
  839. }
  840. }
  841. }else{
  842. put_sbits(&s->pb, 12, level);
  843. }
  844. }
  845. last_non_zero = i;
  846. }
  847. }
  848. /* end of block */
  849. put_bits(&s->pb, table_vlc[112][1], table_vlc[112][0]);
  850. }
  851. #define OFFSET(x) offsetof(MpegEncContext, x)
  852. #define VE AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
  853. #define COMMON_OPTS\
  854. { "gop_timecode", "MPEG GOP Timecode in hh:mm:ss[:;.]ff format", OFFSET(tc_opt_str), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, VE },\
  855. { "intra_vlc", "Use MPEG-2 intra VLC table.", OFFSET(intra_vlc_format), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },\
  856. { "drop_frame_timecode", "Timecode is in drop frame format.", OFFSET(drop_frame_timecode), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE}, \
  857. { "scan_offset", "Reserve space for SVCD scan offset user data.", OFFSET(scan_offset), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  858. static const AVOption mpeg1_options[] = {
  859. COMMON_OPTS
  860. FF_MPV_COMMON_OPTS
  861. { NULL },
  862. };
  863. static const AVOption mpeg2_options[] = {
  864. COMMON_OPTS
  865. { "non_linear_quant", "Use nonlinear quantizer.", OFFSET(q_scale_type), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  866. { "alternate_scan", "Enable alternate scantable.", OFFSET(alternate_scan), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  867. FF_MPV_COMMON_OPTS
  868. { NULL },
  869. };
  870. #define mpeg12_class(x)\
  871. static const AVClass mpeg## x ##_class = {\
  872. .class_name = "mpeg" #x "video encoder",\
  873. .item_name = av_default_item_name,\
  874. .option = mpeg## x ##_options,\
  875. .version = LIBAVUTIL_VERSION_INT,\
  876. };
  877. mpeg12_class(1)
  878. mpeg12_class(2)
  879. AVCodec ff_mpeg1video_encoder = {
  880. .name = "mpeg1video",
  881. .type = AVMEDIA_TYPE_VIDEO,
  882. .id = AV_CODEC_ID_MPEG1VIDEO,
  883. .priv_data_size = sizeof(MpegEncContext),
  884. .init = encode_init,
  885. .encode2 = ff_MPV_encode_picture,
  886. .close = ff_MPV_encode_end,
  887. .supported_framerates = ff_mpeg12_frame_rate_tab + 1,
  888. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P,
  889. AV_PIX_FMT_NONE },
  890. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  891. .long_name = NULL_IF_CONFIG_SMALL("MPEG-1 video"),
  892. .priv_class = &mpeg1_class,
  893. };
  894. AVCodec ff_mpeg2video_encoder = {
  895. .name = "mpeg2video",
  896. .type = AVMEDIA_TYPE_VIDEO,
  897. .id = AV_CODEC_ID_MPEG2VIDEO,
  898. .priv_data_size = sizeof(MpegEncContext),
  899. .init = encode_init,
  900. .encode2 = ff_MPV_encode_picture,
  901. .close = ff_MPV_encode_end,
  902. .supported_framerates = ff_mpeg2_frame_rate_tab,
  903. .pix_fmts = (const enum AVPixelFormat[]){
  904. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE
  905. },
  906. .capabilities = CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  907. .long_name = NULL_IF_CONFIG_SMALL("MPEG-2 video"),
  908. .priv_class = &mpeg2_class,
  909. };