You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1002 lines
36KB

  1. /*
  2. * MPEG1/2 encoder
  3. * Copyright (c) 2000,2001 Fabrice Bellard
  4. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * MPEG1/2 encoder
  25. */
  26. #include "avcodec.h"
  27. #include "dsputil.h"
  28. #include "mathops.h"
  29. #include "mpegvideo.h"
  30. #include "mpeg12.h"
  31. #include "mpeg12data.h"
  32. #include "bytestream.h"
  33. #include "timecode.h"
  34. #include "libavutil/log.h"
  35. #include "libavutil/opt.h"
  36. #include "libavutil/avassert.h"
  37. static const uint8_t inv_non_linear_qscale[13] = {
  38. 0, 2, 4, 6, 8,
  39. 9,10,11,12,13,14,15,16,
  40. };
  41. static const uint8_t svcd_scan_offset_placeholder[14] = {
  42. 0x10, 0x0E,
  43. 0x00, 0x80, 0x81,
  44. 0x00, 0x80, 0x81,
  45. 0xff, 0xff, 0xff,
  46. 0xff, 0xff, 0xff,
  47. };
  48. static void mpeg1_encode_block(MpegEncContext *s,
  49. DCTELEM *block,
  50. int component);
  51. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code); // RAL: f_code parameter added
  52. static uint8_t mv_penalty[MAX_FCODE+1][MAX_MV*2+1];
  53. static uint8_t fcode_tab[MAX_MV*2+1];
  54. static uint8_t uni_mpeg1_ac_vlc_len [64*64*2];
  55. static uint8_t uni_mpeg2_ac_vlc_len [64*64*2];
  56. /* simple include everything table for dc, first byte is bits number next 3 are code*/
  57. static uint32_t mpeg1_lum_dc_uni[512];
  58. static uint32_t mpeg1_chr_dc_uni[512];
  59. static uint8_t mpeg1_index_run[2][64];
  60. static int8_t mpeg1_max_level[2][64];
  61. static void init_uni_ac_vlc(RLTable *rl, uint8_t *uni_ac_vlc_len){
  62. int i;
  63. for(i=0; i<128; i++){
  64. int level= i-64;
  65. int run;
  66. for(run=0; run<64; run++){
  67. int len, bits, code;
  68. int alevel= FFABS(level);
  69. int sign= (level>>31)&1;
  70. if (alevel > rl->max_level[0][run])
  71. code= 111; /*rl->n*/
  72. else
  73. code= rl->index_run[0][run] + alevel - 1;
  74. if (code < 111 /* rl->n */) {
  75. /* store the vlc & sign at once */
  76. len= rl->table_vlc[code][1]+1;
  77. bits= (rl->table_vlc[code][0]<<1) + sign;
  78. } else {
  79. len= rl->table_vlc[111/*rl->n*/][1]+6;
  80. bits= rl->table_vlc[111/*rl->n*/][0]<<6;
  81. bits|= run;
  82. if (alevel < 128) {
  83. bits<<=8; len+=8;
  84. bits|= level & 0xff;
  85. } else {
  86. bits<<=16; len+=16;
  87. bits|= level & 0xff;
  88. if (level < 0) {
  89. bits|= 0x8001 + level + 255;
  90. } else {
  91. bits|= level & 0xffff;
  92. }
  93. }
  94. }
  95. uni_ac_vlc_len [UNI_AC_ENC_INDEX(run, i)]= len;
  96. }
  97. }
  98. }
  99. static int find_frame_rate_index(MpegEncContext *s){
  100. int i;
  101. int64_t dmin= INT64_MAX;
  102. int64_t d;
  103. for(i=1;i<14;i++) {
  104. int64_t n0= 1001LL/ff_frame_rate_tab[i].den*ff_frame_rate_tab[i].num*s->avctx->time_base.num;
  105. int64_t n1= 1001LL*s->avctx->time_base.den;
  106. if(s->avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL && i>=9) break;
  107. d = FFABS(n0 - n1);
  108. if(d < dmin){
  109. dmin=d;
  110. s->frame_rate_index= i;
  111. }
  112. }
  113. if(dmin)
  114. return -1;
  115. else
  116. return 0;
  117. }
  118. static av_cold int encode_init(AVCodecContext *avctx)
  119. {
  120. MpegEncContext *s = avctx->priv_data;
  121. if(MPV_encode_init(avctx) < 0)
  122. return -1;
  123. #if FF_API_MPEGVIDEO_GLOBAL_OPTS
  124. if (avctx->flags2 & CODEC_FLAG2_DROP_FRAME_TIMECODE)
  125. s->drop_frame_timecode = 1;
  126. if (avctx->flags & CODEC_FLAG_SVCD_SCAN_OFFSET)
  127. s->scan_offset = 1;
  128. #endif
  129. if(find_frame_rate_index(s) < 0){
  130. if(s->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL){
  131. av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n", avctx->time_base.den, avctx->time_base.num);
  132. return -1;
  133. }else{
  134. av_log(avctx, AV_LOG_INFO, "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n", avctx->time_base.den, avctx->time_base.num);
  135. }
  136. }
  137. if(avctx->profile == FF_PROFILE_UNKNOWN){
  138. if(avctx->level != FF_LEVEL_UNKNOWN){
  139. av_log(avctx, AV_LOG_ERROR, "Set profile and level\n");
  140. return -1;
  141. }
  142. avctx->profile = s->chroma_format == CHROMA_420 ? 4 : 0; /* Main or 4:2:2 */
  143. }
  144. if(avctx->level == FF_LEVEL_UNKNOWN){
  145. if(avctx->profile == 0){ /* 4:2:2 */
  146. if(avctx->width <= 720 && avctx->height <= 608) avctx->level = 5; /* Main */
  147. else avctx->level = 2; /* High */
  148. }else{
  149. if(avctx->profile != 1 && s->chroma_format != CHROMA_420){
  150. av_log(avctx, AV_LOG_ERROR, "Only High(1) and 4:2:2(0) profiles support 4:2:2 color sampling\n");
  151. return -1;
  152. }
  153. if(avctx->width <= 720 && avctx->height <= 576) avctx->level = 8; /* Main */
  154. else if(avctx->width <= 1440) avctx->level = 6; /* High 1440 */
  155. else avctx->level = 4; /* High */
  156. }
  157. }
  158. s->drop_frame_timecode = s->tc.drop = s->drop_frame_timecode || !!(avctx->flags2 & CODEC_FLAG2_DROP_FRAME_TIMECODE);
  159. if (s->drop_frame_timecode && s->frame_rate_index != 4) {
  160. av_log(avctx, AV_LOG_ERROR, "Drop frame time code only allowed with 1001/30000 fps\n");
  161. return -1;
  162. }
  163. if (s->tc.str) {
  164. s->tc.rate = ff_frame_rate_tab[s->frame_rate_index];
  165. if (ff_init_smtpe_timecode(s, &s->tc) < 0)
  166. return -1;
  167. s->avctx->timecode_frame_start = s->tc.start;
  168. }
  169. return 0;
  170. }
  171. static void put_header(MpegEncContext *s, int header)
  172. {
  173. align_put_bits(&s->pb);
  174. put_bits(&s->pb, 16, header>>16);
  175. put_sbits(&s->pb, 16, header);
  176. }
  177. /* put sequence header if needed */
  178. static void mpeg1_encode_sequence_header(MpegEncContext *s)
  179. {
  180. unsigned int vbv_buffer_size;
  181. unsigned int fps, v;
  182. int i;
  183. uint64_t time_code;
  184. float best_aspect_error= 1E10;
  185. float aspect_ratio= av_q2d(s->avctx->sample_aspect_ratio);
  186. int constraint_parameter_flag;
  187. if(aspect_ratio==0.0) aspect_ratio= 1.0; //pixel aspect 1:1 (VGA)
  188. if (s->current_picture.f.key_frame) {
  189. AVRational framerate= ff_frame_rate_tab[s->frame_rate_index];
  190. /* mpeg1 header repeated every gop */
  191. put_header(s, SEQ_START_CODE);
  192. put_sbits(&s->pb, 12, s->width );
  193. put_sbits(&s->pb, 12, s->height);
  194. for(i=1; i<15; i++){
  195. float error= aspect_ratio;
  196. if(s->codec_id == CODEC_ID_MPEG1VIDEO || i <=1)
  197. error-= 1.0/ff_mpeg1_aspect[i];
  198. else
  199. error-= av_q2d(ff_mpeg2_aspect[i])*s->height/s->width;
  200. error= FFABS(error);
  201. if(error < best_aspect_error){
  202. best_aspect_error= error;
  203. s->aspect_ratio_info= i;
  204. }
  205. }
  206. put_bits(&s->pb, 4, s->aspect_ratio_info);
  207. put_bits(&s->pb, 4, s->frame_rate_index);
  208. if(s->avctx->rc_max_rate){
  209. v = (s->avctx->rc_max_rate + 399) / 400;
  210. if (v > 0x3ffff && s->codec_id == CODEC_ID_MPEG1VIDEO)
  211. v = 0x3ffff;
  212. }else{
  213. v= 0x3FFFF;
  214. }
  215. if(s->avctx->rc_buffer_size)
  216. vbv_buffer_size = s->avctx->rc_buffer_size;
  217. else
  218. /* VBV calculation: Scaled so that a VCD has the proper VBV size of 40 kilobytes */
  219. vbv_buffer_size = (( 20 * s->bit_rate) / (1151929 / 2)) * 8 * 1024;
  220. vbv_buffer_size= (vbv_buffer_size + 16383) / 16384;
  221. put_sbits(&s->pb, 18, v);
  222. put_bits(&s->pb, 1, 1); /* marker */
  223. put_sbits(&s->pb, 10, vbv_buffer_size);
  224. constraint_parameter_flag=
  225. s->width <= 768 && s->height <= 576 &&
  226. s->mb_width * s->mb_height <= 396 &&
  227. s->mb_width * s->mb_height * framerate.num <= framerate.den*396*25 &&
  228. framerate.num <= framerate.den*30 &&
  229. s->avctx->me_range && s->avctx->me_range < 128 &&
  230. vbv_buffer_size <= 20 &&
  231. v <= 1856000/400 &&
  232. s->codec_id == CODEC_ID_MPEG1VIDEO;
  233. put_bits(&s->pb, 1, constraint_parameter_flag);
  234. ff_write_quant_matrix(&s->pb, s->avctx->intra_matrix);
  235. ff_write_quant_matrix(&s->pb, s->avctx->inter_matrix);
  236. if(s->codec_id == CODEC_ID_MPEG2VIDEO){
  237. put_header(s, EXT_START_CODE);
  238. put_bits(&s->pb, 4, 1); //seq ext
  239. put_bits(&s->pb, 1, s->avctx->profile == 0); //escx 1 for 4:2:2 profile */
  240. put_bits(&s->pb, 3, s->avctx->profile); //profile
  241. put_bits(&s->pb, 4, s->avctx->level); //level
  242. put_bits(&s->pb, 1, s->progressive_sequence);
  243. put_bits(&s->pb, 2, s->chroma_format);
  244. put_bits(&s->pb, 2, s->width >>12);
  245. put_bits(&s->pb, 2, s->height>>12);
  246. put_bits(&s->pb, 12, v>>18); //bitrate ext
  247. put_bits(&s->pb, 1, 1); //marker
  248. put_bits(&s->pb, 8, vbv_buffer_size >>10); //vbv buffer ext
  249. put_bits(&s->pb, 1, s->low_delay);
  250. put_bits(&s->pb, 2, 0); // frame_rate_ext_n
  251. put_bits(&s->pb, 5, 0); // frame_rate_ext_d
  252. }
  253. put_header(s, GOP_START_CODE);
  254. put_bits(&s->pb, 1, s->drop_frame_timecode); /* drop frame flag */
  255. /* time code : we must convert from the real frame rate to a
  256. fake mpeg frame rate in case of low frame rate */
  257. fps = (framerate.num + framerate.den/2)/ framerate.den;
  258. time_code = s->current_picture_ptr->f.coded_picture_number + s->avctx->timecode_frame_start;
  259. s->gop_picture_number = s->current_picture_ptr->f.coded_picture_number;
  260. av_assert0(s->drop_frame_timecode == s->tc.drop);
  261. if (s->tc.drop)
  262. time_code = ff_framenum_to_drop_timecode(time_code);
  263. put_bits(&s->pb, 5, (uint32_t)((time_code / (fps * 3600)) % 24));
  264. put_bits(&s->pb, 6, (uint32_t)((time_code / (fps * 60)) % 60));
  265. put_bits(&s->pb, 1, 1);
  266. put_bits(&s->pb, 6, (uint32_t)((time_code / fps) % 60));
  267. put_bits(&s->pb, 6, (uint32_t)((time_code % fps)));
  268. put_bits(&s->pb, 1, !!(s->flags & CODEC_FLAG_CLOSED_GOP));
  269. put_bits(&s->pb, 1, 0); /* broken link */
  270. }
  271. }
  272. static inline void encode_mb_skip_run(MpegEncContext *s, int run){
  273. while (run >= 33) {
  274. put_bits(&s->pb, 11, 0x008);
  275. run -= 33;
  276. }
  277. put_bits(&s->pb, ff_mpeg12_mbAddrIncrTable[run][1],
  278. ff_mpeg12_mbAddrIncrTable[run][0]);
  279. }
  280. static av_always_inline void put_qscale(MpegEncContext *s)
  281. {
  282. if(s->q_scale_type){
  283. assert(s->qscale>=1 && s->qscale <=12);
  284. put_bits(&s->pb, 5, inv_non_linear_qscale[s->qscale]);
  285. }else{
  286. put_bits(&s->pb, 5, s->qscale);
  287. }
  288. }
  289. void ff_mpeg1_encode_slice_header(MpegEncContext *s){
  290. if (s->height > 2800) {
  291. put_header(s, SLICE_MIN_START_CODE + (s->mb_y & 127));
  292. put_bits(&s->pb, 3, s->mb_y >> 7); /* slice_vertical_position_extension */
  293. } else {
  294. put_header(s, SLICE_MIN_START_CODE + s->mb_y);
  295. }
  296. put_qscale(s);
  297. put_bits(&s->pb, 1, 0); /* slice extra information */
  298. }
  299. void mpeg1_encode_picture_header(MpegEncContext *s, int picture_number)
  300. {
  301. mpeg1_encode_sequence_header(s);
  302. /* mpeg1 picture header */
  303. put_header(s, PICTURE_START_CODE);
  304. /* temporal reference */
  305. // RAL: s->picture_number instead of s->fake_picture_number
  306. put_bits(&s->pb, 10, (s->picture_number -
  307. s->gop_picture_number) & 0x3ff);
  308. put_bits(&s->pb, 3, s->pict_type);
  309. s->vbv_delay_ptr= s->pb.buf + put_bits_count(&s->pb)/8;
  310. put_bits(&s->pb, 16, 0xFFFF); /* vbv_delay */
  311. // RAL: Forward f_code also needed for B frames
  312. if (s->pict_type == AV_PICTURE_TYPE_P || s->pict_type == AV_PICTURE_TYPE_B) {
  313. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  314. if(s->codec_id == CODEC_ID_MPEG1VIDEO)
  315. put_bits(&s->pb, 3, s->f_code); /* forward_f_code */
  316. else
  317. put_bits(&s->pb, 3, 7); /* forward_f_code */
  318. }
  319. // RAL: Backward f_code necessary for B frames
  320. if (s->pict_type == AV_PICTURE_TYPE_B) {
  321. put_bits(&s->pb, 1, 0); /* half pel coordinates */
  322. if(s->codec_id == CODEC_ID_MPEG1VIDEO)
  323. put_bits(&s->pb, 3, s->b_code); /* backward_f_code */
  324. else
  325. put_bits(&s->pb, 3, 7); /* backward_f_code */
  326. }
  327. put_bits(&s->pb, 1, 0); /* extra bit picture */
  328. s->frame_pred_frame_dct = 1;
  329. if(s->codec_id == CODEC_ID_MPEG2VIDEO){
  330. put_header(s, EXT_START_CODE);
  331. put_bits(&s->pb, 4, 8); //pic ext
  332. if (s->pict_type == AV_PICTURE_TYPE_P || s->pict_type == AV_PICTURE_TYPE_B) {
  333. put_bits(&s->pb, 4, s->f_code);
  334. put_bits(&s->pb, 4, s->f_code);
  335. }else{
  336. put_bits(&s->pb, 8, 255);
  337. }
  338. if (s->pict_type == AV_PICTURE_TYPE_B) {
  339. put_bits(&s->pb, 4, s->b_code);
  340. put_bits(&s->pb, 4, s->b_code);
  341. }else{
  342. put_bits(&s->pb, 8, 255);
  343. }
  344. put_bits(&s->pb, 2, s->intra_dc_precision);
  345. assert(s->picture_structure == PICT_FRAME);
  346. put_bits(&s->pb, 2, s->picture_structure);
  347. if (s->progressive_sequence) {
  348. put_bits(&s->pb, 1, 0); /* no repeat */
  349. } else {
  350. put_bits(&s->pb, 1, s->current_picture_ptr->f.top_field_first);
  351. }
  352. /* XXX: optimize the generation of this flag with entropy
  353. measures */
  354. s->frame_pred_frame_dct = s->progressive_sequence;
  355. put_bits(&s->pb, 1, s->frame_pred_frame_dct);
  356. put_bits(&s->pb, 1, s->concealment_motion_vectors);
  357. put_bits(&s->pb, 1, s->q_scale_type);
  358. put_bits(&s->pb, 1, s->intra_vlc_format);
  359. put_bits(&s->pb, 1, s->alternate_scan);
  360. put_bits(&s->pb, 1, s->repeat_first_field);
  361. s->progressive_frame = s->progressive_sequence;
  362. put_bits(&s->pb, 1, s->chroma_format == CHROMA_420 ? s->progressive_frame : 0); /* chroma_420_type */
  363. put_bits(&s->pb, 1, s->progressive_frame);
  364. put_bits(&s->pb, 1, 0); //composite_display_flag
  365. }
  366. if (s->scan_offset) {
  367. int i;
  368. put_header(s, USER_START_CODE);
  369. for(i=0; i<sizeof(svcd_scan_offset_placeholder); i++){
  370. put_bits(&s->pb, 8, svcd_scan_offset_placeholder[i]);
  371. }
  372. }
  373. s->mb_y=0;
  374. ff_mpeg1_encode_slice_header(s);
  375. }
  376. static inline void put_mb_modes(MpegEncContext *s, int n, int bits,
  377. int has_mv, int field_motion)
  378. {
  379. put_bits(&s->pb, n, bits);
  380. if (!s->frame_pred_frame_dct) {
  381. if (has_mv)
  382. put_bits(&s->pb, 2, 2 - field_motion); /* motion_type: frame/field */
  383. put_bits(&s->pb, 1, s->interlaced_dct);
  384. }
  385. }
  386. static av_always_inline void mpeg1_encode_mb_internal(MpegEncContext *s,
  387. DCTELEM block[6][64],
  388. int motion_x, int motion_y,
  389. int mb_block_count)
  390. {
  391. int i, cbp;
  392. const int mb_x = s->mb_x;
  393. const int mb_y = s->mb_y;
  394. const int first_mb= mb_x == s->resync_mb_x && mb_y == s->resync_mb_y;
  395. /* compute cbp */
  396. cbp = 0;
  397. for(i=0;i<mb_block_count;i++) {
  398. if (s->block_last_index[i] >= 0)
  399. cbp |= 1 << (mb_block_count - 1 - i);
  400. }
  401. if (cbp == 0 && !first_mb && s->mv_type == MV_TYPE_16X16 &&
  402. (mb_x != s->mb_width - 1 || (mb_y != s->mb_height - 1 && s->codec_id == CODEC_ID_MPEG1VIDEO)) &&
  403. ((s->pict_type == AV_PICTURE_TYPE_P && (motion_x | motion_y) == 0) ||
  404. (s->pict_type == AV_PICTURE_TYPE_B && s->mv_dir == s->last_mv_dir && (((s->mv_dir & MV_DIR_FORWARD) ? ((s->mv[0][0][0] - s->last_mv[0][0][0])|(s->mv[0][0][1] - s->last_mv[0][0][1])) : 0) |
  405. ((s->mv_dir & MV_DIR_BACKWARD) ? ((s->mv[1][0][0] - s->last_mv[1][0][0])|(s->mv[1][0][1] - s->last_mv[1][0][1])) : 0)) == 0))) {
  406. s->mb_skip_run++;
  407. s->qscale -= s->dquant;
  408. s->skip_count++;
  409. s->misc_bits++;
  410. s->last_bits++;
  411. if(s->pict_type == AV_PICTURE_TYPE_P){
  412. s->last_mv[0][1][0]= s->last_mv[0][0][0]=
  413. s->last_mv[0][1][1]= s->last_mv[0][0][1]= 0;
  414. }
  415. } else {
  416. if(first_mb){
  417. assert(s->mb_skip_run == 0);
  418. encode_mb_skip_run(s, s->mb_x);
  419. }else{
  420. encode_mb_skip_run(s, s->mb_skip_run);
  421. }
  422. if (s->pict_type == AV_PICTURE_TYPE_I) {
  423. if(s->dquant && cbp){
  424. put_mb_modes(s, 2, 1, 0, 0); /* macroblock_type : macroblock_quant = 1 */
  425. put_qscale(s);
  426. }else{
  427. put_mb_modes(s, 1, 1, 0, 0); /* macroblock_type : macroblock_quant = 0 */
  428. s->qscale -= s->dquant;
  429. }
  430. s->misc_bits+= get_bits_diff(s);
  431. s->i_count++;
  432. } else if (s->mb_intra) {
  433. if(s->dquant && cbp){
  434. put_mb_modes(s, 6, 0x01, 0, 0);
  435. put_qscale(s);
  436. }else{
  437. put_mb_modes(s, 5, 0x03, 0, 0);
  438. s->qscale -= s->dquant;
  439. }
  440. s->misc_bits+= get_bits_diff(s);
  441. s->i_count++;
  442. memset(s->last_mv, 0, sizeof(s->last_mv));
  443. } else if (s->pict_type == AV_PICTURE_TYPE_P) {
  444. if(s->mv_type == MV_TYPE_16X16){
  445. if (cbp != 0) {
  446. if ((motion_x|motion_y) == 0) {
  447. if(s->dquant){
  448. put_mb_modes(s, 5, 1, 0, 0); /* macroblock_pattern & quant */
  449. put_qscale(s);
  450. }else{
  451. put_mb_modes(s, 2, 1, 0, 0); /* macroblock_pattern only */
  452. }
  453. s->misc_bits+= get_bits_diff(s);
  454. } else {
  455. if(s->dquant){
  456. put_mb_modes(s, 5, 2, 1, 0); /* motion + cbp */
  457. put_qscale(s);
  458. }else{
  459. put_mb_modes(s, 1, 1, 1, 0); /* motion + cbp */
  460. }
  461. s->misc_bits+= get_bits_diff(s);
  462. mpeg1_encode_motion(s, motion_x - s->last_mv[0][0][0], s->f_code); // RAL: f_code parameter added
  463. mpeg1_encode_motion(s, motion_y - s->last_mv[0][0][1], s->f_code); // RAL: f_code parameter added
  464. s->mv_bits+= get_bits_diff(s);
  465. }
  466. } else {
  467. put_bits(&s->pb, 3, 1); /* motion only */
  468. if (!s->frame_pred_frame_dct)
  469. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  470. s->misc_bits+= get_bits_diff(s);
  471. mpeg1_encode_motion(s, motion_x - s->last_mv[0][0][0], s->f_code); // RAL: f_code parameter added
  472. mpeg1_encode_motion(s, motion_y - s->last_mv[0][0][1], s->f_code); // RAL: f_code parameter added
  473. s->qscale -= s->dquant;
  474. s->mv_bits+= get_bits_diff(s);
  475. }
  476. s->last_mv[0][1][0]= s->last_mv[0][0][0]= motion_x;
  477. s->last_mv[0][1][1]= s->last_mv[0][0][1]= motion_y;
  478. }else{
  479. assert(!s->frame_pred_frame_dct && s->mv_type == MV_TYPE_FIELD);
  480. if (cbp) {
  481. if(s->dquant){
  482. put_mb_modes(s, 5, 2, 1, 1); /* motion + cbp */
  483. put_qscale(s);
  484. }else{
  485. put_mb_modes(s, 1, 1, 1, 1); /* motion + cbp */
  486. }
  487. } else {
  488. put_bits(&s->pb, 3, 1); /* motion only */
  489. put_bits(&s->pb, 2, 1); /* motion_type: field */
  490. s->qscale -= s->dquant;
  491. }
  492. s->misc_bits+= get_bits_diff(s);
  493. for(i=0; i<2; i++){
  494. put_bits(&s->pb, 1, s->field_select[0][i]);
  495. mpeg1_encode_motion(s, s->mv[0][i][0] - s->last_mv[0][i][0] , s->f_code);
  496. mpeg1_encode_motion(s, s->mv[0][i][1] - (s->last_mv[0][i][1]>>1), s->f_code);
  497. s->last_mv[0][i][0]= s->mv[0][i][0];
  498. s->last_mv[0][i][1]= 2*s->mv[0][i][1];
  499. }
  500. s->mv_bits+= get_bits_diff(s);
  501. }
  502. if(cbp) {
  503. if (s->chroma_y_shift) {
  504. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp][1], ff_mpeg12_mbPatTable[cbp][0]);
  505. } else {
  506. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp>>2][1], ff_mpeg12_mbPatTable[cbp>>2][0]);
  507. put_sbits(&s->pb, 2, cbp);
  508. }
  509. }
  510. s->f_count++;
  511. } else{
  512. if(s->mv_type == MV_TYPE_16X16){
  513. if (cbp){ // With coded bloc pattern
  514. if (s->dquant) {
  515. if(s->mv_dir == MV_DIR_FORWARD)
  516. put_mb_modes(s, 6, 3, 1, 0);
  517. else
  518. put_mb_modes(s, 8-s->mv_dir, 2, 1, 0);
  519. put_qscale(s);
  520. } else {
  521. put_mb_modes(s, 5-s->mv_dir, 3, 1, 0);
  522. }
  523. }else{ // No coded bloc pattern
  524. put_bits(&s->pb, 5-s->mv_dir, 2);
  525. if (!s->frame_pred_frame_dct)
  526. put_bits(&s->pb, 2, 2); /* motion_type: frame */
  527. s->qscale -= s->dquant;
  528. }
  529. s->misc_bits += get_bits_diff(s);
  530. if (s->mv_dir&MV_DIR_FORWARD){
  531. mpeg1_encode_motion(s, s->mv[0][0][0] - s->last_mv[0][0][0], s->f_code);
  532. mpeg1_encode_motion(s, s->mv[0][0][1] - s->last_mv[0][0][1], s->f_code);
  533. s->last_mv[0][0][0]=s->last_mv[0][1][0]= s->mv[0][0][0];
  534. s->last_mv[0][0][1]=s->last_mv[0][1][1]= s->mv[0][0][1];
  535. s->f_count++;
  536. }
  537. if (s->mv_dir&MV_DIR_BACKWARD){
  538. mpeg1_encode_motion(s, s->mv[1][0][0] - s->last_mv[1][0][0], s->b_code);
  539. mpeg1_encode_motion(s, s->mv[1][0][1] - s->last_mv[1][0][1], s->b_code);
  540. s->last_mv[1][0][0]=s->last_mv[1][1][0]= s->mv[1][0][0];
  541. s->last_mv[1][0][1]=s->last_mv[1][1][1]= s->mv[1][0][1];
  542. s->b_count++;
  543. }
  544. }else{
  545. assert(s->mv_type == MV_TYPE_FIELD);
  546. assert(!s->frame_pred_frame_dct);
  547. if (cbp){ // With coded bloc pattern
  548. if (s->dquant) {
  549. if(s->mv_dir == MV_DIR_FORWARD)
  550. put_mb_modes(s, 6, 3, 1, 1);
  551. else
  552. put_mb_modes(s, 8-s->mv_dir, 2, 1, 1);
  553. put_qscale(s);
  554. } else {
  555. put_mb_modes(s, 5-s->mv_dir, 3, 1, 1);
  556. }
  557. }else{ // No coded bloc pattern
  558. put_bits(&s->pb, 5-s->mv_dir, 2);
  559. put_bits(&s->pb, 2, 1); /* motion_type: field */
  560. s->qscale -= s->dquant;
  561. }
  562. s->misc_bits += get_bits_diff(s);
  563. if (s->mv_dir&MV_DIR_FORWARD){
  564. for(i=0; i<2; i++){
  565. put_bits(&s->pb, 1, s->field_select[0][i]);
  566. mpeg1_encode_motion(s, s->mv[0][i][0] - s->last_mv[0][i][0] , s->f_code);
  567. mpeg1_encode_motion(s, s->mv[0][i][1] - (s->last_mv[0][i][1]>>1), s->f_code);
  568. s->last_mv[0][i][0]= s->mv[0][i][0];
  569. s->last_mv[0][i][1]= 2*s->mv[0][i][1];
  570. }
  571. s->f_count++;
  572. }
  573. if (s->mv_dir&MV_DIR_BACKWARD){
  574. for(i=0; i<2; i++){
  575. put_bits(&s->pb, 1, s->field_select[1][i]);
  576. mpeg1_encode_motion(s, s->mv[1][i][0] - s->last_mv[1][i][0] , s->b_code);
  577. mpeg1_encode_motion(s, s->mv[1][i][1] - (s->last_mv[1][i][1]>>1), s->b_code);
  578. s->last_mv[1][i][0]= s->mv[1][i][0];
  579. s->last_mv[1][i][1]= 2*s->mv[1][i][1];
  580. }
  581. s->b_count++;
  582. }
  583. }
  584. s->mv_bits += get_bits_diff(s);
  585. if(cbp) {
  586. if (s->chroma_y_shift) {
  587. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp][1], ff_mpeg12_mbPatTable[cbp][0]);
  588. } else {
  589. put_bits(&s->pb, ff_mpeg12_mbPatTable[cbp>>2][1], ff_mpeg12_mbPatTable[cbp>>2][0]);
  590. put_sbits(&s->pb, 2, cbp);
  591. }
  592. }
  593. }
  594. for(i=0;i<mb_block_count;i++) {
  595. if (cbp & (1 << (mb_block_count - 1 - i))) {
  596. mpeg1_encode_block(s, block[i], i);
  597. }
  598. }
  599. s->mb_skip_run = 0;
  600. if(s->mb_intra)
  601. s->i_tex_bits+= get_bits_diff(s);
  602. else
  603. s->p_tex_bits+= get_bits_diff(s);
  604. }
  605. }
  606. void mpeg1_encode_mb(MpegEncContext *s, DCTELEM block[6][64], int motion_x, int motion_y)
  607. {
  608. if (s->chroma_format == CHROMA_420) mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 6);
  609. else mpeg1_encode_mb_internal(s, block, motion_x, motion_y, 8);
  610. }
  611. // RAL: Parameter added: f_or_b_code
  612. static void mpeg1_encode_motion(MpegEncContext *s, int val, int f_or_b_code)
  613. {
  614. if (val == 0) {
  615. /* zero vector */
  616. put_bits(&s->pb,
  617. ff_mpeg12_mbMotionVectorTable[0][1],
  618. ff_mpeg12_mbMotionVectorTable[0][0]);
  619. } else {
  620. int code, sign, bits;
  621. int bit_size = f_or_b_code - 1;
  622. int range = 1 << bit_size;
  623. /* modulo encoding */
  624. val = sign_extend(val, 5 + bit_size);
  625. if (val >= 0) {
  626. val--;
  627. code = (val >> bit_size) + 1;
  628. bits = val & (range - 1);
  629. sign = 0;
  630. } else {
  631. val = -val;
  632. val--;
  633. code = (val >> bit_size) + 1;
  634. bits = val & (range - 1);
  635. sign = 1;
  636. }
  637. assert(code > 0 && code <= 16);
  638. put_bits(&s->pb,
  639. ff_mpeg12_mbMotionVectorTable[code][1],
  640. ff_mpeg12_mbMotionVectorTable[code][0]);
  641. put_bits(&s->pb, 1, sign);
  642. if (bit_size > 0) {
  643. put_bits(&s->pb, bit_size, bits);
  644. }
  645. }
  646. }
  647. void ff_mpeg1_encode_init(MpegEncContext *s)
  648. {
  649. static int done=0;
  650. ff_mpeg12_common_init(s);
  651. if(!done){
  652. int f_code;
  653. int mv;
  654. int i;
  655. done=1;
  656. init_rl(&ff_rl_mpeg1, ff_mpeg12_static_rl_table_store[0]);
  657. init_rl(&ff_rl_mpeg2, ff_mpeg12_static_rl_table_store[1]);
  658. for(i=0; i<64; i++)
  659. {
  660. mpeg1_max_level[0][i]= ff_rl_mpeg1.max_level[0][i];
  661. mpeg1_index_run[0][i]= ff_rl_mpeg1.index_run[0][i];
  662. }
  663. init_uni_ac_vlc(&ff_rl_mpeg1, uni_mpeg1_ac_vlc_len);
  664. if(s->intra_vlc_format)
  665. init_uni_ac_vlc(&ff_rl_mpeg2, uni_mpeg2_ac_vlc_len);
  666. /* build unified dc encoding tables */
  667. for(i=-255; i<256; i++)
  668. {
  669. int adiff, index;
  670. int bits, code;
  671. int diff=i;
  672. adiff = FFABS(diff);
  673. if(diff<0) diff--;
  674. index = av_log2(2*adiff);
  675. bits= ff_mpeg12_vlc_dc_lum_bits[index] + index;
  676. code= (ff_mpeg12_vlc_dc_lum_code[index]<<index) + (diff & ((1 << index) - 1));
  677. mpeg1_lum_dc_uni[i+255]= bits + (code<<8);
  678. bits= ff_mpeg12_vlc_dc_chroma_bits[index] + index;
  679. code= (ff_mpeg12_vlc_dc_chroma_code[index]<<index) + (diff & ((1 << index) - 1));
  680. mpeg1_chr_dc_uni[i+255]= bits + (code<<8);
  681. }
  682. for(f_code=1; f_code<=MAX_FCODE; f_code++){
  683. for(mv=-MAX_MV; mv<=MAX_MV; mv++){
  684. int len;
  685. if(mv==0) len= ff_mpeg12_mbMotionVectorTable[0][1];
  686. else{
  687. int val, bit_size, code;
  688. bit_size = f_code - 1;
  689. val=mv;
  690. if (val < 0)
  691. val = -val;
  692. val--;
  693. code = (val >> bit_size) + 1;
  694. if(code<17){
  695. len= ff_mpeg12_mbMotionVectorTable[code][1] + 1 + bit_size;
  696. }else{
  697. len= ff_mpeg12_mbMotionVectorTable[16][1] + 2 + bit_size;
  698. }
  699. }
  700. mv_penalty[f_code][mv+MAX_MV]= len;
  701. }
  702. }
  703. for(f_code=MAX_FCODE; f_code>0; f_code--){
  704. for(mv=-(8<<f_code); mv<(8<<f_code); mv++){
  705. fcode_tab[mv+MAX_MV]= f_code;
  706. }
  707. }
  708. }
  709. s->me.mv_penalty= mv_penalty;
  710. s->fcode_tab= fcode_tab;
  711. if(s->codec_id == CODEC_ID_MPEG1VIDEO){
  712. s->min_qcoeff=-255;
  713. s->max_qcoeff= 255;
  714. }else{
  715. s->min_qcoeff=-2047;
  716. s->max_qcoeff= 2047;
  717. }
  718. if (s->intra_vlc_format) {
  719. s->intra_ac_vlc_length=
  720. s->intra_ac_vlc_last_length= uni_mpeg2_ac_vlc_len;
  721. } else {
  722. s->intra_ac_vlc_length=
  723. s->intra_ac_vlc_last_length= uni_mpeg1_ac_vlc_len;
  724. }
  725. s->inter_ac_vlc_length=
  726. s->inter_ac_vlc_last_length= uni_mpeg1_ac_vlc_len;
  727. }
  728. static inline void encode_dc(MpegEncContext *s, int diff, int component)
  729. {
  730. if(((unsigned) (diff+255)) >= 511){
  731. int index;
  732. if(diff<0){
  733. index= av_log2_16bit(-2*diff);
  734. diff--;
  735. }else{
  736. index= av_log2_16bit(2*diff);
  737. }
  738. if (component == 0) {
  739. put_bits(
  740. &s->pb,
  741. ff_mpeg12_vlc_dc_lum_bits[index] + index,
  742. (ff_mpeg12_vlc_dc_lum_code[index]<<index) + (diff & ((1 << index) - 1)));
  743. }else{
  744. put_bits(
  745. &s->pb,
  746. ff_mpeg12_vlc_dc_chroma_bits[index] + index,
  747. (ff_mpeg12_vlc_dc_chroma_code[index]<<index) + (diff & ((1 << index) - 1)));
  748. }
  749. }else{
  750. if (component == 0) {
  751. put_bits(
  752. &s->pb,
  753. mpeg1_lum_dc_uni[diff+255]&0xFF,
  754. mpeg1_lum_dc_uni[diff+255]>>8);
  755. } else {
  756. put_bits(
  757. &s->pb,
  758. mpeg1_chr_dc_uni[diff+255]&0xFF,
  759. mpeg1_chr_dc_uni[diff+255]>>8);
  760. }
  761. }
  762. }
  763. static void mpeg1_encode_block(MpegEncContext *s,
  764. DCTELEM *block,
  765. int n)
  766. {
  767. int alevel, level, last_non_zero, dc, diff, i, j, run, last_index, sign;
  768. int code, component;
  769. const uint16_t (*table_vlc)[2] = ff_rl_mpeg1.table_vlc;
  770. last_index = s->block_last_index[n];
  771. /* DC coef */
  772. if (s->mb_intra) {
  773. component = (n <= 3 ? 0 : (n&1) + 1);
  774. dc = block[0]; /* overflow is impossible */
  775. diff = dc - s->last_dc[component];
  776. encode_dc(s, diff, component);
  777. s->last_dc[component] = dc;
  778. i = 1;
  779. if (s->intra_vlc_format)
  780. table_vlc = ff_rl_mpeg2.table_vlc;
  781. } else {
  782. /* encode the first coefficient : needs to be done here because
  783. it is handled slightly differently */
  784. level = block[0];
  785. if (abs(level) == 1) {
  786. code = ((uint32_t)level >> 31); /* the sign bit */
  787. put_bits(&s->pb, 2, code | 0x02);
  788. i = 1;
  789. } else {
  790. i = 0;
  791. last_non_zero = -1;
  792. goto next_coef;
  793. }
  794. }
  795. /* now quantify & encode AC coefs */
  796. last_non_zero = i - 1;
  797. for(;i<=last_index;i++) {
  798. j = s->intra_scantable.permutated[i];
  799. level = block[j];
  800. next_coef:
  801. /* encode using VLC */
  802. if (level != 0) {
  803. run = i - last_non_zero - 1;
  804. alevel= level;
  805. MASK_ABS(sign, alevel)
  806. sign&=1;
  807. if (alevel <= mpeg1_max_level[0][run]){
  808. code= mpeg1_index_run[0][run] + alevel - 1;
  809. /* store the vlc & sign at once */
  810. put_bits(&s->pb, table_vlc[code][1]+1, (table_vlc[code][0]<<1) + sign);
  811. } else {
  812. /* escape seems to be pretty rare <5% so I do not optimize it */
  813. put_bits(&s->pb, table_vlc[111][1], table_vlc[111][0]);
  814. /* escape: only clip in this case */
  815. put_bits(&s->pb, 6, run);
  816. if(s->codec_id == CODEC_ID_MPEG1VIDEO){
  817. if (alevel < 128) {
  818. put_sbits(&s->pb, 8, level);
  819. } else {
  820. if (level < 0) {
  821. put_bits(&s->pb, 16, 0x8001 + level + 255);
  822. } else {
  823. put_sbits(&s->pb, 16, level);
  824. }
  825. }
  826. }else{
  827. put_sbits(&s->pb, 12, level);
  828. }
  829. }
  830. last_non_zero = i;
  831. }
  832. }
  833. /* end of block */
  834. put_bits(&s->pb, table_vlc[112][1], table_vlc[112][0]);
  835. }
  836. #define OFFSET(x) offsetof(MpegEncContext, x)
  837. #define VE AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM
  838. #define COMMON_OPTS\
  839. {TIMECODE_OPT(MpegEncContext,\
  840. AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)},\
  841. { "intra_vlc", "Use MPEG-2 intra VLC table.", OFFSET(intra_vlc_format), FF_OPT_TYPE_INT, { 0 }, 0, 1, VE },\
  842. { "drop_frame_timecode", "Timecode is in drop frame format.", OFFSET(drop_frame_timecode), FF_OPT_TYPE_INT, { 0 }, 0, 1, VE}, \
  843. { "scan_offset", "Reserve space for SVCD scan offset user data.", OFFSET(scan_offset), FF_OPT_TYPE_INT, { 0 }, 0, 1, VE },
  844. static const AVOption mpeg1_options[] = {
  845. COMMON_OPTS
  846. { NULL },
  847. };
  848. static const AVOption mpeg2_options[] = {
  849. COMMON_OPTS
  850. { "non_linear_quant", "Use nonlinear quantizer.", OFFSET(q_scale_type), FF_OPT_TYPE_INT, { 0 }, 0, 1, VE },
  851. { "alternate_scan", "Enable alternate scantable.", OFFSET(alternate_scan), FF_OPT_TYPE_INT, { 0 }, 0, 1, VE },
  852. { NULL },
  853. };
  854. #define mpeg12_class(x)\
  855. static const AVClass mpeg## x ##_class = {\
  856. .class_name = "mpeg" #x "video encoder",\
  857. .item_name = av_default_item_name,\
  858. .option = mpeg## x ##_options,\
  859. .version = LIBAVUTIL_VERSION_INT,\
  860. };
  861. mpeg12_class(1)
  862. mpeg12_class(2)
  863. AVCodec ff_mpeg1video_encoder = {
  864. .name = "mpeg1video",
  865. .type = AVMEDIA_TYPE_VIDEO,
  866. .id = CODEC_ID_MPEG1VIDEO,
  867. .priv_data_size = sizeof(MpegEncContext),
  868. .init = encode_init,
  869. .encode = MPV_encode_picture,
  870. .close = MPV_encode_end,
  871. .supported_framerates= ff_frame_rate_tab+1,
  872. .pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
  873. .capabilities= CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  874. .long_name= NULL_IF_CONFIG_SMALL("MPEG-1 video"),
  875. .priv_class = &mpeg1_class,
  876. };
  877. AVCodec ff_mpeg2video_encoder = {
  878. .name = "mpeg2video",
  879. .type = AVMEDIA_TYPE_VIDEO,
  880. .id = CODEC_ID_MPEG2VIDEO,
  881. .priv_data_size = sizeof(MpegEncContext),
  882. .init = encode_init,
  883. .encode = MPV_encode_picture,
  884. .close = MPV_encode_end,
  885. .supported_framerates= ff_frame_rate_tab+1,
  886. .pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE},
  887. .capabilities= CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS,
  888. .long_name= NULL_IF_CONFIG_SMALL("MPEG-2 video"),
  889. .priv_class = &mpeg2_class,
  890. };