You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

666 lines
20KB

  1. /*
  2. * ASUS V1/V2 codec
  3. * Copyright (c) 2003 Michael Niedermayer
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * ASUS V1/V2 codec.
  24. */
  25. #include "avcodec.h"
  26. #include "internal.h"
  27. #include "libavutil/common.h"
  28. #include "put_bits.h"
  29. #include "dsputil.h"
  30. #include "mpeg12data.h"
  31. //#undef NDEBUG
  32. //#include <assert.h>
  33. #define VLC_BITS 6
  34. #define ASV2_LEVEL_VLC_BITS 10
  35. typedef struct ASV1Context{
  36. AVCodecContext *avctx;
  37. DSPContext dsp;
  38. AVFrame picture;
  39. PutBitContext pb;
  40. GetBitContext gb;
  41. ScanTable scantable;
  42. int inv_qscale;
  43. int mb_width;
  44. int mb_height;
  45. int mb_width2;
  46. int mb_height2;
  47. DECLARE_ALIGNED(16, DCTELEM, block)[6][64];
  48. uint16_t intra_matrix[64];
  49. int q_intra_matrix[64];
  50. uint8_t *bitstream_buffer;
  51. unsigned int bitstream_buffer_size;
  52. } ASV1Context;
  53. static const uint8_t scantab[64]={
  54. 0x00,0x08,0x01,0x09,0x10,0x18,0x11,0x19,
  55. 0x02,0x0A,0x03,0x0B,0x12,0x1A,0x13,0x1B,
  56. 0x04,0x0C,0x05,0x0D,0x20,0x28,0x21,0x29,
  57. 0x06,0x0E,0x07,0x0F,0x14,0x1C,0x15,0x1D,
  58. 0x22,0x2A,0x23,0x2B,0x30,0x38,0x31,0x39,
  59. 0x16,0x1E,0x17,0x1F,0x24,0x2C,0x25,0x2D,
  60. 0x32,0x3A,0x33,0x3B,0x26,0x2E,0x27,0x2F,
  61. 0x34,0x3C,0x35,0x3D,0x36,0x3E,0x37,0x3F,
  62. };
  63. static const uint8_t ccp_tab[17][2]={
  64. {0x2,2}, {0x7,5}, {0xB,5}, {0x3,5},
  65. {0xD,5}, {0x5,5}, {0x9,5}, {0x1,5},
  66. {0xE,5}, {0x6,5}, {0xA,5}, {0x2,5},
  67. {0xC,5}, {0x4,5}, {0x8,5}, {0x3,2},
  68. {0xF,5}, //EOB
  69. };
  70. static const uint8_t level_tab[7][2]={
  71. {3,4}, {3,3}, {3,2}, {0,3}, {2,2}, {2,3}, {2,4}
  72. };
  73. static const uint8_t dc_ccp_tab[8][2]={
  74. {0x1,2}, {0xD,4}, {0xF,4}, {0xC,4},
  75. {0x5,3}, {0xE,4}, {0x4,3}, {0x0,2},
  76. };
  77. static const uint8_t ac_ccp_tab[16][2]={
  78. {0x00,2}, {0x3B,6}, {0x0A,4}, {0x3A,6},
  79. {0x02,3}, {0x39,6}, {0x3C,6}, {0x38,6},
  80. {0x03,3}, {0x3D,6}, {0x08,4}, {0x1F,5},
  81. {0x09,4}, {0x0B,4}, {0x0D,4}, {0x0C,4},
  82. };
  83. static const uint8_t asv2_level_tab[63][2]={
  84. {0x3F,10},{0x2F,10},{0x37,10},{0x27,10},{0x3B,10},{0x2B,10},{0x33,10},{0x23,10},
  85. {0x3D,10},{0x2D,10},{0x35,10},{0x25,10},{0x39,10},{0x29,10},{0x31,10},{0x21,10},
  86. {0x1F, 8},{0x17, 8},{0x1B, 8},{0x13, 8},{0x1D, 8},{0x15, 8},{0x19, 8},{0x11, 8},
  87. {0x0F, 6},{0x0B, 6},{0x0D, 6},{0x09, 6},
  88. {0x07, 4},{0x05, 4},
  89. {0x03, 2},
  90. {0x00, 5},
  91. {0x02, 2},
  92. {0x04, 4},{0x06, 4},
  93. {0x08, 6},{0x0C, 6},{0x0A, 6},{0x0E, 6},
  94. {0x10, 8},{0x18, 8},{0x14, 8},{0x1C, 8},{0x12, 8},{0x1A, 8},{0x16, 8},{0x1E, 8},
  95. {0x20,10},{0x30,10},{0x28,10},{0x38,10},{0x24,10},{0x34,10},{0x2C,10},{0x3C,10},
  96. {0x22,10},{0x32,10},{0x2A,10},{0x3A,10},{0x26,10},{0x36,10},{0x2E,10},{0x3E,10},
  97. };
  98. static VLC ccp_vlc;
  99. static VLC level_vlc;
  100. static VLC dc_ccp_vlc;
  101. static VLC ac_ccp_vlc;
  102. static VLC asv2_level_vlc;
  103. static av_cold void init_vlcs(ASV1Context *a){
  104. static int done = 0;
  105. if (!done) {
  106. done = 1;
  107. INIT_VLC_STATIC(&ccp_vlc, VLC_BITS, 17,
  108. &ccp_tab[0][1], 2, 1,
  109. &ccp_tab[0][0], 2, 1, 64);
  110. INIT_VLC_STATIC(&dc_ccp_vlc, VLC_BITS, 8,
  111. &dc_ccp_tab[0][1], 2, 1,
  112. &dc_ccp_tab[0][0], 2, 1, 64);
  113. INIT_VLC_STATIC(&ac_ccp_vlc, VLC_BITS, 16,
  114. &ac_ccp_tab[0][1], 2, 1,
  115. &ac_ccp_tab[0][0], 2, 1, 64);
  116. INIT_VLC_STATIC(&level_vlc, VLC_BITS, 7,
  117. &level_tab[0][1], 2, 1,
  118. &level_tab[0][0], 2, 1, 64);
  119. INIT_VLC_STATIC(&asv2_level_vlc, ASV2_LEVEL_VLC_BITS, 63,
  120. &asv2_level_tab[0][1], 2, 1,
  121. &asv2_level_tab[0][0], 2, 1, 1024);
  122. }
  123. }
  124. //FIXME write a reversed bitstream reader to avoid the double reverse
  125. static inline int asv2_get_bits(GetBitContext *gb, int n){
  126. return av_reverse[ get_bits(gb, n) << (8-n) ];
  127. }
  128. static inline void asv2_put_bits(PutBitContext *pb, int n, int v){
  129. put_bits(pb, n, av_reverse[ v << (8-n) ]);
  130. }
  131. static inline int asv1_get_level(GetBitContext *gb){
  132. int code= get_vlc2(gb, level_vlc.table, VLC_BITS, 1);
  133. if(code==3) return get_sbits(gb, 8);
  134. else return code - 3;
  135. }
  136. static inline int asv2_get_level(GetBitContext *gb){
  137. int code= get_vlc2(gb, asv2_level_vlc.table, ASV2_LEVEL_VLC_BITS, 1);
  138. if(code==31) return (int8_t)asv2_get_bits(gb, 8);
  139. else return code - 31;
  140. }
  141. static inline void asv1_put_level(PutBitContext *pb, int level){
  142. unsigned int index= level + 3;
  143. if(index <= 6) put_bits(pb, level_tab[index][1], level_tab[index][0]);
  144. else{
  145. put_bits(pb, level_tab[3][1], level_tab[3][0]);
  146. put_sbits(pb, 8, level);
  147. }
  148. }
  149. static inline void asv2_put_level(PutBitContext *pb, int level){
  150. unsigned int index= level + 31;
  151. if(index <= 62) put_bits(pb, asv2_level_tab[index][1], asv2_level_tab[index][0]);
  152. else{
  153. put_bits(pb, asv2_level_tab[31][1], asv2_level_tab[31][0]);
  154. asv2_put_bits(pb, 8, level&0xFF);
  155. }
  156. }
  157. static inline int asv1_decode_block(ASV1Context *a, DCTELEM block[64]){
  158. int i;
  159. block[0]= 8*get_bits(&a->gb, 8);
  160. for(i=0; i<11; i++){
  161. const int ccp= get_vlc2(&a->gb, ccp_vlc.table, VLC_BITS, 1);
  162. if(ccp){
  163. if(ccp == 16) break;
  164. if(ccp < 0 || i>=10){
  165. av_log(a->avctx, AV_LOG_ERROR, "coded coeff pattern damaged\n");
  166. return -1;
  167. }
  168. if(ccp&8) block[a->scantable.permutated[4*i+0]]= (asv1_get_level(&a->gb) * a->intra_matrix[4*i+0])>>4;
  169. if(ccp&4) block[a->scantable.permutated[4*i+1]]= (asv1_get_level(&a->gb) * a->intra_matrix[4*i+1])>>4;
  170. if(ccp&2) block[a->scantable.permutated[4*i+2]]= (asv1_get_level(&a->gb) * a->intra_matrix[4*i+2])>>4;
  171. if(ccp&1) block[a->scantable.permutated[4*i+3]]= (asv1_get_level(&a->gb) * a->intra_matrix[4*i+3])>>4;
  172. }
  173. }
  174. return 0;
  175. }
  176. static inline int asv2_decode_block(ASV1Context *a, DCTELEM block[64]){
  177. int i, count, ccp;
  178. count= asv2_get_bits(&a->gb, 4);
  179. block[0]= 8*asv2_get_bits(&a->gb, 8);
  180. ccp= get_vlc2(&a->gb, dc_ccp_vlc.table, VLC_BITS, 1);
  181. if(ccp){
  182. if(ccp&4) block[a->scantable.permutated[1]]= (asv2_get_level(&a->gb) * a->intra_matrix[1])>>4;
  183. if(ccp&2) block[a->scantable.permutated[2]]= (asv2_get_level(&a->gb) * a->intra_matrix[2])>>4;
  184. if(ccp&1) block[a->scantable.permutated[3]]= (asv2_get_level(&a->gb) * a->intra_matrix[3])>>4;
  185. }
  186. for(i=1; i<count+1; i++){
  187. const int ccp= get_vlc2(&a->gb, ac_ccp_vlc.table, VLC_BITS, 1);
  188. if(ccp){
  189. if(ccp&8) block[a->scantable.permutated[4*i+0]]= (asv2_get_level(&a->gb) * a->intra_matrix[4*i+0])>>4;
  190. if(ccp&4) block[a->scantable.permutated[4*i+1]]= (asv2_get_level(&a->gb) * a->intra_matrix[4*i+1])>>4;
  191. if(ccp&2) block[a->scantable.permutated[4*i+2]]= (asv2_get_level(&a->gb) * a->intra_matrix[4*i+2])>>4;
  192. if(ccp&1) block[a->scantable.permutated[4*i+3]]= (asv2_get_level(&a->gb) * a->intra_matrix[4*i+3])>>4;
  193. }
  194. }
  195. return 0;
  196. }
  197. static inline void asv1_encode_block(ASV1Context *a, DCTELEM block[64]){
  198. int i;
  199. int nc_count=0;
  200. put_bits(&a->pb, 8, (block[0] + 32)>>6);
  201. block[0]= 0;
  202. for(i=0; i<10; i++){
  203. const int index= scantab[4*i];
  204. int ccp=0;
  205. if( (block[index + 0] = (block[index + 0]*a->q_intra_matrix[index + 0] + (1<<15))>>16) ) ccp |= 8;
  206. if( (block[index + 8] = (block[index + 8]*a->q_intra_matrix[index + 8] + (1<<15))>>16) ) ccp |= 4;
  207. if( (block[index + 1] = (block[index + 1]*a->q_intra_matrix[index + 1] + (1<<15))>>16) ) ccp |= 2;
  208. if( (block[index + 9] = (block[index + 9]*a->q_intra_matrix[index + 9] + (1<<15))>>16) ) ccp |= 1;
  209. if(ccp){
  210. for(;nc_count; nc_count--)
  211. put_bits(&a->pb, ccp_tab[0][1], ccp_tab[0][0]);
  212. put_bits(&a->pb, ccp_tab[ccp][1], ccp_tab[ccp][0]);
  213. if(ccp&8) asv1_put_level(&a->pb, block[index + 0]);
  214. if(ccp&4) asv1_put_level(&a->pb, block[index + 8]);
  215. if(ccp&2) asv1_put_level(&a->pb, block[index + 1]);
  216. if(ccp&1) asv1_put_level(&a->pb, block[index + 9]);
  217. }else{
  218. nc_count++;
  219. }
  220. }
  221. put_bits(&a->pb, ccp_tab[16][1], ccp_tab[16][0]);
  222. }
  223. static inline void asv2_encode_block(ASV1Context *a, DCTELEM block[64]){
  224. int i;
  225. int count=0;
  226. for(count=63; count>3; count--){
  227. const int index= scantab[count];
  228. if( (block[index]*a->q_intra_matrix[index] + (1<<15))>>16 )
  229. break;
  230. }
  231. count >>= 2;
  232. asv2_put_bits(&a->pb, 4, count);
  233. asv2_put_bits(&a->pb, 8, (block[0] + 32)>>6);
  234. block[0]= 0;
  235. for(i=0; i<=count; i++){
  236. const int index= scantab[4*i];
  237. int ccp=0;
  238. if( (block[index + 0] = (block[index + 0]*a->q_intra_matrix[index + 0] + (1<<15))>>16) ) ccp |= 8;
  239. if( (block[index + 8] = (block[index + 8]*a->q_intra_matrix[index + 8] + (1<<15))>>16) ) ccp |= 4;
  240. if( (block[index + 1] = (block[index + 1]*a->q_intra_matrix[index + 1] + (1<<15))>>16) ) ccp |= 2;
  241. if( (block[index + 9] = (block[index + 9]*a->q_intra_matrix[index + 9] + (1<<15))>>16) ) ccp |= 1;
  242. assert(i || ccp<8);
  243. if(i) put_bits(&a->pb, ac_ccp_tab[ccp][1], ac_ccp_tab[ccp][0]);
  244. else put_bits(&a->pb, dc_ccp_tab[ccp][1], dc_ccp_tab[ccp][0]);
  245. if(ccp){
  246. if(ccp&8) asv2_put_level(&a->pb, block[index + 0]);
  247. if(ccp&4) asv2_put_level(&a->pb, block[index + 8]);
  248. if(ccp&2) asv2_put_level(&a->pb, block[index + 1]);
  249. if(ccp&1) asv2_put_level(&a->pb, block[index + 9]);
  250. }
  251. }
  252. }
  253. static inline int decode_mb(ASV1Context *a, DCTELEM block[6][64]){
  254. int i;
  255. a->dsp.clear_blocks(block[0]);
  256. if(a->avctx->codec_id == CODEC_ID_ASV1){
  257. for(i=0; i<6; i++){
  258. if( asv1_decode_block(a, block[i]) < 0)
  259. return -1;
  260. }
  261. }else{
  262. for(i=0; i<6; i++){
  263. if( asv2_decode_block(a, block[i]) < 0)
  264. return -1;
  265. }
  266. }
  267. return 0;
  268. }
  269. #define MAX_MB_SIZE (30*16*16*3/2/8)
  270. static inline int encode_mb(ASV1Context *a, DCTELEM block[6][64]){
  271. int i;
  272. if (a->pb.buf_end - a->pb.buf - (put_bits_count(&a->pb)>>3) < MAX_MB_SIZE) {
  273. av_log(a->avctx, AV_LOG_ERROR, "encoded frame too large\n");
  274. return -1;
  275. }
  276. if(a->avctx->codec_id == CODEC_ID_ASV1){
  277. for(i=0; i<6; i++)
  278. asv1_encode_block(a, block[i]);
  279. }else{
  280. for(i=0; i<6; i++)
  281. asv2_encode_block(a, block[i]);
  282. }
  283. return 0;
  284. }
  285. static inline void idct_put(ASV1Context *a, int mb_x, int mb_y){
  286. DCTELEM (*block)[64]= a->block;
  287. int linesize= a->picture.linesize[0];
  288. uint8_t *dest_y = a->picture.data[0] + (mb_y * 16* linesize ) + mb_x * 16;
  289. uint8_t *dest_cb = a->picture.data[1] + (mb_y * 8 * a->picture.linesize[1]) + mb_x * 8;
  290. uint8_t *dest_cr = a->picture.data[2] + (mb_y * 8 * a->picture.linesize[2]) + mb_x * 8;
  291. a->dsp.idct_put(dest_y , linesize, block[0]);
  292. a->dsp.idct_put(dest_y + 8, linesize, block[1]);
  293. a->dsp.idct_put(dest_y + 8*linesize , linesize, block[2]);
  294. a->dsp.idct_put(dest_y + 8*linesize + 8, linesize, block[3]);
  295. if(!(a->avctx->flags&CODEC_FLAG_GRAY)){
  296. a->dsp.idct_put(dest_cb, a->picture.linesize[1], block[4]);
  297. a->dsp.idct_put(dest_cr, a->picture.linesize[2], block[5]);
  298. }
  299. }
  300. static inline void dct_get(ASV1Context *a, int mb_x, int mb_y){
  301. DCTELEM (*block)[64]= a->block;
  302. int linesize= a->picture.linesize[0];
  303. int i;
  304. uint8_t *ptr_y = a->picture.data[0] + (mb_y * 16* linesize ) + mb_x * 16;
  305. uint8_t *ptr_cb = a->picture.data[1] + (mb_y * 8 * a->picture.linesize[1]) + mb_x * 8;
  306. uint8_t *ptr_cr = a->picture.data[2] + (mb_y * 8 * a->picture.linesize[2]) + mb_x * 8;
  307. a->dsp.get_pixels(block[0], ptr_y , linesize);
  308. a->dsp.get_pixels(block[1], ptr_y + 8, linesize);
  309. a->dsp.get_pixels(block[2], ptr_y + 8*linesize , linesize);
  310. a->dsp.get_pixels(block[3], ptr_y + 8*linesize + 8, linesize);
  311. for(i=0; i<4; i++)
  312. a->dsp.fdct(block[i]);
  313. if(!(a->avctx->flags&CODEC_FLAG_GRAY)){
  314. a->dsp.get_pixels(block[4], ptr_cb, a->picture.linesize[1]);
  315. a->dsp.get_pixels(block[5], ptr_cr, a->picture.linesize[2]);
  316. for(i=4; i<6; i++)
  317. a->dsp.fdct(block[i]);
  318. }
  319. }
  320. static int decode_frame(AVCodecContext *avctx,
  321. void *data, int *data_size,
  322. AVPacket *avpkt)
  323. {
  324. const uint8_t *buf = avpkt->data;
  325. int buf_size = avpkt->size;
  326. ASV1Context * const a = avctx->priv_data;
  327. AVFrame *picture = data;
  328. AVFrame * const p= &a->picture;
  329. int mb_x, mb_y;
  330. if(p->data[0])
  331. avctx->release_buffer(avctx, p);
  332. p->reference= 0;
  333. if(avctx->get_buffer(avctx, p) < 0){
  334. av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
  335. return -1;
  336. }
  337. p->pict_type= AV_PICTURE_TYPE_I;
  338. p->key_frame= 1;
  339. av_fast_padded_malloc(&a->bitstream_buffer, &a->bitstream_buffer_size,
  340. buf_size);
  341. if (!a->bitstream_buffer)
  342. return AVERROR(ENOMEM);
  343. if(avctx->codec_id == CODEC_ID_ASV1)
  344. a->dsp.bswap_buf((uint32_t*)a->bitstream_buffer, (const uint32_t*)buf, buf_size/4);
  345. else{
  346. int i;
  347. for(i=0; i<buf_size; i++)
  348. a->bitstream_buffer[i]= av_reverse[ buf[i] ];
  349. }
  350. init_get_bits(&a->gb, a->bitstream_buffer, buf_size*8);
  351. for(mb_y=0; mb_y<a->mb_height2; mb_y++){
  352. for(mb_x=0; mb_x<a->mb_width2; mb_x++){
  353. if( decode_mb(a, a->block) <0)
  354. return -1;
  355. idct_put(a, mb_x, mb_y);
  356. }
  357. }
  358. if(a->mb_width2 != a->mb_width){
  359. mb_x= a->mb_width2;
  360. for(mb_y=0; mb_y<a->mb_height2; mb_y++){
  361. if( decode_mb(a, a->block) <0)
  362. return -1;
  363. idct_put(a, mb_x, mb_y);
  364. }
  365. }
  366. if(a->mb_height2 != a->mb_height){
  367. mb_y= a->mb_height2;
  368. for(mb_x=0; mb_x<a->mb_width; mb_x++){
  369. if( decode_mb(a, a->block) <0)
  370. return -1;
  371. idct_put(a, mb_x, mb_y);
  372. }
  373. }
  374. *picture = a->picture;
  375. *data_size = sizeof(AVPicture);
  376. emms_c();
  377. return (get_bits_count(&a->gb)+31)/32*4;
  378. }
  379. #if CONFIG_ASV1_ENCODER || CONFIG_ASV2_ENCODER
  380. static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  381. const AVFrame *pict, int *got_packet)
  382. {
  383. ASV1Context * const a = avctx->priv_data;
  384. AVFrame * const p= &a->picture;
  385. int size, ret;
  386. int mb_x, mb_y;
  387. if ((ret = ff_alloc_packet2(avctx, pkt, a->mb_height*a->mb_width*MAX_MB_SIZE +
  388. FF_MIN_BUFFER_SIZE)) < 0)
  389. return ret;
  390. init_put_bits(&a->pb, pkt->data, pkt->size);
  391. *p = *pict;
  392. p->pict_type= AV_PICTURE_TYPE_I;
  393. p->key_frame= 1;
  394. for(mb_y=0; mb_y<a->mb_height2; mb_y++){
  395. for(mb_x=0; mb_x<a->mb_width2; mb_x++){
  396. dct_get(a, mb_x, mb_y);
  397. encode_mb(a, a->block);
  398. }
  399. }
  400. if(a->mb_width2 != a->mb_width){
  401. mb_x= a->mb_width2;
  402. for(mb_y=0; mb_y<a->mb_height2; mb_y++){
  403. dct_get(a, mb_x, mb_y);
  404. encode_mb(a, a->block);
  405. }
  406. }
  407. if(a->mb_height2 != a->mb_height){
  408. mb_y= a->mb_height2;
  409. for(mb_x=0; mb_x<a->mb_width; mb_x++){
  410. dct_get(a, mb_x, mb_y);
  411. encode_mb(a, a->block);
  412. }
  413. }
  414. emms_c();
  415. avpriv_align_put_bits(&a->pb);
  416. while(put_bits_count(&a->pb)&31)
  417. put_bits(&a->pb, 8, 0);
  418. size= put_bits_count(&a->pb)/32;
  419. if(avctx->codec_id == CODEC_ID_ASV1)
  420. a->dsp.bswap_buf((uint32_t*)pkt->data, (uint32_t*)pkt->data, size);
  421. else{
  422. int i;
  423. for(i=0; i<4*size; i++)
  424. pkt->data[i] = av_reverse[pkt->data[i]];
  425. }
  426. pkt->size = size*4;
  427. pkt->flags |= AV_PKT_FLAG_KEY;
  428. *got_packet = 1;
  429. return 0;
  430. }
  431. #endif /* CONFIG_ASV1_ENCODER || CONFIG_ASV2_ENCODER */
  432. static av_cold void common_init(AVCodecContext *avctx){
  433. ASV1Context * const a = avctx->priv_data;
  434. ff_dsputil_init(&a->dsp, avctx);
  435. a->mb_width = (avctx->width + 15) / 16;
  436. a->mb_height = (avctx->height + 15) / 16;
  437. a->mb_width2 = (avctx->width + 0) / 16;
  438. a->mb_height2 = (avctx->height + 0) / 16;
  439. avctx->coded_frame= &a->picture;
  440. a->avctx= avctx;
  441. }
  442. static av_cold int decode_init(AVCodecContext *avctx){
  443. ASV1Context * const a = avctx->priv_data;
  444. AVFrame *p= &a->picture;
  445. int i;
  446. const int scale= avctx->codec_id == CODEC_ID_ASV1 ? 1 : 2;
  447. common_init(avctx);
  448. init_vlcs(a);
  449. ff_init_scantable(a->dsp.idct_permutation, &a->scantable, scantab);
  450. avctx->pix_fmt= PIX_FMT_YUV420P;
  451. if(avctx->extradata_size < 1 || (a->inv_qscale= avctx->extradata[0]) == 0){
  452. av_log(avctx, AV_LOG_ERROR, "illegal qscale 0\n");
  453. if(avctx->codec_id == CODEC_ID_ASV1)
  454. a->inv_qscale= 6;
  455. else
  456. a->inv_qscale= 10;
  457. }
  458. for(i=0; i<64; i++){
  459. int index= scantab[i];
  460. a->intra_matrix[i]= 64*scale*ff_mpeg1_default_intra_matrix[index] / a->inv_qscale;
  461. }
  462. p->qstride= a->mb_width;
  463. p->qscale_table= av_malloc( p->qstride * a->mb_height);
  464. p->quality= (32*scale + a->inv_qscale/2)/a->inv_qscale;
  465. memset(p->qscale_table, p->quality, p->qstride*a->mb_height);
  466. return 0;
  467. }
  468. #if CONFIG_ASV1_ENCODER || CONFIG_ASV2_ENCODER
  469. static av_cold int encode_init(AVCodecContext *avctx){
  470. ASV1Context * const a = avctx->priv_data;
  471. int i;
  472. const int scale= avctx->codec_id == CODEC_ID_ASV1 ? 1 : 2;
  473. common_init(avctx);
  474. if(avctx->global_quality == 0) avctx->global_quality= 4*FF_QUALITY_SCALE;
  475. a->inv_qscale= (32*scale*FF_QUALITY_SCALE + avctx->global_quality/2) / avctx->global_quality;
  476. avctx->extradata= av_mallocz(8);
  477. avctx->extradata_size=8;
  478. ((uint32_t*)avctx->extradata)[0]= av_le2ne32(a->inv_qscale);
  479. ((uint32_t*)avctx->extradata)[1]= av_le2ne32(AV_RL32("ASUS"));
  480. for(i=0; i<64; i++){
  481. int q= 32*scale*ff_mpeg1_default_intra_matrix[i];
  482. a->q_intra_matrix[i]= ((a->inv_qscale<<16) + q/2) / q;
  483. }
  484. return 0;
  485. }
  486. #endif /* CONFIG_ASV1_ENCODER || CONFIG_ASV2_ENCODER */
  487. static av_cold int decode_end(AVCodecContext *avctx){
  488. ASV1Context * const a = avctx->priv_data;
  489. av_freep(&a->bitstream_buffer);
  490. av_freep(&a->picture.qscale_table);
  491. a->bitstream_buffer_size=0;
  492. if(a->picture.data[0])
  493. avctx->release_buffer(avctx, &a->picture);
  494. return 0;
  495. }
  496. AVCodec ff_asv1_decoder = {
  497. .name = "asv1",
  498. .type = AVMEDIA_TYPE_VIDEO,
  499. .id = CODEC_ID_ASV1,
  500. .priv_data_size = sizeof(ASV1Context),
  501. .init = decode_init,
  502. .close = decode_end,
  503. .decode = decode_frame,
  504. .capabilities = CODEC_CAP_DR1,
  505. .long_name = NULL_IF_CONFIG_SMALL("ASUS V1"),
  506. };
  507. AVCodec ff_asv2_decoder = {
  508. .name = "asv2",
  509. .type = AVMEDIA_TYPE_VIDEO,
  510. .id = CODEC_ID_ASV2,
  511. .priv_data_size = sizeof(ASV1Context),
  512. .init = decode_init,
  513. .close = decode_end,
  514. .decode = decode_frame,
  515. .capabilities = CODEC_CAP_DR1,
  516. .long_name = NULL_IF_CONFIG_SMALL("ASUS V2"),
  517. };
  518. #if CONFIG_ASV1_ENCODER
  519. AVCodec ff_asv1_encoder = {
  520. .name = "asv1",
  521. .type = AVMEDIA_TYPE_VIDEO,
  522. .id = CODEC_ID_ASV1,
  523. .priv_data_size = sizeof(ASV1Context),
  524. .init = encode_init,
  525. .encode2 = encode_frame,
  526. .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
  527. .long_name = NULL_IF_CONFIG_SMALL("ASUS V1"),
  528. };
  529. #endif
  530. #if CONFIG_ASV2_ENCODER
  531. AVCodec ff_asv2_encoder = {
  532. .name = "asv2",
  533. .type = AVMEDIA_TYPE_VIDEO,
  534. .id = CODEC_ID_ASV2,
  535. .priv_data_size = sizeof(ASV1Context),
  536. .init = encode_init,
  537. .encode2 = encode_frame,
  538. .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
  539. .long_name = NULL_IF_CONFIG_SMALL("ASUS V2"),
  540. };
  541. #endif