Reviewed-by: Paul B Mahol <onemda@gmail.com> Signed-off-by: Michael Niedermayer <michaelni@gmx.at>tags/n2.2-rc1
| @@ -67,6 +67,12 @@ typedef struct HYuvContext { | |||
| int version; | |||
| int yuy2; //use yuy2 instead of 422P | |||
| int bgr32; //use bgr32 instead of bgr24 | |||
| int bps; | |||
| int alpha; | |||
| int chroma; | |||
| int yuv; | |||
| int chroma_h_shift; | |||
| int chroma_v_shift; | |||
| int width, height; | |||
| int flags; | |||
| int context; | |||
| @@ -32,6 +32,7 @@ | |||
| #include "get_bits.h" | |||
| #include "huffyuv.h" | |||
| #include "thread.h" | |||
| #include "libavutil/pixdesc.h" | |||
| #define classic_shift_luma_table_size 42 | |||
| static const unsigned char classic_shift_luma[classic_shift_luma_table_size + FF_INPUT_BUFFER_PADDING_SIZE] = { | |||
| @@ -264,12 +265,16 @@ static av_cold int decode_init(AVCodecContext *avctx) | |||
| if ((avctx->bits_per_coded_sample & 7) && | |||
| avctx->bits_per_coded_sample != 12) | |||
| s->version = 1; // do such files exist at all? | |||
| else | |||
| else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0) | |||
| s->version = 2; | |||
| else | |||
| s->version = 3; | |||
| } else | |||
| s->version = 0; | |||
| if (s->version == 2) { | |||
| s->bps = 8; | |||
| s->chroma = 1; | |||
| if (s->version >= 2) { | |||
| int method, interlace; | |||
| if (avctx->extradata_size < 4) | |||
| @@ -278,9 +283,18 @@ static av_cold int decode_init(AVCodecContext *avctx) | |||
| method = ((uint8_t*)avctx->extradata)[0]; | |||
| s->decorrelate = method & 64 ? 1 : 0; | |||
| s->predictor = method & 63; | |||
| s->bitstream_bpp = ((uint8_t*)avctx->extradata)[1]; | |||
| if (s->bitstream_bpp == 0) | |||
| s->bitstream_bpp = avctx->bits_per_coded_sample & ~7; | |||
| if (s->version == 2) { | |||
| s->bitstream_bpp = ((uint8_t*)avctx->extradata)[1]; | |||
| if (s->bitstream_bpp == 0) | |||
| s->bitstream_bpp = avctx->bits_per_coded_sample & ~7; | |||
| } else { | |||
| s->bps = (avctx->extradata[1] >> 4) + 1; | |||
| s->chroma_h_shift = avctx->extradata[1] & 3; | |||
| s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3; | |||
| s->yuv = !!(((uint8_t*)avctx->extradata)[2] & 1); | |||
| s->chroma= !!(((uint8_t*)avctx->extradata)[2] & 3); | |||
| s->alpha = !!(((uint8_t*)avctx->extradata)[2] & 4); | |||
| } | |||
| interlace = (((uint8_t*)avctx->extradata)[2] & 0x30) >> 4; | |||
| s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced; | |||
| s->context = ((uint8_t*)avctx->extradata)[2] & 0x40 ? 1 : 0; | |||
| @@ -318,29 +332,59 @@ static av_cold int decode_init(AVCodecContext *avctx) | |||
| return AVERROR_INVALIDDATA; | |||
| } | |||
| switch (s->bitstream_bpp) { | |||
| case 12: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P; | |||
| break; | |||
| case 16: | |||
| if (s->yuy2) { | |||
| avctx->pix_fmt = AV_PIX_FMT_YUYV422; | |||
| } else { | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV422P; | |||
| if (s->version <= 2) { | |||
| switch (s->bitstream_bpp) { | |||
| case 12: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P; | |||
| s->yuv = 1; | |||
| break; | |||
| case 16: | |||
| if (s->yuy2) { | |||
| avctx->pix_fmt = AV_PIX_FMT_YUYV422; | |||
| } else { | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV422P; | |||
| } | |||
| s->yuv = 1; | |||
| break; | |||
| case 24: | |||
| case 32: | |||
| if (s->bgr32) { | |||
| avctx->pix_fmt = AV_PIX_FMT_RGB32; | |||
| s->alpha = 1; | |||
| } else { | |||
| avctx->pix_fmt = AV_PIX_FMT_BGR24; | |||
| } | |||
| break; | |||
| default: | |||
| return AVERROR_INVALIDDATA; | |||
| } | |||
| break; | |||
| case 24: | |||
| case 32: | |||
| if (s->bgr32) { | |||
| avctx->pix_fmt = AV_PIX_FMT_RGB32; | |||
| } else { | |||
| avctx->pix_fmt = AV_PIX_FMT_BGR24; | |||
| av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, | |||
| &s->chroma_h_shift, | |||
| &s->chroma_v_shift); | |||
| } else { | |||
| switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) { | |||
| case 0x670: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV444P; | |||
| break; | |||
| case 0x671: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV422P; | |||
| break; | |||
| case 0x672: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV411P; | |||
| break; | |||
| case 0x674: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV440P; | |||
| break; | |||
| case 0x675: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P; | |||
| break; | |||
| case 0x67A: | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV410P; | |||
| break; | |||
| } | |||
| break; | |||
| default: | |||
| return AVERROR_INVALIDDATA; | |||
| } | |||
| if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) { | |||
| av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n"); | |||
| return AVERROR_INVALIDDATA; | |||
| @@ -370,7 +414,7 @@ static av_cold int decode_init_thread_copy(AVCodecContext *avctx) | |||
| for (i = 0; i < 6; i++) | |||
| s->vlc[i].table = NULL; | |||
| if (s->version == 2) { | |||
| if (s->version >= 2) { | |||
| if (read_huffman_tables(s, ((uint8_t*)avctx->extradata) + 4, | |||
| avctx->extradata_size) < 0) | |||
| return AVERROR_INVALIDDATA; | |||
| @@ -417,6 +461,25 @@ static void decode_422_bitstream(HYuvContext *s, int count) | |||
| } | |||
| } | |||
| static void decode_plane_bitstream(HYuvContext *s, int count, int plane) | |||
| { | |||
| int i; | |||
| count/=2; | |||
| if (count >= (get_bits_left(&s->gb)) / (31 * 2)) { | |||
| for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) { | |||
| s->temp[0][2*i ] = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3); | |||
| s->temp[0][2*i + 1] = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3); | |||
| } | |||
| } else { | |||
| for(i=0; i<count; i++){ | |||
| s->temp[0][2*i ] = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3); | |||
| s->temp[0][2*i + 1] = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3); | |||
| } | |||
| } | |||
| } | |||
| static void decode_gray_bitstream(HYuvContext *s, int count) | |||
| { | |||
| int i; | |||
| @@ -546,7 +609,72 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, | |||
| s->last_slice_end = 0; | |||
| if (s->bitstream_bpp < 24) { | |||
| if (s->version > 2) { | |||
| int plane; | |||
| for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) { | |||
| int left, lefttop, y; | |||
| int w = width; | |||
| int h = height; | |||
| int fake_stride = fake_ystride; | |||
| if (s->chroma && (plane == 1 || plane == 2)) { | |||
| w >>= s->chroma_h_shift; | |||
| h >>= s->chroma_v_shift; | |||
| fake_stride = plane == 1 ? fake_ustride : fake_vstride; | |||
| } | |||
| switch (s->predictor) { | |||
| case LEFT: | |||
| case PLANE: | |||
| decode_plane_bitstream(s, w, plane); | |||
| left = s->dsp.add_hfyu_left_prediction(p->data[plane], s->temp[0], w, 0); | |||
| for (y = 1; y < h; y++) { | |||
| uint8_t *dst = p->data[plane] + p->linesize[plane]*y; | |||
| decode_plane_bitstream(s, w, plane); | |||
| left = s->dsp.add_hfyu_left_prediction(dst, s->temp[0], w, left); | |||
| if (s->predictor == PLANE) { | |||
| if (y > s->interlaced) { | |||
| s->dsp.add_bytes(dst, dst - fake_stride, w); | |||
| } | |||
| } | |||
| } | |||
| break; | |||
| case MEDIAN: | |||
| decode_plane_bitstream(s, w, plane); | |||
| left= s->dsp.add_hfyu_left_prediction(p->data[plane], s->temp[0], w, 0); | |||
| y = 1; | |||
| /* second line is left predicted for interlaced case */ | |||
| if (s->interlaced) { | |||
| decode_plane_bitstream(s, w, plane); | |||
| left = s->dsp.add_hfyu_left_prediction(p->data[plane] + p->linesize[plane], s->temp[0], w, left); | |||
| y++; | |||
| } | |||
| lefttop = p->data[plane][0]; | |||
| decode_plane_bitstream(s, w, plane); | |||
| s->dsp.add_hfyu_median_prediction(p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop); | |||
| y++; | |||
| for (; y<h; y++) { | |||
| uint8_t *dst; | |||
| decode_plane_bitstream(s, w, plane); | |||
| dst = p->data[plane] + p->linesize[plane] * y; | |||
| s->dsp.add_hfyu_median_prediction(dst, dst - fake_stride, s->temp[0], w, &left, &lefttop); | |||
| } | |||
| break; | |||
| } | |||
| } | |||
| draw_slice(s, p, height); | |||
| } else if (s->bitstream_bpp < 24) { | |||
| int y, cy; | |||
| int lefty, leftu, leftv; | |||
| int lefttopy, lefttopu, lefttopv; | |||
| @@ -31,6 +31,7 @@ | |||
| #include "huffman.h" | |||
| #include "internal.h" | |||
| #include "put_bits.h" | |||
| #include "libavutil/pixdesc.h" | |||
| static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst, | |||
| const uint8_t *src, int w, int left) | |||
| @@ -145,6 +146,7 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| { | |||
| HYuvContext *s = avctx->priv_data; | |||
| int i, j; | |||
| const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt); | |||
| ff_huffyuv_common_init(avctx); | |||
| @@ -163,6 +165,14 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; | |||
| avctx->coded_frame->key_frame = 1; | |||
| s->bps = 8; | |||
| s->yuv = !(desc->flags & AV_PIX_FMT_FLAG_RGB) && desc->nb_components >= 2; | |||
| s->chroma = desc->nb_components > 2; | |||
| s->alpha = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA); | |||
| av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, | |||
| &s->chroma_h_shift, | |||
| &s->chroma_v_shift); | |||
| switch (avctx->pix_fmt) { | |||
| case AV_PIX_FMT_YUV420P: | |||
| case AV_PIX_FMT_YUV422P: | |||
| @@ -172,6 +182,12 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| } | |||
| s->bitstream_bpp = avctx->pix_fmt == AV_PIX_FMT_YUV420P ? 12 : 16; | |||
| break; | |||
| case AV_PIX_FMT_YUV444P: | |||
| case AV_PIX_FMT_YUV410P: | |||
| case AV_PIX_FMT_YUV411P: | |||
| case AV_PIX_FMT_YUV440P: | |||
| s->version = 3; | |||
| break; | |||
| case AV_PIX_FMT_RGB32: | |||
| s->bitstream_bpp = 32; | |||
| break; | |||
| @@ -182,8 +198,9 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| av_log(avctx, AV_LOG_ERROR, "format not supported\n"); | |||
| return AVERROR(EINVAL); | |||
| } | |||
| avctx->bits_per_coded_sample = s->bitstream_bpp; | |||
| s->decorrelate = s->bitstream_bpp >= 24; | |||
| s->decorrelate = s->bitstream_bpp >= 24 && !s->yuv; | |||
| s->predictor = avctx->prediction_method; | |||
| s->interlaced = avctx->flags&CODEC_FLAG_INTERLACED_ME ? 1 : 0; | |||
| if (avctx->context_model == 1) { | |||
| @@ -209,11 +226,23 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| "by huffyuv; use vcodec=ffvhuff\n"); | |||
| return AVERROR(EINVAL); | |||
| } | |||
| if (s->version > 2) { | |||
| av_log(avctx, AV_LOG_ERROR, | |||
| "Error: ver>2 is not supported " | |||
| "by huffyuv; use vcodec=ffvhuff\n"); | |||
| return AVERROR(EINVAL); | |||
| } | |||
| if (s->interlaced != ( s->height > 288 )) | |||
| av_log(avctx, AV_LOG_INFO, | |||
| "using huffyuv 2.2.0 or newer interlacing flag\n"); | |||
| } | |||
| if (s->version > 2 && avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) { | |||
| av_log(avctx, AV_LOG_ERROR, "Ver > 2 is under development, files encoded with it may not be decodable with future versions!!!\n" | |||
| "Use vstrict=-2 / -strict -2 to use it anyway.\n"); | |||
| return AVERROR(EINVAL); | |||
| } | |||
| if (s->bitstream_bpp >= 24 && s->predictor == MEDIAN) { | |||
| av_log(avctx, AV_LOG_ERROR, | |||
| "Error: RGB is incompatible with median predictor\n"); | |||
| @@ -221,11 +250,20 @@ static av_cold int encode_init(AVCodecContext *avctx) | |||
| } | |||
| ((uint8_t*)avctx->extradata)[0] = s->predictor | (s->decorrelate << 6); | |||
| ((uint8_t*)avctx->extradata)[1] = s->bitstream_bpp; | |||
| ((uint8_t*)avctx->extradata)[2] = s->interlaced ? 0x10 : 0x20; | |||
| if (s->context) | |||
| ((uint8_t*)avctx->extradata)[2] |= 0x40; | |||
| ((uint8_t*)avctx->extradata)[3] = 0; | |||
| if (s->version < 3) { | |||
| ((uint8_t*)avctx->extradata)[1] = s->bitstream_bpp; | |||
| ((uint8_t*)avctx->extradata)[3] = 0; | |||
| } else { | |||
| ((uint8_t*)avctx->extradata)[1] = ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2); | |||
| if (s->chroma) | |||
| ((uint8_t*)avctx->extradata)[2] |= s->yuv ? 1 : 2; | |||
| if (s->alpha) | |||
| ((uint8_t*)avctx->extradata)[2] |= 4; | |||
| ((uint8_t*)avctx->extradata)[3] = 1; | |||
| } | |||
| s->avctx->extradata_size = 4; | |||
| if (avctx->stats_in) { | |||
| @@ -345,6 +383,54 @@ static int encode_422_bitstream(HYuvContext *s, int offset, int count) | |||
| return 0; | |||
| } | |||
| static int encode_plane_bitstream(HYuvContext *s, int count, int plane) | |||
| { | |||
| int i; | |||
| if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 4 * count) { | |||
| av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); | |||
| return -1; | |||
| } | |||
| #define LOAD2\ | |||
| int y0 = s->temp[0][2 * i];\ | |||
| int y1 = s->temp[0][2 * i + 1]; | |||
| #define STAT2\ | |||
| s->stats[plane][y0]++;\ | |||
| s->stats[plane][y1]++; | |||
| #define WRITE2\ | |||
| put_bits(&s->pb, s->len[plane][y0], s->bits[plane][y0]);\ | |||
| put_bits(&s->pb, s->len[plane][y1], s->bits[plane][y1]); | |||
| count /= 2; | |||
| if (s->flags & CODEC_FLAG_PASS1) { | |||
| for (i = 0; i < count; i++) { | |||
| LOAD2; | |||
| STAT2; | |||
| } | |||
| } | |||
| if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT) | |||
| return 0; | |||
| if (s->context) { | |||
| for (i = 0; i < count; i++) { | |||
| LOAD2; | |||
| STAT2; | |||
| WRITE2; | |||
| } | |||
| } else { | |||
| for (i = 0; i < count; i++) { | |||
| LOAD2; | |||
| WRITE2; | |||
| } | |||
| } | |||
| #undef LOAD2 | |||
| #undef STAT2 | |||
| #undef WRITE2 | |||
| return 0; | |||
| } | |||
| static int encode_gray_bitstream(HYuvContext *s, int count) | |||
| { | |||
| int i; | |||
| @@ -634,6 +720,59 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt, | |||
| } | |||
| encode_bgra_bitstream(s, width, 3); | |||
| } | |||
| } else if (s->yuv) { | |||
| int plane; | |||
| for (plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) { | |||
| int left, y; | |||
| int w = width; | |||
| int h = height; | |||
| int fake_stride = fake_ystride; | |||
| if (s->chroma && (plane == 1 || plane == 2)) { | |||
| w >>= s->chroma_h_shift; | |||
| h >>= s->chroma_v_shift; | |||
| fake_stride = plane == 1 ? fake_ustride : fake_vstride; | |||
| } | |||
| left = sub_left_prediction(s, s->temp[0], p->data[plane], w , 0); | |||
| encode_plane_bitstream(s, w, plane); | |||
| if (s->predictor==MEDIAN) { | |||
| int lefttop; | |||
| y = 1; | |||
| if (s->interlaced) { | |||
| left = sub_left_prediction(s, s->temp[0], p->data[plane] + p->linesize[plane], w , left); | |||
| encode_plane_bitstream(s, w, plane); | |||
| y++; | |||
| } | |||
| lefttop = p->data[plane][0]; | |||
| for (; y < h; y++) { | |||
| uint8_t *dst = p->data[plane] + p->linesize[plane] * y; | |||
| s->dsp.sub_hfyu_median_prediction(s->temp[0], dst - fake_stride, dst, w , &left, &lefttop); | |||
| encode_plane_bitstream(s, w, plane); | |||
| } | |||
| } else { | |||
| for (y = 1; y < h; y++) { | |||
| uint8_t *dst = p->data[plane] + p->linesize[plane] * y; | |||
| if (s->predictor == PLANE && s->interlaced < y) { | |||
| s->dsp.diff_bytes(s->temp[1], dst, dst - fake_stride, w); | |||
| left = sub_left_prediction(s, s->temp[0], s->temp[1], w , left); | |||
| } else { | |||
| left = sub_left_prediction(s, s->temp[0], dst, w , left); | |||
| } | |||
| encode_plane_bitstream(s, w, plane); | |||
| } | |||
| } | |||
| } | |||
| } else { | |||
| av_log(avctx, AV_LOG_ERROR, "Format not supported!\n"); | |||
| } | |||
| @@ -715,7 +854,9 @@ AVCodec ff_ffvhuff_encoder = { | |||
| .encode2 = encode_frame, | |||
| .close = encode_end, | |||
| .pix_fmts = (const enum AVPixelFormat[]){ | |||
| AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24, | |||
| AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV411P, | |||
| AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P, | |||
| AV_PIX_FMT_RGB24, | |||
| AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE | |||
| }, | |||
| }; | |||