Signed-off-by: Michael Niedermayer <michaelni@gmx.at>tags/n2.2-rc1
| @@ -2159,7 +2159,7 @@ static void decode_postinit(H264Context *h, int setup_finished) | |||||
| if (cur->reference == 0) | if (cur->reference == 0) | ||||
| cur->reference = DELAYED_PIC_REF; | cur->reference = DELAYED_PIC_REF; | ||||
| out = h->delayed_pic[0]; | |||||
| out = h->delayed_pic[0]; | |||||
| out_idx = 0; | out_idx = 0; | ||||
| for (i = 1; h->delayed_pic[i] && | for (i = 1; h->delayed_pic[i] && | ||||
| !h->delayed_pic[i]->f.key_frame && | !h->delayed_pic[i]->f.key_frame && | ||||
| @@ -2983,8 +2983,7 @@ static int field_end(H264Context *h, int in_setup) | |||||
| * past end by one (callers fault) and resync_mb_y != 0 | * past end by one (callers fault) and resync_mb_y != 0 | ||||
| * causes problems for the first MB line, too. | * causes problems for the first MB line, too. | ||||
| */ | */ | ||||
| if (CONFIG_ERROR_RESILIENCE && | |||||
| !FIELD_PICTURE(h) && h->current_slice && !h->sps.new) { | |||||
| if (CONFIG_ERROR_RESILIENCE && !FIELD_PICTURE(h) && h->current_slice && !h->sps.new) { | |||||
| h->er.cur_pic = h->cur_pic_ptr; | h->er.cur_pic = h->cur_pic_ptr; | ||||
| ff_er_frame_end(&h->er); | ff_er_frame_end(&h->er); | ||||
| } | } | ||||
| @@ -3302,8 +3301,8 @@ static int h264_slice_header_init(H264Context *h, int reinit) | |||||
| c->height = h->height; | c->height = h->height; | ||||
| c->linesize = h->linesize; | c->linesize = h->linesize; | ||||
| c->uvlinesize = h->uvlinesize; | c->uvlinesize = h->uvlinesize; | ||||
| c->chroma_x_shift = h->chroma_x_shift; | |||||
| c->chroma_y_shift = h->chroma_y_shift; | |||||
| c->chroma_x_shift = h->chroma_x_shift; | |||||
| c->chroma_y_shift = h->chroma_y_shift; | |||||
| c->qscale = h->qscale; | c->qscale = h->qscale; | ||||
| c->droppable = h->droppable; | c->droppable = h->droppable; | ||||
| c->data_partitioning = h->data_partitioning; | c->data_partitioning = h->data_partitioning; | ||||
| @@ -4758,7 +4757,9 @@ static int decode_nal_units(H264Context *h, const uint8_t *buf, int buf_size, | |||||
| decode_rbsp_trailing(h, ptr + dst_length - 1)); | decode_rbsp_trailing(h, ptr + dst_length - 1)); | ||||
| if (h->avctx->debug & FF_DEBUG_STARTCODE) | if (h->avctx->debug & FF_DEBUG_STARTCODE) | ||||
| av_log(h->avctx, AV_LOG_DEBUG, "NAL %d/%d at %d/%d length %d pass %d\n", hx->nal_unit_type, hx->nal_ref_idc, buf_index, buf_size, dst_length, pass); | |||||
| av_log(h->avctx, AV_LOG_DEBUG, | |||||
| "NAL %d/%d at %d/%d length %d pass %d\n", | |||||
| hx->nal_unit_type, hx->nal_ref_idc, buf_index, buf_size, dst_length, pass); | |||||
| if (h->is_avc && (nalsize != consumed) && nalsize) | if (h->is_avc && (nalsize != consumed) && nalsize) | ||||
| av_log(h->avctx, AV_LOG_DEBUG, | av_log(h->avctx, AV_LOG_DEBUG, | ||||
| @@ -4860,7 +4861,7 @@ again: | |||||
| } | } | ||||
| h->cur_pic_ptr->f.key_frame |= | h->cur_pic_ptr->f.key_frame |= | ||||
| (hx->nal_unit_type == NAL_IDR_SLICE); | |||||
| (hx->nal_unit_type == NAL_IDR_SLICE); | |||||
| if (hx->nal_unit_type == NAL_IDR_SLICE || | if (hx->nal_unit_type == NAL_IDR_SLICE || | ||||
| h->recovery_frame == h->frame_num) { | h->recovery_frame == h->frame_num) { | ||||
| @@ -1684,7 +1684,6 @@ decode_cabac_residual_internal(H264Context *h, int16_t *block, | |||||
| } | } | ||||
| } | } | ||||
| #define STORE_BLOCK(type) \ | #define STORE_BLOCK(type) \ | ||||
| do { \ | do { \ | ||||
| uint8_t *ctx = coeff_abs_level1_ctx[node_ctx] + abs_level_m1_ctx_base; \ | uint8_t *ctx = coeff_abs_level1_ctx[node_ctx] + abs_level_m1_ctx_base; \ | ||||
| @@ -1728,11 +1727,11 @@ decode_cabac_residual_internal(H264Context *h, int16_t *block, | |||||
| } \ | } \ | ||||
| } while ( coeff_count ); | } while ( coeff_count ); | ||||
| if (h->pixel_shift) { | |||||
| STORE_BLOCK(int32_t) | |||||
| } else { | |||||
| STORE_BLOCK(int16_t) | |||||
| } | |||||
| if (h->pixel_shift) { | |||||
| STORE_BLOCK(int32_t) | |||||
| } else { | |||||
| STORE_BLOCK(int16_t) | |||||
| } | |||||
| #ifdef CABAC_ON_STACK | #ifdef CABAC_ON_STACK | ||||
| h->cabac.range = cc.range ; | h->cabac.range = cc.range ; | ||||
| h->cabac.low = cc.low ; | h->cabac.low = cc.low ; | ||||
| @@ -866,7 +866,7 @@ decode_intra_mb: | |||||
| } | } | ||||
| for(list=0; list<h->list_count; list++){ | for(list=0; list<h->list_count; list++){ | ||||
| int ref_count= IS_REF0(mb_type) ? 1 : local_ref_count[list]; | |||||
| int ref_count = IS_REF0(mb_type) ? 1 : local_ref_count[list]; | |||||
| for(i=0; i<4; i++){ | for(i=0; i<4; i++){ | ||||
| if(IS_DIRECT(h->sub_mb_type[i])) continue; | if(IS_DIRECT(h->sub_mb_type[i])) continue; | ||||
| if(IS_DIR(h->sub_mb_type[i], 0, list)){ | if(IS_DIR(h->sub_mb_type[i], 0, list)){ | ||||
| @@ -948,11 +948,11 @@ decode_intra_mb: | |||||
| if(IS_DIR(mb_type, 0, list)){ | if(IS_DIR(mb_type, 0, list)){ | ||||
| if(local_ref_count[list]==1){ | if(local_ref_count[list]==1){ | ||||
| val= 0; | val= 0; | ||||
| }else if(local_ref_count[list]==2){ | |||||
| } else if(local_ref_count[list]==2){ | |||||
| val= get_bits1(&h->gb)^1; | val= get_bits1(&h->gb)^1; | ||||
| }else{ | }else{ | ||||
| val= get_ue_golomb_31(&h->gb); | val= get_ue_golomb_31(&h->gb); | ||||
| if(val >= local_ref_count[list]){ | |||||
| if (val >= local_ref_count[list]){ | |||||
| av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | ||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -976,13 +976,13 @@ decode_intra_mb: | |||||
| for(i=0; i<2; i++){ | for(i=0; i<2; i++){ | ||||
| unsigned int val; | unsigned int val; | ||||
| if(IS_DIR(mb_type, i, list)){ | if(IS_DIR(mb_type, i, list)){ | ||||
| if(local_ref_count[list] == 1){ | |||||
| if(local_ref_count[list] == 1) { | |||||
| val= 0; | val= 0; | ||||
| }else if(local_ref_count[list] == 2){ | |||||
| } else if(local_ref_count[list] == 2) { | |||||
| val= get_bits1(&h->gb)^1; | val= get_bits1(&h->gb)^1; | ||||
| }else{ | }else{ | ||||
| val= get_ue_golomb_31(&h->gb); | val= get_ue_golomb_31(&h->gb); | ||||
| if(val >= local_ref_count[list]){ | |||||
| if (val >= local_ref_count[list]){ | |||||
| av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | ||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -1015,11 +1015,11 @@ decode_intra_mb: | |||||
| if(IS_DIR(mb_type, i, list)){ //FIXME optimize | if(IS_DIR(mb_type, i, list)){ //FIXME optimize | ||||
| if(local_ref_count[list]==1){ | if(local_ref_count[list]==1){ | ||||
| val= 0; | val= 0; | ||||
| }else if(local_ref_count[list]==2){ | |||||
| } else if(local_ref_count[list]==2){ | |||||
| val= get_bits1(&h->gb)^1; | val= get_bits1(&h->gb)^1; | ||||
| }else{ | }else{ | ||||
| val= get_ue_golomb_31(&h->gb); | val= get_ue_golomb_31(&h->gb); | ||||
| if(val >= local_ref_count[list]){ | |||||
| if (val >= local_ref_count[list]){ | |||||
| av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | av_log(h->avctx, AV_LOG_ERROR, "ref %u overflow\n", val); | ||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -1142,12 +1142,12 @@ decode_intra_mb: | |||||
| for(chroma_idx=0; chroma_idx<2; chroma_idx++){ | for(chroma_idx=0; chroma_idx<2; chroma_idx++){ | ||||
| const uint32_t *qmul = h->dequant4_coeff[chroma_idx+1+(IS_INTRA( mb_type ) ? 0:3)][h->chroma_qp[chroma_idx]]; | const uint32_t *qmul = h->dequant4_coeff[chroma_idx+1+(IS_INTRA( mb_type ) ? 0:3)][h->chroma_qp[chroma_idx]]; | ||||
| int16_t *mb = h->mb + (16*(16 + 16*chroma_idx) << pixel_shift); | int16_t *mb = h->mb + (16*(16 + 16*chroma_idx) << pixel_shift); | ||||
| for (i8x8=0; i8x8<num_c8x8; i8x8++) { | |||||
| for (i4x4=0; i4x4<4; i4x4++) { | |||||
| const int index= 16 + 16*chroma_idx + 8*i8x8 + i4x4; | |||||
| for (i8x8 = 0; i8x8<num_c8x8; i8x8++) { | |||||
| for (i4x4 = 0; i4x4 < 4; i4x4++) { | |||||
| const int index = 16 + 16*chroma_idx + 8*i8x8 + i4x4; | |||||
| if (decode_residual(h, gb, mb, index, scan + 1, qmul, 15) < 0) | if (decode_residual(h, gb, mb, index, scan + 1, qmul, 15) < 0) | ||||
| return -1; | return -1; | ||||
| mb += 16<<pixel_shift; | |||||
| mb += 16 << pixel_shift; | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -138,8 +138,8 @@ static av_noinline void FUNC(hl_decode_mb)(H264Context *h) | |||||
| if (SIMPLE || !CONFIG_GRAY || !(h->flags & CODEC_FLAG_GRAY)) { | if (SIMPLE || !CONFIG_GRAY || !(h->flags & CODEC_FLAG_GRAY)) { | ||||
| if (!h->sps.chroma_format_idc) { | if (!h->sps.chroma_format_idc) { | ||||
| for (i = 0; i < 8; i++) { | for (i = 0; i < 8; i++) { | ||||
| memset(dest_cb + i*uvlinesize, 1 << (bit_depth - 1), 8); | |||||
| memset(dest_cr + i*uvlinesize, 1 << (bit_depth - 1), 8); | |||||
| memset(dest_cb + i * uvlinesize, 1 << (bit_depth - 1), 8); | |||||
| memset(dest_cr + i * uvlinesize, 1 << (bit_depth - 1), 8); | |||||
| } | } | ||||
| } else { | } else { | ||||
| const uint8_t *src_cb = h->intra_pcm_ptr + 256; | const uint8_t *src_cb = h->intra_pcm_ptr + 256; | ||||
| @@ -663,7 +663,7 @@ static void fill_decode_caches(H264Context *h, int mb_type) | |||||
| ref_cache[4 - 1 * 8] = topright_type ? LIST_NOT_USED | ref_cache[4 - 1 * 8] = topright_type ? LIST_NOT_USED | ||||
| : PART_NOT_AVAILABLE; | : PART_NOT_AVAILABLE; | ||||
| } | } | ||||
| if(ref_cache[2 - 1*8] < 0 || ref_cache[4 - 1*8] < 0){ | |||||
| if(ref_cache[2 - 1*8] < 0 || ref_cache[4 - 1 * 8] < 0) { | |||||
| if (USES_LIST(topleft_type, list)) { | if (USES_LIST(topleft_type, list)) { | ||||
| const int b_xy = h->mb2b_xy[topleft_xy] + 3 + b_stride + | const int b_xy = h->mb2b_xy[topleft_xy] + 3 + b_stride + | ||||
| (h->topleft_partition & 2 * b_stride); | (h->topleft_partition & 2 * b_stride); | ||||
| @@ -26,6 +26,7 @@ | |||||
| */ | */ | ||||
| #include <stdint.h> | #include <stdint.h> | ||||
| #include "libavutil/attributes.h" | #include "libavutil/attributes.h" | ||||
| #include "libavutil/avassert.h" | #include "libavutil/avassert.h" | ||||
| @@ -33,11 +33,11 @@ struct vda_buffer { | |||||
| }; | }; | ||||
| /* Decoder callback that adds the vda frame to the queue in display order. */ | /* Decoder callback that adds the vda frame to the queue in display order. */ | ||||
| static void vda_decoder_callback (void *vda_hw_ctx, | |||||
| CFDictionaryRef user_info, | |||||
| OSStatus status, | |||||
| uint32_t infoFlags, | |||||
| CVImageBufferRef image_buffer) | |||||
| static void vda_decoder_callback(void *vda_hw_ctx, | |||||
| CFDictionaryRef user_info, | |||||
| OSStatus status, | |||||
| uint32_t infoFlags, | |||||
| CVImageBufferRef image_buffer) | |||||
| { | { | ||||
| struct vda_context *vda_ctx = vda_hw_ctx; | struct vda_context *vda_ctx = vda_hw_ctx; | ||||