| @@ -157,6 +157,13 @@ int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s) | |||
| } else if (avctx->pix_fmt == AV_PIX_FMT_RGB24) { | |||
| if (s->maxval > 255) | |||
| avctx->pix_fmt = AV_PIX_FMT_RGB48BE; | |||
| } else if (avctx->pix_fmt == AV_PIX_FMT_YUV420P && s->maxval < 65536) { | |||
| if (s->maxval < 512) | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P9BE; | |||
| else if (s->maxval < 1024) | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P10BE; | |||
| else | |||
| avctx->pix_fmt = AV_PIX_FMT_YUV420P16; | |||
| } else { | |||
| av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format\n"); | |||
| avctx->pix_fmt = AV_PIX_FMT_NONE; | |||
| @@ -166,7 +173,7 @@ int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s) | |||
| }else | |||
| s->maxval=1; | |||
| /* more check if YUV420 */ | |||
| if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) { | |||
| if (av_pix_fmt_descriptors[avctx->pix_fmt].flags & PIX_FMT_PLANAR) { | |||
| if ((avctx->width & 1) != 0) | |||
| return AVERROR_INVALIDDATA; | |||
| h = (avctx->height * 2); | |||
| @@ -135,12 +135,16 @@ static int pnm_decode_frame(AVCodecContext *avctx, void *data, | |||
| } | |||
| break; | |||
| case AV_PIX_FMT_YUV420P: | |||
| case AV_PIX_FMT_YUV420P9BE: | |||
| case AV_PIX_FMT_YUV420P10BE: | |||
| { | |||
| unsigned char *ptr1, *ptr2; | |||
| n = avctx->width; | |||
| ptr = p->data[0]; | |||
| linesize = p->linesize[0]; | |||
| if (s->maxval >= 256) | |||
| n *= 2; | |||
| if (s->bytestream + n * avctx->height * 3 / 2 > s->bytestream_end) | |||
| return AVERROR_INVALIDDATA; | |||
| for (i = 0; i < avctx->height; i++) { | |||
| @@ -162,6 +166,47 @@ static int pnm_decode_frame(AVCodecContext *avctx, void *data, | |||
| } | |||
| } | |||
| break; | |||
| case AV_PIX_FMT_YUV420P16: | |||
| { | |||
| uint16_t *ptr1, *ptr2; | |||
| const int f = (65535 * 32768 + s->maxval / 2) / s->maxval; | |||
| unsigned int j, v; | |||
| n = avctx->width * 2; | |||
| ptr = p->data[0]; | |||
| linesize = p->linesize[0]; | |||
| if (s->bytestream + n * avctx->height * 3 / 2 > s->bytestream_end) | |||
| return AVERROR_INVALIDDATA; | |||
| for (i = 0; i < avctx->height; i++) { | |||
| for (j = 0; j < n / 2; j++) { | |||
| v = av_be2ne16(((uint16_t *)s->bytestream)[j]); | |||
| ((uint16_t *)ptr)[j] = (v * f + 16384) >> 15; | |||
| } | |||
| s->bytestream += n; | |||
| ptr += linesize; | |||
| } | |||
| ptr1 = (uint16_t*)p->data[1]; | |||
| ptr2 = (uint16_t*)p->data[2]; | |||
| n >>= 1; | |||
| h = avctx->height >> 1; | |||
| for (i = 0; i < h; i++) { | |||
| for (j = 0; j < n / 2; j++) { | |||
| v = av_be2ne16(((uint16_t *)s->bytestream)[j]); | |||
| ptr1[j] = (v * f + 16384) >> 15; | |||
| } | |||
| s->bytestream += n; | |||
| for (j = 0; j < n / 2; j++) { | |||
| v = av_be2ne16(((uint16_t *)s->bytestream)[j]); | |||
| ptr2[j] = (v * f + 16384) >> 15; | |||
| } | |||
| s->bytestream += n; | |||
| ptr1 += p->linesize[1] / 2; | |||
| ptr2 += p->linesize[2] / 2; | |||
| } | |||
| } | |||
| break; | |||
| case AV_PIX_FMT_RGB32: | |||
| ptr = p->data[0]; | |||
| linesize = p->linesize[0]; | |||
| @@ -19,6 +19,7 @@ | |||
| * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |||
| */ | |||
| #include "libavutil/pixdesc.h" | |||
| #include "avcodec.h" | |||
| #include "bytestream.h" | |||
| #include "internal.h" | |||
| @@ -76,6 +77,11 @@ static int pnm_encode_frame(AVCodecContext *avctx, AVPacket *pkt, | |||
| n = avctx->width; | |||
| h1 = (h * 3) / 2; | |||
| break; | |||
| case AV_PIX_FMT_YUV420P16BE: | |||
| c = '5'; | |||
| n = avctx->width * 2; | |||
| h1 = (h * 3) / 2; | |||
| break; | |||
| default: | |||
| return -1; | |||
| } | |||
| @@ -83,8 +89,9 @@ static int pnm_encode_frame(AVCodecContext *avctx, AVPacket *pkt, | |||
| "P%c\n%d %d\n", c, avctx->width, h1); | |||
| s->bytestream += strlen(s->bytestream); | |||
| if (avctx->pix_fmt != AV_PIX_FMT_MONOWHITE) { | |||
| int maxdepth = (1 << (av_pix_fmt_descriptors[avctx->pix_fmt].comp[0].depth_minus1 + 1)) - 1; | |||
| snprintf(s->bytestream, s->bytestream_end - s->bytestream, | |||
| "%d\n", (avctx->pix_fmt != AV_PIX_FMT_GRAY16BE && avctx->pix_fmt != AV_PIX_FMT_RGB48BE) ? 255 : 65535); | |||
| "%d\n", maxdepth); | |||
| s->bytestream += strlen(s->bytestream); | |||
| } | |||
| @@ -96,7 +103,7 @@ static int pnm_encode_frame(AVCodecContext *avctx, AVPacket *pkt, | |||
| ptr += linesize; | |||
| } | |||
| if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) { | |||
| if (avctx->pix_fmt == AV_PIX_FMT_YUV420P || avctx->pix_fmt == AV_PIX_FMT_YUV420P16BE) { | |||
| h >>= 1; | |||
| n >>= 1; | |||
| ptr1 = p->data[1]; | |||
| @@ -141,7 +148,9 @@ AVCodec ff_pgmyuv_encoder = { | |||
| .priv_data_size = sizeof(PNMContext), | |||
| .init = ff_pnm_init, | |||
| .encode2 = pnm_encode_frame, | |||
| .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE }, | |||
| .pix_fmts = (const enum AVPixelFormat[]){ | |||
| AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV420P16BE, AV_PIX_FMT_NONE | |||
| }, | |||
| .long_name = NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"), | |||
| }; | |||
| #endif | |||