You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

904 lines
26KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavutil/imgutils.h"
  37. #if HAVE_MMX && HAVE_YASM
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #if HAVE_MMX && HAVE_YASM
  45. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  46. #define deinterlace_line ff_deinterlace_line_mmx
  47. #else
  48. #define deinterlace_line_inplace deinterlace_line_inplace_c
  49. #define deinterlace_line deinterlace_line_c
  50. #endif
  51. #define pixdesc_has_alpha(pixdesc) \
  52. ((pixdesc)->nb_components == 2 || (pixdesc)->nb_components == 4 || (pixdesc)->flags & PIX_FMT_PAL)
  53. typedef struct PixFmtInfo {
  54. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  55. uint8_t padded_size; /**< padded size in bits if different from the non-padded size */
  56. } PixFmtInfo;
  57. /* this table gives more information about formats */
  58. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  59. /* YUV formats */
  60. [PIX_FMT_YUV420P] = {
  61. .color_type = FF_COLOR_YUV,
  62. },
  63. [PIX_FMT_YUV422P] = {
  64. .color_type = FF_COLOR_YUV,
  65. },
  66. [PIX_FMT_YUV444P] = {
  67. .color_type = FF_COLOR_YUV,
  68. },
  69. [PIX_FMT_YUYV422] = {
  70. .color_type = FF_COLOR_YUV,
  71. },
  72. [PIX_FMT_UYVY422] = {
  73. .color_type = FF_COLOR_YUV,
  74. },
  75. [PIX_FMT_YUV410P] = {
  76. .color_type = FF_COLOR_YUV,
  77. },
  78. [PIX_FMT_YUV411P] = {
  79. .color_type = FF_COLOR_YUV,
  80. },
  81. [PIX_FMT_YUV440P] = {
  82. .color_type = FF_COLOR_YUV,
  83. },
  84. [PIX_FMT_YUV420P16LE] = {
  85. .color_type = FF_COLOR_YUV,
  86. },
  87. [PIX_FMT_YUV422P16LE] = {
  88. .color_type = FF_COLOR_YUV,
  89. },
  90. [PIX_FMT_YUV444P16LE] = {
  91. .color_type = FF_COLOR_YUV,
  92. },
  93. [PIX_FMT_YUV420P16BE] = {
  94. .color_type = FF_COLOR_YUV,
  95. },
  96. [PIX_FMT_YUV422P16BE] = {
  97. .color_type = FF_COLOR_YUV,
  98. },
  99. [PIX_FMT_YUV444P16BE] = {
  100. .color_type = FF_COLOR_YUV,
  101. },
  102. /* YUV formats with alpha plane */
  103. [PIX_FMT_YUVA420P] = {
  104. .color_type = FF_COLOR_YUV,
  105. },
  106. [PIX_FMT_YUVA422P] = {
  107. .color_type = FF_COLOR_YUV,
  108. },
  109. [PIX_FMT_YUVA444P] = {
  110. .color_type = FF_COLOR_YUV,
  111. },
  112. /* JPEG YUV */
  113. [PIX_FMT_YUVJ420P] = {
  114. .color_type = FF_COLOR_YUV_JPEG,
  115. },
  116. [PIX_FMT_YUVJ422P] = {
  117. .color_type = FF_COLOR_YUV_JPEG,
  118. },
  119. [PIX_FMT_YUVJ444P] = {
  120. .color_type = FF_COLOR_YUV_JPEG,
  121. },
  122. [PIX_FMT_YUVJ440P] = {
  123. .color_type = FF_COLOR_YUV_JPEG,
  124. },
  125. /* RGB formats */
  126. [PIX_FMT_RGB24] = {
  127. .color_type = FF_COLOR_RGB,
  128. },
  129. [PIX_FMT_BGR24] = {
  130. .color_type = FF_COLOR_RGB,
  131. },
  132. [PIX_FMT_ARGB] = {
  133. .color_type = FF_COLOR_RGB,
  134. },
  135. [PIX_FMT_RGB48BE] = {
  136. .color_type = FF_COLOR_RGB,
  137. },
  138. [PIX_FMT_RGB48LE] = {
  139. .color_type = FF_COLOR_RGB,
  140. },
  141. [PIX_FMT_RGBA64BE] = {
  142. .color_type = FF_COLOR_RGB,
  143. },
  144. [PIX_FMT_RGBA64LE] = {
  145. .color_type = FF_COLOR_RGB,
  146. },
  147. [PIX_FMT_RGB565BE] = {
  148. .color_type = FF_COLOR_RGB,
  149. },
  150. [PIX_FMT_RGB565LE] = {
  151. .color_type = FF_COLOR_RGB,
  152. },
  153. [PIX_FMT_RGB555BE] = {
  154. .color_type = FF_COLOR_RGB,
  155. .padded_size = 16,
  156. },
  157. [PIX_FMT_RGB555LE] = {
  158. .color_type = FF_COLOR_RGB,
  159. .padded_size = 16,
  160. },
  161. [PIX_FMT_RGB444BE] = {
  162. .color_type = FF_COLOR_RGB,
  163. .padded_size = 16,
  164. },
  165. [PIX_FMT_RGB444LE] = {
  166. .color_type = FF_COLOR_RGB,
  167. .padded_size = 16,
  168. },
  169. /* gray / mono formats */
  170. [PIX_FMT_GRAY16BE] = {
  171. .color_type = FF_COLOR_GRAY,
  172. },
  173. [PIX_FMT_GRAY16LE] = {
  174. .color_type = FF_COLOR_GRAY,
  175. },
  176. [PIX_FMT_GRAY8] = {
  177. .color_type = FF_COLOR_GRAY,
  178. },
  179. [PIX_FMT_GRAY8A] = {
  180. .color_type = FF_COLOR_GRAY,
  181. },
  182. [PIX_FMT_MONOWHITE] = {
  183. .color_type = FF_COLOR_GRAY,
  184. },
  185. [PIX_FMT_MONOBLACK] = {
  186. .color_type = FF_COLOR_GRAY,
  187. },
  188. /* paletted formats */
  189. [PIX_FMT_PAL8] = {
  190. .color_type = FF_COLOR_RGB,
  191. },
  192. [PIX_FMT_UYYVYY411] = {
  193. .color_type = FF_COLOR_YUV,
  194. },
  195. [PIX_FMT_ABGR] = {
  196. .color_type = FF_COLOR_RGB,
  197. },
  198. [PIX_FMT_BGR48BE] = {
  199. .color_type = FF_COLOR_RGB,
  200. },
  201. [PIX_FMT_BGR48LE] = {
  202. .color_type = FF_COLOR_RGB,
  203. },
  204. [PIX_FMT_BGRA64BE] = {
  205. .color_type = FF_COLOR_RGB,
  206. },
  207. [PIX_FMT_BGRA64LE] = {
  208. .color_type = FF_COLOR_RGB,
  209. },
  210. [PIX_FMT_BGR565BE] = {
  211. .color_type = FF_COLOR_RGB,
  212. .padded_size = 16,
  213. },
  214. [PIX_FMT_BGR565LE] = {
  215. .color_type = FF_COLOR_RGB,
  216. .padded_size = 16,
  217. },
  218. [PIX_FMT_BGR555BE] = {
  219. .color_type = FF_COLOR_RGB,
  220. .padded_size = 16,
  221. },
  222. [PIX_FMT_BGR555LE] = {
  223. .color_type = FF_COLOR_RGB,
  224. .padded_size = 16,
  225. },
  226. [PIX_FMT_BGR444BE] = {
  227. .color_type = FF_COLOR_RGB,
  228. .padded_size = 16,
  229. },
  230. [PIX_FMT_BGR444LE] = {
  231. .color_type = FF_COLOR_RGB,
  232. .padded_size = 16,
  233. },
  234. [PIX_FMT_RGB8] = {
  235. .color_type = FF_COLOR_RGB,
  236. },
  237. [PIX_FMT_RGB4] = {
  238. .color_type = FF_COLOR_RGB,
  239. },
  240. [PIX_FMT_RGB4_BYTE] = {
  241. .color_type = FF_COLOR_RGB,
  242. .padded_size = 8,
  243. },
  244. [PIX_FMT_BGR8] = {
  245. .color_type = FF_COLOR_RGB,
  246. },
  247. [PIX_FMT_BGR4] = {
  248. .color_type = FF_COLOR_RGB,
  249. },
  250. [PIX_FMT_BGR4_BYTE] = {
  251. .color_type = FF_COLOR_RGB,
  252. .padded_size = 8,
  253. },
  254. [PIX_FMT_NV12] = {
  255. .color_type = FF_COLOR_YUV,
  256. },
  257. [PIX_FMT_NV21] = {
  258. .color_type = FF_COLOR_YUV,
  259. },
  260. [PIX_FMT_BGRA] = {
  261. .color_type = FF_COLOR_RGB,
  262. },
  263. [PIX_FMT_RGBA] = {
  264. .color_type = FF_COLOR_RGB,
  265. },
  266. };
  267. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  268. {
  269. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  270. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  271. }
  272. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  273. {
  274. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  275. }
  276. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  277. enum PixelFormat pix_fmt, int width, int height)
  278. {
  279. int ret;
  280. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  281. return ret;
  282. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  283. return ret;
  284. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  285. }
  286. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  287. unsigned char *dest, int dest_size)
  288. {
  289. int i, j, nb_planes = 0, linesizes[4];
  290. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  291. int size = avpicture_get_size(pix_fmt, width, height);
  292. if (size > dest_size || size < 0)
  293. return AVERROR(EINVAL);
  294. for (i = 0; i < desc->nb_components; i++)
  295. nb_planes = FFMAX(desc->comp[i].plane, nb_planes);
  296. nb_planes++;
  297. av_image_fill_linesizes(linesizes, pix_fmt, width);
  298. for (i = 0; i < nb_planes; i++) {
  299. int h, shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
  300. const unsigned char *s = src->data[i];
  301. h = (height + (1 << shift) - 1) >> shift;
  302. for (j = 0; j < h; j++) {
  303. memcpy(dest, s, linesizes[i]);
  304. dest += linesizes[i];
  305. s += src->linesize[i];
  306. }
  307. }
  308. switch (pix_fmt) {
  309. case PIX_FMT_RGB8:
  310. case PIX_FMT_BGR8:
  311. case PIX_FMT_RGB4_BYTE:
  312. case PIX_FMT_BGR4_BYTE:
  313. case PIX_FMT_GRAY8:
  314. // do not include palette for these pseudo-paletted formats
  315. return size;
  316. }
  317. if (desc->flags & PIX_FMT_PAL) {
  318. uint32_t *d32 = (unsigned char *)(((size_t)dest + 3) & ~3);
  319. for (i = 0; i<256; i++)
  320. AV_WL32(d32 + i, AV_RN32(src->data[1] + 4*i));
  321. }
  322. return size;
  323. }
  324. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  325. {
  326. AVPicture dummy_pict;
  327. if(av_image_check_size(width, height, 0, NULL))
  328. return -1;
  329. if (av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_PSEUDOPAL)
  330. // do not include palette for these pseudo-paletted formats
  331. return width * height;
  332. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  333. }
  334. static int get_pix_fmt_depth(int *min, int *max, enum PixelFormat pix_fmt)
  335. {
  336. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  337. int i;
  338. if (!desc->nb_components) {
  339. *min = *max = 0;
  340. return AVERROR(EINVAL);
  341. }
  342. *min = INT_MAX, *max = -INT_MAX;
  343. for (i = 0; i < desc->nb_components; i++) {
  344. *min = FFMIN(desc->comp[i].depth_minus1+1, *min);
  345. *max = FFMAX(desc->comp[i].depth_minus1+1, *max);
  346. }
  347. return 0;
  348. }
  349. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  350. int has_alpha)
  351. {
  352. const PixFmtInfo *pf, *ps;
  353. const AVPixFmtDescriptor *src_desc;
  354. const AVPixFmtDescriptor *dst_desc;
  355. int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
  356. int ret, loss;
  357. if (dst_pix_fmt >= PIX_FMT_NB || dst_pix_fmt <= PIX_FMT_NONE)
  358. return ~0;
  359. src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  360. dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  361. ps = &pix_fmt_info[src_pix_fmt];
  362. /* compute loss */
  363. loss = 0;
  364. if ((ret = get_pix_fmt_depth(&src_min_depth, &src_max_depth, src_pix_fmt)) < 0)
  365. return ret;
  366. if ((ret = get_pix_fmt_depth(&dst_min_depth, &dst_max_depth, dst_pix_fmt)) < 0)
  367. return ret;
  368. if (dst_min_depth < src_min_depth ||
  369. dst_max_depth < src_max_depth)
  370. loss |= FF_LOSS_DEPTH;
  371. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  372. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  373. loss |= FF_LOSS_RESOLUTION;
  374. pf = &pix_fmt_info[dst_pix_fmt];
  375. switch(pf->color_type) {
  376. case FF_COLOR_RGB:
  377. if (ps->color_type != FF_COLOR_RGB &&
  378. ps->color_type != FF_COLOR_GRAY)
  379. loss |= FF_LOSS_COLORSPACE;
  380. break;
  381. case FF_COLOR_GRAY:
  382. if (ps->color_type != FF_COLOR_GRAY)
  383. loss |= FF_LOSS_COLORSPACE;
  384. break;
  385. case FF_COLOR_YUV:
  386. if (ps->color_type != FF_COLOR_YUV)
  387. loss |= FF_LOSS_COLORSPACE;
  388. break;
  389. case FF_COLOR_YUV_JPEG:
  390. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  391. ps->color_type != FF_COLOR_YUV &&
  392. ps->color_type != FF_COLOR_GRAY)
  393. loss |= FF_LOSS_COLORSPACE;
  394. break;
  395. default:
  396. /* fail safe test */
  397. if (ps->color_type != pf->color_type)
  398. loss |= FF_LOSS_COLORSPACE;
  399. break;
  400. }
  401. if (pf->color_type == FF_COLOR_GRAY &&
  402. ps->color_type != FF_COLOR_GRAY)
  403. loss |= FF_LOSS_CHROMA;
  404. if (!pixdesc_has_alpha(dst_desc) && (pixdesc_has_alpha(src_desc) && has_alpha))
  405. loss |= FF_LOSS_ALPHA;
  406. if (dst_pix_fmt == PIX_FMT_PAL8 &&
  407. (src_pix_fmt != PIX_FMT_PAL8 && (ps->color_type != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && has_alpha))))
  408. loss |= FF_LOSS_COLORQUANT;
  409. return loss;
  410. }
  411. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  412. {
  413. const PixFmtInfo *info = &pix_fmt_info[pix_fmt];
  414. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  415. return info->padded_size ?
  416. info->padded_size : av_get_bits_per_pixel(desc);
  417. }
  418. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  419. int has_alpha, int *loss_ptr)
  420. {
  421. enum PixelFormat dst_pix_fmt;
  422. int i;
  423. if (loss_ptr) /* all losses count (for backward compatibility) */
  424. *loss_ptr = 0;
  425. dst_pix_fmt = PIX_FMT_NONE; /* so first iteration doesn't have to be treated special */
  426. for(i = 0; i< FFMIN(PIX_FMT_NB, 64); i++){
  427. if (pix_fmt_mask & (1ULL << i))
  428. dst_pix_fmt = avcodec_find_best_pix_fmt2(dst_pix_fmt, i, src_pix_fmt, has_alpha, loss_ptr);
  429. }
  430. return dst_pix_fmt;
  431. }
  432. enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
  433. enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
  434. {
  435. enum PixelFormat dst_pix_fmt;
  436. int loss1, loss2, loss_order1, loss_order2, i, loss_mask;
  437. static const int loss_mask_order[] = {
  438. ~0, /* no loss first */
  439. ~FF_LOSS_ALPHA,
  440. ~FF_LOSS_RESOLUTION,
  441. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  442. ~FF_LOSS_COLORQUANT,
  443. ~FF_LOSS_DEPTH,
  444. ~(FF_LOSS_DEPTH|FF_LOSS_COLORSPACE),
  445. ~(FF_LOSS_RESOLUTION | FF_LOSS_DEPTH | FF_LOSS_COLORSPACE | FF_LOSS_ALPHA |
  446. FF_LOSS_COLORQUANT | FF_LOSS_CHROMA),
  447. 0x80000, //non zero entry that combines all loss variants including future additions
  448. 0,
  449. };
  450. loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
  451. dst_pix_fmt = PIX_FMT_NONE;
  452. loss1 = avcodec_get_pix_fmt_loss(dst_pix_fmt1, src_pix_fmt, has_alpha) & loss_mask;
  453. loss2 = avcodec_get_pix_fmt_loss(dst_pix_fmt2, src_pix_fmt, has_alpha) & loss_mask;
  454. /* try with successive loss */
  455. for(i = 0;loss_mask_order[i] != 0 && dst_pix_fmt == PIX_FMT_NONE;i++) {
  456. loss_order1 = loss1 & loss_mask_order[i];
  457. loss_order2 = loss2 & loss_mask_order[i];
  458. if (loss_order1 == 0 && loss_order2 == 0){ /* use format with smallest depth */
  459. dst_pix_fmt = avg_bits_per_pixel(dst_pix_fmt2) < avg_bits_per_pixel(dst_pix_fmt1) ? dst_pix_fmt2 : dst_pix_fmt1;
  460. } else if (loss_order1 == 0 || loss_order2 == 0) { /* use format with no loss */
  461. dst_pix_fmt = loss_order2 ? dst_pix_fmt1 : dst_pix_fmt2;
  462. }
  463. }
  464. if (loss_ptr)
  465. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  466. return dst_pix_fmt;
  467. }
  468. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  469. enum PixelFormat pix_fmt, int width, int height)
  470. {
  471. av_image_copy(dst->data, dst->linesize, src->data,
  472. src->linesize, pix_fmt, width, height);
  473. }
  474. /* 2x2 -> 1x1 */
  475. void ff_shrink22(uint8_t *dst, int dst_wrap,
  476. const uint8_t *src, int src_wrap,
  477. int width, int height)
  478. {
  479. int w;
  480. const uint8_t *s1, *s2;
  481. uint8_t *d;
  482. for(;height > 0; height--) {
  483. s1 = src;
  484. s2 = s1 + src_wrap;
  485. d = dst;
  486. for(w = width;w >= 4; w-=4) {
  487. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  488. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  489. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  490. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  491. s1 += 8;
  492. s2 += 8;
  493. d += 4;
  494. }
  495. for(;w > 0; w--) {
  496. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  497. s1 += 2;
  498. s2 += 2;
  499. d++;
  500. }
  501. src += 2 * src_wrap;
  502. dst += dst_wrap;
  503. }
  504. }
  505. /* 4x4 -> 1x1 */
  506. void ff_shrink44(uint8_t *dst, int dst_wrap,
  507. const uint8_t *src, int src_wrap,
  508. int width, int height)
  509. {
  510. int w;
  511. const uint8_t *s1, *s2, *s3, *s4;
  512. uint8_t *d;
  513. for(;height > 0; height--) {
  514. s1 = src;
  515. s2 = s1 + src_wrap;
  516. s3 = s2 + src_wrap;
  517. s4 = s3 + src_wrap;
  518. d = dst;
  519. for(w = width;w > 0; w--) {
  520. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  521. s2[0] + s2[1] + s2[2] + s2[3] +
  522. s3[0] + s3[1] + s3[2] + s3[3] +
  523. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  524. s1 += 4;
  525. s2 += 4;
  526. s3 += 4;
  527. s4 += 4;
  528. d++;
  529. }
  530. src += 4 * src_wrap;
  531. dst += dst_wrap;
  532. }
  533. }
  534. /* 8x8 -> 1x1 */
  535. void ff_shrink88(uint8_t *dst, int dst_wrap,
  536. const uint8_t *src, int src_wrap,
  537. int width, int height)
  538. {
  539. int w, i;
  540. for(;height > 0; height--) {
  541. for(w = width;w > 0; w--) {
  542. int tmp=0;
  543. for(i=0; i<8; i++){
  544. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  545. src += src_wrap;
  546. }
  547. *(dst++) = (tmp + 32)>>6;
  548. src += 8 - 8*src_wrap;
  549. }
  550. src += 8*src_wrap - 8*width;
  551. dst += dst_wrap - width;
  552. }
  553. }
  554. int avpicture_alloc(AVPicture *picture,
  555. enum PixelFormat pix_fmt, int width, int height)
  556. {
  557. int ret;
  558. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  559. memset(picture, 0, sizeof(AVPicture));
  560. return ret;
  561. }
  562. return 0;
  563. }
  564. void avpicture_free(AVPicture *picture)
  565. {
  566. av_free(picture->data[0]);
  567. }
  568. /* return true if yuv planar */
  569. static inline int is_yuv_planar(enum PixelFormat fmt)
  570. {
  571. const PixFmtInfo *info = &pix_fmt_info[fmt];
  572. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[fmt];
  573. int i;
  574. int planes[4] = { 0 };
  575. if (info->color_type != FF_COLOR_YUV &&
  576. info->color_type != FF_COLOR_YUV_JPEG)
  577. return 0;
  578. /* set the used planes */
  579. for (i = 0; i < desc->nb_components; i++)
  580. planes[desc->comp[i].plane] = 1;
  581. /* if there is an unused plane, the format is not planar */
  582. for (i = 0; i < desc->nb_components; i++)
  583. if (!planes[i])
  584. return 0;
  585. return 1;
  586. }
  587. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  588. enum PixelFormat pix_fmt, int top_band, int left_band)
  589. {
  590. int y_shift;
  591. int x_shift;
  592. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  593. return -1;
  594. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  595. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  596. if (is_yuv_planar(pix_fmt)) {
  597. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  598. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  599. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  600. } else{
  601. if(top_band % (1<<y_shift) || left_band % (1<<x_shift))
  602. return -1;
  603. if(left_band) //FIXME add support for this too
  604. return -1;
  605. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  606. }
  607. dst->linesize[0] = src->linesize[0];
  608. dst->linesize[1] = src->linesize[1];
  609. dst->linesize[2] = src->linesize[2];
  610. return 0;
  611. }
  612. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  613. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  614. int *color)
  615. {
  616. uint8_t *optr;
  617. int y_shift;
  618. int x_shift;
  619. int yheight;
  620. int i, y;
  621. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  622. !is_yuv_planar(pix_fmt)) return -1;
  623. for (i = 0; i < 3; i++) {
  624. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  625. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  626. if (padtop || padleft) {
  627. memset(dst->data[i], color[i],
  628. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  629. }
  630. if (padleft || padright) {
  631. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  632. (dst->linesize[i] - (padright >> x_shift));
  633. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  634. for (y = 0; y < yheight; y++) {
  635. memset(optr, color[i], (padleft + padright) >> x_shift);
  636. optr += dst->linesize[i];
  637. }
  638. }
  639. if (src) { /* first line */
  640. uint8_t *iptr = src->data[i];
  641. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  642. (padleft >> x_shift);
  643. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  644. iptr += src->linesize[i];
  645. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  646. (dst->linesize[i] - (padright >> x_shift));
  647. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  648. for (y = 0; y < yheight; y++) {
  649. memset(optr, color[i], (padleft + padright) >> x_shift);
  650. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  651. (width - padleft - padright) >> x_shift);
  652. iptr += src->linesize[i];
  653. optr += dst->linesize[i];
  654. }
  655. }
  656. if (padbottom || padright) {
  657. optr = dst->data[i] + dst->linesize[i] *
  658. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  659. memset(optr, color[i],dst->linesize[i] *
  660. (padbottom >> y_shift) + (padright >> x_shift));
  661. }
  662. }
  663. return 0;
  664. }
  665. #if !(HAVE_MMX && HAVE_YASM)
  666. /* filter parameters: [-1 4 2 4 -1] // 8 */
  667. static void deinterlace_line_c(uint8_t *dst,
  668. const uint8_t *lum_m4, const uint8_t *lum_m3,
  669. const uint8_t *lum_m2, const uint8_t *lum_m1,
  670. const uint8_t *lum,
  671. int size)
  672. {
  673. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  674. int sum;
  675. for(;size > 0;size--) {
  676. sum = -lum_m4[0];
  677. sum += lum_m3[0] << 2;
  678. sum += lum_m2[0] << 1;
  679. sum += lum_m1[0] << 2;
  680. sum += -lum[0];
  681. dst[0] = cm[(sum + 4) >> 3];
  682. lum_m4++;
  683. lum_m3++;
  684. lum_m2++;
  685. lum_m1++;
  686. lum++;
  687. dst++;
  688. }
  689. }
  690. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  691. uint8_t *lum_m2, uint8_t *lum_m1,
  692. uint8_t *lum, int size)
  693. {
  694. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  695. int sum;
  696. for(;size > 0;size--) {
  697. sum = -lum_m4[0];
  698. sum += lum_m3[0] << 2;
  699. sum += lum_m2[0] << 1;
  700. lum_m4[0]=lum_m2[0];
  701. sum += lum_m1[0] << 2;
  702. sum += -lum[0];
  703. lum_m2[0] = cm[(sum + 4) >> 3];
  704. lum_m4++;
  705. lum_m3++;
  706. lum_m2++;
  707. lum_m1++;
  708. lum++;
  709. }
  710. }
  711. #endif
  712. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  713. top field is copied as is, but the bottom field is deinterlaced
  714. against the top field. */
  715. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  716. const uint8_t *src1, int src_wrap,
  717. int width, int height)
  718. {
  719. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  720. int y;
  721. src_m2 = src1;
  722. src_m1 = src1;
  723. src_0=&src_m1[src_wrap];
  724. src_p1=&src_0[src_wrap];
  725. src_p2=&src_p1[src_wrap];
  726. for(y=0;y<(height-2);y+=2) {
  727. memcpy(dst,src_m1,width);
  728. dst += dst_wrap;
  729. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  730. src_m2 = src_0;
  731. src_m1 = src_p1;
  732. src_0 = src_p2;
  733. src_p1 += 2*src_wrap;
  734. src_p2 += 2*src_wrap;
  735. dst += dst_wrap;
  736. }
  737. memcpy(dst,src_m1,width);
  738. dst += dst_wrap;
  739. /* do last line */
  740. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  741. }
  742. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  743. int width, int height)
  744. {
  745. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  746. int y;
  747. uint8_t *buf;
  748. buf = av_malloc(width);
  749. src_m1 = src1;
  750. memcpy(buf,src_m1,width);
  751. src_0=&src_m1[src_wrap];
  752. src_p1=&src_0[src_wrap];
  753. src_p2=&src_p1[src_wrap];
  754. for(y=0;y<(height-2);y+=2) {
  755. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  756. src_m1 = src_p1;
  757. src_0 = src_p2;
  758. src_p1 += 2*src_wrap;
  759. src_p2 += 2*src_wrap;
  760. }
  761. /* do last line */
  762. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  763. av_free(buf);
  764. }
  765. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  766. enum PixelFormat pix_fmt, int width, int height)
  767. {
  768. int i;
  769. if (pix_fmt != PIX_FMT_YUV420P &&
  770. pix_fmt != PIX_FMT_YUVJ420P &&
  771. pix_fmt != PIX_FMT_YUV422P &&
  772. pix_fmt != PIX_FMT_YUVJ422P &&
  773. pix_fmt != PIX_FMT_YUV444P &&
  774. pix_fmt != PIX_FMT_YUV411P &&
  775. pix_fmt != PIX_FMT_GRAY8)
  776. return -1;
  777. if ((width & 3) != 0 || (height & 3) != 0)
  778. return -1;
  779. for(i=0;i<3;i++) {
  780. if (i == 1) {
  781. switch(pix_fmt) {
  782. case PIX_FMT_YUVJ420P:
  783. case PIX_FMT_YUV420P:
  784. width >>= 1;
  785. height >>= 1;
  786. break;
  787. case PIX_FMT_YUV422P:
  788. case PIX_FMT_YUVJ422P:
  789. width >>= 1;
  790. break;
  791. case PIX_FMT_YUV411P:
  792. width >>= 2;
  793. break;
  794. default:
  795. break;
  796. }
  797. if (pix_fmt == PIX_FMT_GRAY8) {
  798. break;
  799. }
  800. }
  801. if (src == dst) {
  802. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  803. width, height);
  804. } else {
  805. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  806. src->data[i], src->linesize[i],
  807. width, height);
  808. }
  809. }
  810. emms_c();
  811. return 0;
  812. }