You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

906 lines
26KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavutil/imgutils.h"
  37. #if HAVE_MMX && HAVE_YASM
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #if HAVE_MMX && HAVE_YASM
  45. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  46. #define deinterlace_line ff_deinterlace_line_mmx
  47. #else
  48. #define deinterlace_line_inplace deinterlace_line_inplace_c
  49. #define deinterlace_line deinterlace_line_c
  50. #endif
  51. typedef struct PixFmtInfo {
  52. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  53. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  54. uint8_t padded_size; /**< padded size in bits if different from the non-padded size */
  55. } PixFmtInfo;
  56. /* this table gives more information about formats */
  57. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  58. /* YUV formats */
  59. [PIX_FMT_YUV420P] = {
  60. .color_type = FF_COLOR_YUV,
  61. },
  62. [PIX_FMT_YUV422P] = {
  63. .color_type = FF_COLOR_YUV,
  64. },
  65. [PIX_FMT_YUV444P] = {
  66. .color_type = FF_COLOR_YUV,
  67. },
  68. [PIX_FMT_YUYV422] = {
  69. .color_type = FF_COLOR_YUV,
  70. },
  71. [PIX_FMT_UYVY422] = {
  72. .color_type = FF_COLOR_YUV,
  73. },
  74. [PIX_FMT_YUV410P] = {
  75. .color_type = FF_COLOR_YUV,
  76. },
  77. [PIX_FMT_YUV411P] = {
  78. .color_type = FF_COLOR_YUV,
  79. },
  80. [PIX_FMT_YUV440P] = {
  81. .color_type = FF_COLOR_YUV,
  82. },
  83. [PIX_FMT_YUV420P16LE] = {
  84. .color_type = FF_COLOR_YUV,
  85. },
  86. [PIX_FMT_YUV422P16LE] = {
  87. .color_type = FF_COLOR_YUV,
  88. },
  89. [PIX_FMT_YUV444P16LE] = {
  90. .color_type = FF_COLOR_YUV,
  91. },
  92. [PIX_FMT_YUV420P16BE] = {
  93. .color_type = FF_COLOR_YUV,
  94. },
  95. [PIX_FMT_YUV422P16BE] = {
  96. .color_type = FF_COLOR_YUV,
  97. },
  98. [PIX_FMT_YUV444P16BE] = {
  99. .color_type = FF_COLOR_YUV,
  100. },
  101. /* YUV formats with alpha plane */
  102. [PIX_FMT_YUVA420P] = {
  103. .is_alpha = 1,
  104. .color_type = FF_COLOR_YUV,
  105. },
  106. [PIX_FMT_YUVA444P] = {
  107. .is_alpha = 1,
  108. .color_type = FF_COLOR_YUV,
  109. },
  110. /* JPEG YUV */
  111. [PIX_FMT_YUVJ420P] = {
  112. .color_type = FF_COLOR_YUV_JPEG,
  113. },
  114. [PIX_FMT_YUVJ422P] = {
  115. .color_type = FF_COLOR_YUV_JPEG,
  116. },
  117. [PIX_FMT_YUVJ444P] = {
  118. .color_type = FF_COLOR_YUV_JPEG,
  119. },
  120. [PIX_FMT_YUVJ440P] = {
  121. .color_type = FF_COLOR_YUV_JPEG,
  122. },
  123. /* RGB formats */
  124. [PIX_FMT_RGB24] = {
  125. .color_type = FF_COLOR_RGB,
  126. },
  127. [PIX_FMT_BGR24] = {
  128. .color_type = FF_COLOR_RGB,
  129. },
  130. [PIX_FMT_ARGB] = {
  131. .is_alpha = 1,
  132. .color_type = FF_COLOR_RGB,
  133. },
  134. [PIX_FMT_RGB48BE] = {
  135. .color_type = FF_COLOR_RGB,
  136. },
  137. [PIX_FMT_RGB48LE] = {
  138. .color_type = FF_COLOR_RGB,
  139. },
  140. [PIX_FMT_RGBA64BE] = {
  141. .is_alpha = 1,
  142. .color_type = FF_COLOR_RGB,
  143. },
  144. [PIX_FMT_RGBA64LE] = {
  145. .is_alpha = 1,
  146. .color_type = FF_COLOR_RGB,
  147. },
  148. [PIX_FMT_RGB565BE] = {
  149. .color_type = FF_COLOR_RGB,
  150. },
  151. [PIX_FMT_RGB565LE] = {
  152. .color_type = FF_COLOR_RGB,
  153. },
  154. [PIX_FMT_RGB555BE] = {
  155. .color_type = FF_COLOR_RGB,
  156. .padded_size = 16,
  157. },
  158. [PIX_FMT_RGB555LE] = {
  159. .color_type = FF_COLOR_RGB,
  160. .padded_size = 16,
  161. },
  162. [PIX_FMT_RGB444BE] = {
  163. .color_type = FF_COLOR_RGB,
  164. .padded_size = 16,
  165. },
  166. [PIX_FMT_RGB444LE] = {
  167. .color_type = FF_COLOR_RGB,
  168. .padded_size = 16,
  169. },
  170. /* gray / mono formats */
  171. [PIX_FMT_GRAY16BE] = {
  172. .color_type = FF_COLOR_GRAY,
  173. },
  174. [PIX_FMT_GRAY16LE] = {
  175. .color_type = FF_COLOR_GRAY,
  176. },
  177. [PIX_FMT_GRAY8] = {
  178. .color_type = FF_COLOR_GRAY,
  179. },
  180. [PIX_FMT_GRAY8A] = {
  181. .is_alpha = 1,
  182. .color_type = FF_COLOR_GRAY,
  183. },
  184. [PIX_FMT_MONOWHITE] = {
  185. .color_type = FF_COLOR_GRAY,
  186. },
  187. [PIX_FMT_MONOBLACK] = {
  188. .color_type = FF_COLOR_GRAY,
  189. },
  190. /* paletted formats */
  191. [PIX_FMT_PAL8] = {
  192. .is_alpha = 1,
  193. .color_type = FF_COLOR_RGB,
  194. },
  195. [PIX_FMT_UYYVYY411] = {
  196. .color_type = FF_COLOR_YUV,
  197. },
  198. [PIX_FMT_ABGR] = {
  199. .is_alpha = 1,
  200. .color_type = FF_COLOR_RGB,
  201. },
  202. [PIX_FMT_BGR48BE] = {
  203. .color_type = FF_COLOR_RGB,
  204. },
  205. [PIX_FMT_BGR48LE] = {
  206. .color_type = FF_COLOR_RGB,
  207. },
  208. [PIX_FMT_BGRA64BE] = {
  209. .is_alpha = 1,
  210. .color_type = FF_COLOR_RGB,
  211. },
  212. [PIX_FMT_BGRA64LE] = {
  213. .is_alpha = 1,
  214. .color_type = FF_COLOR_RGB,
  215. },
  216. [PIX_FMT_BGR565BE] = {
  217. .color_type = FF_COLOR_RGB,
  218. .padded_size = 16,
  219. },
  220. [PIX_FMT_BGR565LE] = {
  221. .color_type = FF_COLOR_RGB,
  222. .padded_size = 16,
  223. },
  224. [PIX_FMT_BGR555BE] = {
  225. .color_type = FF_COLOR_RGB,
  226. .padded_size = 16,
  227. },
  228. [PIX_FMT_BGR555LE] = {
  229. .color_type = FF_COLOR_RGB,
  230. .padded_size = 16,
  231. },
  232. [PIX_FMT_BGR444BE] = {
  233. .color_type = FF_COLOR_RGB,
  234. .padded_size = 16,
  235. },
  236. [PIX_FMT_BGR444LE] = {
  237. .color_type = FF_COLOR_RGB,
  238. .padded_size = 16,
  239. },
  240. [PIX_FMT_RGB8] = {
  241. .color_type = FF_COLOR_RGB,
  242. },
  243. [PIX_FMT_RGB4] = {
  244. .color_type = FF_COLOR_RGB,
  245. },
  246. [PIX_FMT_RGB4_BYTE] = {
  247. .color_type = FF_COLOR_RGB,
  248. .padded_size = 8,
  249. },
  250. [PIX_FMT_BGR8] = {
  251. .color_type = FF_COLOR_RGB,
  252. },
  253. [PIX_FMT_BGR4] = {
  254. .color_type = FF_COLOR_RGB,
  255. },
  256. [PIX_FMT_BGR4_BYTE] = {
  257. .color_type = FF_COLOR_RGB,
  258. .padded_size = 8,
  259. },
  260. [PIX_FMT_NV12] = {
  261. .color_type = FF_COLOR_YUV,
  262. },
  263. [PIX_FMT_NV21] = {
  264. .color_type = FF_COLOR_YUV,
  265. },
  266. [PIX_FMT_BGRA] = {
  267. .is_alpha = 1,
  268. .color_type = FF_COLOR_RGB,
  269. },
  270. [PIX_FMT_RGBA] = {
  271. .is_alpha = 1,
  272. .color_type = FF_COLOR_RGB,
  273. },
  274. };
  275. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  276. {
  277. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  278. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  279. }
  280. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  281. {
  282. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  283. }
  284. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  285. enum PixelFormat pix_fmt, int width, int height)
  286. {
  287. int ret;
  288. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  289. return ret;
  290. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  291. return ret;
  292. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  293. }
  294. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  295. unsigned char *dest, int dest_size)
  296. {
  297. int i, j, nb_planes = 0, linesizes[4];
  298. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  299. int size = avpicture_get_size(pix_fmt, width, height);
  300. if (size > dest_size || size < 0)
  301. return AVERROR(EINVAL);
  302. for (i = 0; i < desc->nb_components; i++)
  303. nb_planes = FFMAX(desc->comp[i].plane, nb_planes);
  304. nb_planes++;
  305. av_image_fill_linesizes(linesizes, pix_fmt, width);
  306. for (i = 0; i < nb_planes; i++) {
  307. int h, shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
  308. const unsigned char *s = src->data[i];
  309. h = (height + (1 << shift) - 1) >> shift;
  310. for (j = 0; j < h; j++) {
  311. memcpy(dest, s, linesizes[i]);
  312. dest += linesizes[i];
  313. s += src->linesize[i];
  314. }
  315. }
  316. switch (pix_fmt) {
  317. case PIX_FMT_RGB8:
  318. case PIX_FMT_BGR8:
  319. case PIX_FMT_RGB4_BYTE:
  320. case PIX_FMT_BGR4_BYTE:
  321. case PIX_FMT_GRAY8:
  322. // do not include palette for these pseudo-paletted formats
  323. return size;
  324. }
  325. if (desc->flags & PIX_FMT_PAL)
  326. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  327. return size;
  328. }
  329. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  330. {
  331. AVPicture dummy_pict;
  332. if(av_image_check_size(width, height, 0, NULL))
  333. return -1;
  334. if (av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_PSEUDOPAL)
  335. // do not include palette for these pseudo-paletted formats
  336. return width * height;
  337. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  338. }
  339. static int get_pix_fmt_depth(int *min, int *max, enum PixelFormat pix_fmt)
  340. {
  341. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  342. int i;
  343. if (!desc->nb_components) {
  344. *min = *max = 0;
  345. return AVERROR(EINVAL);
  346. }
  347. *min = INT_MAX, *max = -INT_MAX;
  348. for (i = 0; i < desc->nb_components; i++) {
  349. *min = FFMIN(desc->comp[i].depth_minus1+1, *min);
  350. *max = FFMAX(desc->comp[i].depth_minus1+1, *max);
  351. }
  352. return 0;
  353. }
  354. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  355. int has_alpha)
  356. {
  357. const PixFmtInfo *pf, *ps;
  358. const AVPixFmtDescriptor *src_desc;
  359. const AVPixFmtDescriptor *dst_desc;
  360. int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
  361. int ret, loss;
  362. if (dst_pix_fmt >= PIX_FMT_NB || dst_pix_fmt <= PIX_FMT_NONE)
  363. return ~0;
  364. src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  365. dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  366. ps = &pix_fmt_info[src_pix_fmt];
  367. /* compute loss */
  368. loss = 0;
  369. if ((ret = get_pix_fmt_depth(&src_min_depth, &src_max_depth, src_pix_fmt)) < 0)
  370. return ret;
  371. if ((ret = get_pix_fmt_depth(&dst_min_depth, &dst_max_depth, dst_pix_fmt)) < 0)
  372. return ret;
  373. if (dst_min_depth < src_min_depth ||
  374. dst_max_depth < src_max_depth)
  375. loss |= FF_LOSS_DEPTH;
  376. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  377. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  378. loss |= FF_LOSS_RESOLUTION;
  379. pf = &pix_fmt_info[dst_pix_fmt];
  380. switch(pf->color_type) {
  381. case FF_COLOR_RGB:
  382. if (ps->color_type != FF_COLOR_RGB &&
  383. ps->color_type != FF_COLOR_GRAY)
  384. loss |= FF_LOSS_COLORSPACE;
  385. break;
  386. case FF_COLOR_GRAY:
  387. if (ps->color_type != FF_COLOR_GRAY)
  388. loss |= FF_LOSS_COLORSPACE;
  389. break;
  390. case FF_COLOR_YUV:
  391. if (ps->color_type != FF_COLOR_YUV)
  392. loss |= FF_LOSS_COLORSPACE;
  393. break;
  394. case FF_COLOR_YUV_JPEG:
  395. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  396. ps->color_type != FF_COLOR_YUV &&
  397. ps->color_type != FF_COLOR_GRAY)
  398. loss |= FF_LOSS_COLORSPACE;
  399. break;
  400. default:
  401. /* fail safe test */
  402. if (ps->color_type != pf->color_type)
  403. loss |= FF_LOSS_COLORSPACE;
  404. break;
  405. }
  406. if (pf->color_type == FF_COLOR_GRAY &&
  407. ps->color_type != FF_COLOR_GRAY)
  408. loss |= FF_LOSS_CHROMA;
  409. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  410. loss |= FF_LOSS_ALPHA;
  411. if (dst_pix_fmt == PIX_FMT_PAL8 &&
  412. (src_pix_fmt != PIX_FMT_PAL8 && (ps->color_type != FF_COLOR_GRAY || (ps->is_alpha && has_alpha))))
  413. loss |= FF_LOSS_COLORQUANT;
  414. return loss;
  415. }
  416. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  417. {
  418. const PixFmtInfo *info = &pix_fmt_info[pix_fmt];
  419. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  420. return info->padded_size ?
  421. info->padded_size : av_get_bits_per_pixel(desc);
  422. }
  423. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  424. int has_alpha, int *loss_ptr)
  425. {
  426. enum PixelFormat dst_pix_fmt;
  427. int i;
  428. if (loss_ptr) /* all losses count (for backward compatibility) */
  429. *loss_ptr = 0;
  430. dst_pix_fmt = PIX_FMT_NONE; /* so first iteration doesn't have to be treated special */
  431. for(i = 0; i< FFMIN(PIX_FMT_NB, 64); i++){
  432. if (pix_fmt_mask & (1ULL << i))
  433. dst_pix_fmt = avcodec_find_best_pix_fmt2(dst_pix_fmt, i, src_pix_fmt, has_alpha, loss_ptr);
  434. }
  435. return dst_pix_fmt;
  436. }
  437. enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
  438. enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
  439. {
  440. enum PixelFormat dst_pix_fmt;
  441. int loss1, loss2, loss_order1, loss_order2, i, loss_mask;
  442. static const int loss_mask_order[] = {
  443. ~0, /* no loss first */
  444. ~FF_LOSS_ALPHA,
  445. ~FF_LOSS_RESOLUTION,
  446. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  447. ~FF_LOSS_COLORQUANT,
  448. ~FF_LOSS_DEPTH,
  449. ~(FF_LOSS_RESOLUTION | FF_LOSS_DEPTH | FF_LOSS_COLORSPACE | FF_LOSS_ALPHA |
  450. FF_LOSS_COLORQUANT | FF_LOSS_CHROMA),
  451. 0x80000, //non zero entry that combines all loss variants including future additions
  452. 0,
  453. };
  454. loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
  455. dst_pix_fmt = PIX_FMT_NONE;
  456. loss1 = avcodec_get_pix_fmt_loss(dst_pix_fmt1, src_pix_fmt, has_alpha) & loss_mask;
  457. loss2 = avcodec_get_pix_fmt_loss(dst_pix_fmt2, src_pix_fmt, has_alpha) & loss_mask;
  458. /* try with successive loss */
  459. for(i = 0;loss_mask_order[i] != 0 && dst_pix_fmt == PIX_FMT_NONE;i++) {
  460. loss_order1 = loss1 & loss_mask_order[i];
  461. loss_order2 = loss2 & loss_mask_order[i];
  462. if (loss_order1 == 0 && loss_order2 == 0){ /* use format with smallest depth */
  463. dst_pix_fmt = avg_bits_per_pixel(dst_pix_fmt2) < avg_bits_per_pixel(dst_pix_fmt1) ? dst_pix_fmt2 : dst_pix_fmt1;
  464. } else if (loss_order1 == 0 || loss_order2 == 0) { /* use format with no loss */
  465. dst_pix_fmt = loss_order2 ? dst_pix_fmt1 : dst_pix_fmt2;
  466. }
  467. }
  468. if (loss_ptr)
  469. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  470. return dst_pix_fmt;
  471. }
  472. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  473. enum PixelFormat pix_fmt, int width, int height)
  474. {
  475. av_image_copy(dst->data, dst->linesize, src->data,
  476. src->linesize, pix_fmt, width, height);
  477. }
  478. /* 2x2 -> 1x1 */
  479. void ff_shrink22(uint8_t *dst, int dst_wrap,
  480. const uint8_t *src, int src_wrap,
  481. int width, int height)
  482. {
  483. int w;
  484. const uint8_t *s1, *s2;
  485. uint8_t *d;
  486. for(;height > 0; height--) {
  487. s1 = src;
  488. s2 = s1 + src_wrap;
  489. d = dst;
  490. for(w = width;w >= 4; w-=4) {
  491. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  492. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  493. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  494. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  495. s1 += 8;
  496. s2 += 8;
  497. d += 4;
  498. }
  499. for(;w > 0; w--) {
  500. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  501. s1 += 2;
  502. s2 += 2;
  503. d++;
  504. }
  505. src += 2 * src_wrap;
  506. dst += dst_wrap;
  507. }
  508. }
  509. /* 4x4 -> 1x1 */
  510. void ff_shrink44(uint8_t *dst, int dst_wrap,
  511. const uint8_t *src, int src_wrap,
  512. int width, int height)
  513. {
  514. int w;
  515. const uint8_t *s1, *s2, *s3, *s4;
  516. uint8_t *d;
  517. for(;height > 0; height--) {
  518. s1 = src;
  519. s2 = s1 + src_wrap;
  520. s3 = s2 + src_wrap;
  521. s4 = s3 + src_wrap;
  522. d = dst;
  523. for(w = width;w > 0; w--) {
  524. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  525. s2[0] + s2[1] + s2[2] + s2[3] +
  526. s3[0] + s3[1] + s3[2] + s3[3] +
  527. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  528. s1 += 4;
  529. s2 += 4;
  530. s3 += 4;
  531. s4 += 4;
  532. d++;
  533. }
  534. src += 4 * src_wrap;
  535. dst += dst_wrap;
  536. }
  537. }
  538. /* 8x8 -> 1x1 */
  539. void ff_shrink88(uint8_t *dst, int dst_wrap,
  540. const uint8_t *src, int src_wrap,
  541. int width, int height)
  542. {
  543. int w, i;
  544. for(;height > 0; height--) {
  545. for(w = width;w > 0; w--) {
  546. int tmp=0;
  547. for(i=0; i<8; i++){
  548. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  549. src += src_wrap;
  550. }
  551. *(dst++) = (tmp + 32)>>6;
  552. src += 8 - 8*src_wrap;
  553. }
  554. src += 8*src_wrap - 8*width;
  555. dst += dst_wrap - width;
  556. }
  557. }
  558. int avpicture_alloc(AVPicture *picture,
  559. enum PixelFormat pix_fmt, int width, int height)
  560. {
  561. int ret;
  562. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  563. memset(picture, 0, sizeof(AVPicture));
  564. return ret;
  565. }
  566. return 0;
  567. }
  568. void avpicture_free(AVPicture *picture)
  569. {
  570. av_free(picture->data[0]);
  571. }
  572. /* return true if yuv planar */
  573. static inline int is_yuv_planar(enum PixelFormat fmt)
  574. {
  575. const PixFmtInfo *info = &pix_fmt_info[fmt];
  576. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[fmt];
  577. int i;
  578. int planes[4] = { 0 };
  579. if (info->color_type != FF_COLOR_YUV &&
  580. info->color_type != FF_COLOR_YUV_JPEG)
  581. return 0;
  582. /* set the used planes */
  583. for (i = 0; i < desc->nb_components; i++)
  584. planes[desc->comp[i].plane] = 1;
  585. /* if there is an unused plane, the format is not planar */
  586. for (i = 0; i < desc->nb_components; i++)
  587. if (!planes[i])
  588. return 0;
  589. return 1;
  590. }
  591. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  592. enum PixelFormat pix_fmt, int top_band, int left_band)
  593. {
  594. int y_shift;
  595. int x_shift;
  596. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  597. return -1;
  598. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  599. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  600. if (is_yuv_planar(pix_fmt)) {
  601. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  602. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  603. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  604. } else{
  605. if(top_band % (1<<y_shift) || left_band % (1<<x_shift))
  606. return -1;
  607. if(left_band) //FIXME add support for this too
  608. return -1;
  609. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  610. }
  611. dst->linesize[0] = src->linesize[0];
  612. dst->linesize[1] = src->linesize[1];
  613. dst->linesize[2] = src->linesize[2];
  614. return 0;
  615. }
  616. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  617. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  618. int *color)
  619. {
  620. uint8_t *optr;
  621. int y_shift;
  622. int x_shift;
  623. int yheight;
  624. int i, y;
  625. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  626. !is_yuv_planar(pix_fmt)) return -1;
  627. for (i = 0; i < 3; i++) {
  628. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  629. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  630. if (padtop || padleft) {
  631. memset(dst->data[i], color[i],
  632. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  633. }
  634. if (padleft || padright) {
  635. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  636. (dst->linesize[i] - (padright >> x_shift));
  637. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  638. for (y = 0; y < yheight; y++) {
  639. memset(optr, color[i], (padleft + padright) >> x_shift);
  640. optr += dst->linesize[i];
  641. }
  642. }
  643. if (src) { /* first line */
  644. uint8_t *iptr = src->data[i];
  645. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  646. (padleft >> x_shift);
  647. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  648. iptr += src->linesize[i];
  649. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  650. (dst->linesize[i] - (padright >> x_shift));
  651. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  652. for (y = 0; y < yheight; y++) {
  653. memset(optr, color[i], (padleft + padright) >> x_shift);
  654. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  655. (width - padleft - padright) >> x_shift);
  656. iptr += src->linesize[i];
  657. optr += dst->linesize[i];
  658. }
  659. }
  660. if (padbottom || padright) {
  661. optr = dst->data[i] + dst->linesize[i] *
  662. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  663. memset(optr, color[i],dst->linesize[i] *
  664. (padbottom >> y_shift) + (padright >> x_shift));
  665. }
  666. }
  667. return 0;
  668. }
  669. #if !(HAVE_MMX && HAVE_YASM)
  670. /* filter parameters: [-1 4 2 4 -1] // 8 */
  671. static void deinterlace_line_c(uint8_t *dst,
  672. const uint8_t *lum_m4, const uint8_t *lum_m3,
  673. const uint8_t *lum_m2, const uint8_t *lum_m1,
  674. const uint8_t *lum,
  675. int size)
  676. {
  677. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  678. int sum;
  679. for(;size > 0;size--) {
  680. sum = -lum_m4[0];
  681. sum += lum_m3[0] << 2;
  682. sum += lum_m2[0] << 1;
  683. sum += lum_m1[0] << 2;
  684. sum += -lum[0];
  685. dst[0] = cm[(sum + 4) >> 3];
  686. lum_m4++;
  687. lum_m3++;
  688. lum_m2++;
  689. lum_m1++;
  690. lum++;
  691. dst++;
  692. }
  693. }
  694. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  695. uint8_t *lum_m2, uint8_t *lum_m1,
  696. uint8_t *lum, int size)
  697. {
  698. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  699. int sum;
  700. for(;size > 0;size--) {
  701. sum = -lum_m4[0];
  702. sum += lum_m3[0] << 2;
  703. sum += lum_m2[0] << 1;
  704. lum_m4[0]=lum_m2[0];
  705. sum += lum_m1[0] << 2;
  706. sum += -lum[0];
  707. lum_m2[0] = cm[(sum + 4) >> 3];
  708. lum_m4++;
  709. lum_m3++;
  710. lum_m2++;
  711. lum_m1++;
  712. lum++;
  713. }
  714. }
  715. #endif
  716. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  717. top field is copied as is, but the bottom field is deinterlaced
  718. against the top field. */
  719. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  720. const uint8_t *src1, int src_wrap,
  721. int width, int height)
  722. {
  723. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  724. int y;
  725. src_m2 = src1;
  726. src_m1 = src1;
  727. src_0=&src_m1[src_wrap];
  728. src_p1=&src_0[src_wrap];
  729. src_p2=&src_p1[src_wrap];
  730. for(y=0;y<(height-2);y+=2) {
  731. memcpy(dst,src_m1,width);
  732. dst += dst_wrap;
  733. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  734. src_m2 = src_0;
  735. src_m1 = src_p1;
  736. src_0 = src_p2;
  737. src_p1 += 2*src_wrap;
  738. src_p2 += 2*src_wrap;
  739. dst += dst_wrap;
  740. }
  741. memcpy(dst,src_m1,width);
  742. dst += dst_wrap;
  743. /* do last line */
  744. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  745. }
  746. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  747. int width, int height)
  748. {
  749. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  750. int y;
  751. uint8_t *buf;
  752. buf = av_malloc(width);
  753. src_m1 = src1;
  754. memcpy(buf,src_m1,width);
  755. src_0=&src_m1[src_wrap];
  756. src_p1=&src_0[src_wrap];
  757. src_p2=&src_p1[src_wrap];
  758. for(y=0;y<(height-2);y+=2) {
  759. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  760. src_m1 = src_p1;
  761. src_0 = src_p2;
  762. src_p1 += 2*src_wrap;
  763. src_p2 += 2*src_wrap;
  764. }
  765. /* do last line */
  766. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  767. av_free(buf);
  768. }
  769. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  770. enum PixelFormat pix_fmt, int width, int height)
  771. {
  772. int i;
  773. if (pix_fmt != PIX_FMT_YUV420P &&
  774. pix_fmt != PIX_FMT_YUVJ420P &&
  775. pix_fmt != PIX_FMT_YUV422P &&
  776. pix_fmt != PIX_FMT_YUVJ422P &&
  777. pix_fmt != PIX_FMT_YUV444P &&
  778. pix_fmt != PIX_FMT_YUV411P &&
  779. pix_fmt != PIX_FMT_GRAY8)
  780. return -1;
  781. if ((width & 3) != 0 || (height & 3) != 0)
  782. return -1;
  783. for(i=0;i<3;i++) {
  784. if (i == 1) {
  785. switch(pix_fmt) {
  786. case PIX_FMT_YUVJ420P:
  787. case PIX_FMT_YUV420P:
  788. width >>= 1;
  789. height >>= 1;
  790. break;
  791. case PIX_FMT_YUV422P:
  792. case PIX_FMT_YUVJ422P:
  793. width >>= 1;
  794. break;
  795. case PIX_FMT_YUV411P:
  796. width >>= 2;
  797. break;
  798. default:
  799. break;
  800. }
  801. if (pix_fmt == PIX_FMT_GRAY8) {
  802. break;
  803. }
  804. }
  805. if (src == dst) {
  806. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  807. width, height);
  808. } else {
  809. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  810. src->data[i], src->linesize[i],
  811. width, height);
  812. }
  813. }
  814. emms_c();
  815. return 0;
  816. }