You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1038 lines
30KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/common.h"
  36. #include "libavutil/pixdesc.h"
  37. #include "libavutil/imgutils.h"
  38. #if HAVE_MMX && HAVE_YASM
  39. #include "x86/dsputil_mmx.h"
  40. #endif
  41. #define FF_COLOR_RGB 0 /**< RGB color space */
  42. #define FF_COLOR_GRAY 1 /**< gray color space */
  43. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  44. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  45. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  46. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  47. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  48. #if HAVE_MMX && HAVE_YASM
  49. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  50. #define deinterlace_line ff_deinterlace_line_mmx
  51. #else
  52. #define deinterlace_line_inplace deinterlace_line_inplace_c
  53. #define deinterlace_line deinterlace_line_c
  54. #endif
  55. typedef struct PixFmtInfo {
  56. uint8_t nb_channels; /**< number of channels (including alpha) */
  57. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  58. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  59. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  60. uint8_t depth; /**< bit depth of the color components */
  61. } PixFmtInfo;
  62. /* this table gives more information about formats */
  63. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  64. /* YUV formats */
  65. [PIX_FMT_YUV420P] = {
  66. .nb_channels = 3,
  67. .color_type = FF_COLOR_YUV,
  68. .pixel_type = FF_PIXEL_PLANAR,
  69. .depth = 8,
  70. },
  71. [PIX_FMT_YUV422P] = {
  72. .nb_channels = 3,
  73. .color_type = FF_COLOR_YUV,
  74. .pixel_type = FF_PIXEL_PLANAR,
  75. .depth = 8,
  76. },
  77. [PIX_FMT_YUV444P] = {
  78. .nb_channels = 3,
  79. .color_type = FF_COLOR_YUV,
  80. .pixel_type = FF_PIXEL_PLANAR,
  81. .depth = 8,
  82. },
  83. [PIX_FMT_YUYV422] = {
  84. .nb_channels = 1,
  85. .color_type = FF_COLOR_YUV,
  86. .pixel_type = FF_PIXEL_PACKED,
  87. .depth = 8,
  88. },
  89. [PIX_FMT_UYVY422] = {
  90. .nb_channels = 1,
  91. .color_type = FF_COLOR_YUV,
  92. .pixel_type = FF_PIXEL_PACKED,
  93. .depth = 8,
  94. },
  95. [PIX_FMT_YUV410P] = {
  96. .nb_channels = 3,
  97. .color_type = FF_COLOR_YUV,
  98. .pixel_type = FF_PIXEL_PLANAR,
  99. .depth = 8,
  100. },
  101. [PIX_FMT_YUV411P] = {
  102. .nb_channels = 3,
  103. .color_type = FF_COLOR_YUV,
  104. .pixel_type = FF_PIXEL_PLANAR,
  105. .depth = 8,
  106. },
  107. [PIX_FMT_YUV440P] = {
  108. .nb_channels = 3,
  109. .color_type = FF_COLOR_YUV,
  110. .pixel_type = FF_PIXEL_PLANAR,
  111. .depth = 8,
  112. },
  113. [PIX_FMT_YUV420P16LE] = {
  114. .nb_channels = 3,
  115. .color_type = FF_COLOR_YUV,
  116. .pixel_type = FF_PIXEL_PLANAR,
  117. .depth = 16,
  118. },
  119. [PIX_FMT_YUV422P16LE] = {
  120. .nb_channels = 3,
  121. .color_type = FF_COLOR_YUV,
  122. .pixel_type = FF_PIXEL_PLANAR,
  123. .depth = 16,
  124. },
  125. [PIX_FMT_YUV444P16LE] = {
  126. .nb_channels = 3,
  127. .color_type = FF_COLOR_YUV,
  128. .pixel_type = FF_PIXEL_PLANAR,
  129. .depth = 16,
  130. },
  131. [PIX_FMT_YUV420P16BE] = {
  132. .nb_channels = 3,
  133. .color_type = FF_COLOR_YUV,
  134. .pixel_type = FF_PIXEL_PLANAR,
  135. .depth = 16,
  136. },
  137. [PIX_FMT_YUV422P16BE] = {
  138. .nb_channels = 3,
  139. .color_type = FF_COLOR_YUV,
  140. .pixel_type = FF_PIXEL_PLANAR,
  141. .depth = 16,
  142. },
  143. [PIX_FMT_YUV444P16BE] = {
  144. .nb_channels = 3,
  145. .color_type = FF_COLOR_YUV,
  146. .pixel_type = FF_PIXEL_PLANAR,
  147. .depth = 16,
  148. },
  149. /* YUV formats with alpha plane */
  150. [PIX_FMT_YUVA420P] = {
  151. .nb_channels = 4,
  152. .color_type = FF_COLOR_YUV,
  153. .pixel_type = FF_PIXEL_PLANAR,
  154. .depth = 8,
  155. },
  156. /* JPEG YUV */
  157. [PIX_FMT_YUVJ420P] = {
  158. .nb_channels = 3,
  159. .color_type = FF_COLOR_YUV_JPEG,
  160. .pixel_type = FF_PIXEL_PLANAR,
  161. .depth = 8,
  162. },
  163. [PIX_FMT_YUVJ422P] = {
  164. .nb_channels = 3,
  165. .color_type = FF_COLOR_YUV_JPEG,
  166. .pixel_type = FF_PIXEL_PLANAR,
  167. .depth = 8,
  168. },
  169. [PIX_FMT_YUVJ444P] = {
  170. .nb_channels = 3,
  171. .color_type = FF_COLOR_YUV_JPEG,
  172. .pixel_type = FF_PIXEL_PLANAR,
  173. .depth = 8,
  174. },
  175. [PIX_FMT_YUVJ440P] = {
  176. .nb_channels = 3,
  177. .color_type = FF_COLOR_YUV_JPEG,
  178. .pixel_type = FF_PIXEL_PLANAR,
  179. .depth = 8,
  180. },
  181. /* RGB formats */
  182. [PIX_FMT_RGB24] = {
  183. .nb_channels = 3,
  184. .color_type = FF_COLOR_RGB,
  185. .pixel_type = FF_PIXEL_PACKED,
  186. .depth = 8,
  187. },
  188. [PIX_FMT_BGR24] = {
  189. .nb_channels = 3,
  190. .color_type = FF_COLOR_RGB,
  191. .pixel_type = FF_PIXEL_PACKED,
  192. .depth = 8,
  193. },
  194. [PIX_FMT_ARGB] = {
  195. .nb_channels = 4, .is_alpha = 1,
  196. .color_type = FF_COLOR_RGB,
  197. .pixel_type = FF_PIXEL_PACKED,
  198. .depth = 8,
  199. },
  200. [PIX_FMT_RGB48BE] = {
  201. .nb_channels = 3,
  202. .color_type = FF_COLOR_RGB,
  203. .pixel_type = FF_PIXEL_PACKED,
  204. .depth = 16,
  205. },
  206. [PIX_FMT_RGB48LE] = {
  207. .nb_channels = 3,
  208. .color_type = FF_COLOR_RGB,
  209. .pixel_type = FF_PIXEL_PACKED,
  210. .depth = 16,
  211. },
  212. [PIX_FMT_RGB565BE] = {
  213. .nb_channels = 3,
  214. .color_type = FF_COLOR_RGB,
  215. .pixel_type = FF_PIXEL_PACKED,
  216. .depth = 5,
  217. },
  218. [PIX_FMT_RGB565LE] = {
  219. .nb_channels = 3,
  220. .color_type = FF_COLOR_RGB,
  221. .pixel_type = FF_PIXEL_PACKED,
  222. .depth = 5,
  223. },
  224. [PIX_FMT_RGB555BE] = {
  225. .nb_channels = 3,
  226. .color_type = FF_COLOR_RGB,
  227. .pixel_type = FF_PIXEL_PACKED,
  228. .depth = 5,
  229. },
  230. [PIX_FMT_RGB555LE] = {
  231. .nb_channels = 3,
  232. .color_type = FF_COLOR_RGB,
  233. .pixel_type = FF_PIXEL_PACKED,
  234. .depth = 5,
  235. },
  236. [PIX_FMT_RGB444BE] = {
  237. .nb_channels = 3,
  238. .color_type = FF_COLOR_RGB,
  239. .pixel_type = FF_PIXEL_PACKED,
  240. .depth = 4,
  241. },
  242. [PIX_FMT_RGB444LE] = {
  243. .nb_channels = 3,
  244. .color_type = FF_COLOR_RGB,
  245. .pixel_type = FF_PIXEL_PACKED,
  246. .depth = 4,
  247. },
  248. /* gray / mono formats */
  249. [PIX_FMT_GRAY16BE] = {
  250. .nb_channels = 1,
  251. .color_type = FF_COLOR_GRAY,
  252. .pixel_type = FF_PIXEL_PLANAR,
  253. .depth = 16,
  254. },
  255. [PIX_FMT_GRAY16LE] = {
  256. .nb_channels = 1,
  257. .color_type = FF_COLOR_GRAY,
  258. .pixel_type = FF_PIXEL_PLANAR,
  259. .depth = 16,
  260. },
  261. [PIX_FMT_GRAY8] = {
  262. .nb_channels = 1,
  263. .color_type = FF_COLOR_GRAY,
  264. .pixel_type = FF_PIXEL_PLANAR,
  265. .depth = 8,
  266. },
  267. [PIX_FMT_MONOWHITE] = {
  268. .nb_channels = 1,
  269. .color_type = FF_COLOR_GRAY,
  270. .pixel_type = FF_PIXEL_PLANAR,
  271. .depth = 1,
  272. },
  273. [PIX_FMT_MONOBLACK] = {
  274. .nb_channels = 1,
  275. .color_type = FF_COLOR_GRAY,
  276. .pixel_type = FF_PIXEL_PLANAR,
  277. .depth = 1,
  278. },
  279. /* paletted formats */
  280. [PIX_FMT_PAL8] = {
  281. .nb_channels = 4, .is_alpha = 1,
  282. .color_type = FF_COLOR_RGB,
  283. .pixel_type = FF_PIXEL_PALETTE,
  284. .depth = 8,
  285. },
  286. [PIX_FMT_UYYVYY411] = {
  287. .nb_channels = 1,
  288. .color_type = FF_COLOR_YUV,
  289. .pixel_type = FF_PIXEL_PACKED,
  290. .depth = 8,
  291. },
  292. [PIX_FMT_ABGR] = {
  293. .nb_channels = 4, .is_alpha = 1,
  294. .color_type = FF_COLOR_RGB,
  295. .pixel_type = FF_PIXEL_PACKED,
  296. .depth = 8,
  297. },
  298. [PIX_FMT_BGR565BE] = {
  299. .nb_channels = 3,
  300. .color_type = FF_COLOR_RGB,
  301. .pixel_type = FF_PIXEL_PACKED,
  302. .depth = 5,
  303. },
  304. [PIX_FMT_BGR565LE] = {
  305. .nb_channels = 3,
  306. .color_type = FF_COLOR_RGB,
  307. .pixel_type = FF_PIXEL_PACKED,
  308. .depth = 5,
  309. },
  310. [PIX_FMT_BGR555BE] = {
  311. .nb_channels = 3,
  312. .color_type = FF_COLOR_RGB,
  313. .pixel_type = FF_PIXEL_PACKED,
  314. .depth = 5,
  315. },
  316. [PIX_FMT_BGR555LE] = {
  317. .nb_channels = 3,
  318. .color_type = FF_COLOR_RGB,
  319. .pixel_type = FF_PIXEL_PACKED,
  320. .depth = 5,
  321. },
  322. [PIX_FMT_BGR444BE] = {
  323. .nb_channels = 3,
  324. .color_type = FF_COLOR_RGB,
  325. .pixel_type = FF_PIXEL_PACKED,
  326. .depth = 4,
  327. },
  328. [PIX_FMT_BGR444LE] = {
  329. .nb_channels = 3,
  330. .color_type = FF_COLOR_RGB,
  331. .pixel_type = FF_PIXEL_PACKED,
  332. .depth = 4,
  333. },
  334. [PIX_FMT_RGB8] = {
  335. .nb_channels = 1,
  336. .color_type = FF_COLOR_RGB,
  337. .pixel_type = FF_PIXEL_PACKED,
  338. .depth = 8,
  339. },
  340. [PIX_FMT_RGB4] = {
  341. .nb_channels = 1,
  342. .color_type = FF_COLOR_RGB,
  343. .pixel_type = FF_PIXEL_PACKED,
  344. .depth = 4,
  345. },
  346. [PIX_FMT_RGB4_BYTE] = {
  347. .nb_channels = 1,
  348. .color_type = FF_COLOR_RGB,
  349. .pixel_type = FF_PIXEL_PACKED,
  350. .depth = 8,
  351. },
  352. [PIX_FMT_BGR8] = {
  353. .nb_channels = 1,
  354. .color_type = FF_COLOR_RGB,
  355. .pixel_type = FF_PIXEL_PACKED,
  356. .depth = 8,
  357. },
  358. [PIX_FMT_BGR4] = {
  359. .nb_channels = 1,
  360. .color_type = FF_COLOR_RGB,
  361. .pixel_type = FF_PIXEL_PACKED,
  362. .depth = 4,
  363. },
  364. [PIX_FMT_BGR4_BYTE] = {
  365. .nb_channels = 1,
  366. .color_type = FF_COLOR_RGB,
  367. .pixel_type = FF_PIXEL_PACKED,
  368. .depth = 8,
  369. },
  370. [PIX_FMT_NV12] = {
  371. .nb_channels = 2,
  372. .color_type = FF_COLOR_YUV,
  373. .pixel_type = FF_PIXEL_PLANAR,
  374. .depth = 8,
  375. },
  376. [PIX_FMT_NV21] = {
  377. .nb_channels = 2,
  378. .color_type = FF_COLOR_YUV,
  379. .pixel_type = FF_PIXEL_PLANAR,
  380. .depth = 8,
  381. },
  382. [PIX_FMT_BGRA] = {
  383. .nb_channels = 4, .is_alpha = 1,
  384. .color_type = FF_COLOR_RGB,
  385. .pixel_type = FF_PIXEL_PACKED,
  386. .depth = 8,
  387. },
  388. [PIX_FMT_RGBA] = {
  389. .nb_channels = 4, .is_alpha = 1,
  390. .color_type = FF_COLOR_RGB,
  391. .pixel_type = FF_PIXEL_PACKED,
  392. .depth = 8,
  393. },
  394. };
  395. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  396. {
  397. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  398. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  399. }
  400. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  401. {
  402. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  403. }
  404. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  405. enum PixelFormat pix_fmt, int width, int height)
  406. {
  407. int ret;
  408. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  409. return ret;
  410. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  411. return ret;
  412. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  413. }
  414. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  415. unsigned char *dest, int dest_size)
  416. {
  417. int i, j, nb_planes = 0, linesizes[4];
  418. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  419. int size = avpicture_get_size(pix_fmt, width, height);
  420. if (size > dest_size || size < 0)
  421. return AVERROR(EINVAL);
  422. for (i = 0; i < desc->nb_components; i++)
  423. nb_planes = FFMAX(desc->comp[i].plane, nb_planes);
  424. nb_planes++;
  425. av_image_fill_linesizes(linesizes, pix_fmt, width);
  426. for (i = 0; i < nb_planes; i++) {
  427. int h, shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
  428. const unsigned char *s = src->data[i];
  429. h = (height + (1 << shift) - 1) >> shift;
  430. for (j = 0; j < h; j++) {
  431. memcpy(dest, s, linesizes[i]);
  432. dest += linesizes[i];
  433. s += src->linesize[i];
  434. }
  435. }
  436. if (desc->flags & PIX_FMT_PAL)
  437. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  438. return size;
  439. }
  440. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  441. {
  442. AVPicture dummy_pict;
  443. if(av_image_check_size(width, height, 0, NULL))
  444. return -1;
  445. if (av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_PSEUDOPAL)
  446. // do not include palette for these pseudo-paletted formats
  447. return width * height;
  448. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  449. }
  450. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  451. int has_alpha)
  452. {
  453. const PixFmtInfo *pf, *ps;
  454. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  455. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  456. int loss;
  457. ps = &pix_fmt_info[src_pix_fmt];
  458. /* compute loss */
  459. loss = 0;
  460. pf = &pix_fmt_info[dst_pix_fmt];
  461. if (pf->depth < ps->depth ||
  462. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  463. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  464. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  465. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  466. loss |= FF_LOSS_DEPTH;
  467. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  468. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  469. loss |= FF_LOSS_RESOLUTION;
  470. switch(pf->color_type) {
  471. case FF_COLOR_RGB:
  472. if (ps->color_type != FF_COLOR_RGB &&
  473. ps->color_type != FF_COLOR_GRAY)
  474. loss |= FF_LOSS_COLORSPACE;
  475. break;
  476. case FF_COLOR_GRAY:
  477. if (ps->color_type != FF_COLOR_GRAY)
  478. loss |= FF_LOSS_COLORSPACE;
  479. break;
  480. case FF_COLOR_YUV:
  481. if (ps->color_type != FF_COLOR_YUV)
  482. loss |= FF_LOSS_COLORSPACE;
  483. break;
  484. case FF_COLOR_YUV_JPEG:
  485. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  486. ps->color_type != FF_COLOR_YUV &&
  487. ps->color_type != FF_COLOR_GRAY)
  488. loss |= FF_LOSS_COLORSPACE;
  489. break;
  490. default:
  491. /* fail safe test */
  492. if (ps->color_type != pf->color_type)
  493. loss |= FF_LOSS_COLORSPACE;
  494. break;
  495. }
  496. if (pf->color_type == FF_COLOR_GRAY &&
  497. ps->color_type != FF_COLOR_GRAY)
  498. loss |= FF_LOSS_CHROMA;
  499. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  500. loss |= FF_LOSS_ALPHA;
  501. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  502. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  503. loss |= FF_LOSS_COLORQUANT;
  504. return loss;
  505. }
  506. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  507. {
  508. int bits;
  509. const PixFmtInfo *pf;
  510. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  511. pf = &pix_fmt_info[pix_fmt];
  512. switch(pf->pixel_type) {
  513. case FF_PIXEL_PACKED:
  514. switch(pix_fmt) {
  515. case PIX_FMT_YUYV422:
  516. case PIX_FMT_UYVY422:
  517. case PIX_FMT_RGB565BE:
  518. case PIX_FMT_RGB565LE:
  519. case PIX_FMT_RGB555BE:
  520. case PIX_FMT_RGB555LE:
  521. case PIX_FMT_RGB444BE:
  522. case PIX_FMT_RGB444LE:
  523. case PIX_FMT_BGR565BE:
  524. case PIX_FMT_BGR565LE:
  525. case PIX_FMT_BGR555BE:
  526. case PIX_FMT_BGR555LE:
  527. case PIX_FMT_BGR444BE:
  528. case PIX_FMT_BGR444LE:
  529. bits = 16;
  530. break;
  531. case PIX_FMT_UYYVYY411:
  532. bits = 12;
  533. break;
  534. default:
  535. bits = pf->depth * pf->nb_channels;
  536. break;
  537. }
  538. break;
  539. case FF_PIXEL_PLANAR:
  540. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  541. bits = pf->depth * pf->nb_channels;
  542. } else {
  543. bits = pf->depth + ((2 * pf->depth) >>
  544. (desc->log2_chroma_w + desc->log2_chroma_h));
  545. }
  546. break;
  547. case FF_PIXEL_PALETTE:
  548. bits = 8;
  549. break;
  550. default:
  551. bits = -1;
  552. break;
  553. }
  554. return bits;
  555. }
  556. static enum PixelFormat avcodec_find_best_pix_fmt1(enum PixelFormat *pix_fmt_list,
  557. enum PixelFormat src_pix_fmt,
  558. int has_alpha,
  559. int loss_mask)
  560. {
  561. int dist, i, loss, min_dist;
  562. enum PixelFormat dst_pix_fmt;
  563. /* find exact color match with smallest size */
  564. dst_pix_fmt = PIX_FMT_NONE;
  565. min_dist = 0x7fffffff;
  566. i = 0;
  567. while (pix_fmt_list[i] != PIX_FMT_NONE) {
  568. enum PixelFormat pix_fmt = pix_fmt_list[i];
  569. if (i > PIX_FMT_NB) {
  570. av_log(NULL, AV_LOG_ERROR, "Pixel format list longer than expected, "
  571. "it is either not properly terminated or contains duplicates\n");
  572. return PIX_FMT_NONE;
  573. }
  574. loss = avcodec_get_pix_fmt_loss(pix_fmt, src_pix_fmt, has_alpha) & loss_mask;
  575. if (loss == 0) {
  576. dist = avg_bits_per_pixel(pix_fmt);
  577. if (dist < min_dist) {
  578. min_dist = dist;
  579. dst_pix_fmt = pix_fmt;
  580. }
  581. }
  582. i++;
  583. }
  584. return dst_pix_fmt;
  585. }
  586. #if FF_API_FIND_BEST_PIX_FMT
  587. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  588. int has_alpha, int *loss_ptr)
  589. {
  590. enum PixelFormat list[64];
  591. int i, j = 0;
  592. // test only the first 64 pixel formats to avoid undefined behaviour
  593. for (i = 0; i < 64; i++) {
  594. if (pix_fmt_mask & (1ULL << i))
  595. list[j++] = i;
  596. }
  597. list[j] = PIX_FMT_NONE;
  598. return avcodec_find_best_pix_fmt2(list, src_pix_fmt, has_alpha, loss_ptr);
  599. }
  600. #endif /* FF_API_FIND_BEST_PIX_FMT */
  601. enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat *pix_fmt_list,
  602. enum PixelFormat src_pix_fmt,
  603. int has_alpha, int *loss_ptr)
  604. {
  605. enum PixelFormat dst_pix_fmt;
  606. int loss_mask, i;
  607. static const int loss_mask_order[] = {
  608. ~0, /* no loss first */
  609. ~FF_LOSS_ALPHA,
  610. ~FF_LOSS_RESOLUTION,
  611. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  612. ~FF_LOSS_COLORQUANT,
  613. ~FF_LOSS_DEPTH,
  614. 0,
  615. };
  616. /* try with successive loss */
  617. i = 0;
  618. for(;;) {
  619. loss_mask = loss_mask_order[i++];
  620. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_list, src_pix_fmt,
  621. has_alpha, loss_mask);
  622. if (dst_pix_fmt >= 0)
  623. goto found;
  624. if (loss_mask == 0)
  625. break;
  626. }
  627. return PIX_FMT_NONE;
  628. found:
  629. if (loss_ptr)
  630. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  631. return dst_pix_fmt;
  632. }
  633. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  634. enum PixelFormat pix_fmt, int width, int height)
  635. {
  636. av_image_copy(dst->data, dst->linesize, src->data,
  637. src->linesize, pix_fmt, width, height);
  638. }
  639. /* 2x2 -> 1x1 */
  640. void ff_shrink22(uint8_t *dst, int dst_wrap,
  641. const uint8_t *src, int src_wrap,
  642. int width, int height)
  643. {
  644. int w;
  645. const uint8_t *s1, *s2;
  646. uint8_t *d;
  647. for(;height > 0; height--) {
  648. s1 = src;
  649. s2 = s1 + src_wrap;
  650. d = dst;
  651. for(w = width;w >= 4; w-=4) {
  652. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  653. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  654. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  655. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  656. s1 += 8;
  657. s2 += 8;
  658. d += 4;
  659. }
  660. for(;w > 0; w--) {
  661. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  662. s1 += 2;
  663. s2 += 2;
  664. d++;
  665. }
  666. src += 2 * src_wrap;
  667. dst += dst_wrap;
  668. }
  669. }
  670. /* 4x4 -> 1x1 */
  671. void ff_shrink44(uint8_t *dst, int dst_wrap,
  672. const uint8_t *src, int src_wrap,
  673. int width, int height)
  674. {
  675. int w;
  676. const uint8_t *s1, *s2, *s3, *s4;
  677. uint8_t *d;
  678. for(;height > 0; height--) {
  679. s1 = src;
  680. s2 = s1 + src_wrap;
  681. s3 = s2 + src_wrap;
  682. s4 = s3 + src_wrap;
  683. d = dst;
  684. for(w = width;w > 0; w--) {
  685. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  686. s2[0] + s2[1] + s2[2] + s2[3] +
  687. s3[0] + s3[1] + s3[2] + s3[3] +
  688. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  689. s1 += 4;
  690. s2 += 4;
  691. s3 += 4;
  692. s4 += 4;
  693. d++;
  694. }
  695. src += 4 * src_wrap;
  696. dst += dst_wrap;
  697. }
  698. }
  699. /* 8x8 -> 1x1 */
  700. void ff_shrink88(uint8_t *dst, int dst_wrap,
  701. const uint8_t *src, int src_wrap,
  702. int width, int height)
  703. {
  704. int w, i;
  705. for(;height > 0; height--) {
  706. for(w = width;w > 0; w--) {
  707. int tmp=0;
  708. for(i=0; i<8; i++){
  709. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  710. src += src_wrap;
  711. }
  712. *(dst++) = (tmp + 32)>>6;
  713. src += 8 - 8*src_wrap;
  714. }
  715. src += 8*src_wrap - 8*width;
  716. dst += dst_wrap - width;
  717. }
  718. }
  719. int avpicture_alloc(AVPicture *picture,
  720. enum PixelFormat pix_fmt, int width, int height)
  721. {
  722. int ret;
  723. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  724. memset(picture, 0, sizeof(AVPicture));
  725. return ret;
  726. }
  727. return 0;
  728. }
  729. void avpicture_free(AVPicture *picture)
  730. {
  731. av_free(picture->data[0]);
  732. }
  733. /* return true if yuv planar */
  734. static inline int is_yuv_planar(const PixFmtInfo *ps)
  735. {
  736. return (ps->color_type == FF_COLOR_YUV ||
  737. ps->color_type == FF_COLOR_YUV_JPEG) &&
  738. ps->pixel_type == FF_PIXEL_PLANAR;
  739. }
  740. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  741. enum PixelFormat pix_fmt, int top_band, int left_band)
  742. {
  743. int y_shift;
  744. int x_shift;
  745. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  746. return -1;
  747. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  748. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  749. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  750. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  751. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  752. dst->linesize[0] = src->linesize[0];
  753. dst->linesize[1] = src->linesize[1];
  754. dst->linesize[2] = src->linesize[2];
  755. return 0;
  756. }
  757. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  758. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  759. int *color)
  760. {
  761. uint8_t *optr;
  762. int y_shift;
  763. int x_shift;
  764. int yheight;
  765. int i, y;
  766. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  767. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  768. for (i = 0; i < 3; i++) {
  769. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  770. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  771. if (padtop || padleft) {
  772. memset(dst->data[i], color[i],
  773. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  774. }
  775. if (padleft || padright) {
  776. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  777. (dst->linesize[i] - (padright >> x_shift));
  778. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  779. for (y = 0; y < yheight; y++) {
  780. memset(optr, color[i], (padleft + padright) >> x_shift);
  781. optr += dst->linesize[i];
  782. }
  783. }
  784. if (src) { /* first line */
  785. uint8_t *iptr = src->data[i];
  786. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  787. (padleft >> x_shift);
  788. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  789. iptr += src->linesize[i];
  790. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  791. (dst->linesize[i] - (padright >> x_shift));
  792. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  793. for (y = 0; y < yheight; y++) {
  794. memset(optr, color[i], (padleft + padright) >> x_shift);
  795. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  796. (width - padleft - padright) >> x_shift);
  797. iptr += src->linesize[i];
  798. optr += dst->linesize[i];
  799. }
  800. }
  801. if (padbottom || padright) {
  802. optr = dst->data[i] + dst->linesize[i] *
  803. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  804. memset(optr, color[i],dst->linesize[i] *
  805. (padbottom >> y_shift) + (padright >> x_shift));
  806. }
  807. }
  808. return 0;
  809. }
  810. #if !(HAVE_MMX && HAVE_YASM)
  811. /* filter parameters: [-1 4 2 4 -1] // 8 */
  812. static void deinterlace_line_c(uint8_t *dst,
  813. const uint8_t *lum_m4, const uint8_t *lum_m3,
  814. const uint8_t *lum_m2, const uint8_t *lum_m1,
  815. const uint8_t *lum,
  816. int size)
  817. {
  818. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  819. int sum;
  820. for(;size > 0;size--) {
  821. sum = -lum_m4[0];
  822. sum += lum_m3[0] << 2;
  823. sum += lum_m2[0] << 1;
  824. sum += lum_m1[0] << 2;
  825. sum += -lum[0];
  826. dst[0] = cm[(sum + 4) >> 3];
  827. lum_m4++;
  828. lum_m3++;
  829. lum_m2++;
  830. lum_m1++;
  831. lum++;
  832. dst++;
  833. }
  834. }
  835. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  836. uint8_t *lum_m2, uint8_t *lum_m1,
  837. uint8_t *lum, int size)
  838. {
  839. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  840. int sum;
  841. for(;size > 0;size--) {
  842. sum = -lum_m4[0];
  843. sum += lum_m3[0] << 2;
  844. sum += lum_m2[0] << 1;
  845. lum_m4[0]=lum_m2[0];
  846. sum += lum_m1[0] << 2;
  847. sum += -lum[0];
  848. lum_m2[0] = cm[(sum + 4) >> 3];
  849. lum_m4++;
  850. lum_m3++;
  851. lum_m2++;
  852. lum_m1++;
  853. lum++;
  854. }
  855. }
  856. #endif
  857. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  858. top field is copied as is, but the bottom field is deinterlaced
  859. against the top field. */
  860. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  861. const uint8_t *src1, int src_wrap,
  862. int width, int height)
  863. {
  864. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  865. int y;
  866. src_m2 = src1;
  867. src_m1 = src1;
  868. src_0=&src_m1[src_wrap];
  869. src_p1=&src_0[src_wrap];
  870. src_p2=&src_p1[src_wrap];
  871. for(y=0;y<(height-2);y+=2) {
  872. memcpy(dst,src_m1,width);
  873. dst += dst_wrap;
  874. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  875. src_m2 = src_0;
  876. src_m1 = src_p1;
  877. src_0 = src_p2;
  878. src_p1 += 2*src_wrap;
  879. src_p2 += 2*src_wrap;
  880. dst += dst_wrap;
  881. }
  882. memcpy(dst,src_m1,width);
  883. dst += dst_wrap;
  884. /* do last line */
  885. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  886. }
  887. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  888. int width, int height)
  889. {
  890. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  891. int y;
  892. uint8_t *buf;
  893. buf = av_malloc(width);
  894. src_m1 = src1;
  895. memcpy(buf,src_m1,width);
  896. src_0=&src_m1[src_wrap];
  897. src_p1=&src_0[src_wrap];
  898. src_p2=&src_p1[src_wrap];
  899. for(y=0;y<(height-2);y+=2) {
  900. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  901. src_m1 = src_p1;
  902. src_0 = src_p2;
  903. src_p1 += 2*src_wrap;
  904. src_p2 += 2*src_wrap;
  905. }
  906. /* do last line */
  907. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  908. av_free(buf);
  909. }
  910. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  911. enum PixelFormat pix_fmt, int width, int height)
  912. {
  913. int i;
  914. if (pix_fmt != PIX_FMT_YUV420P &&
  915. pix_fmt != PIX_FMT_YUVJ420P &&
  916. pix_fmt != PIX_FMT_YUV422P &&
  917. pix_fmt != PIX_FMT_YUVJ422P &&
  918. pix_fmt != PIX_FMT_YUV444P &&
  919. pix_fmt != PIX_FMT_YUV411P &&
  920. pix_fmt != PIX_FMT_GRAY8)
  921. return -1;
  922. if ((width & 3) != 0 || (height & 3) != 0)
  923. return -1;
  924. for(i=0;i<3;i++) {
  925. if (i == 1) {
  926. switch(pix_fmt) {
  927. case PIX_FMT_YUVJ420P:
  928. case PIX_FMT_YUV420P:
  929. width >>= 1;
  930. height >>= 1;
  931. break;
  932. case PIX_FMT_YUV422P:
  933. case PIX_FMT_YUVJ422P:
  934. width >>= 1;
  935. break;
  936. case PIX_FMT_YUV411P:
  937. width >>= 2;
  938. break;
  939. default:
  940. break;
  941. }
  942. if (pix_fmt == PIX_FMT_GRAY8) {
  943. break;
  944. }
  945. }
  946. if (src == dst) {
  947. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  948. width, height);
  949. } else {
  950. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  951. src->data[i], src->linesize[i],
  952. width, height);
  953. }
  954. }
  955. emms_c();
  956. return 0;
  957. }