You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

991 lines
29KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "libavutil/colorspace.h"
  34. #include "libavutil/common.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavutil/imgutils.h"
  37. #if HAVE_MMX_EXTERNAL
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #if HAVE_MMX_EXTERNAL
  45. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  46. #define deinterlace_line ff_deinterlace_line_mmx
  47. #else
  48. #define deinterlace_line_inplace deinterlace_line_inplace_c
  49. #define deinterlace_line deinterlace_line_c
  50. #endif
  51. #define pixdesc_has_alpha(pixdesc) \
  52. ((pixdesc)->nb_components == 2 || (pixdesc)->nb_components == 4 || (pixdesc)->flags & PIX_FMT_PAL)
  53. typedef struct PixFmtInfo {
  54. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  55. uint8_t padded_size; /**< padded size in bits if different from the non-padded size */
  56. } PixFmtInfo;
  57. /* this table gives more information about formats */
  58. static const PixFmtInfo pix_fmt_info[AV_PIX_FMT_NB] = {
  59. /* YUV formats */
  60. [AV_PIX_FMT_YUV420P] = {
  61. .color_type = FF_COLOR_YUV,
  62. },
  63. [AV_PIX_FMT_YUV422P] = {
  64. .color_type = FF_COLOR_YUV,
  65. },
  66. [AV_PIX_FMT_YUV444P] = {
  67. .color_type = FF_COLOR_YUV,
  68. },
  69. [AV_PIX_FMT_YUYV422] = {
  70. .color_type = FF_COLOR_YUV,
  71. },
  72. [AV_PIX_FMT_UYVY422] = {
  73. .color_type = FF_COLOR_YUV,
  74. },
  75. [AV_PIX_FMT_YUV410P] = {
  76. .color_type = FF_COLOR_YUV,
  77. },
  78. [AV_PIX_FMT_YUV411P] = {
  79. .color_type = FF_COLOR_YUV,
  80. },
  81. [AV_PIX_FMT_YUV440P] = {
  82. .color_type = FF_COLOR_YUV,
  83. },
  84. [AV_PIX_FMT_YUV420P9LE] = {
  85. .color_type = FF_COLOR_YUV,
  86. },
  87. [AV_PIX_FMT_YUV422P9LE] = {
  88. .color_type = FF_COLOR_YUV,
  89. },
  90. [AV_PIX_FMT_YUV444P9LE] = {
  91. .color_type = FF_COLOR_YUV,
  92. },
  93. [AV_PIX_FMT_YUV420P9BE] = {
  94. .color_type = FF_COLOR_YUV,
  95. },
  96. [AV_PIX_FMT_YUV422P9BE] = {
  97. .color_type = FF_COLOR_YUV,
  98. },
  99. [AV_PIX_FMT_YUV444P9BE] = {
  100. .color_type = FF_COLOR_YUV,
  101. },
  102. [AV_PIX_FMT_YUV420P10LE] = {
  103. .color_type = FF_COLOR_YUV,
  104. },
  105. [AV_PIX_FMT_YUV422P10LE] = {
  106. .color_type = FF_COLOR_YUV,
  107. },
  108. [AV_PIX_FMT_YUV444P10LE] = {
  109. .color_type = FF_COLOR_YUV,
  110. },
  111. [AV_PIX_FMT_YUV420P10BE] = {
  112. .color_type = FF_COLOR_YUV,
  113. },
  114. [AV_PIX_FMT_YUV422P10BE] = {
  115. .color_type = FF_COLOR_YUV,
  116. },
  117. [AV_PIX_FMT_YUV444P10BE] = {
  118. .color_type = FF_COLOR_YUV,
  119. },
  120. [AV_PIX_FMT_YUV420P12LE] = {
  121. .color_type = FF_COLOR_YUV,
  122. },
  123. [AV_PIX_FMT_YUV422P12LE] = {
  124. .color_type = FF_COLOR_YUV,
  125. },
  126. [AV_PIX_FMT_YUV444P12LE] = {
  127. .color_type = FF_COLOR_YUV,
  128. },
  129. [AV_PIX_FMT_YUV420P12BE] = {
  130. .color_type = FF_COLOR_YUV,
  131. },
  132. [AV_PIX_FMT_YUV422P12BE] = {
  133. .color_type = FF_COLOR_YUV,
  134. },
  135. [AV_PIX_FMT_YUV444P12BE] = {
  136. .color_type = FF_COLOR_YUV,
  137. },
  138. [AV_PIX_FMT_YUV420P14LE] = {
  139. .color_type = FF_COLOR_YUV,
  140. },
  141. [AV_PIX_FMT_YUV422P14LE] = {
  142. .color_type = FF_COLOR_YUV,
  143. },
  144. [AV_PIX_FMT_YUV444P14LE] = {
  145. .color_type = FF_COLOR_YUV,
  146. },
  147. [AV_PIX_FMT_YUV420P14BE] = {
  148. .color_type = FF_COLOR_YUV,
  149. },
  150. [AV_PIX_FMT_YUV422P14BE] = {
  151. .color_type = FF_COLOR_YUV,
  152. },
  153. [AV_PIX_FMT_YUV444P14BE] = {
  154. .color_type = FF_COLOR_YUV,
  155. },
  156. [AV_PIX_FMT_YUV420P16LE] = {
  157. .color_type = FF_COLOR_YUV,
  158. },
  159. [AV_PIX_FMT_YUV422P16LE] = {
  160. .color_type = FF_COLOR_YUV,
  161. },
  162. [AV_PIX_FMT_YUV444P16LE] = {
  163. .color_type = FF_COLOR_YUV,
  164. },
  165. [AV_PIX_FMT_YUV420P16BE] = {
  166. .color_type = FF_COLOR_YUV,
  167. },
  168. [AV_PIX_FMT_YUV422P16BE] = {
  169. .color_type = FF_COLOR_YUV,
  170. },
  171. [AV_PIX_FMT_YUV444P16BE] = {
  172. .color_type = FF_COLOR_YUV,
  173. },
  174. /* YUV formats with alpha plane */
  175. [AV_PIX_FMT_YUVA420P] = {
  176. .color_type = FF_COLOR_YUV,
  177. },
  178. [AV_PIX_FMT_YUVA422P] = {
  179. .color_type = FF_COLOR_YUV,
  180. },
  181. [AV_PIX_FMT_YUVA444P] = {
  182. .color_type = FF_COLOR_YUV,
  183. },
  184. /* JPEG YUV */
  185. [AV_PIX_FMT_YUVJ420P] = {
  186. .color_type = FF_COLOR_YUV_JPEG,
  187. },
  188. [AV_PIX_FMT_YUVJ422P] = {
  189. .color_type = FF_COLOR_YUV_JPEG,
  190. },
  191. [AV_PIX_FMT_YUVJ444P] = {
  192. .color_type = FF_COLOR_YUV_JPEG,
  193. },
  194. [AV_PIX_FMT_YUVJ440P] = {
  195. .color_type = FF_COLOR_YUV_JPEG,
  196. },
  197. /* RGB formats */
  198. [AV_PIX_FMT_RGB24] = {
  199. .color_type = FF_COLOR_RGB,
  200. },
  201. [AV_PIX_FMT_BGR24] = {
  202. .color_type = FF_COLOR_RGB,
  203. },
  204. [AV_PIX_FMT_ARGB] = {
  205. .color_type = FF_COLOR_RGB,
  206. },
  207. [AV_PIX_FMT_RGB48BE] = {
  208. .color_type = FF_COLOR_RGB,
  209. },
  210. [AV_PIX_FMT_RGB48LE] = {
  211. .color_type = FF_COLOR_RGB,
  212. },
  213. [AV_PIX_FMT_RGBA64BE] = {
  214. .color_type = FF_COLOR_RGB,
  215. },
  216. [AV_PIX_FMT_RGBA64LE] = {
  217. .color_type = FF_COLOR_RGB,
  218. },
  219. [AV_PIX_FMT_RGB565BE] = {
  220. .color_type = FF_COLOR_RGB,
  221. },
  222. [AV_PIX_FMT_RGB565LE] = {
  223. .color_type = FF_COLOR_RGB,
  224. },
  225. [AV_PIX_FMT_RGB555BE] = {
  226. .color_type = FF_COLOR_RGB,
  227. .padded_size = 16,
  228. },
  229. [AV_PIX_FMT_RGB555LE] = {
  230. .color_type = FF_COLOR_RGB,
  231. .padded_size = 16,
  232. },
  233. [AV_PIX_FMT_RGB444BE] = {
  234. .color_type = FF_COLOR_RGB,
  235. .padded_size = 16,
  236. },
  237. [AV_PIX_FMT_RGB444LE] = {
  238. .color_type = FF_COLOR_RGB,
  239. .padded_size = 16,
  240. },
  241. /* gray / mono formats */
  242. [AV_PIX_FMT_GRAY16BE] = {
  243. .color_type = FF_COLOR_GRAY,
  244. },
  245. [AV_PIX_FMT_GRAY16LE] = {
  246. .color_type = FF_COLOR_GRAY,
  247. },
  248. [AV_PIX_FMT_GRAY8] = {
  249. .color_type = FF_COLOR_GRAY,
  250. },
  251. [AV_PIX_FMT_GRAY8A] = {
  252. .color_type = FF_COLOR_GRAY,
  253. },
  254. [AV_PIX_FMT_MONOWHITE] = {
  255. .color_type = FF_COLOR_GRAY,
  256. },
  257. [AV_PIX_FMT_MONOBLACK] = {
  258. .color_type = FF_COLOR_GRAY,
  259. },
  260. /* paletted formats */
  261. [AV_PIX_FMT_PAL8] = {
  262. .color_type = FF_COLOR_RGB,
  263. },
  264. [AV_PIX_FMT_UYYVYY411] = {
  265. .color_type = FF_COLOR_YUV,
  266. },
  267. [AV_PIX_FMT_ABGR] = {
  268. .color_type = FF_COLOR_RGB,
  269. },
  270. [AV_PIX_FMT_BGR48BE] = {
  271. .color_type = FF_COLOR_RGB,
  272. },
  273. [AV_PIX_FMT_BGR48LE] = {
  274. .color_type = FF_COLOR_RGB,
  275. },
  276. [AV_PIX_FMT_BGRA64BE] = {
  277. .color_type = FF_COLOR_RGB,
  278. },
  279. [AV_PIX_FMT_BGRA64LE] = {
  280. .color_type = FF_COLOR_RGB,
  281. },
  282. [AV_PIX_FMT_BGR565BE] = {
  283. .color_type = FF_COLOR_RGB,
  284. .padded_size = 16,
  285. },
  286. [AV_PIX_FMT_BGR565LE] = {
  287. .color_type = FF_COLOR_RGB,
  288. .padded_size = 16,
  289. },
  290. [AV_PIX_FMT_BGR555BE] = {
  291. .color_type = FF_COLOR_RGB,
  292. .padded_size = 16,
  293. },
  294. [AV_PIX_FMT_BGR555LE] = {
  295. .color_type = FF_COLOR_RGB,
  296. .padded_size = 16,
  297. },
  298. [AV_PIX_FMT_BGR444BE] = {
  299. .color_type = FF_COLOR_RGB,
  300. .padded_size = 16,
  301. },
  302. [AV_PIX_FMT_BGR444LE] = {
  303. .color_type = FF_COLOR_RGB,
  304. .padded_size = 16,
  305. },
  306. [AV_PIX_FMT_RGB8] = {
  307. .color_type = FF_COLOR_RGB,
  308. },
  309. [AV_PIX_FMT_RGB4] = {
  310. .color_type = FF_COLOR_RGB,
  311. },
  312. [AV_PIX_FMT_RGB4_BYTE] = {
  313. .color_type = FF_COLOR_RGB,
  314. .padded_size = 8,
  315. },
  316. [AV_PIX_FMT_BGR8] = {
  317. .color_type = FF_COLOR_RGB,
  318. },
  319. [AV_PIX_FMT_BGR4] = {
  320. .color_type = FF_COLOR_RGB,
  321. },
  322. [AV_PIX_FMT_BGR4_BYTE] = {
  323. .color_type = FF_COLOR_RGB,
  324. .padded_size = 8,
  325. },
  326. [AV_PIX_FMT_NV12] = {
  327. .color_type = FF_COLOR_YUV,
  328. },
  329. [AV_PIX_FMT_NV21] = {
  330. .color_type = FF_COLOR_YUV,
  331. },
  332. [AV_PIX_FMT_BGRA] = {
  333. .color_type = FF_COLOR_RGB,
  334. },
  335. [AV_PIX_FMT_RGBA] = {
  336. .color_type = FF_COLOR_RGB,
  337. },
  338. [AV_PIX_FMT_GBRP] = {
  339. .color_type = FF_COLOR_RGB,
  340. },
  341. [AV_PIX_FMT_GBRP9BE] = {
  342. .color_type = FF_COLOR_RGB,
  343. },
  344. [AV_PIX_FMT_GBRP9LE] = {
  345. .color_type = FF_COLOR_RGB,
  346. },
  347. [AV_PIX_FMT_GBRP10BE] = {
  348. .color_type = FF_COLOR_RGB,
  349. },
  350. [AV_PIX_FMT_GBRP10LE] = {
  351. .color_type = FF_COLOR_RGB,
  352. },
  353. [AV_PIX_FMT_GBRP12BE] = {
  354. .color_type = FF_COLOR_RGB,
  355. },
  356. [AV_PIX_FMT_GBRP12LE] = {
  357. .color_type = FF_COLOR_RGB,
  358. },
  359. [AV_PIX_FMT_GBRP14BE] = {
  360. .color_type = FF_COLOR_RGB,
  361. },
  362. [AV_PIX_FMT_GBRP14LE] = {
  363. .color_type = FF_COLOR_RGB,
  364. },
  365. [AV_PIX_FMT_GBRP16BE] = {
  366. .color_type = FF_COLOR_RGB,
  367. },
  368. [AV_PIX_FMT_GBRP16LE] = {
  369. .color_type = FF_COLOR_RGB,
  370. },
  371. };
  372. void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
  373. {
  374. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  375. *h_shift = desc->log2_chroma_w;
  376. *v_shift = desc->log2_chroma_h;
  377. }
  378. int ff_is_hwaccel_pix_fmt(enum AVPixelFormat pix_fmt)
  379. {
  380. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  381. return desc->flags & PIX_FMT_HWACCEL;
  382. }
  383. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  384. enum AVPixelFormat pix_fmt, int width, int height)
  385. {
  386. return av_image_fill_arrays(picture->data, picture->linesize,
  387. ptr, pix_fmt, width, height, 1);
  388. }
  389. int avpicture_layout(const AVPicture* src, enum AVPixelFormat pix_fmt, int width, int height,
  390. unsigned char *dest, int dest_size)
  391. {
  392. return av_image_copy_to_buffer(dest, dest_size,
  393. (const uint8_t * const*)src->data, src->linesize,
  394. pix_fmt, width, height, 1);
  395. }
  396. int avpicture_get_size(enum AVPixelFormat pix_fmt, int width, int height)
  397. {
  398. return av_image_get_buffer_size(pix_fmt, width, height, 1);
  399. }
  400. static int get_pix_fmt_depth(int *min, int *max, enum AVPixelFormat pix_fmt)
  401. {
  402. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  403. int i;
  404. if (!desc || !desc->nb_components) {
  405. *min = *max = 0;
  406. return AVERROR(EINVAL);
  407. }
  408. *min = INT_MAX, *max = -INT_MAX;
  409. for (i = 0; i < desc->nb_components; i++) {
  410. *min = FFMIN(desc->comp[i].depth_minus1+1, *min);
  411. *max = FFMAX(desc->comp[i].depth_minus1+1, *max);
  412. }
  413. return 0;
  414. }
  415. int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
  416. int has_alpha)
  417. {
  418. const PixFmtInfo *pf, *ps;
  419. const AVPixFmtDescriptor *src_desc = av_pix_fmt_desc_get(src_pix_fmt);
  420. const AVPixFmtDescriptor *dst_desc = av_pix_fmt_desc_get(dst_pix_fmt);
  421. int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
  422. int ret, loss;
  423. if (dst_pix_fmt >= AV_PIX_FMT_NB || dst_pix_fmt <= AV_PIX_FMT_NONE)
  424. return ~0;
  425. ps = &pix_fmt_info[src_pix_fmt];
  426. /* compute loss */
  427. loss = 0;
  428. if ((ret = get_pix_fmt_depth(&src_min_depth, &src_max_depth, src_pix_fmt)) < 0)
  429. return ret;
  430. if ((ret = get_pix_fmt_depth(&dst_min_depth, &dst_max_depth, dst_pix_fmt)) < 0)
  431. return ret;
  432. if (dst_min_depth < src_min_depth ||
  433. dst_max_depth < src_max_depth)
  434. loss |= FF_LOSS_DEPTH;
  435. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  436. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  437. loss |= FF_LOSS_RESOLUTION;
  438. pf = &pix_fmt_info[dst_pix_fmt];
  439. switch(pf->color_type) {
  440. case FF_COLOR_RGB:
  441. if (ps->color_type != FF_COLOR_RGB &&
  442. ps->color_type != FF_COLOR_GRAY)
  443. loss |= FF_LOSS_COLORSPACE;
  444. break;
  445. case FF_COLOR_GRAY:
  446. if (ps->color_type != FF_COLOR_GRAY)
  447. loss |= FF_LOSS_COLORSPACE;
  448. break;
  449. case FF_COLOR_YUV:
  450. if (ps->color_type != FF_COLOR_YUV)
  451. loss |= FF_LOSS_COLORSPACE;
  452. break;
  453. case FF_COLOR_YUV_JPEG:
  454. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  455. ps->color_type != FF_COLOR_YUV &&
  456. ps->color_type != FF_COLOR_GRAY)
  457. loss |= FF_LOSS_COLORSPACE;
  458. break;
  459. default:
  460. /* fail safe test */
  461. if (ps->color_type != pf->color_type)
  462. loss |= FF_LOSS_COLORSPACE;
  463. break;
  464. }
  465. if (pf->color_type == FF_COLOR_GRAY &&
  466. ps->color_type != FF_COLOR_GRAY)
  467. loss |= FF_LOSS_CHROMA;
  468. if (!pixdesc_has_alpha(dst_desc) && (pixdesc_has_alpha(src_desc) && has_alpha))
  469. loss |= FF_LOSS_ALPHA;
  470. if (dst_pix_fmt == AV_PIX_FMT_PAL8 &&
  471. (src_pix_fmt != AV_PIX_FMT_PAL8 && (ps->color_type != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && has_alpha))))
  472. loss |= FF_LOSS_COLORQUANT;
  473. return loss;
  474. }
  475. static int avg_bits_per_pixel(enum AVPixelFormat pix_fmt)
  476. {
  477. const PixFmtInfo *info = &pix_fmt_info[pix_fmt];
  478. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  479. return info->padded_size ?
  480. info->padded_size : av_get_bits_per_pixel(desc);
  481. }
  482. #if FF_API_FIND_BEST_PIX_FMT
  483. enum AVPixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum AVPixelFormat src_pix_fmt,
  484. int has_alpha, int *loss_ptr)
  485. {
  486. enum AVPixelFormat dst_pix_fmt;
  487. int i;
  488. if (loss_ptr) /* all losses count (for backward compatibility) */
  489. *loss_ptr = 0;
  490. dst_pix_fmt = AV_PIX_FMT_NONE; /* so first iteration doesn't have to be treated special */
  491. for(i = 0; i< FFMIN(AV_PIX_FMT_NB, 64); i++){
  492. if (pix_fmt_mask & (1ULL << i))
  493. dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt, i, src_pix_fmt, has_alpha, loss_ptr);
  494. }
  495. return dst_pix_fmt;
  496. }
  497. #endif /* FF_API_FIND_BEST_PIX_FMT */
  498. enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
  499. enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
  500. {
  501. enum AVPixelFormat dst_pix_fmt;
  502. int loss1, loss2, loss_order1, loss_order2, i, loss_mask;
  503. static const int loss_mask_order[] = {
  504. ~0, /* no loss first */
  505. ~FF_LOSS_ALPHA,
  506. ~FF_LOSS_RESOLUTION,
  507. ~FF_LOSS_COLORSPACE,
  508. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  509. ~FF_LOSS_COLORQUANT,
  510. ~FF_LOSS_DEPTH,
  511. ~(FF_LOSS_DEPTH|FF_LOSS_COLORSPACE),
  512. ~(FF_LOSS_RESOLUTION | FF_LOSS_DEPTH | FF_LOSS_COLORSPACE | FF_LOSS_ALPHA |
  513. FF_LOSS_COLORQUANT | FF_LOSS_CHROMA),
  514. 0x80000, //non zero entry that combines all loss variants including future additions
  515. 0,
  516. };
  517. loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
  518. dst_pix_fmt = AV_PIX_FMT_NONE;
  519. loss1 = avcodec_get_pix_fmt_loss(dst_pix_fmt1, src_pix_fmt, has_alpha) & loss_mask;
  520. loss2 = avcodec_get_pix_fmt_loss(dst_pix_fmt2, src_pix_fmt, has_alpha) & loss_mask;
  521. /* try with successive loss */
  522. for(i = 0;loss_mask_order[i] != 0 && dst_pix_fmt == AV_PIX_FMT_NONE;i++) {
  523. loss_order1 = loss1 & loss_mask_order[i];
  524. loss_order2 = loss2 & loss_mask_order[i];
  525. if (loss_order1 == 0 && loss_order2 == 0 && dst_pix_fmt2 != AV_PIX_FMT_NONE && dst_pix_fmt1 != AV_PIX_FMT_NONE){ /* use format with smallest depth */
  526. dst_pix_fmt = avg_bits_per_pixel(dst_pix_fmt2) < avg_bits_per_pixel(dst_pix_fmt1) ? dst_pix_fmt2 : dst_pix_fmt1;
  527. } else if (loss_order1 == 0 || loss_order2 == 0) { /* use format with no loss */
  528. dst_pix_fmt = loss_order2 ? dst_pix_fmt1 : dst_pix_fmt2;
  529. }
  530. }
  531. if (loss_ptr)
  532. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  533. return dst_pix_fmt;
  534. }
  535. #if AV_HAVE_INCOMPATIBLE_FORK_ABI
  536. enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat *pix_fmt_list,
  537. enum AVPixelFormat src_pix_fmt,
  538. int has_alpha, int *loss_ptr){
  539. return avcodec_find_best_pix_fmt_of_list(pix_fmt_list, src_pix_fmt, has_alpha, loss_ptr);
  540. }
  541. #else
  542. enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
  543. enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
  544. {
  545. return avcodec_find_best_pix_fmt_of_2(dst_pix_fmt1, dst_pix_fmt2, src_pix_fmt, has_alpha, loss_ptr);
  546. }
  547. #endif
  548. enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(enum AVPixelFormat *pix_fmt_list,
  549. enum AVPixelFormat src_pix_fmt,
  550. int has_alpha, int *loss_ptr){
  551. int i;
  552. enum AVPixelFormat best = AV_PIX_FMT_NONE;
  553. for(i=0; pix_fmt_list[i] != AV_PIX_FMT_NONE; i++)
  554. best = avcodec_find_best_pix_fmt_of_2(best, pix_fmt_list[i], src_pix_fmt, has_alpha, loss_ptr);
  555. return best;
  556. }
  557. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  558. enum AVPixelFormat pix_fmt, int width, int height)
  559. {
  560. av_image_copy(dst->data, dst->linesize, (const uint8_t **)src->data,
  561. src->linesize, pix_fmt, width, height);
  562. }
  563. /* 2x2 -> 1x1 */
  564. void ff_shrink22(uint8_t *dst, int dst_wrap,
  565. const uint8_t *src, int src_wrap,
  566. int width, int height)
  567. {
  568. int w;
  569. const uint8_t *s1, *s2;
  570. uint8_t *d;
  571. for(;height > 0; height--) {
  572. s1 = src;
  573. s2 = s1 + src_wrap;
  574. d = dst;
  575. for(w = width;w >= 4; w-=4) {
  576. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  577. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  578. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  579. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  580. s1 += 8;
  581. s2 += 8;
  582. d += 4;
  583. }
  584. for(;w > 0; w--) {
  585. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  586. s1 += 2;
  587. s2 += 2;
  588. d++;
  589. }
  590. src += 2 * src_wrap;
  591. dst += dst_wrap;
  592. }
  593. }
  594. /* 4x4 -> 1x1 */
  595. void ff_shrink44(uint8_t *dst, int dst_wrap,
  596. const uint8_t *src, int src_wrap,
  597. int width, int height)
  598. {
  599. int w;
  600. const uint8_t *s1, *s2, *s3, *s4;
  601. uint8_t *d;
  602. for(;height > 0; height--) {
  603. s1 = src;
  604. s2 = s1 + src_wrap;
  605. s3 = s2 + src_wrap;
  606. s4 = s3 + src_wrap;
  607. d = dst;
  608. for(w = width;w > 0; w--) {
  609. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  610. s2[0] + s2[1] + s2[2] + s2[3] +
  611. s3[0] + s3[1] + s3[2] + s3[3] +
  612. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  613. s1 += 4;
  614. s2 += 4;
  615. s3 += 4;
  616. s4 += 4;
  617. d++;
  618. }
  619. src += 4 * src_wrap;
  620. dst += dst_wrap;
  621. }
  622. }
  623. /* 8x8 -> 1x1 */
  624. void ff_shrink88(uint8_t *dst, int dst_wrap,
  625. const uint8_t *src, int src_wrap,
  626. int width, int height)
  627. {
  628. int w, i;
  629. for(;height > 0; height--) {
  630. for(w = width;w > 0; w--) {
  631. int tmp=0;
  632. for(i=0; i<8; i++){
  633. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  634. src += src_wrap;
  635. }
  636. *(dst++) = (tmp + 32)>>6;
  637. src += 8 - 8*src_wrap;
  638. }
  639. src += 8*src_wrap - 8*width;
  640. dst += dst_wrap - width;
  641. }
  642. }
  643. int avpicture_alloc(AVPicture *picture,
  644. enum AVPixelFormat pix_fmt, int width, int height)
  645. {
  646. int ret;
  647. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  648. memset(picture, 0, sizeof(AVPicture));
  649. return ret;
  650. }
  651. return 0;
  652. }
  653. void avpicture_free(AVPicture *picture)
  654. {
  655. av_free(picture->data[0]);
  656. }
  657. /* return true if yuv planar */
  658. static inline int is_yuv_planar(enum AVPixelFormat fmt)
  659. {
  660. const PixFmtInfo *info = &pix_fmt_info[fmt];
  661. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(fmt);
  662. int i;
  663. int planes[4] = { 0 };
  664. if (info->color_type != FF_COLOR_YUV &&
  665. info->color_type != FF_COLOR_YUV_JPEG)
  666. return 0;
  667. /* set the used planes */
  668. for (i = 0; i < desc->nb_components; i++)
  669. planes[desc->comp[i].plane] = 1;
  670. /* if there is an unused plane, the format is not planar */
  671. for (i = 0; i < desc->nb_components; i++)
  672. if (!planes[i])
  673. return 0;
  674. return 1;
  675. }
  676. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  677. enum AVPixelFormat pix_fmt, int top_band, int left_band)
  678. {
  679. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  680. int y_shift;
  681. int x_shift;
  682. if (pix_fmt < 0 || pix_fmt >= AV_PIX_FMT_NB)
  683. return -1;
  684. y_shift = desc->log2_chroma_h;
  685. x_shift = desc->log2_chroma_w;
  686. if (is_yuv_planar(pix_fmt)) {
  687. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  688. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  689. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  690. } else{
  691. if(top_band % (1<<y_shift) || left_band % (1<<x_shift))
  692. return -1;
  693. if(left_band) //FIXME add support for this too
  694. return -1;
  695. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  696. }
  697. dst->linesize[0] = src->linesize[0];
  698. dst->linesize[1] = src->linesize[1];
  699. dst->linesize[2] = src->linesize[2];
  700. return 0;
  701. }
  702. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  703. enum AVPixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  704. int *color)
  705. {
  706. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
  707. uint8_t *optr;
  708. int y_shift;
  709. int x_shift;
  710. int yheight;
  711. int i, y;
  712. if (pix_fmt < 0 || pix_fmt >= AV_PIX_FMT_NB ||
  713. !is_yuv_planar(pix_fmt)) return -1;
  714. for (i = 0; i < 3; i++) {
  715. x_shift = i ? desc->log2_chroma_w : 0;
  716. y_shift = i ? desc->log2_chroma_h : 0;
  717. if (padtop || padleft) {
  718. memset(dst->data[i], color[i],
  719. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  720. }
  721. if (padleft || padright) {
  722. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  723. (dst->linesize[i] - (padright >> x_shift));
  724. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  725. for (y = 0; y < yheight; y++) {
  726. memset(optr, color[i], (padleft + padright) >> x_shift);
  727. optr += dst->linesize[i];
  728. }
  729. }
  730. if (src) { /* first line */
  731. uint8_t *iptr = src->data[i];
  732. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  733. (padleft >> x_shift);
  734. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  735. iptr += src->linesize[i];
  736. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  737. (dst->linesize[i] - (padright >> x_shift));
  738. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  739. for (y = 0; y < yheight; y++) {
  740. memset(optr, color[i], (padleft + padright) >> x_shift);
  741. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  742. (width - padleft - padright) >> x_shift);
  743. iptr += src->linesize[i];
  744. optr += dst->linesize[i];
  745. }
  746. }
  747. if (padbottom || padright) {
  748. optr = dst->data[i] + dst->linesize[i] *
  749. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  750. memset(optr, color[i],dst->linesize[i] *
  751. (padbottom >> y_shift) + (padright >> x_shift));
  752. }
  753. }
  754. return 0;
  755. }
  756. #if !HAVE_MMX_EXTERNAL
  757. /* filter parameters: [-1 4 2 4 -1] // 8 */
  758. static void deinterlace_line_c(uint8_t *dst,
  759. const uint8_t *lum_m4, const uint8_t *lum_m3,
  760. const uint8_t *lum_m2, const uint8_t *lum_m1,
  761. const uint8_t *lum,
  762. int size)
  763. {
  764. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  765. int sum;
  766. for(;size > 0;size--) {
  767. sum = -lum_m4[0];
  768. sum += lum_m3[0] << 2;
  769. sum += lum_m2[0] << 1;
  770. sum += lum_m1[0] << 2;
  771. sum += -lum[0];
  772. dst[0] = cm[(sum + 4) >> 3];
  773. lum_m4++;
  774. lum_m3++;
  775. lum_m2++;
  776. lum_m1++;
  777. lum++;
  778. dst++;
  779. }
  780. }
  781. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  782. uint8_t *lum_m2, uint8_t *lum_m1,
  783. uint8_t *lum, int size)
  784. {
  785. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  786. int sum;
  787. for(;size > 0;size--) {
  788. sum = -lum_m4[0];
  789. sum += lum_m3[0] << 2;
  790. sum += lum_m2[0] << 1;
  791. lum_m4[0]=lum_m2[0];
  792. sum += lum_m1[0] << 2;
  793. sum += -lum[0];
  794. lum_m2[0] = cm[(sum + 4) >> 3];
  795. lum_m4++;
  796. lum_m3++;
  797. lum_m2++;
  798. lum_m1++;
  799. lum++;
  800. }
  801. }
  802. #endif /* !HAVE_MMX_EXTERNAL */
  803. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  804. top field is copied as is, but the bottom field is deinterlaced
  805. against the top field. */
  806. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  807. const uint8_t *src1, int src_wrap,
  808. int width, int height)
  809. {
  810. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  811. int y;
  812. src_m2 = src1;
  813. src_m1 = src1;
  814. src_0=&src_m1[src_wrap];
  815. src_p1=&src_0[src_wrap];
  816. src_p2=&src_p1[src_wrap];
  817. for(y=0;y<(height-2);y+=2) {
  818. memcpy(dst,src_m1,width);
  819. dst += dst_wrap;
  820. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  821. src_m2 = src_0;
  822. src_m1 = src_p1;
  823. src_0 = src_p2;
  824. src_p1 += 2*src_wrap;
  825. src_p2 += 2*src_wrap;
  826. dst += dst_wrap;
  827. }
  828. memcpy(dst,src_m1,width);
  829. dst += dst_wrap;
  830. /* do last line */
  831. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  832. }
  833. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  834. int width, int height)
  835. {
  836. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  837. int y;
  838. uint8_t *buf;
  839. buf = av_malloc(width);
  840. src_m1 = src1;
  841. memcpy(buf,src_m1,width);
  842. src_0=&src_m1[src_wrap];
  843. src_p1=&src_0[src_wrap];
  844. src_p2=&src_p1[src_wrap];
  845. for(y=0;y<(height-2);y+=2) {
  846. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  847. src_m1 = src_p1;
  848. src_0 = src_p2;
  849. src_p1 += 2*src_wrap;
  850. src_p2 += 2*src_wrap;
  851. }
  852. /* do last line */
  853. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  854. av_free(buf);
  855. }
  856. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  857. enum AVPixelFormat pix_fmt, int width, int height)
  858. {
  859. int i;
  860. if (pix_fmt != AV_PIX_FMT_YUV420P &&
  861. pix_fmt != AV_PIX_FMT_YUVJ420P &&
  862. pix_fmt != AV_PIX_FMT_YUV422P &&
  863. pix_fmt != AV_PIX_FMT_YUVJ422P &&
  864. pix_fmt != AV_PIX_FMT_YUV444P &&
  865. pix_fmt != AV_PIX_FMT_YUV411P &&
  866. pix_fmt != AV_PIX_FMT_GRAY8)
  867. return -1;
  868. if ((width & 3) != 0 || (height & 3) != 0)
  869. return -1;
  870. for(i=0;i<3;i++) {
  871. if (i == 1) {
  872. switch(pix_fmt) {
  873. case AV_PIX_FMT_YUVJ420P:
  874. case AV_PIX_FMT_YUV420P:
  875. width >>= 1;
  876. height >>= 1;
  877. break;
  878. case AV_PIX_FMT_YUV422P:
  879. case AV_PIX_FMT_YUVJ422P:
  880. width >>= 1;
  881. break;
  882. case AV_PIX_FMT_YUV411P:
  883. width >>= 2;
  884. break;
  885. default:
  886. break;
  887. }
  888. if (pix_fmt == AV_PIX_FMT_GRAY8) {
  889. break;
  890. }
  891. }
  892. if (src == dst) {
  893. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  894. width, height);
  895. } else {
  896. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  897. src->data[i], src->linesize[i],
  898. width, height);
  899. }
  900. }
  901. emms_c();
  902. return 0;
  903. }