You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1671 lines
48KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #if HAVE_MMX
  34. #include "x86/mmx.h"
  35. #include "x86/dsputil_mmx.h"
  36. #endif
  37. #define xglue(x, y) x ## y
  38. #define glue(x, y) xglue(x, y)
  39. #define FF_COLOR_RGB 0 /**< RGB color space */
  40. #define FF_COLOR_GRAY 1 /**< gray color space */
  41. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  42. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  43. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  44. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  45. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  46. typedef struct PixFmtInfo {
  47. const char *name;
  48. uint8_t nb_channels; /**< number of channels (including alpha) */
  49. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  50. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  51. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  52. uint8_t is_hwaccel : 1; /**< true if this is an HW accelerated format */
  53. uint8_t x_chroma_shift; /**< X chroma subsampling factor is 2 ^ shift */
  54. uint8_t y_chroma_shift; /**< Y chroma subsampling factor is 2 ^ shift */
  55. uint8_t depth; /**< bit depth of the color components */
  56. } PixFmtInfo;
  57. /* this table gives more information about formats */
  58. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  59. /* YUV formats */
  60. [PIX_FMT_YUV420P] = {
  61. .name = "yuv420p",
  62. .nb_channels = 3,
  63. .color_type = FF_COLOR_YUV,
  64. .pixel_type = FF_PIXEL_PLANAR,
  65. .depth = 8,
  66. .x_chroma_shift = 1, .y_chroma_shift = 1,
  67. },
  68. [PIX_FMT_YUV422P] = {
  69. .name = "yuv422p",
  70. .nb_channels = 3,
  71. .color_type = FF_COLOR_YUV,
  72. .pixel_type = FF_PIXEL_PLANAR,
  73. .depth = 8,
  74. .x_chroma_shift = 1, .y_chroma_shift = 0,
  75. },
  76. [PIX_FMT_YUV444P] = {
  77. .name = "yuv444p",
  78. .nb_channels = 3,
  79. .color_type = FF_COLOR_YUV,
  80. .pixel_type = FF_PIXEL_PLANAR,
  81. .depth = 8,
  82. .x_chroma_shift = 0, .y_chroma_shift = 0,
  83. },
  84. [PIX_FMT_YUYV422] = {
  85. .name = "yuyv422",
  86. .nb_channels = 1,
  87. .color_type = FF_COLOR_YUV,
  88. .pixel_type = FF_PIXEL_PACKED,
  89. .depth = 8,
  90. .x_chroma_shift = 1, .y_chroma_shift = 0,
  91. },
  92. [PIX_FMT_UYVY422] = {
  93. .name = "uyvy422",
  94. .nb_channels = 1,
  95. .color_type = FF_COLOR_YUV,
  96. .pixel_type = FF_PIXEL_PACKED,
  97. .depth = 8,
  98. .x_chroma_shift = 1, .y_chroma_shift = 0,
  99. },
  100. [PIX_FMT_YUV410P] = {
  101. .name = "yuv410p",
  102. .nb_channels = 3,
  103. .color_type = FF_COLOR_YUV,
  104. .pixel_type = FF_PIXEL_PLANAR,
  105. .depth = 8,
  106. .x_chroma_shift = 2, .y_chroma_shift = 2,
  107. },
  108. [PIX_FMT_YUV411P] = {
  109. .name = "yuv411p",
  110. .nb_channels = 3,
  111. .color_type = FF_COLOR_YUV,
  112. .pixel_type = FF_PIXEL_PLANAR,
  113. .depth = 8,
  114. .x_chroma_shift = 2, .y_chroma_shift = 0,
  115. },
  116. [PIX_FMT_YUV440P] = {
  117. .name = "yuv440p",
  118. .nb_channels = 3,
  119. .color_type = FF_COLOR_YUV,
  120. .pixel_type = FF_PIXEL_PLANAR,
  121. .depth = 8,
  122. .x_chroma_shift = 0, .y_chroma_shift = 1,
  123. },
  124. [PIX_FMT_YUV420PLE] = {
  125. .name = "yuv420ple",
  126. .nb_channels = 3,
  127. .color_type = FF_COLOR_YUV,
  128. .pixel_type = FF_PIXEL_PLANAR,
  129. .depth = 16,
  130. .x_chroma_shift = 1, .y_chroma_shift = 1,
  131. },
  132. [PIX_FMT_YUV422PLE] = {
  133. .name = "yuv422ple",
  134. .nb_channels = 3,
  135. .color_type = FF_COLOR_YUV,
  136. .pixel_type = FF_PIXEL_PLANAR,
  137. .depth = 16,
  138. .x_chroma_shift = 1, .y_chroma_shift = 0,
  139. },
  140. [PIX_FMT_YUV444PLE] = {
  141. .name = "yuv444ple",
  142. .nb_channels = 3,
  143. .color_type = FF_COLOR_YUV,
  144. .pixel_type = FF_PIXEL_PLANAR,
  145. .depth = 16,
  146. .x_chroma_shift = 0, .y_chroma_shift = 0,
  147. },
  148. [PIX_FMT_YUV420PBE] = {
  149. .name = "yuv420pbe",
  150. .nb_channels = 3,
  151. .color_type = FF_COLOR_YUV,
  152. .pixel_type = FF_PIXEL_PLANAR,
  153. .depth = 16,
  154. .x_chroma_shift = 1, .y_chroma_shift = 1,
  155. },
  156. [PIX_FMT_YUV422PBE] = {
  157. .name = "yuv422pbe",
  158. .nb_channels = 3,
  159. .color_type = FF_COLOR_YUV,
  160. .pixel_type = FF_PIXEL_PLANAR,
  161. .depth = 16,
  162. .x_chroma_shift = 1, .y_chroma_shift = 0,
  163. },
  164. [PIX_FMT_YUV444PBE] = {
  165. .name = "yuv444pbe",
  166. .nb_channels = 3,
  167. .color_type = FF_COLOR_YUV,
  168. .pixel_type = FF_PIXEL_PLANAR,
  169. .depth = 16,
  170. .x_chroma_shift = 0, .y_chroma_shift = 0,
  171. },
  172. /* YUV formats with alpha plane */
  173. [PIX_FMT_YUVA420P] = {
  174. .name = "yuva420p",
  175. .nb_channels = 4,
  176. .color_type = FF_COLOR_YUV,
  177. .pixel_type = FF_PIXEL_PLANAR,
  178. .depth = 8,
  179. .x_chroma_shift = 1, .y_chroma_shift = 1,
  180. },
  181. /* JPEG YUV */
  182. [PIX_FMT_YUVJ420P] = {
  183. .name = "yuvj420p",
  184. .nb_channels = 3,
  185. .color_type = FF_COLOR_YUV_JPEG,
  186. .pixel_type = FF_PIXEL_PLANAR,
  187. .depth = 8,
  188. .x_chroma_shift = 1, .y_chroma_shift = 1,
  189. },
  190. [PIX_FMT_YUVJ422P] = {
  191. .name = "yuvj422p",
  192. .nb_channels = 3,
  193. .color_type = FF_COLOR_YUV_JPEG,
  194. .pixel_type = FF_PIXEL_PLANAR,
  195. .depth = 8,
  196. .x_chroma_shift = 1, .y_chroma_shift = 0,
  197. },
  198. [PIX_FMT_YUVJ444P] = {
  199. .name = "yuvj444p",
  200. .nb_channels = 3,
  201. .color_type = FF_COLOR_YUV_JPEG,
  202. .pixel_type = FF_PIXEL_PLANAR,
  203. .depth = 8,
  204. .x_chroma_shift = 0, .y_chroma_shift = 0,
  205. },
  206. [PIX_FMT_YUVJ440P] = {
  207. .name = "yuvj440p",
  208. .nb_channels = 3,
  209. .color_type = FF_COLOR_YUV_JPEG,
  210. .pixel_type = FF_PIXEL_PLANAR,
  211. .depth = 8,
  212. .x_chroma_shift = 0, .y_chroma_shift = 1,
  213. },
  214. /* RGB formats */
  215. [PIX_FMT_RGB24] = {
  216. .name = "rgb24",
  217. .nb_channels = 3,
  218. .color_type = FF_COLOR_RGB,
  219. .pixel_type = FF_PIXEL_PACKED,
  220. .depth = 8,
  221. .x_chroma_shift = 0, .y_chroma_shift = 0,
  222. },
  223. [PIX_FMT_BGR24] = {
  224. .name = "bgr24",
  225. .nb_channels = 3,
  226. .color_type = FF_COLOR_RGB,
  227. .pixel_type = FF_PIXEL_PACKED,
  228. .depth = 8,
  229. .x_chroma_shift = 0, .y_chroma_shift = 0,
  230. },
  231. [PIX_FMT_ARGB] = {
  232. .name = "argb",
  233. .nb_channels = 4, .is_alpha = 1,
  234. .color_type = FF_COLOR_RGB,
  235. .pixel_type = FF_PIXEL_PACKED,
  236. .depth = 8,
  237. .x_chroma_shift = 0, .y_chroma_shift = 0,
  238. },
  239. [PIX_FMT_RGB48BE] = {
  240. .name = "rgb48be",
  241. .nb_channels = 3,
  242. .color_type = FF_COLOR_RGB,
  243. .pixel_type = FF_PIXEL_PACKED,
  244. .depth = 16,
  245. .x_chroma_shift = 0, .y_chroma_shift = 0,
  246. },
  247. [PIX_FMT_RGB48LE] = {
  248. .name = "rgb48le",
  249. .nb_channels = 3,
  250. .color_type = FF_COLOR_RGB,
  251. .pixel_type = FF_PIXEL_PACKED,
  252. .depth = 16,
  253. .x_chroma_shift = 0, .y_chroma_shift = 0,
  254. },
  255. [PIX_FMT_RGB565BE] = {
  256. .name = "rgb565be",
  257. .nb_channels = 3,
  258. .color_type = FF_COLOR_RGB,
  259. .pixel_type = FF_PIXEL_PACKED,
  260. .depth = 5,
  261. .x_chroma_shift = 0, .y_chroma_shift = 0,
  262. },
  263. [PIX_FMT_RGB565LE] = {
  264. .name = "rgb565le",
  265. .nb_channels = 3,
  266. .color_type = FF_COLOR_RGB,
  267. .pixel_type = FF_PIXEL_PACKED,
  268. .depth = 5,
  269. .x_chroma_shift = 0, .y_chroma_shift = 0,
  270. },
  271. [PIX_FMT_RGB555BE] = {
  272. .name = "rgb555be",
  273. .nb_channels = 3,
  274. .color_type = FF_COLOR_RGB,
  275. .pixel_type = FF_PIXEL_PACKED,
  276. .depth = 5,
  277. .x_chroma_shift = 0, .y_chroma_shift = 0,
  278. },
  279. [PIX_FMT_RGB555LE] = {
  280. .name = "rgb555le",
  281. .nb_channels = 3,
  282. .color_type = FF_COLOR_RGB,
  283. .pixel_type = FF_PIXEL_PACKED,
  284. .depth = 5,
  285. .x_chroma_shift = 0, .y_chroma_shift = 0,
  286. },
  287. /* gray / mono formats */
  288. [PIX_FMT_GRAY16BE] = {
  289. .name = "gray16be",
  290. .nb_channels = 1,
  291. .color_type = FF_COLOR_GRAY,
  292. .pixel_type = FF_PIXEL_PLANAR,
  293. .depth = 16,
  294. },
  295. [PIX_FMT_GRAY16LE] = {
  296. .name = "gray16le",
  297. .nb_channels = 1,
  298. .color_type = FF_COLOR_GRAY,
  299. .pixel_type = FF_PIXEL_PLANAR,
  300. .depth = 16,
  301. },
  302. [PIX_FMT_GRAY8] = {
  303. .name = "gray",
  304. .nb_channels = 1,
  305. .color_type = FF_COLOR_GRAY,
  306. .pixel_type = FF_PIXEL_PLANAR,
  307. .depth = 8,
  308. },
  309. [PIX_FMT_MONOWHITE] = {
  310. .name = "monow",
  311. .nb_channels = 1,
  312. .color_type = FF_COLOR_GRAY,
  313. .pixel_type = FF_PIXEL_PLANAR,
  314. .depth = 1,
  315. },
  316. [PIX_FMT_MONOBLACK] = {
  317. .name = "monob",
  318. .nb_channels = 1,
  319. .color_type = FF_COLOR_GRAY,
  320. .pixel_type = FF_PIXEL_PLANAR,
  321. .depth = 1,
  322. },
  323. /* paletted formats */
  324. [PIX_FMT_PAL8] = {
  325. .name = "pal8",
  326. .nb_channels = 4, .is_alpha = 1,
  327. .color_type = FF_COLOR_RGB,
  328. .pixel_type = FF_PIXEL_PALETTE,
  329. .depth = 8,
  330. },
  331. [PIX_FMT_XVMC_MPEG2_MC] = {
  332. .name = "xvmcmc",
  333. .is_hwaccel = 1,
  334. },
  335. [PIX_FMT_XVMC_MPEG2_IDCT] = {
  336. .name = "xvmcidct",
  337. .is_hwaccel = 1,
  338. },
  339. [PIX_FMT_VDPAU_MPEG1] = {
  340. .name = "vdpau_mpeg1",
  341. .is_hwaccel = 1,
  342. .x_chroma_shift = 1, .y_chroma_shift = 1,
  343. },
  344. [PIX_FMT_VDPAU_MPEG2] = {
  345. .name = "vdpau_mpeg2",
  346. .is_hwaccel = 1,
  347. .x_chroma_shift = 1, .y_chroma_shift = 1,
  348. },
  349. [PIX_FMT_VDPAU_H264] = {
  350. .name = "vdpau_h264",
  351. .is_hwaccel = 1,
  352. .x_chroma_shift = 1, .y_chroma_shift = 1,
  353. },
  354. [PIX_FMT_VDPAU_WMV3] = {
  355. .name = "vdpau_wmv3",
  356. .is_hwaccel = 1,
  357. .x_chroma_shift = 1, .y_chroma_shift = 1,
  358. },
  359. [PIX_FMT_VDPAU_VC1] = {
  360. .name = "vdpau_vc1",
  361. .is_hwaccel = 1,
  362. .x_chroma_shift = 1, .y_chroma_shift = 1,
  363. },
  364. [PIX_FMT_UYYVYY411] = {
  365. .name = "uyyvyy411",
  366. .nb_channels = 1,
  367. .color_type = FF_COLOR_YUV,
  368. .pixel_type = FF_PIXEL_PACKED,
  369. .depth = 8,
  370. .x_chroma_shift = 2, .y_chroma_shift = 0,
  371. },
  372. [PIX_FMT_ABGR] = {
  373. .name = "abgr",
  374. .nb_channels = 4, .is_alpha = 1,
  375. .color_type = FF_COLOR_RGB,
  376. .pixel_type = FF_PIXEL_PACKED,
  377. .depth = 8,
  378. .x_chroma_shift = 0, .y_chroma_shift = 0,
  379. },
  380. [PIX_FMT_BGR565BE] = {
  381. .name = "bgr565be",
  382. .nb_channels = 3,
  383. .color_type = FF_COLOR_RGB,
  384. .pixel_type = FF_PIXEL_PACKED,
  385. .depth = 5,
  386. .x_chroma_shift = 0, .y_chroma_shift = 0,
  387. },
  388. [PIX_FMT_BGR565LE] = {
  389. .name = "bgr565le",
  390. .nb_channels = 3,
  391. .color_type = FF_COLOR_RGB,
  392. .pixel_type = FF_PIXEL_PACKED,
  393. .depth = 5,
  394. .x_chroma_shift = 0, .y_chroma_shift = 0,
  395. },
  396. [PIX_FMT_BGR555BE] = {
  397. .name = "bgr555be",
  398. .nb_channels = 3,
  399. .color_type = FF_COLOR_RGB,
  400. .pixel_type = FF_PIXEL_PACKED,
  401. .depth = 5,
  402. .x_chroma_shift = 0, .y_chroma_shift = 0,
  403. },
  404. [PIX_FMT_BGR555LE] = {
  405. .name = "bgr555le",
  406. .nb_channels = 3,
  407. .color_type = FF_COLOR_RGB,
  408. .pixel_type = FF_PIXEL_PACKED,
  409. .depth = 5,
  410. .x_chroma_shift = 0, .y_chroma_shift = 0,
  411. },
  412. [PIX_FMT_RGB8] = {
  413. .name = "rgb8",
  414. .nb_channels = 1,
  415. .color_type = FF_COLOR_RGB,
  416. .pixel_type = FF_PIXEL_PACKED,
  417. .depth = 8,
  418. .x_chroma_shift = 0, .y_chroma_shift = 0,
  419. },
  420. [PIX_FMT_RGB4] = {
  421. .name = "rgb4",
  422. .nb_channels = 1,
  423. .color_type = FF_COLOR_RGB,
  424. .pixel_type = FF_PIXEL_PACKED,
  425. .depth = 4,
  426. .x_chroma_shift = 0, .y_chroma_shift = 0,
  427. },
  428. [PIX_FMT_RGB4_BYTE] = {
  429. .name = "rgb4_byte",
  430. .nb_channels = 1,
  431. .color_type = FF_COLOR_RGB,
  432. .pixel_type = FF_PIXEL_PACKED,
  433. .depth = 8,
  434. .x_chroma_shift = 0, .y_chroma_shift = 0,
  435. },
  436. [PIX_FMT_BGR8] = {
  437. .name = "bgr8",
  438. .nb_channels = 1,
  439. .color_type = FF_COLOR_RGB,
  440. .pixel_type = FF_PIXEL_PACKED,
  441. .depth = 8,
  442. .x_chroma_shift = 0, .y_chroma_shift = 0,
  443. },
  444. [PIX_FMT_BGR4] = {
  445. .name = "bgr4",
  446. .nb_channels = 1,
  447. .color_type = FF_COLOR_RGB,
  448. .pixel_type = FF_PIXEL_PACKED,
  449. .depth = 4,
  450. .x_chroma_shift = 0, .y_chroma_shift = 0,
  451. },
  452. [PIX_FMT_BGR4_BYTE] = {
  453. .name = "bgr4_byte",
  454. .nb_channels = 1,
  455. .color_type = FF_COLOR_RGB,
  456. .pixel_type = FF_PIXEL_PACKED,
  457. .depth = 8,
  458. .x_chroma_shift = 0, .y_chroma_shift = 0,
  459. },
  460. [PIX_FMT_NV12] = {
  461. .name = "nv12",
  462. .nb_channels = 2,
  463. .color_type = FF_COLOR_YUV,
  464. .pixel_type = FF_PIXEL_PLANAR,
  465. .depth = 8,
  466. .x_chroma_shift = 1, .y_chroma_shift = 1,
  467. },
  468. [PIX_FMT_NV21] = {
  469. .name = "nv12",
  470. .nb_channels = 2,
  471. .color_type = FF_COLOR_YUV,
  472. .pixel_type = FF_PIXEL_PLANAR,
  473. .depth = 8,
  474. .x_chroma_shift = 1, .y_chroma_shift = 1,
  475. },
  476. [PIX_FMT_BGRA] = {
  477. .name = "bgra",
  478. .nb_channels = 4, .is_alpha = 1,
  479. .color_type = FF_COLOR_RGB,
  480. .pixel_type = FF_PIXEL_PACKED,
  481. .depth = 8,
  482. .x_chroma_shift = 0, .y_chroma_shift = 0,
  483. },
  484. [PIX_FMT_RGBA] = {
  485. .name = "rgba",
  486. .nb_channels = 4, .is_alpha = 1,
  487. .color_type = FF_COLOR_RGB,
  488. .pixel_type = FF_PIXEL_PACKED,
  489. .depth = 8,
  490. .x_chroma_shift = 0, .y_chroma_shift = 0,
  491. },
  492. /* VA API formats */
  493. [PIX_FMT_VAAPI_MOCO] = {
  494. .name = "vaapi_moco",
  495. .is_hwaccel = 1,
  496. .x_chroma_shift = 1, .y_chroma_shift = 1,
  497. },
  498. [PIX_FMT_VAAPI_IDCT] = {
  499. .name = "vaapi_idct",
  500. .is_hwaccel = 1,
  501. .x_chroma_shift = 1, .y_chroma_shift = 1,
  502. },
  503. [PIX_FMT_VAAPI_VLD] = {
  504. .name = "vaapi_vld",
  505. .is_hwaccel = 1,
  506. .x_chroma_shift = 1, .y_chroma_shift = 1,
  507. },
  508. };
  509. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  510. {
  511. *h_shift = pix_fmt_info[pix_fmt].x_chroma_shift;
  512. *v_shift = pix_fmt_info[pix_fmt].y_chroma_shift;
  513. }
  514. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  515. {
  516. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  517. return NULL;
  518. else
  519. return pix_fmt_info[pix_fmt].name;
  520. }
  521. static enum PixelFormat avcodec_get_pix_fmt_internal(const char *name)
  522. {
  523. int i;
  524. for (i=0; i < PIX_FMT_NB; i++)
  525. if (pix_fmt_info[i].name && !strcmp(pix_fmt_info[i].name, name))
  526. return i;
  527. return PIX_FMT_NONE;
  528. }
  529. #if HAVE_BIGENDIAN
  530. # define X_NE(be, le) be
  531. #else
  532. # define X_NE(be, le) le
  533. #endif
  534. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  535. {
  536. enum PixelFormat pix_fmt;
  537. if (!strcmp(name, "rgb32"))
  538. name = X_NE("argb", "bgra");
  539. else if (!strcmp(name, "bgr32"))
  540. name = X_NE("abgr", "rgba");
  541. pix_fmt = avcodec_get_pix_fmt_internal(name);
  542. if (pix_fmt == PIX_FMT_NONE) {
  543. char name2[32];
  544. snprintf(name2, sizeof(name2), "%s%s", name, X_NE("be", "le"));
  545. pix_fmt = avcodec_get_pix_fmt_internal(name2);
  546. }
  547. return pix_fmt;
  548. }
  549. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  550. {
  551. /* print header */
  552. if (pix_fmt < 0)
  553. snprintf (buf, buf_size,
  554. "name " " nb_channels" " depth" " is_alpha"
  555. );
  556. else{
  557. PixFmtInfo info= pix_fmt_info[pix_fmt];
  558. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  559. snprintf (buf, buf_size,
  560. "%-10s" " %1d " " %2d " " %c ",
  561. info.name,
  562. info.nb_channels,
  563. info.depth,
  564. is_alpha_char
  565. );
  566. }
  567. }
  568. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  569. {
  570. return pix_fmt_info[pix_fmt].is_hwaccel;
  571. }
  572. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  573. int i;
  574. for(i=0; i<256; i++){
  575. int r,g,b;
  576. switch(pix_fmt) {
  577. case PIX_FMT_RGB8:
  578. r= (i>>5 )*36;
  579. g= ((i>>2)&7)*36;
  580. b= (i&3 )*85;
  581. break;
  582. case PIX_FMT_BGR8:
  583. b= (i>>6 )*85;
  584. g= ((i>>3)&7)*36;
  585. r= (i&7 )*36;
  586. break;
  587. case PIX_FMT_RGB4_BYTE:
  588. r= (i>>3 )*255;
  589. g= ((i>>1)&3)*85;
  590. b= (i&1 )*255;
  591. break;
  592. case PIX_FMT_BGR4_BYTE:
  593. b= (i>>3 )*255;
  594. g= ((i>>1)&3)*85;
  595. r= (i&1 )*255;
  596. break;
  597. case PIX_FMT_GRAY8:
  598. r=b=g= i;
  599. break;
  600. default:
  601. return -1;
  602. }
  603. pal[i] = b + (g<<8) + (r<<16);
  604. }
  605. return 0;
  606. }
  607. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  608. {
  609. int w2;
  610. const PixFmtInfo *pinfo;
  611. memset(picture->linesize, 0, sizeof(picture->linesize));
  612. pinfo = &pix_fmt_info[pix_fmt];
  613. switch(pix_fmt) {
  614. case PIX_FMT_YUV420P:
  615. case PIX_FMT_YUV422P:
  616. case PIX_FMT_YUV444P:
  617. case PIX_FMT_YUV410P:
  618. case PIX_FMT_YUV411P:
  619. case PIX_FMT_YUV440P:
  620. case PIX_FMT_YUVJ420P:
  621. case PIX_FMT_YUVJ422P:
  622. case PIX_FMT_YUVJ444P:
  623. case PIX_FMT_YUVJ440P:
  624. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  625. picture->linesize[0] = width;
  626. picture->linesize[1] = w2;
  627. picture->linesize[2] = w2;
  628. break;
  629. case PIX_FMT_YUV420PLE:
  630. case PIX_FMT_YUV422PLE:
  631. case PIX_FMT_YUV444PLE:
  632. case PIX_FMT_YUV420PBE:
  633. case PIX_FMT_YUV422PBE:
  634. case PIX_FMT_YUV444PBE:
  635. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  636. picture->linesize[0] = 2*width;
  637. picture->linesize[1] = 2*w2;
  638. picture->linesize[2] = 2*w2;
  639. break;
  640. case PIX_FMT_YUVA420P:
  641. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  642. picture->linesize[0] = width;
  643. picture->linesize[1] = w2;
  644. picture->linesize[2] = w2;
  645. picture->linesize[3] = width;
  646. break;
  647. case PIX_FMT_NV12:
  648. case PIX_FMT_NV21:
  649. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  650. picture->linesize[0] = width;
  651. picture->linesize[1] = w2;
  652. break;
  653. case PIX_FMT_RGB24:
  654. case PIX_FMT_BGR24:
  655. picture->linesize[0] = width * 3;
  656. break;
  657. case PIX_FMT_ARGB:
  658. case PIX_FMT_ABGR:
  659. case PIX_FMT_RGBA:
  660. case PIX_FMT_BGRA:
  661. picture->linesize[0] = width * 4;
  662. break;
  663. case PIX_FMT_RGB48BE:
  664. case PIX_FMT_RGB48LE:
  665. picture->linesize[0] = width * 6;
  666. break;
  667. case PIX_FMT_GRAY16BE:
  668. case PIX_FMT_GRAY16LE:
  669. case PIX_FMT_BGR555BE:
  670. case PIX_FMT_BGR555LE:
  671. case PIX_FMT_BGR565BE:
  672. case PIX_FMT_BGR565LE:
  673. case PIX_FMT_RGB555BE:
  674. case PIX_FMT_RGB555LE:
  675. case PIX_FMT_RGB565BE:
  676. case PIX_FMT_RGB565LE:
  677. case PIX_FMT_YUYV422:
  678. picture->linesize[0] = width * 2;
  679. break;
  680. case PIX_FMT_UYVY422:
  681. picture->linesize[0] = width * 2;
  682. break;
  683. case PIX_FMT_UYYVYY411:
  684. picture->linesize[0] = width + width/2;
  685. break;
  686. case PIX_FMT_RGB4:
  687. case PIX_FMT_BGR4:
  688. picture->linesize[0] = width / 2;
  689. break;
  690. case PIX_FMT_MONOWHITE:
  691. case PIX_FMT_MONOBLACK:
  692. picture->linesize[0] = (width + 7) >> 3;
  693. break;
  694. case PIX_FMT_PAL8:
  695. case PIX_FMT_RGB8:
  696. case PIX_FMT_BGR8:
  697. case PIX_FMT_RGB4_BYTE:
  698. case PIX_FMT_BGR4_BYTE:
  699. case PIX_FMT_GRAY8:
  700. picture->linesize[0] = width;
  701. break;
  702. default:
  703. return -1;
  704. }
  705. return 0;
  706. }
  707. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  708. int height)
  709. {
  710. int size, h2, size2;
  711. const PixFmtInfo *pinfo;
  712. pinfo = &pix_fmt_info[pix_fmt];
  713. size = picture->linesize[0] * height;
  714. switch(pix_fmt) {
  715. case PIX_FMT_YUV420P:
  716. case PIX_FMT_YUV422P:
  717. case PIX_FMT_YUV444P:
  718. case PIX_FMT_YUV410P:
  719. case PIX_FMT_YUV411P:
  720. case PIX_FMT_YUV440P:
  721. case PIX_FMT_YUVJ420P:
  722. case PIX_FMT_YUVJ422P:
  723. case PIX_FMT_YUVJ444P:
  724. case PIX_FMT_YUVJ440P:
  725. case PIX_FMT_YUV420PLE:
  726. case PIX_FMT_YUV422PLE:
  727. case PIX_FMT_YUV444PLE:
  728. case PIX_FMT_YUV420PBE:
  729. case PIX_FMT_YUV422PBE:
  730. case PIX_FMT_YUV444PBE:
  731. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  732. size2 = picture->linesize[1] * h2;
  733. picture->data[0] = ptr;
  734. picture->data[1] = picture->data[0] + size;
  735. picture->data[2] = picture->data[1] + size2;
  736. picture->data[3] = NULL;
  737. return size + 2 * size2;
  738. case PIX_FMT_YUVA420P:
  739. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  740. size2 = picture->linesize[1] * h2;
  741. picture->data[0] = ptr;
  742. picture->data[1] = picture->data[0] + size;
  743. picture->data[2] = picture->data[1] + size2;
  744. picture->data[3] = picture->data[1] + size2 + size2;
  745. return 2 * size + 2 * size2;
  746. case PIX_FMT_NV12:
  747. case PIX_FMT_NV21:
  748. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  749. size2 = picture->linesize[1] * h2 * 2;
  750. picture->data[0] = ptr;
  751. picture->data[1] = picture->data[0] + size;
  752. picture->data[2] = NULL;
  753. picture->data[3] = NULL;
  754. return size + 2 * size2;
  755. case PIX_FMT_RGB24:
  756. case PIX_FMT_BGR24:
  757. case PIX_FMT_ARGB:
  758. case PIX_FMT_ABGR:
  759. case PIX_FMT_RGBA:
  760. case PIX_FMT_BGRA:
  761. case PIX_FMT_RGB48BE:
  762. case PIX_FMT_RGB48LE:
  763. case PIX_FMT_GRAY16BE:
  764. case PIX_FMT_GRAY16LE:
  765. case PIX_FMT_BGR555BE:
  766. case PIX_FMT_BGR555LE:
  767. case PIX_FMT_BGR565BE:
  768. case PIX_FMT_BGR565LE:
  769. case PIX_FMT_RGB555BE:
  770. case PIX_FMT_RGB555LE:
  771. case PIX_FMT_RGB565BE:
  772. case PIX_FMT_RGB565LE:
  773. case PIX_FMT_YUYV422:
  774. case PIX_FMT_UYVY422:
  775. case PIX_FMT_UYYVYY411:
  776. case PIX_FMT_RGB4:
  777. case PIX_FMT_BGR4:
  778. case PIX_FMT_MONOWHITE:
  779. case PIX_FMT_MONOBLACK:
  780. picture->data[0] = ptr;
  781. picture->data[1] = NULL;
  782. picture->data[2] = NULL;
  783. picture->data[3] = NULL;
  784. return size;
  785. case PIX_FMT_PAL8:
  786. case PIX_FMT_RGB8:
  787. case PIX_FMT_BGR8:
  788. case PIX_FMT_RGB4_BYTE:
  789. case PIX_FMT_BGR4_BYTE:
  790. case PIX_FMT_GRAY8:
  791. size2 = (size + 3) & ~3;
  792. picture->data[0] = ptr;
  793. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  794. picture->data[2] = NULL;
  795. picture->data[3] = NULL;
  796. return size2 + 256 * 4;
  797. default:
  798. picture->data[0] = NULL;
  799. picture->data[1] = NULL;
  800. picture->data[2] = NULL;
  801. picture->data[3] = NULL;
  802. return -1;
  803. }
  804. }
  805. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  806. enum PixelFormat pix_fmt, int width, int height)
  807. {
  808. if(avcodec_check_dimensions(NULL, width, height))
  809. return -1;
  810. if (ff_fill_linesize(picture, pix_fmt, width))
  811. return -1;
  812. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  813. }
  814. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  815. unsigned char *dest, int dest_size)
  816. {
  817. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  818. int i, j, w, ow, h, oh, data_planes;
  819. const unsigned char* s;
  820. int size = avpicture_get_size(pix_fmt, width, height);
  821. if (size > dest_size || size < 0)
  822. return -1;
  823. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  824. if (pix_fmt == PIX_FMT_YUYV422 ||
  825. pix_fmt == PIX_FMT_UYVY422 ||
  826. pix_fmt == PIX_FMT_BGR565BE ||
  827. pix_fmt == PIX_FMT_BGR565LE ||
  828. pix_fmt == PIX_FMT_BGR555BE ||
  829. pix_fmt == PIX_FMT_BGR555LE ||
  830. pix_fmt == PIX_FMT_RGB565BE ||
  831. pix_fmt == PIX_FMT_RGB565LE ||
  832. pix_fmt == PIX_FMT_RGB555BE ||
  833. pix_fmt == PIX_FMT_RGB555LE)
  834. w = width * 2;
  835. else if (pix_fmt == PIX_FMT_UYYVYY411)
  836. w = width + width/2;
  837. else if (pix_fmt == PIX_FMT_PAL8)
  838. w = width;
  839. else
  840. w = width * (pf->depth * pf->nb_channels / 8);
  841. data_planes = 1;
  842. h = height;
  843. } else {
  844. data_planes = pf->nb_channels;
  845. w = (width*pf->depth + 7)/8;
  846. h = height;
  847. }
  848. ow = w;
  849. oh = h;
  850. for (i=0; i<data_planes; i++) {
  851. if (i == 1) {
  852. w = width >> pf->x_chroma_shift;
  853. h = height >> pf->y_chroma_shift;
  854. } else if (i == 3) {
  855. w = ow;
  856. h = oh;
  857. }
  858. s = src->data[i];
  859. for(j=0; j<h; j++) {
  860. memcpy(dest, s, w);
  861. dest += w;
  862. s += src->linesize[i];
  863. }
  864. }
  865. if (pf->pixel_type == FF_PIXEL_PALETTE)
  866. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  867. return size;
  868. }
  869. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  870. {
  871. AVPicture dummy_pict;
  872. if(avcodec_check_dimensions(NULL, width, height))
  873. return -1;
  874. switch (pix_fmt) {
  875. case PIX_FMT_RGB8:
  876. case PIX_FMT_BGR8:
  877. case PIX_FMT_RGB4_BYTE:
  878. case PIX_FMT_BGR4_BYTE:
  879. case PIX_FMT_GRAY8:
  880. // do not include palette for these pseudo-paletted formats
  881. return width * height;
  882. }
  883. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  884. }
  885. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  886. int has_alpha)
  887. {
  888. const PixFmtInfo *pf, *ps;
  889. int loss;
  890. ps = &pix_fmt_info[src_pix_fmt];
  891. /* compute loss */
  892. loss = 0;
  893. pf = &pix_fmt_info[dst_pix_fmt];
  894. if (pf->depth < ps->depth ||
  895. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE) &&
  896. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE)))
  897. loss |= FF_LOSS_DEPTH;
  898. if (pf->x_chroma_shift > ps->x_chroma_shift ||
  899. pf->y_chroma_shift > ps->y_chroma_shift)
  900. loss |= FF_LOSS_RESOLUTION;
  901. switch(pf->color_type) {
  902. case FF_COLOR_RGB:
  903. if (ps->color_type != FF_COLOR_RGB &&
  904. ps->color_type != FF_COLOR_GRAY)
  905. loss |= FF_LOSS_COLORSPACE;
  906. break;
  907. case FF_COLOR_GRAY:
  908. if (ps->color_type != FF_COLOR_GRAY)
  909. loss |= FF_LOSS_COLORSPACE;
  910. break;
  911. case FF_COLOR_YUV:
  912. if (ps->color_type != FF_COLOR_YUV)
  913. loss |= FF_LOSS_COLORSPACE;
  914. break;
  915. case FF_COLOR_YUV_JPEG:
  916. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  917. ps->color_type != FF_COLOR_YUV &&
  918. ps->color_type != FF_COLOR_GRAY)
  919. loss |= FF_LOSS_COLORSPACE;
  920. break;
  921. default:
  922. /* fail safe test */
  923. if (ps->color_type != pf->color_type)
  924. loss |= FF_LOSS_COLORSPACE;
  925. break;
  926. }
  927. if (pf->color_type == FF_COLOR_GRAY &&
  928. ps->color_type != FF_COLOR_GRAY)
  929. loss |= FF_LOSS_CHROMA;
  930. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  931. loss |= FF_LOSS_ALPHA;
  932. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  933. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  934. loss |= FF_LOSS_COLORQUANT;
  935. return loss;
  936. }
  937. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  938. {
  939. int bits;
  940. const PixFmtInfo *pf;
  941. pf = &pix_fmt_info[pix_fmt];
  942. switch(pf->pixel_type) {
  943. case FF_PIXEL_PACKED:
  944. switch(pix_fmt) {
  945. case PIX_FMT_YUYV422:
  946. case PIX_FMT_UYVY422:
  947. case PIX_FMT_RGB565BE:
  948. case PIX_FMT_RGB565LE:
  949. case PIX_FMT_RGB555BE:
  950. case PIX_FMT_RGB555LE:
  951. case PIX_FMT_BGR565BE:
  952. case PIX_FMT_BGR565LE:
  953. case PIX_FMT_BGR555BE:
  954. case PIX_FMT_BGR555LE:
  955. bits = 16;
  956. break;
  957. case PIX_FMT_UYYVYY411:
  958. bits = 12;
  959. break;
  960. default:
  961. bits = pf->depth * pf->nb_channels;
  962. break;
  963. }
  964. break;
  965. case FF_PIXEL_PLANAR:
  966. if (pf->x_chroma_shift == 0 && pf->y_chroma_shift == 0) {
  967. bits = pf->depth * pf->nb_channels;
  968. } else {
  969. bits = pf->depth + ((2 * pf->depth) >>
  970. (pf->x_chroma_shift + pf->y_chroma_shift));
  971. }
  972. break;
  973. case FF_PIXEL_PALETTE:
  974. bits = 8;
  975. break;
  976. default:
  977. bits = -1;
  978. break;
  979. }
  980. return bits;
  981. }
  982. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  983. enum PixelFormat src_pix_fmt,
  984. int has_alpha,
  985. int loss_mask)
  986. {
  987. int dist, i, loss, min_dist;
  988. enum PixelFormat dst_pix_fmt;
  989. /* find exact color match with smallest size */
  990. dst_pix_fmt = -1;
  991. min_dist = 0x7fffffff;
  992. for(i = 0;i < PIX_FMT_NB; i++) {
  993. if (pix_fmt_mask & (1ULL << i)) {
  994. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  995. if (loss == 0) {
  996. dist = avg_bits_per_pixel(i);
  997. if (dist < min_dist) {
  998. min_dist = dist;
  999. dst_pix_fmt = i;
  1000. }
  1001. }
  1002. }
  1003. }
  1004. return dst_pix_fmt;
  1005. }
  1006. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  1007. int has_alpha, int *loss_ptr)
  1008. {
  1009. enum PixelFormat dst_pix_fmt;
  1010. int loss_mask, i;
  1011. static const int loss_mask_order[] = {
  1012. ~0, /* no loss first */
  1013. ~FF_LOSS_ALPHA,
  1014. ~FF_LOSS_RESOLUTION,
  1015. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  1016. ~FF_LOSS_COLORQUANT,
  1017. ~FF_LOSS_DEPTH,
  1018. 0,
  1019. };
  1020. /* try with successive loss */
  1021. i = 0;
  1022. for(;;) {
  1023. loss_mask = loss_mask_order[i++];
  1024. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  1025. has_alpha, loss_mask);
  1026. if (dst_pix_fmt >= 0)
  1027. goto found;
  1028. if (loss_mask == 0)
  1029. break;
  1030. }
  1031. return -1;
  1032. found:
  1033. if (loss_ptr)
  1034. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  1035. return dst_pix_fmt;
  1036. }
  1037. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  1038. const uint8_t *src, int src_wrap,
  1039. int width, int height)
  1040. {
  1041. if((!dst) || (!src))
  1042. return;
  1043. for(;height > 0; height--) {
  1044. memcpy(dst, src, width);
  1045. dst += dst_wrap;
  1046. src += src_wrap;
  1047. }
  1048. }
  1049. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  1050. {
  1051. int bits;
  1052. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1053. pf = &pix_fmt_info[pix_fmt];
  1054. switch(pf->pixel_type) {
  1055. case FF_PIXEL_PACKED:
  1056. switch(pix_fmt) {
  1057. case PIX_FMT_YUYV422:
  1058. case PIX_FMT_UYVY422:
  1059. case PIX_FMT_RGB565BE:
  1060. case PIX_FMT_RGB565LE:
  1061. case PIX_FMT_RGB555BE:
  1062. case PIX_FMT_RGB555LE:
  1063. case PIX_FMT_BGR565BE:
  1064. case PIX_FMT_BGR565LE:
  1065. case PIX_FMT_BGR555BE:
  1066. case PIX_FMT_BGR555LE:
  1067. bits = 16;
  1068. break;
  1069. case PIX_FMT_UYYVYY411:
  1070. bits = 12;
  1071. break;
  1072. default:
  1073. bits = pf->depth * pf->nb_channels;
  1074. break;
  1075. }
  1076. return (width * bits + 7) >> 3;
  1077. break;
  1078. case FF_PIXEL_PLANAR:
  1079. if (plane == 1 || plane == 2)
  1080. width= -((-width)>>pf->x_chroma_shift);
  1081. return (width * pf->depth + 7) >> 3;
  1082. break;
  1083. case FF_PIXEL_PALETTE:
  1084. if (plane == 0)
  1085. return width;
  1086. break;
  1087. }
  1088. return -1;
  1089. }
  1090. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  1091. enum PixelFormat pix_fmt, int width, int height)
  1092. {
  1093. int i;
  1094. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1095. switch(pf->pixel_type) {
  1096. case FF_PIXEL_PACKED:
  1097. case FF_PIXEL_PLANAR:
  1098. for(i = 0; i < pf->nb_channels; i++) {
  1099. int h;
  1100. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  1101. h = height;
  1102. if (i == 1 || i == 2) {
  1103. h= -((-height)>>pf->y_chroma_shift);
  1104. }
  1105. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  1106. src->data[i], src->linesize[i],
  1107. bwidth, h);
  1108. }
  1109. break;
  1110. case FF_PIXEL_PALETTE:
  1111. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  1112. src->data[0], src->linesize[0],
  1113. width, height);
  1114. /* copy the palette */
  1115. ff_img_copy_plane(dst->data[1], dst->linesize[1],
  1116. src->data[1], src->linesize[1],
  1117. 4, 256);
  1118. break;
  1119. }
  1120. }
  1121. /* 2x2 -> 1x1 */
  1122. void ff_shrink22(uint8_t *dst, int dst_wrap,
  1123. const uint8_t *src, int src_wrap,
  1124. int width, int height)
  1125. {
  1126. int w;
  1127. const uint8_t *s1, *s2;
  1128. uint8_t *d;
  1129. for(;height > 0; height--) {
  1130. s1 = src;
  1131. s2 = s1 + src_wrap;
  1132. d = dst;
  1133. for(w = width;w >= 4; w-=4) {
  1134. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1135. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  1136. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  1137. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  1138. s1 += 8;
  1139. s2 += 8;
  1140. d += 4;
  1141. }
  1142. for(;w > 0; w--) {
  1143. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1144. s1 += 2;
  1145. s2 += 2;
  1146. d++;
  1147. }
  1148. src += 2 * src_wrap;
  1149. dst += dst_wrap;
  1150. }
  1151. }
  1152. /* 4x4 -> 1x1 */
  1153. void ff_shrink44(uint8_t *dst, int dst_wrap,
  1154. const uint8_t *src, int src_wrap,
  1155. int width, int height)
  1156. {
  1157. int w;
  1158. const uint8_t *s1, *s2, *s3, *s4;
  1159. uint8_t *d;
  1160. for(;height > 0; height--) {
  1161. s1 = src;
  1162. s2 = s1 + src_wrap;
  1163. s3 = s2 + src_wrap;
  1164. s4 = s3 + src_wrap;
  1165. d = dst;
  1166. for(w = width;w > 0; w--) {
  1167. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1168. s2[0] + s2[1] + s2[2] + s2[3] +
  1169. s3[0] + s3[1] + s3[2] + s3[3] +
  1170. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1171. s1 += 4;
  1172. s2 += 4;
  1173. s3 += 4;
  1174. s4 += 4;
  1175. d++;
  1176. }
  1177. src += 4 * src_wrap;
  1178. dst += dst_wrap;
  1179. }
  1180. }
  1181. /* 8x8 -> 1x1 */
  1182. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1183. const uint8_t *src, int src_wrap,
  1184. int width, int height)
  1185. {
  1186. int w, i;
  1187. for(;height > 0; height--) {
  1188. for(w = width;w > 0; w--) {
  1189. int tmp=0;
  1190. for(i=0; i<8; i++){
  1191. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1192. src += src_wrap;
  1193. }
  1194. *(dst++) = (tmp + 32)>>6;
  1195. src += 8 - 8*src_wrap;
  1196. }
  1197. src += 8*src_wrap - 8*width;
  1198. dst += dst_wrap - width;
  1199. }
  1200. }
  1201. int avpicture_alloc(AVPicture *picture,
  1202. enum PixelFormat pix_fmt, int width, int height)
  1203. {
  1204. int size;
  1205. void *ptr;
  1206. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1207. if(size<0)
  1208. goto fail;
  1209. ptr = av_malloc(size);
  1210. if (!ptr)
  1211. goto fail;
  1212. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1213. if(picture->data[1] && !picture->data[2])
  1214. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1215. return 0;
  1216. fail:
  1217. memset(picture, 0, sizeof(AVPicture));
  1218. return -1;
  1219. }
  1220. void avpicture_free(AVPicture *picture)
  1221. {
  1222. av_free(picture->data[0]);
  1223. }
  1224. /* return true if yuv planar */
  1225. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1226. {
  1227. return (ps->color_type == FF_COLOR_YUV ||
  1228. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1229. ps->pixel_type == FF_PIXEL_PLANAR;
  1230. }
  1231. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1232. enum PixelFormat pix_fmt, int top_band, int left_band)
  1233. {
  1234. int y_shift;
  1235. int x_shift;
  1236. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1237. return -1;
  1238. y_shift = pix_fmt_info[pix_fmt].y_chroma_shift;
  1239. x_shift = pix_fmt_info[pix_fmt].x_chroma_shift;
  1240. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1241. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1242. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1243. dst->linesize[0] = src->linesize[0];
  1244. dst->linesize[1] = src->linesize[1];
  1245. dst->linesize[2] = src->linesize[2];
  1246. return 0;
  1247. }
  1248. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1249. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1250. int *color)
  1251. {
  1252. uint8_t *optr;
  1253. int y_shift;
  1254. int x_shift;
  1255. int yheight;
  1256. int i, y;
  1257. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1258. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1259. for (i = 0; i < 3; i++) {
  1260. x_shift = i ? pix_fmt_info[pix_fmt].x_chroma_shift : 0;
  1261. y_shift = i ? pix_fmt_info[pix_fmt].y_chroma_shift : 0;
  1262. if (padtop || padleft) {
  1263. memset(dst->data[i], color[i],
  1264. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1265. }
  1266. if (padleft || padright) {
  1267. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1268. (dst->linesize[i] - (padright >> x_shift));
  1269. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1270. for (y = 0; y < yheight; y++) {
  1271. memset(optr, color[i], (padleft + padright) >> x_shift);
  1272. optr += dst->linesize[i];
  1273. }
  1274. }
  1275. if (src) { /* first line */
  1276. uint8_t *iptr = src->data[i];
  1277. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1278. (padleft >> x_shift);
  1279. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1280. iptr += src->linesize[i];
  1281. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1282. (dst->linesize[i] - (padright >> x_shift));
  1283. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1284. for (y = 0; y < yheight; y++) {
  1285. memset(optr, color[i], (padleft + padright) >> x_shift);
  1286. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1287. (width - padleft - padright) >> x_shift);
  1288. iptr += src->linesize[i];
  1289. optr += dst->linesize[i];
  1290. }
  1291. }
  1292. if (padbottom || padright) {
  1293. optr = dst->data[i] + dst->linesize[i] *
  1294. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1295. memset(optr, color[i],dst->linesize[i] *
  1296. (padbottom >> y_shift) + (padright >> x_shift));
  1297. }
  1298. }
  1299. return 0;
  1300. }
  1301. /* NOTE: we scan all the pixels to have an exact information */
  1302. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1303. {
  1304. const unsigned char *p;
  1305. int src_wrap, ret, x, y;
  1306. unsigned int a;
  1307. uint32_t *palette = (uint32_t *)src->data[1];
  1308. p = src->data[0];
  1309. src_wrap = src->linesize[0] - width;
  1310. ret = 0;
  1311. for(y=0;y<height;y++) {
  1312. for(x=0;x<width;x++) {
  1313. a = palette[p[0]] >> 24;
  1314. if (a == 0x00) {
  1315. ret |= FF_ALPHA_TRANSP;
  1316. } else if (a != 0xff) {
  1317. ret |= FF_ALPHA_SEMI_TRANSP;
  1318. }
  1319. p++;
  1320. }
  1321. p += src_wrap;
  1322. }
  1323. return ret;
  1324. }
  1325. int img_get_alpha_info(const AVPicture *src,
  1326. enum PixelFormat pix_fmt, int width, int height)
  1327. {
  1328. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1329. int ret;
  1330. /* no alpha can be represented in format */
  1331. if (!pf->is_alpha)
  1332. return 0;
  1333. switch(pix_fmt) {
  1334. case PIX_FMT_PAL8:
  1335. ret = get_alpha_info_pal8(src, width, height);
  1336. break;
  1337. default:
  1338. /* we do not know, so everything is indicated */
  1339. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1340. break;
  1341. }
  1342. return ret;
  1343. }
  1344. #if HAVE_MMX
  1345. #define DEINT_INPLACE_LINE_LUM \
  1346. movd_m2r(lum_m4[0],mm0);\
  1347. movd_m2r(lum_m3[0],mm1);\
  1348. movd_m2r(lum_m2[0],mm2);\
  1349. movd_m2r(lum_m1[0],mm3);\
  1350. movd_m2r(lum[0],mm4);\
  1351. punpcklbw_r2r(mm7,mm0);\
  1352. movd_r2m(mm2,lum_m4[0]);\
  1353. punpcklbw_r2r(mm7,mm1);\
  1354. punpcklbw_r2r(mm7,mm2);\
  1355. punpcklbw_r2r(mm7,mm3);\
  1356. punpcklbw_r2r(mm7,mm4);\
  1357. paddw_r2r(mm3,mm1);\
  1358. psllw_i2r(1,mm2);\
  1359. paddw_r2r(mm4,mm0);\
  1360. psllw_i2r(2,mm1);\
  1361. paddw_r2r(mm6,mm2);\
  1362. paddw_r2r(mm2,mm1);\
  1363. psubusw_r2r(mm0,mm1);\
  1364. psrlw_i2r(3,mm1);\
  1365. packuswb_r2r(mm7,mm1);\
  1366. movd_r2m(mm1,lum_m2[0]);
  1367. #define DEINT_LINE_LUM \
  1368. movd_m2r(lum_m4[0],mm0);\
  1369. movd_m2r(lum_m3[0],mm1);\
  1370. movd_m2r(lum_m2[0],mm2);\
  1371. movd_m2r(lum_m1[0],mm3);\
  1372. movd_m2r(lum[0],mm4);\
  1373. punpcklbw_r2r(mm7,mm0);\
  1374. punpcklbw_r2r(mm7,mm1);\
  1375. punpcklbw_r2r(mm7,mm2);\
  1376. punpcklbw_r2r(mm7,mm3);\
  1377. punpcklbw_r2r(mm7,mm4);\
  1378. paddw_r2r(mm3,mm1);\
  1379. psllw_i2r(1,mm2);\
  1380. paddw_r2r(mm4,mm0);\
  1381. psllw_i2r(2,mm1);\
  1382. paddw_r2r(mm6,mm2);\
  1383. paddw_r2r(mm2,mm1);\
  1384. psubusw_r2r(mm0,mm1);\
  1385. psrlw_i2r(3,mm1);\
  1386. packuswb_r2r(mm7,mm1);\
  1387. movd_r2m(mm1,dst[0]);
  1388. #endif
  1389. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1390. static void deinterlace_line(uint8_t *dst,
  1391. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1392. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1393. const uint8_t *lum,
  1394. int size)
  1395. {
  1396. #if !HAVE_MMX
  1397. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1398. int sum;
  1399. for(;size > 0;size--) {
  1400. sum = -lum_m4[0];
  1401. sum += lum_m3[0] << 2;
  1402. sum += lum_m2[0] << 1;
  1403. sum += lum_m1[0] << 2;
  1404. sum += -lum[0];
  1405. dst[0] = cm[(sum + 4) >> 3];
  1406. lum_m4++;
  1407. lum_m3++;
  1408. lum_m2++;
  1409. lum_m1++;
  1410. lum++;
  1411. dst++;
  1412. }
  1413. #else
  1414. {
  1415. pxor_r2r(mm7,mm7);
  1416. movq_m2r(ff_pw_4,mm6);
  1417. }
  1418. for (;size > 3; size-=4) {
  1419. DEINT_LINE_LUM
  1420. lum_m4+=4;
  1421. lum_m3+=4;
  1422. lum_m2+=4;
  1423. lum_m1+=4;
  1424. lum+=4;
  1425. dst+=4;
  1426. }
  1427. #endif
  1428. }
  1429. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1430. int size)
  1431. {
  1432. #if !HAVE_MMX
  1433. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1434. int sum;
  1435. for(;size > 0;size--) {
  1436. sum = -lum_m4[0];
  1437. sum += lum_m3[0] << 2;
  1438. sum += lum_m2[0] << 1;
  1439. lum_m4[0]=lum_m2[0];
  1440. sum += lum_m1[0] << 2;
  1441. sum += -lum[0];
  1442. lum_m2[0] = cm[(sum + 4) >> 3];
  1443. lum_m4++;
  1444. lum_m3++;
  1445. lum_m2++;
  1446. lum_m1++;
  1447. lum++;
  1448. }
  1449. #else
  1450. {
  1451. pxor_r2r(mm7,mm7);
  1452. movq_m2r(ff_pw_4,mm6);
  1453. }
  1454. for (;size > 3; size-=4) {
  1455. DEINT_INPLACE_LINE_LUM
  1456. lum_m4+=4;
  1457. lum_m3+=4;
  1458. lum_m2+=4;
  1459. lum_m1+=4;
  1460. lum+=4;
  1461. }
  1462. #endif
  1463. }
  1464. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1465. top field is copied as is, but the bottom field is deinterlaced
  1466. against the top field. */
  1467. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1468. const uint8_t *src1, int src_wrap,
  1469. int width, int height)
  1470. {
  1471. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1472. int y;
  1473. src_m2 = src1;
  1474. src_m1 = src1;
  1475. src_0=&src_m1[src_wrap];
  1476. src_p1=&src_0[src_wrap];
  1477. src_p2=&src_p1[src_wrap];
  1478. for(y=0;y<(height-2);y+=2) {
  1479. memcpy(dst,src_m1,width);
  1480. dst += dst_wrap;
  1481. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1482. src_m2 = src_0;
  1483. src_m1 = src_p1;
  1484. src_0 = src_p2;
  1485. src_p1 += 2*src_wrap;
  1486. src_p2 += 2*src_wrap;
  1487. dst += dst_wrap;
  1488. }
  1489. memcpy(dst,src_m1,width);
  1490. dst += dst_wrap;
  1491. /* do last line */
  1492. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1493. }
  1494. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1495. int width, int height)
  1496. {
  1497. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1498. int y;
  1499. uint8_t *buf;
  1500. buf = (uint8_t*)av_malloc(width);
  1501. src_m1 = src1;
  1502. memcpy(buf,src_m1,width);
  1503. src_0=&src_m1[src_wrap];
  1504. src_p1=&src_0[src_wrap];
  1505. src_p2=&src_p1[src_wrap];
  1506. for(y=0;y<(height-2);y+=2) {
  1507. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1508. src_m1 = src_p1;
  1509. src_0 = src_p2;
  1510. src_p1 += 2*src_wrap;
  1511. src_p2 += 2*src_wrap;
  1512. }
  1513. /* do last line */
  1514. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1515. av_free(buf);
  1516. }
  1517. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1518. enum PixelFormat pix_fmt, int width, int height)
  1519. {
  1520. int i;
  1521. if (pix_fmt != PIX_FMT_YUV420P &&
  1522. pix_fmt != PIX_FMT_YUV422P &&
  1523. pix_fmt != PIX_FMT_YUV444P &&
  1524. pix_fmt != PIX_FMT_YUV411P &&
  1525. pix_fmt != PIX_FMT_GRAY8)
  1526. return -1;
  1527. if ((width & 3) != 0 || (height & 3) != 0)
  1528. return -1;
  1529. for(i=0;i<3;i++) {
  1530. if (i == 1) {
  1531. switch(pix_fmt) {
  1532. case PIX_FMT_YUV420P:
  1533. width >>= 1;
  1534. height >>= 1;
  1535. break;
  1536. case PIX_FMT_YUV422P:
  1537. width >>= 1;
  1538. break;
  1539. case PIX_FMT_YUV411P:
  1540. width >>= 2;
  1541. break;
  1542. default:
  1543. break;
  1544. }
  1545. if (pix_fmt == PIX_FMT_GRAY8) {
  1546. break;
  1547. }
  1548. }
  1549. if (src == dst) {
  1550. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1551. width, height);
  1552. } else {
  1553. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1554. src->data[i], src->linesize[i],
  1555. width, height);
  1556. }
  1557. }
  1558. emms_c();
  1559. return 0;
  1560. }