You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1528 lines
43KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #include "libavutil/pixdesc.h"
  34. #if HAVE_MMX
  35. #include "x86/mmx.h"
  36. #include "x86/dsputil_mmx.h"
  37. #endif
  38. #define xglue(x, y) x ## y
  39. #define glue(x, y) xglue(x, y)
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  45. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  46. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  47. typedef struct PixFmtInfo {
  48. uint8_t nb_channels; /**< number of channels (including alpha) */
  49. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  50. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  51. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  52. uint8_t depth; /**< bit depth of the color components */
  53. } PixFmtInfo;
  54. /* this table gives more information about formats */
  55. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  56. /* YUV formats */
  57. [PIX_FMT_YUV420P] = {
  58. .nb_channels = 3,
  59. .color_type = FF_COLOR_YUV,
  60. .pixel_type = FF_PIXEL_PLANAR,
  61. .depth = 8,
  62. },
  63. [PIX_FMT_YUV422P] = {
  64. .nb_channels = 3,
  65. .color_type = FF_COLOR_YUV,
  66. .pixel_type = FF_PIXEL_PLANAR,
  67. .depth = 8,
  68. },
  69. [PIX_FMT_YUV444P] = {
  70. .nb_channels = 3,
  71. .color_type = FF_COLOR_YUV,
  72. .pixel_type = FF_PIXEL_PLANAR,
  73. .depth = 8,
  74. },
  75. [PIX_FMT_YUYV422] = {
  76. .nb_channels = 1,
  77. .color_type = FF_COLOR_YUV,
  78. .pixel_type = FF_PIXEL_PACKED,
  79. .depth = 8,
  80. },
  81. [PIX_FMT_UYVY422] = {
  82. .nb_channels = 1,
  83. .color_type = FF_COLOR_YUV,
  84. .pixel_type = FF_PIXEL_PACKED,
  85. .depth = 8,
  86. },
  87. [PIX_FMT_YUV410P] = {
  88. .nb_channels = 3,
  89. .color_type = FF_COLOR_YUV,
  90. .pixel_type = FF_PIXEL_PLANAR,
  91. .depth = 8,
  92. },
  93. [PIX_FMT_YUV411P] = {
  94. .nb_channels = 3,
  95. .color_type = FF_COLOR_YUV,
  96. .pixel_type = FF_PIXEL_PLANAR,
  97. .depth = 8,
  98. },
  99. [PIX_FMT_YUV440P] = {
  100. .nb_channels = 3,
  101. .color_type = FF_COLOR_YUV,
  102. .pixel_type = FF_PIXEL_PLANAR,
  103. .depth = 8,
  104. },
  105. [PIX_FMT_YUV420P16LE] = {
  106. .nb_channels = 3,
  107. .color_type = FF_COLOR_YUV,
  108. .pixel_type = FF_PIXEL_PLANAR,
  109. .depth = 16,
  110. },
  111. [PIX_FMT_YUV422P16LE] = {
  112. .nb_channels = 3,
  113. .color_type = FF_COLOR_YUV,
  114. .pixel_type = FF_PIXEL_PLANAR,
  115. .depth = 16,
  116. },
  117. [PIX_FMT_YUV444P16LE] = {
  118. .nb_channels = 3,
  119. .color_type = FF_COLOR_YUV,
  120. .pixel_type = FF_PIXEL_PLANAR,
  121. .depth = 16,
  122. },
  123. [PIX_FMT_YUV420P16BE] = {
  124. .nb_channels = 3,
  125. .color_type = FF_COLOR_YUV,
  126. .pixel_type = FF_PIXEL_PLANAR,
  127. .depth = 16,
  128. },
  129. [PIX_FMT_YUV422P16BE] = {
  130. .nb_channels = 3,
  131. .color_type = FF_COLOR_YUV,
  132. .pixel_type = FF_PIXEL_PLANAR,
  133. .depth = 16,
  134. },
  135. [PIX_FMT_YUV444P16BE] = {
  136. .nb_channels = 3,
  137. .color_type = FF_COLOR_YUV,
  138. .pixel_type = FF_PIXEL_PLANAR,
  139. .depth = 16,
  140. },
  141. /* YUV formats with alpha plane */
  142. [PIX_FMT_YUVA420P] = {
  143. .nb_channels = 4,
  144. .color_type = FF_COLOR_YUV,
  145. .pixel_type = FF_PIXEL_PLANAR,
  146. .depth = 8,
  147. },
  148. /* JPEG YUV */
  149. [PIX_FMT_YUVJ420P] = {
  150. .nb_channels = 3,
  151. .color_type = FF_COLOR_YUV_JPEG,
  152. .pixel_type = FF_PIXEL_PLANAR,
  153. .depth = 8,
  154. },
  155. [PIX_FMT_YUVJ422P] = {
  156. .nb_channels = 3,
  157. .color_type = FF_COLOR_YUV_JPEG,
  158. .pixel_type = FF_PIXEL_PLANAR,
  159. .depth = 8,
  160. },
  161. [PIX_FMT_YUVJ444P] = {
  162. .nb_channels = 3,
  163. .color_type = FF_COLOR_YUV_JPEG,
  164. .pixel_type = FF_PIXEL_PLANAR,
  165. .depth = 8,
  166. },
  167. [PIX_FMT_YUVJ440P] = {
  168. .nb_channels = 3,
  169. .color_type = FF_COLOR_YUV_JPEG,
  170. .pixel_type = FF_PIXEL_PLANAR,
  171. .depth = 8,
  172. },
  173. /* RGB formats */
  174. [PIX_FMT_RGB24] = {
  175. .nb_channels = 3,
  176. .color_type = FF_COLOR_RGB,
  177. .pixel_type = FF_PIXEL_PACKED,
  178. .depth = 8,
  179. },
  180. [PIX_FMT_BGR24] = {
  181. .nb_channels = 3,
  182. .color_type = FF_COLOR_RGB,
  183. .pixel_type = FF_PIXEL_PACKED,
  184. .depth = 8,
  185. },
  186. [PIX_FMT_ARGB] = {
  187. .nb_channels = 4, .is_alpha = 1,
  188. .color_type = FF_COLOR_RGB,
  189. .pixel_type = FF_PIXEL_PACKED,
  190. .depth = 8,
  191. },
  192. [PIX_FMT_RGB48BE] = {
  193. .nb_channels = 3,
  194. .color_type = FF_COLOR_RGB,
  195. .pixel_type = FF_PIXEL_PACKED,
  196. .depth = 16,
  197. },
  198. [PIX_FMT_RGB48LE] = {
  199. .nb_channels = 3,
  200. .color_type = FF_COLOR_RGB,
  201. .pixel_type = FF_PIXEL_PACKED,
  202. .depth = 16,
  203. },
  204. [PIX_FMT_RGB565BE] = {
  205. .nb_channels = 3,
  206. .color_type = FF_COLOR_RGB,
  207. .pixel_type = FF_PIXEL_PACKED,
  208. .depth = 5,
  209. },
  210. [PIX_FMT_RGB565LE] = {
  211. .nb_channels = 3,
  212. .color_type = FF_COLOR_RGB,
  213. .pixel_type = FF_PIXEL_PACKED,
  214. .depth = 5,
  215. },
  216. [PIX_FMT_RGB555BE] = {
  217. .nb_channels = 3,
  218. .color_type = FF_COLOR_RGB,
  219. .pixel_type = FF_PIXEL_PACKED,
  220. .depth = 5,
  221. },
  222. [PIX_FMT_RGB555LE] = {
  223. .nb_channels = 3,
  224. .color_type = FF_COLOR_RGB,
  225. .pixel_type = FF_PIXEL_PACKED,
  226. .depth = 5,
  227. },
  228. /* gray / mono formats */
  229. [PIX_FMT_GRAY16BE] = {
  230. .nb_channels = 1,
  231. .color_type = FF_COLOR_GRAY,
  232. .pixel_type = FF_PIXEL_PLANAR,
  233. .depth = 16,
  234. },
  235. [PIX_FMT_GRAY16LE] = {
  236. .nb_channels = 1,
  237. .color_type = FF_COLOR_GRAY,
  238. .pixel_type = FF_PIXEL_PLANAR,
  239. .depth = 16,
  240. },
  241. [PIX_FMT_GRAY8] = {
  242. .nb_channels = 1,
  243. .color_type = FF_COLOR_GRAY,
  244. .pixel_type = FF_PIXEL_PLANAR,
  245. .depth = 8,
  246. },
  247. [PIX_FMT_MONOWHITE] = {
  248. .nb_channels = 1,
  249. .color_type = FF_COLOR_GRAY,
  250. .pixel_type = FF_PIXEL_PLANAR,
  251. .depth = 1,
  252. },
  253. [PIX_FMT_MONOBLACK] = {
  254. .nb_channels = 1,
  255. .color_type = FF_COLOR_GRAY,
  256. .pixel_type = FF_PIXEL_PLANAR,
  257. .depth = 1,
  258. },
  259. /* paletted formats */
  260. [PIX_FMT_PAL8] = {
  261. .nb_channels = 4, .is_alpha = 1,
  262. .color_type = FF_COLOR_RGB,
  263. .pixel_type = FF_PIXEL_PALETTE,
  264. .depth = 8,
  265. },
  266. [PIX_FMT_UYYVYY411] = {
  267. .nb_channels = 1,
  268. .color_type = FF_COLOR_YUV,
  269. .pixel_type = FF_PIXEL_PACKED,
  270. .depth = 8,
  271. },
  272. [PIX_FMT_ABGR] = {
  273. .nb_channels = 4, .is_alpha = 1,
  274. .color_type = FF_COLOR_RGB,
  275. .pixel_type = FF_PIXEL_PACKED,
  276. .depth = 8,
  277. },
  278. [PIX_FMT_BGR565BE] = {
  279. .nb_channels = 3,
  280. .color_type = FF_COLOR_RGB,
  281. .pixel_type = FF_PIXEL_PACKED,
  282. .depth = 5,
  283. },
  284. [PIX_FMT_BGR565LE] = {
  285. .nb_channels = 3,
  286. .color_type = FF_COLOR_RGB,
  287. .pixel_type = FF_PIXEL_PACKED,
  288. .depth = 5,
  289. },
  290. [PIX_FMT_BGR555BE] = {
  291. .nb_channels = 3,
  292. .color_type = FF_COLOR_RGB,
  293. .pixel_type = FF_PIXEL_PACKED,
  294. .depth = 5,
  295. },
  296. [PIX_FMT_BGR555LE] = {
  297. .nb_channels = 3,
  298. .color_type = FF_COLOR_RGB,
  299. .pixel_type = FF_PIXEL_PACKED,
  300. .depth = 5,
  301. },
  302. [PIX_FMT_RGB8] = {
  303. .nb_channels = 1,
  304. .color_type = FF_COLOR_RGB,
  305. .pixel_type = FF_PIXEL_PACKED,
  306. .depth = 8,
  307. },
  308. [PIX_FMT_RGB4] = {
  309. .nb_channels = 1,
  310. .color_type = FF_COLOR_RGB,
  311. .pixel_type = FF_PIXEL_PACKED,
  312. .depth = 4,
  313. },
  314. [PIX_FMT_RGB4_BYTE] = {
  315. .nb_channels = 1,
  316. .color_type = FF_COLOR_RGB,
  317. .pixel_type = FF_PIXEL_PACKED,
  318. .depth = 8,
  319. },
  320. [PIX_FMT_BGR8] = {
  321. .nb_channels = 1,
  322. .color_type = FF_COLOR_RGB,
  323. .pixel_type = FF_PIXEL_PACKED,
  324. .depth = 8,
  325. },
  326. [PIX_FMT_BGR4] = {
  327. .nb_channels = 1,
  328. .color_type = FF_COLOR_RGB,
  329. .pixel_type = FF_PIXEL_PACKED,
  330. .depth = 4,
  331. },
  332. [PIX_FMT_BGR4_BYTE] = {
  333. .nb_channels = 1,
  334. .color_type = FF_COLOR_RGB,
  335. .pixel_type = FF_PIXEL_PACKED,
  336. .depth = 8,
  337. },
  338. [PIX_FMT_NV12] = {
  339. .nb_channels = 2,
  340. .color_type = FF_COLOR_YUV,
  341. .pixel_type = FF_PIXEL_PLANAR,
  342. .depth = 8,
  343. },
  344. [PIX_FMT_NV21] = {
  345. .nb_channels = 2,
  346. .color_type = FF_COLOR_YUV,
  347. .pixel_type = FF_PIXEL_PLANAR,
  348. .depth = 8,
  349. },
  350. [PIX_FMT_BGRA] = {
  351. .nb_channels = 4, .is_alpha = 1,
  352. .color_type = FF_COLOR_RGB,
  353. .pixel_type = FF_PIXEL_PACKED,
  354. .depth = 8,
  355. },
  356. [PIX_FMT_RGBA] = {
  357. .nb_channels = 4, .is_alpha = 1,
  358. .color_type = FF_COLOR_RGB,
  359. .pixel_type = FF_PIXEL_PACKED,
  360. .depth = 8,
  361. },
  362. };
  363. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  364. {
  365. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  366. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  367. }
  368. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  369. {
  370. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  371. return NULL;
  372. else
  373. return av_pix_fmt_descriptors[pix_fmt].name;
  374. }
  375. static enum PixelFormat avcodec_get_pix_fmt_internal(const char *name)
  376. {
  377. int i;
  378. for (i=0; i < PIX_FMT_NB; i++)
  379. if (av_pix_fmt_descriptors[i].name && !strcmp(av_pix_fmt_descriptors[i].name, name))
  380. return i;
  381. return PIX_FMT_NONE;
  382. }
  383. #if HAVE_BIGENDIAN
  384. # define X_NE(be, le) be
  385. #else
  386. # define X_NE(be, le) le
  387. #endif
  388. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  389. {
  390. enum PixelFormat pix_fmt;
  391. if (!strcmp(name, "rgb32"))
  392. name = X_NE("argb", "bgra");
  393. else if (!strcmp(name, "bgr32"))
  394. name = X_NE("abgr", "rgba");
  395. pix_fmt = avcodec_get_pix_fmt_internal(name);
  396. if (pix_fmt == PIX_FMT_NONE) {
  397. char name2[32];
  398. snprintf(name2, sizeof(name2), "%s%s", name, X_NE("be", "le"));
  399. pix_fmt = avcodec_get_pix_fmt_internal(name2);
  400. }
  401. return pix_fmt;
  402. }
  403. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  404. {
  405. /* print header */
  406. if (pix_fmt < 0)
  407. snprintf (buf, buf_size,
  408. "name " " nb_channels" " depth" " is_alpha"
  409. );
  410. else{
  411. PixFmtInfo info= pix_fmt_info[pix_fmt];
  412. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  413. snprintf (buf, buf_size,
  414. "%-11s %5d %9d %6c",
  415. av_pix_fmt_descriptors[pix_fmt].name,
  416. info.nb_channels,
  417. info.depth,
  418. is_alpha_char
  419. );
  420. }
  421. }
  422. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  423. {
  424. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  425. }
  426. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  427. int i;
  428. for(i=0; i<256; i++){
  429. int r,g,b;
  430. switch(pix_fmt) {
  431. case PIX_FMT_RGB8:
  432. r= (i>>5 )*36;
  433. g= ((i>>2)&7)*36;
  434. b= (i&3 )*85;
  435. break;
  436. case PIX_FMT_BGR8:
  437. b= (i>>6 )*85;
  438. g= ((i>>3)&7)*36;
  439. r= (i&7 )*36;
  440. break;
  441. case PIX_FMT_RGB4_BYTE:
  442. r= (i>>3 )*255;
  443. g= ((i>>1)&3)*85;
  444. b= (i&1 )*255;
  445. break;
  446. case PIX_FMT_BGR4_BYTE:
  447. b= (i>>3 )*255;
  448. g= ((i>>1)&3)*85;
  449. r= (i&1 )*255;
  450. break;
  451. case PIX_FMT_GRAY8:
  452. r=b=g= i;
  453. break;
  454. default:
  455. return -1;
  456. }
  457. pal[i] = b + (g<<8) + (r<<16);
  458. }
  459. return 0;
  460. }
  461. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  462. {
  463. int w2;
  464. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  465. memset(picture->linesize, 0, sizeof(picture->linesize));
  466. switch(pix_fmt) {
  467. case PIX_FMT_YUV420P:
  468. case PIX_FMT_YUV422P:
  469. case PIX_FMT_YUV444P:
  470. case PIX_FMT_YUV410P:
  471. case PIX_FMT_YUV411P:
  472. case PIX_FMT_YUV440P:
  473. case PIX_FMT_YUVJ420P:
  474. case PIX_FMT_YUVJ422P:
  475. case PIX_FMT_YUVJ444P:
  476. case PIX_FMT_YUVJ440P:
  477. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  478. picture->linesize[0] = width;
  479. picture->linesize[1] = w2;
  480. picture->linesize[2] = w2;
  481. break;
  482. case PIX_FMT_YUV420P16LE:
  483. case PIX_FMT_YUV422P16LE:
  484. case PIX_FMT_YUV444P16LE:
  485. case PIX_FMT_YUV420P16BE:
  486. case PIX_FMT_YUV422P16BE:
  487. case PIX_FMT_YUV444P16BE:
  488. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  489. picture->linesize[0] = 2*width;
  490. picture->linesize[1] = 2*w2;
  491. picture->linesize[2] = 2*w2;
  492. break;
  493. case PIX_FMT_YUVA420P:
  494. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  495. picture->linesize[0] = width;
  496. picture->linesize[1] = w2;
  497. picture->linesize[2] = w2;
  498. picture->linesize[3] = width;
  499. break;
  500. case PIX_FMT_NV12:
  501. case PIX_FMT_NV21:
  502. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  503. picture->linesize[0] = width;
  504. picture->linesize[1] = 2 * w2;
  505. break;
  506. case PIX_FMT_RGB24:
  507. case PIX_FMT_BGR24:
  508. picture->linesize[0] = width * 3;
  509. break;
  510. case PIX_FMT_ARGB:
  511. case PIX_FMT_ABGR:
  512. case PIX_FMT_RGBA:
  513. case PIX_FMT_BGRA:
  514. picture->linesize[0] = width * 4;
  515. break;
  516. case PIX_FMT_RGB48BE:
  517. case PIX_FMT_RGB48LE:
  518. picture->linesize[0] = width * 6;
  519. break;
  520. case PIX_FMT_GRAY16BE:
  521. case PIX_FMT_GRAY16LE:
  522. case PIX_FMT_BGR555BE:
  523. case PIX_FMT_BGR555LE:
  524. case PIX_FMT_BGR565BE:
  525. case PIX_FMT_BGR565LE:
  526. case PIX_FMT_RGB555BE:
  527. case PIX_FMT_RGB555LE:
  528. case PIX_FMT_RGB565BE:
  529. case PIX_FMT_RGB565LE:
  530. case PIX_FMT_YUYV422:
  531. picture->linesize[0] = width * 2;
  532. break;
  533. case PIX_FMT_UYVY422:
  534. picture->linesize[0] = width * 2;
  535. break;
  536. case PIX_FMT_UYYVYY411:
  537. picture->linesize[0] = width + width/2;
  538. break;
  539. case PIX_FMT_RGB4:
  540. case PIX_FMT_BGR4:
  541. picture->linesize[0] = width / 2;
  542. break;
  543. case PIX_FMT_MONOWHITE:
  544. case PIX_FMT_MONOBLACK:
  545. picture->linesize[0] = (width + 7) >> 3;
  546. break;
  547. case PIX_FMT_PAL8:
  548. case PIX_FMT_RGB8:
  549. case PIX_FMT_BGR8:
  550. case PIX_FMT_RGB4_BYTE:
  551. case PIX_FMT_BGR4_BYTE:
  552. case PIX_FMT_GRAY8:
  553. picture->linesize[0] = width;
  554. break;
  555. default:
  556. return -1;
  557. }
  558. return 0;
  559. }
  560. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  561. int height)
  562. {
  563. int size, h2, size2;
  564. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  565. size = picture->linesize[0] * height;
  566. switch(pix_fmt) {
  567. case PIX_FMT_YUV420P:
  568. case PIX_FMT_YUV422P:
  569. case PIX_FMT_YUV444P:
  570. case PIX_FMT_YUV410P:
  571. case PIX_FMT_YUV411P:
  572. case PIX_FMT_YUV440P:
  573. case PIX_FMT_YUVJ420P:
  574. case PIX_FMT_YUVJ422P:
  575. case PIX_FMT_YUVJ444P:
  576. case PIX_FMT_YUVJ440P:
  577. case PIX_FMT_YUV420P16LE:
  578. case PIX_FMT_YUV422P16LE:
  579. case PIX_FMT_YUV444P16LE:
  580. case PIX_FMT_YUV420P16BE:
  581. case PIX_FMT_YUV422P16BE:
  582. case PIX_FMT_YUV444P16BE:
  583. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  584. size2 = picture->linesize[1] * h2;
  585. picture->data[0] = ptr;
  586. picture->data[1] = picture->data[0] + size;
  587. picture->data[2] = picture->data[1] + size2;
  588. picture->data[3] = NULL;
  589. return size + 2 * size2;
  590. case PIX_FMT_YUVA420P:
  591. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  592. size2 = picture->linesize[1] * h2;
  593. picture->data[0] = ptr;
  594. picture->data[1] = picture->data[0] + size;
  595. picture->data[2] = picture->data[1] + size2;
  596. picture->data[3] = picture->data[1] + size2 + size2;
  597. return 2 * size + 2 * size2;
  598. case PIX_FMT_NV12:
  599. case PIX_FMT_NV21:
  600. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  601. size2 = picture->linesize[1] * h2;
  602. picture->data[0] = ptr;
  603. picture->data[1] = picture->data[0] + size;
  604. picture->data[2] = NULL;
  605. picture->data[3] = NULL;
  606. return size + size2;
  607. case PIX_FMT_RGB24:
  608. case PIX_FMT_BGR24:
  609. case PIX_FMT_ARGB:
  610. case PIX_FMT_ABGR:
  611. case PIX_FMT_RGBA:
  612. case PIX_FMT_BGRA:
  613. case PIX_FMT_RGB48BE:
  614. case PIX_FMT_RGB48LE:
  615. case PIX_FMT_GRAY16BE:
  616. case PIX_FMT_GRAY16LE:
  617. case PIX_FMT_BGR555BE:
  618. case PIX_FMT_BGR555LE:
  619. case PIX_FMT_BGR565BE:
  620. case PIX_FMT_BGR565LE:
  621. case PIX_FMT_RGB555BE:
  622. case PIX_FMT_RGB555LE:
  623. case PIX_FMT_RGB565BE:
  624. case PIX_FMT_RGB565LE:
  625. case PIX_FMT_YUYV422:
  626. case PIX_FMT_UYVY422:
  627. case PIX_FMT_UYYVYY411:
  628. case PIX_FMT_RGB4:
  629. case PIX_FMT_BGR4:
  630. case PIX_FMT_MONOWHITE:
  631. case PIX_FMT_MONOBLACK:
  632. picture->data[0] = ptr;
  633. picture->data[1] = NULL;
  634. picture->data[2] = NULL;
  635. picture->data[3] = NULL;
  636. return size;
  637. case PIX_FMT_PAL8:
  638. case PIX_FMT_RGB8:
  639. case PIX_FMT_BGR8:
  640. case PIX_FMT_RGB4_BYTE:
  641. case PIX_FMT_BGR4_BYTE:
  642. case PIX_FMT_GRAY8:
  643. size2 = (size + 3) & ~3;
  644. picture->data[0] = ptr;
  645. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  646. picture->data[2] = NULL;
  647. picture->data[3] = NULL;
  648. return size2 + 256 * 4;
  649. default:
  650. picture->data[0] = NULL;
  651. picture->data[1] = NULL;
  652. picture->data[2] = NULL;
  653. picture->data[3] = NULL;
  654. return -1;
  655. }
  656. }
  657. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  658. enum PixelFormat pix_fmt, int width, int height)
  659. {
  660. if(avcodec_check_dimensions(NULL, width, height))
  661. return -1;
  662. if (ff_fill_linesize(picture, pix_fmt, width))
  663. return -1;
  664. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  665. }
  666. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  667. unsigned char *dest, int dest_size)
  668. {
  669. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  670. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  671. int i, j, w, ow, h, oh, data_planes;
  672. const unsigned char* s;
  673. int size = avpicture_get_size(pix_fmt, width, height);
  674. if (size > dest_size || size < 0)
  675. return -1;
  676. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  677. if (pix_fmt == PIX_FMT_YUYV422 ||
  678. pix_fmt == PIX_FMT_UYVY422 ||
  679. pix_fmt == PIX_FMT_BGR565BE ||
  680. pix_fmt == PIX_FMT_BGR565LE ||
  681. pix_fmt == PIX_FMT_BGR555BE ||
  682. pix_fmt == PIX_FMT_BGR555LE ||
  683. pix_fmt == PIX_FMT_RGB565BE ||
  684. pix_fmt == PIX_FMT_RGB565LE ||
  685. pix_fmt == PIX_FMT_RGB555BE ||
  686. pix_fmt == PIX_FMT_RGB555LE)
  687. w = width * 2;
  688. else if (pix_fmt == PIX_FMT_UYYVYY411)
  689. w = width + width/2;
  690. else if (pix_fmt == PIX_FMT_PAL8)
  691. w = width;
  692. else
  693. w = width * (pf->depth * pf->nb_channels / 8);
  694. data_planes = 1;
  695. h = height;
  696. } else {
  697. data_planes = pf->nb_channels;
  698. w = (width*pf->depth + 7)/8;
  699. h = height;
  700. }
  701. ow = w;
  702. oh = h;
  703. for (i=0; i<data_planes; i++) {
  704. if (i == 1) {
  705. w = ((width >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  706. h = height >> desc->log2_chroma_h;
  707. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  708. w <<= 1;
  709. } else if (i == 3) {
  710. w = ow;
  711. h = oh;
  712. }
  713. s = src->data[i];
  714. for(j=0; j<h; j++) {
  715. memcpy(dest, s, w);
  716. dest += w;
  717. s += src->linesize[i];
  718. }
  719. }
  720. if (pf->pixel_type == FF_PIXEL_PALETTE)
  721. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  722. return size;
  723. }
  724. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  725. {
  726. AVPicture dummy_pict;
  727. if(avcodec_check_dimensions(NULL, width, height))
  728. return -1;
  729. switch (pix_fmt) {
  730. case PIX_FMT_RGB8:
  731. case PIX_FMT_BGR8:
  732. case PIX_FMT_RGB4_BYTE:
  733. case PIX_FMT_BGR4_BYTE:
  734. case PIX_FMT_GRAY8:
  735. // do not include palette for these pseudo-paletted formats
  736. return width * height;
  737. }
  738. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  739. }
  740. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  741. int has_alpha)
  742. {
  743. const PixFmtInfo *pf, *ps;
  744. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  745. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  746. int loss;
  747. ps = &pix_fmt_info[src_pix_fmt];
  748. /* compute loss */
  749. loss = 0;
  750. pf = &pix_fmt_info[dst_pix_fmt];
  751. if (pf->depth < ps->depth ||
  752. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE) &&
  753. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE)))
  754. loss |= FF_LOSS_DEPTH;
  755. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  756. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  757. loss |= FF_LOSS_RESOLUTION;
  758. switch(pf->color_type) {
  759. case FF_COLOR_RGB:
  760. if (ps->color_type != FF_COLOR_RGB &&
  761. ps->color_type != FF_COLOR_GRAY)
  762. loss |= FF_LOSS_COLORSPACE;
  763. break;
  764. case FF_COLOR_GRAY:
  765. if (ps->color_type != FF_COLOR_GRAY)
  766. loss |= FF_LOSS_COLORSPACE;
  767. break;
  768. case FF_COLOR_YUV:
  769. if (ps->color_type != FF_COLOR_YUV)
  770. loss |= FF_LOSS_COLORSPACE;
  771. break;
  772. case FF_COLOR_YUV_JPEG:
  773. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  774. ps->color_type != FF_COLOR_YUV &&
  775. ps->color_type != FF_COLOR_GRAY)
  776. loss |= FF_LOSS_COLORSPACE;
  777. break;
  778. default:
  779. /* fail safe test */
  780. if (ps->color_type != pf->color_type)
  781. loss |= FF_LOSS_COLORSPACE;
  782. break;
  783. }
  784. if (pf->color_type == FF_COLOR_GRAY &&
  785. ps->color_type != FF_COLOR_GRAY)
  786. loss |= FF_LOSS_CHROMA;
  787. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  788. loss |= FF_LOSS_ALPHA;
  789. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  790. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  791. loss |= FF_LOSS_COLORQUANT;
  792. return loss;
  793. }
  794. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  795. {
  796. int bits;
  797. const PixFmtInfo *pf;
  798. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  799. pf = &pix_fmt_info[pix_fmt];
  800. switch(pf->pixel_type) {
  801. case FF_PIXEL_PACKED:
  802. switch(pix_fmt) {
  803. case PIX_FMT_YUYV422:
  804. case PIX_FMT_UYVY422:
  805. case PIX_FMT_RGB565BE:
  806. case PIX_FMT_RGB565LE:
  807. case PIX_FMT_RGB555BE:
  808. case PIX_FMT_RGB555LE:
  809. case PIX_FMT_BGR565BE:
  810. case PIX_FMT_BGR565LE:
  811. case PIX_FMT_BGR555BE:
  812. case PIX_FMT_BGR555LE:
  813. bits = 16;
  814. break;
  815. case PIX_FMT_UYYVYY411:
  816. bits = 12;
  817. break;
  818. default:
  819. bits = pf->depth * pf->nb_channels;
  820. break;
  821. }
  822. break;
  823. case FF_PIXEL_PLANAR:
  824. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  825. bits = pf->depth * pf->nb_channels;
  826. } else {
  827. bits = pf->depth + ((2 * pf->depth) >>
  828. (desc->log2_chroma_w + desc->log2_chroma_h));
  829. }
  830. break;
  831. case FF_PIXEL_PALETTE:
  832. bits = 8;
  833. break;
  834. default:
  835. bits = -1;
  836. break;
  837. }
  838. return bits;
  839. }
  840. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  841. enum PixelFormat src_pix_fmt,
  842. int has_alpha,
  843. int loss_mask)
  844. {
  845. int dist, i, loss, min_dist;
  846. enum PixelFormat dst_pix_fmt;
  847. /* find exact color match with smallest size */
  848. dst_pix_fmt = PIX_FMT_NONE;
  849. min_dist = 0x7fffffff;
  850. for(i = 0;i < PIX_FMT_NB; i++) {
  851. if (pix_fmt_mask & (1ULL << i)) {
  852. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  853. if (loss == 0) {
  854. dist = avg_bits_per_pixel(i);
  855. if (dist < min_dist) {
  856. min_dist = dist;
  857. dst_pix_fmt = i;
  858. }
  859. }
  860. }
  861. }
  862. return dst_pix_fmt;
  863. }
  864. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  865. int has_alpha, int *loss_ptr)
  866. {
  867. enum PixelFormat dst_pix_fmt;
  868. int loss_mask, i;
  869. static const int loss_mask_order[] = {
  870. ~0, /* no loss first */
  871. ~FF_LOSS_ALPHA,
  872. ~FF_LOSS_RESOLUTION,
  873. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  874. ~FF_LOSS_COLORQUANT,
  875. ~FF_LOSS_DEPTH,
  876. 0,
  877. };
  878. /* try with successive loss */
  879. i = 0;
  880. for(;;) {
  881. loss_mask = loss_mask_order[i++];
  882. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  883. has_alpha, loss_mask);
  884. if (dst_pix_fmt >= 0)
  885. goto found;
  886. if (loss_mask == 0)
  887. break;
  888. }
  889. return PIX_FMT_NONE;
  890. found:
  891. if (loss_ptr)
  892. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  893. return dst_pix_fmt;
  894. }
  895. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  896. const uint8_t *src, int src_wrap,
  897. int width, int height)
  898. {
  899. if((!dst) || (!src))
  900. return;
  901. for(;height > 0; height--) {
  902. memcpy(dst, src, width);
  903. dst += dst_wrap;
  904. src += src_wrap;
  905. }
  906. }
  907. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  908. {
  909. int bits;
  910. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  911. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  912. pf = &pix_fmt_info[pix_fmt];
  913. switch(pf->pixel_type) {
  914. case FF_PIXEL_PACKED:
  915. switch(pix_fmt) {
  916. case PIX_FMT_YUYV422:
  917. case PIX_FMT_UYVY422:
  918. case PIX_FMT_RGB565BE:
  919. case PIX_FMT_RGB565LE:
  920. case PIX_FMT_RGB555BE:
  921. case PIX_FMT_RGB555LE:
  922. case PIX_FMT_BGR565BE:
  923. case PIX_FMT_BGR565LE:
  924. case PIX_FMT_BGR555BE:
  925. case PIX_FMT_BGR555LE:
  926. bits = 16;
  927. break;
  928. case PIX_FMT_UYYVYY411:
  929. bits = 12;
  930. break;
  931. default:
  932. bits = pf->depth * pf->nb_channels;
  933. break;
  934. }
  935. return (width * bits + 7) >> 3;
  936. break;
  937. case FF_PIXEL_PLANAR:
  938. if (plane == 1 || plane == 2)
  939. width= -((-width)>>desc->log2_chroma_w);
  940. return (width * pf->depth + 7) >> 3;
  941. break;
  942. case FF_PIXEL_PALETTE:
  943. if (plane == 0)
  944. return width;
  945. break;
  946. }
  947. return -1;
  948. }
  949. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  950. enum PixelFormat pix_fmt, int width, int height)
  951. {
  952. int i;
  953. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  954. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  955. switch(pf->pixel_type) {
  956. case FF_PIXEL_PACKED:
  957. case FF_PIXEL_PLANAR:
  958. for(i = 0; i < pf->nb_channels; i++) {
  959. int h;
  960. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  961. h = height;
  962. if (i == 1 || i == 2) {
  963. h= -((-height)>>desc->log2_chroma_h);
  964. }
  965. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  966. src->data[i], src->linesize[i],
  967. bwidth, h);
  968. }
  969. break;
  970. case FF_PIXEL_PALETTE:
  971. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  972. src->data[0], src->linesize[0],
  973. width, height);
  974. /* copy the palette */
  975. memcpy(dst->data[1], src->data[1], 4*256);
  976. break;
  977. }
  978. }
  979. /* 2x2 -> 1x1 */
  980. void ff_shrink22(uint8_t *dst, int dst_wrap,
  981. const uint8_t *src, int src_wrap,
  982. int width, int height)
  983. {
  984. int w;
  985. const uint8_t *s1, *s2;
  986. uint8_t *d;
  987. for(;height > 0; height--) {
  988. s1 = src;
  989. s2 = s1 + src_wrap;
  990. d = dst;
  991. for(w = width;w >= 4; w-=4) {
  992. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  993. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  994. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  995. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  996. s1 += 8;
  997. s2 += 8;
  998. d += 4;
  999. }
  1000. for(;w > 0; w--) {
  1001. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1002. s1 += 2;
  1003. s2 += 2;
  1004. d++;
  1005. }
  1006. src += 2 * src_wrap;
  1007. dst += dst_wrap;
  1008. }
  1009. }
  1010. /* 4x4 -> 1x1 */
  1011. void ff_shrink44(uint8_t *dst, int dst_wrap,
  1012. const uint8_t *src, int src_wrap,
  1013. int width, int height)
  1014. {
  1015. int w;
  1016. const uint8_t *s1, *s2, *s3, *s4;
  1017. uint8_t *d;
  1018. for(;height > 0; height--) {
  1019. s1 = src;
  1020. s2 = s1 + src_wrap;
  1021. s3 = s2 + src_wrap;
  1022. s4 = s3 + src_wrap;
  1023. d = dst;
  1024. for(w = width;w > 0; w--) {
  1025. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1026. s2[0] + s2[1] + s2[2] + s2[3] +
  1027. s3[0] + s3[1] + s3[2] + s3[3] +
  1028. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1029. s1 += 4;
  1030. s2 += 4;
  1031. s3 += 4;
  1032. s4 += 4;
  1033. d++;
  1034. }
  1035. src += 4 * src_wrap;
  1036. dst += dst_wrap;
  1037. }
  1038. }
  1039. /* 8x8 -> 1x1 */
  1040. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1041. const uint8_t *src, int src_wrap,
  1042. int width, int height)
  1043. {
  1044. int w, i;
  1045. for(;height > 0; height--) {
  1046. for(w = width;w > 0; w--) {
  1047. int tmp=0;
  1048. for(i=0; i<8; i++){
  1049. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1050. src += src_wrap;
  1051. }
  1052. *(dst++) = (tmp + 32)>>6;
  1053. src += 8 - 8*src_wrap;
  1054. }
  1055. src += 8*src_wrap - 8*width;
  1056. dst += dst_wrap - width;
  1057. }
  1058. }
  1059. int avpicture_alloc(AVPicture *picture,
  1060. enum PixelFormat pix_fmt, int width, int height)
  1061. {
  1062. int size;
  1063. void *ptr;
  1064. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1065. if(size<0)
  1066. goto fail;
  1067. ptr = av_malloc(size);
  1068. if (!ptr)
  1069. goto fail;
  1070. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1071. if(picture->data[1] && !picture->data[2])
  1072. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1073. return 0;
  1074. fail:
  1075. memset(picture, 0, sizeof(AVPicture));
  1076. return -1;
  1077. }
  1078. void avpicture_free(AVPicture *picture)
  1079. {
  1080. av_free(picture->data[0]);
  1081. }
  1082. /* return true if yuv planar */
  1083. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1084. {
  1085. return (ps->color_type == FF_COLOR_YUV ||
  1086. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1087. ps->pixel_type == FF_PIXEL_PLANAR;
  1088. }
  1089. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1090. enum PixelFormat pix_fmt, int top_band, int left_band)
  1091. {
  1092. int y_shift;
  1093. int x_shift;
  1094. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1095. return -1;
  1096. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  1097. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  1098. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1099. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1100. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1101. dst->linesize[0] = src->linesize[0];
  1102. dst->linesize[1] = src->linesize[1];
  1103. dst->linesize[2] = src->linesize[2];
  1104. return 0;
  1105. }
  1106. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1107. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1108. int *color)
  1109. {
  1110. uint8_t *optr;
  1111. int y_shift;
  1112. int x_shift;
  1113. int yheight;
  1114. int i, y;
  1115. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1116. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1117. for (i = 0; i < 3; i++) {
  1118. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  1119. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  1120. if (padtop || padleft) {
  1121. memset(dst->data[i], color[i],
  1122. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1123. }
  1124. if (padleft || padright) {
  1125. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1126. (dst->linesize[i] - (padright >> x_shift));
  1127. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1128. for (y = 0; y < yheight; y++) {
  1129. memset(optr, color[i], (padleft + padright) >> x_shift);
  1130. optr += dst->linesize[i];
  1131. }
  1132. }
  1133. if (src) { /* first line */
  1134. uint8_t *iptr = src->data[i];
  1135. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1136. (padleft >> x_shift);
  1137. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1138. iptr += src->linesize[i];
  1139. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1140. (dst->linesize[i] - (padright >> x_shift));
  1141. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1142. for (y = 0; y < yheight; y++) {
  1143. memset(optr, color[i], (padleft + padright) >> x_shift);
  1144. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1145. (width - padleft - padright) >> x_shift);
  1146. iptr += src->linesize[i];
  1147. optr += dst->linesize[i];
  1148. }
  1149. }
  1150. if (padbottom || padright) {
  1151. optr = dst->data[i] + dst->linesize[i] *
  1152. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1153. memset(optr, color[i],dst->linesize[i] *
  1154. (padbottom >> y_shift) + (padright >> x_shift));
  1155. }
  1156. }
  1157. return 0;
  1158. }
  1159. /* NOTE: we scan all the pixels to have an exact information */
  1160. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1161. {
  1162. const unsigned char *p;
  1163. int src_wrap, ret, x, y;
  1164. unsigned int a;
  1165. uint32_t *palette = (uint32_t *)src->data[1];
  1166. p = src->data[0];
  1167. src_wrap = src->linesize[0] - width;
  1168. ret = 0;
  1169. for(y=0;y<height;y++) {
  1170. for(x=0;x<width;x++) {
  1171. a = palette[p[0]] >> 24;
  1172. if (a == 0x00) {
  1173. ret |= FF_ALPHA_TRANSP;
  1174. } else if (a != 0xff) {
  1175. ret |= FF_ALPHA_SEMI_TRANSP;
  1176. }
  1177. p++;
  1178. }
  1179. p += src_wrap;
  1180. }
  1181. return ret;
  1182. }
  1183. int img_get_alpha_info(const AVPicture *src,
  1184. enum PixelFormat pix_fmt, int width, int height)
  1185. {
  1186. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1187. int ret;
  1188. /* no alpha can be represented in format */
  1189. if (!pf->is_alpha)
  1190. return 0;
  1191. switch(pix_fmt) {
  1192. case PIX_FMT_PAL8:
  1193. ret = get_alpha_info_pal8(src, width, height);
  1194. break;
  1195. default:
  1196. /* we do not know, so everything is indicated */
  1197. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1198. break;
  1199. }
  1200. return ret;
  1201. }
  1202. #if HAVE_MMX
  1203. #define DEINT_INPLACE_LINE_LUM \
  1204. movd_m2r(lum_m4[0],mm0);\
  1205. movd_m2r(lum_m3[0],mm1);\
  1206. movd_m2r(lum_m2[0],mm2);\
  1207. movd_m2r(lum_m1[0],mm3);\
  1208. movd_m2r(lum[0],mm4);\
  1209. punpcklbw_r2r(mm7,mm0);\
  1210. movd_r2m(mm2,lum_m4[0]);\
  1211. punpcklbw_r2r(mm7,mm1);\
  1212. punpcklbw_r2r(mm7,mm2);\
  1213. punpcklbw_r2r(mm7,mm3);\
  1214. punpcklbw_r2r(mm7,mm4);\
  1215. paddw_r2r(mm3,mm1);\
  1216. psllw_i2r(1,mm2);\
  1217. paddw_r2r(mm4,mm0);\
  1218. psllw_i2r(2,mm1);\
  1219. paddw_r2r(mm6,mm2);\
  1220. paddw_r2r(mm2,mm1);\
  1221. psubusw_r2r(mm0,mm1);\
  1222. psrlw_i2r(3,mm1);\
  1223. packuswb_r2r(mm7,mm1);\
  1224. movd_r2m(mm1,lum_m2[0]);
  1225. #define DEINT_LINE_LUM \
  1226. movd_m2r(lum_m4[0],mm0);\
  1227. movd_m2r(lum_m3[0],mm1);\
  1228. movd_m2r(lum_m2[0],mm2);\
  1229. movd_m2r(lum_m1[0],mm3);\
  1230. movd_m2r(lum[0],mm4);\
  1231. punpcklbw_r2r(mm7,mm0);\
  1232. punpcklbw_r2r(mm7,mm1);\
  1233. punpcklbw_r2r(mm7,mm2);\
  1234. punpcklbw_r2r(mm7,mm3);\
  1235. punpcklbw_r2r(mm7,mm4);\
  1236. paddw_r2r(mm3,mm1);\
  1237. psllw_i2r(1,mm2);\
  1238. paddw_r2r(mm4,mm0);\
  1239. psllw_i2r(2,mm1);\
  1240. paddw_r2r(mm6,mm2);\
  1241. paddw_r2r(mm2,mm1);\
  1242. psubusw_r2r(mm0,mm1);\
  1243. psrlw_i2r(3,mm1);\
  1244. packuswb_r2r(mm7,mm1);\
  1245. movd_r2m(mm1,dst[0]);
  1246. #endif
  1247. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1248. static void deinterlace_line(uint8_t *dst,
  1249. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1250. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1251. const uint8_t *lum,
  1252. int size)
  1253. {
  1254. #if !HAVE_MMX
  1255. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1256. int sum;
  1257. for(;size > 0;size--) {
  1258. sum = -lum_m4[0];
  1259. sum += lum_m3[0] << 2;
  1260. sum += lum_m2[0] << 1;
  1261. sum += lum_m1[0] << 2;
  1262. sum += -lum[0];
  1263. dst[0] = cm[(sum + 4) >> 3];
  1264. lum_m4++;
  1265. lum_m3++;
  1266. lum_m2++;
  1267. lum_m1++;
  1268. lum++;
  1269. dst++;
  1270. }
  1271. #else
  1272. {
  1273. pxor_r2r(mm7,mm7);
  1274. movq_m2r(ff_pw_4,mm6);
  1275. }
  1276. for (;size > 3; size-=4) {
  1277. DEINT_LINE_LUM
  1278. lum_m4+=4;
  1279. lum_m3+=4;
  1280. lum_m2+=4;
  1281. lum_m1+=4;
  1282. lum+=4;
  1283. dst+=4;
  1284. }
  1285. #endif
  1286. }
  1287. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1288. int size)
  1289. {
  1290. #if !HAVE_MMX
  1291. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1292. int sum;
  1293. for(;size > 0;size--) {
  1294. sum = -lum_m4[0];
  1295. sum += lum_m3[0] << 2;
  1296. sum += lum_m2[0] << 1;
  1297. lum_m4[0]=lum_m2[0];
  1298. sum += lum_m1[0] << 2;
  1299. sum += -lum[0];
  1300. lum_m2[0] = cm[(sum + 4) >> 3];
  1301. lum_m4++;
  1302. lum_m3++;
  1303. lum_m2++;
  1304. lum_m1++;
  1305. lum++;
  1306. }
  1307. #else
  1308. {
  1309. pxor_r2r(mm7,mm7);
  1310. movq_m2r(ff_pw_4,mm6);
  1311. }
  1312. for (;size > 3; size-=4) {
  1313. DEINT_INPLACE_LINE_LUM
  1314. lum_m4+=4;
  1315. lum_m3+=4;
  1316. lum_m2+=4;
  1317. lum_m1+=4;
  1318. lum+=4;
  1319. }
  1320. #endif
  1321. }
  1322. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1323. top field is copied as is, but the bottom field is deinterlaced
  1324. against the top field. */
  1325. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1326. const uint8_t *src1, int src_wrap,
  1327. int width, int height)
  1328. {
  1329. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1330. int y;
  1331. src_m2 = src1;
  1332. src_m1 = src1;
  1333. src_0=&src_m1[src_wrap];
  1334. src_p1=&src_0[src_wrap];
  1335. src_p2=&src_p1[src_wrap];
  1336. for(y=0;y<(height-2);y+=2) {
  1337. memcpy(dst,src_m1,width);
  1338. dst += dst_wrap;
  1339. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1340. src_m2 = src_0;
  1341. src_m1 = src_p1;
  1342. src_0 = src_p2;
  1343. src_p1 += 2*src_wrap;
  1344. src_p2 += 2*src_wrap;
  1345. dst += dst_wrap;
  1346. }
  1347. memcpy(dst,src_m1,width);
  1348. dst += dst_wrap;
  1349. /* do last line */
  1350. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1351. }
  1352. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1353. int width, int height)
  1354. {
  1355. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1356. int y;
  1357. uint8_t *buf;
  1358. buf = (uint8_t*)av_malloc(width);
  1359. src_m1 = src1;
  1360. memcpy(buf,src_m1,width);
  1361. src_0=&src_m1[src_wrap];
  1362. src_p1=&src_0[src_wrap];
  1363. src_p2=&src_p1[src_wrap];
  1364. for(y=0;y<(height-2);y+=2) {
  1365. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1366. src_m1 = src_p1;
  1367. src_0 = src_p2;
  1368. src_p1 += 2*src_wrap;
  1369. src_p2 += 2*src_wrap;
  1370. }
  1371. /* do last line */
  1372. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1373. av_free(buf);
  1374. }
  1375. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1376. enum PixelFormat pix_fmt, int width, int height)
  1377. {
  1378. int i;
  1379. if (pix_fmt != PIX_FMT_YUV420P &&
  1380. pix_fmt != PIX_FMT_YUV422P &&
  1381. pix_fmt != PIX_FMT_YUV444P &&
  1382. pix_fmt != PIX_FMT_YUV411P &&
  1383. pix_fmt != PIX_FMT_GRAY8)
  1384. return -1;
  1385. if ((width & 3) != 0 || (height & 3) != 0)
  1386. return -1;
  1387. for(i=0;i<3;i++) {
  1388. if (i == 1) {
  1389. switch(pix_fmt) {
  1390. case PIX_FMT_YUV420P:
  1391. width >>= 1;
  1392. height >>= 1;
  1393. break;
  1394. case PIX_FMT_YUV422P:
  1395. width >>= 1;
  1396. break;
  1397. case PIX_FMT_YUV411P:
  1398. width >>= 2;
  1399. break;
  1400. default:
  1401. break;
  1402. }
  1403. if (pix_fmt == PIX_FMT_GRAY8) {
  1404. break;
  1405. }
  1406. }
  1407. if (src == dst) {
  1408. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1409. width, height);
  1410. } else {
  1411. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1412. src->data[i], src->linesize[i],
  1413. width, height);
  1414. }
  1415. }
  1416. emms_c();
  1417. return 0;
  1418. }