You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1501 lines
43KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #include "libavutil/pixdesc.h"
  34. #if HAVE_MMX
  35. #include "x86/mmx.h"
  36. #include "x86/dsputil_mmx.h"
  37. #endif
  38. #define xglue(x, y) x ## y
  39. #define glue(x, y) xglue(x, y)
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  45. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  46. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  47. typedef struct PixFmtInfo {
  48. uint8_t nb_channels; /**< number of channels (including alpha) */
  49. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  50. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  51. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  52. uint8_t depth; /**< bit depth of the color components */
  53. } PixFmtInfo;
  54. /* this table gives more information about formats */
  55. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  56. /* YUV formats */
  57. [PIX_FMT_YUV420P] = {
  58. .nb_channels = 3,
  59. .color_type = FF_COLOR_YUV,
  60. .pixel_type = FF_PIXEL_PLANAR,
  61. .depth = 8,
  62. },
  63. [PIX_FMT_YUV422P] = {
  64. .nb_channels = 3,
  65. .color_type = FF_COLOR_YUV,
  66. .pixel_type = FF_PIXEL_PLANAR,
  67. .depth = 8,
  68. },
  69. [PIX_FMT_YUV444P] = {
  70. .nb_channels = 3,
  71. .color_type = FF_COLOR_YUV,
  72. .pixel_type = FF_PIXEL_PLANAR,
  73. .depth = 8,
  74. },
  75. [PIX_FMT_YUYV422] = {
  76. .nb_channels = 1,
  77. .color_type = FF_COLOR_YUV,
  78. .pixel_type = FF_PIXEL_PACKED,
  79. .depth = 8,
  80. },
  81. [PIX_FMT_UYVY422] = {
  82. .nb_channels = 1,
  83. .color_type = FF_COLOR_YUV,
  84. .pixel_type = FF_PIXEL_PACKED,
  85. .depth = 8,
  86. },
  87. [PIX_FMT_YUV410P] = {
  88. .nb_channels = 3,
  89. .color_type = FF_COLOR_YUV,
  90. .pixel_type = FF_PIXEL_PLANAR,
  91. .depth = 8,
  92. },
  93. [PIX_FMT_YUV411P] = {
  94. .nb_channels = 3,
  95. .color_type = FF_COLOR_YUV,
  96. .pixel_type = FF_PIXEL_PLANAR,
  97. .depth = 8,
  98. },
  99. [PIX_FMT_YUV440P] = {
  100. .nb_channels = 3,
  101. .color_type = FF_COLOR_YUV,
  102. .pixel_type = FF_PIXEL_PLANAR,
  103. .depth = 8,
  104. },
  105. [PIX_FMT_YUV420P16LE] = {
  106. .nb_channels = 3,
  107. .color_type = FF_COLOR_YUV,
  108. .pixel_type = FF_PIXEL_PLANAR,
  109. .depth = 16,
  110. },
  111. [PIX_FMT_YUV422P16LE] = {
  112. .nb_channels = 3,
  113. .color_type = FF_COLOR_YUV,
  114. .pixel_type = FF_PIXEL_PLANAR,
  115. .depth = 16,
  116. },
  117. [PIX_FMT_YUV444P16LE] = {
  118. .nb_channels = 3,
  119. .color_type = FF_COLOR_YUV,
  120. .pixel_type = FF_PIXEL_PLANAR,
  121. .depth = 16,
  122. },
  123. [PIX_FMT_YUV420P16BE] = {
  124. .nb_channels = 3,
  125. .color_type = FF_COLOR_YUV,
  126. .pixel_type = FF_PIXEL_PLANAR,
  127. .depth = 16,
  128. },
  129. [PIX_FMT_YUV422P16BE] = {
  130. .nb_channels = 3,
  131. .color_type = FF_COLOR_YUV,
  132. .pixel_type = FF_PIXEL_PLANAR,
  133. .depth = 16,
  134. },
  135. [PIX_FMT_YUV444P16BE] = {
  136. .nb_channels = 3,
  137. .color_type = FF_COLOR_YUV,
  138. .pixel_type = FF_PIXEL_PLANAR,
  139. .depth = 16,
  140. },
  141. /* YUV formats with alpha plane */
  142. [PIX_FMT_YUVA420P] = {
  143. .nb_channels = 4,
  144. .color_type = FF_COLOR_YUV,
  145. .pixel_type = FF_PIXEL_PLANAR,
  146. .depth = 8,
  147. },
  148. /* JPEG YUV */
  149. [PIX_FMT_YUVJ420P] = {
  150. .nb_channels = 3,
  151. .color_type = FF_COLOR_YUV_JPEG,
  152. .pixel_type = FF_PIXEL_PLANAR,
  153. .depth = 8,
  154. },
  155. [PIX_FMT_YUVJ422P] = {
  156. .nb_channels = 3,
  157. .color_type = FF_COLOR_YUV_JPEG,
  158. .pixel_type = FF_PIXEL_PLANAR,
  159. .depth = 8,
  160. },
  161. [PIX_FMT_YUVJ444P] = {
  162. .nb_channels = 3,
  163. .color_type = FF_COLOR_YUV_JPEG,
  164. .pixel_type = FF_PIXEL_PLANAR,
  165. .depth = 8,
  166. },
  167. [PIX_FMT_YUVJ440P] = {
  168. .nb_channels = 3,
  169. .color_type = FF_COLOR_YUV_JPEG,
  170. .pixel_type = FF_PIXEL_PLANAR,
  171. .depth = 8,
  172. },
  173. /* RGB formats */
  174. [PIX_FMT_RGB24] = {
  175. .nb_channels = 3,
  176. .color_type = FF_COLOR_RGB,
  177. .pixel_type = FF_PIXEL_PACKED,
  178. .depth = 8,
  179. },
  180. [PIX_FMT_BGR24] = {
  181. .nb_channels = 3,
  182. .color_type = FF_COLOR_RGB,
  183. .pixel_type = FF_PIXEL_PACKED,
  184. .depth = 8,
  185. },
  186. [PIX_FMT_ARGB] = {
  187. .nb_channels = 4, .is_alpha = 1,
  188. .color_type = FF_COLOR_RGB,
  189. .pixel_type = FF_PIXEL_PACKED,
  190. .depth = 8,
  191. },
  192. [PIX_FMT_RGB48BE] = {
  193. .nb_channels = 3,
  194. .color_type = FF_COLOR_RGB,
  195. .pixel_type = FF_PIXEL_PACKED,
  196. .depth = 16,
  197. },
  198. [PIX_FMT_RGB48LE] = {
  199. .nb_channels = 3,
  200. .color_type = FF_COLOR_RGB,
  201. .pixel_type = FF_PIXEL_PACKED,
  202. .depth = 16,
  203. },
  204. [PIX_FMT_RGB565BE] = {
  205. .nb_channels = 3,
  206. .color_type = FF_COLOR_RGB,
  207. .pixel_type = FF_PIXEL_PACKED,
  208. .depth = 5,
  209. },
  210. [PIX_FMT_RGB565LE] = {
  211. .nb_channels = 3,
  212. .color_type = FF_COLOR_RGB,
  213. .pixel_type = FF_PIXEL_PACKED,
  214. .depth = 5,
  215. },
  216. [PIX_FMT_RGB555BE] = {
  217. .nb_channels = 3,
  218. .color_type = FF_COLOR_RGB,
  219. .pixel_type = FF_PIXEL_PACKED,
  220. .depth = 5,
  221. },
  222. [PIX_FMT_RGB555LE] = {
  223. .nb_channels = 3,
  224. .color_type = FF_COLOR_RGB,
  225. .pixel_type = FF_PIXEL_PACKED,
  226. .depth = 5,
  227. },
  228. /* gray / mono formats */
  229. [PIX_FMT_GRAY16BE] = {
  230. .nb_channels = 1,
  231. .color_type = FF_COLOR_GRAY,
  232. .pixel_type = FF_PIXEL_PLANAR,
  233. .depth = 16,
  234. },
  235. [PIX_FMT_GRAY16LE] = {
  236. .nb_channels = 1,
  237. .color_type = FF_COLOR_GRAY,
  238. .pixel_type = FF_PIXEL_PLANAR,
  239. .depth = 16,
  240. },
  241. [PIX_FMT_GRAY8] = {
  242. .nb_channels = 1,
  243. .color_type = FF_COLOR_GRAY,
  244. .pixel_type = FF_PIXEL_PLANAR,
  245. .depth = 8,
  246. },
  247. [PIX_FMT_MONOWHITE] = {
  248. .nb_channels = 1,
  249. .color_type = FF_COLOR_GRAY,
  250. .pixel_type = FF_PIXEL_PLANAR,
  251. .depth = 1,
  252. },
  253. [PIX_FMT_MONOBLACK] = {
  254. .nb_channels = 1,
  255. .color_type = FF_COLOR_GRAY,
  256. .pixel_type = FF_PIXEL_PLANAR,
  257. .depth = 1,
  258. },
  259. /* paletted formats */
  260. [PIX_FMT_PAL8] = {
  261. .nb_channels = 4, .is_alpha = 1,
  262. .color_type = FF_COLOR_RGB,
  263. .pixel_type = FF_PIXEL_PALETTE,
  264. .depth = 8,
  265. },
  266. [PIX_FMT_UYYVYY411] = {
  267. .nb_channels = 1,
  268. .color_type = FF_COLOR_YUV,
  269. .pixel_type = FF_PIXEL_PACKED,
  270. .depth = 8,
  271. },
  272. [PIX_FMT_ABGR] = {
  273. .nb_channels = 4, .is_alpha = 1,
  274. .color_type = FF_COLOR_RGB,
  275. .pixel_type = FF_PIXEL_PACKED,
  276. .depth = 8,
  277. },
  278. [PIX_FMT_BGR565BE] = {
  279. .nb_channels = 3,
  280. .color_type = FF_COLOR_RGB,
  281. .pixel_type = FF_PIXEL_PACKED,
  282. .depth = 5,
  283. },
  284. [PIX_FMT_BGR565LE] = {
  285. .nb_channels = 3,
  286. .color_type = FF_COLOR_RGB,
  287. .pixel_type = FF_PIXEL_PACKED,
  288. .depth = 5,
  289. },
  290. [PIX_FMT_BGR555BE] = {
  291. .nb_channels = 3,
  292. .color_type = FF_COLOR_RGB,
  293. .pixel_type = FF_PIXEL_PACKED,
  294. .depth = 5,
  295. },
  296. [PIX_FMT_BGR555LE] = {
  297. .nb_channels = 3,
  298. .color_type = FF_COLOR_RGB,
  299. .pixel_type = FF_PIXEL_PACKED,
  300. .depth = 5,
  301. },
  302. [PIX_FMT_RGB8] = {
  303. .nb_channels = 1,
  304. .color_type = FF_COLOR_RGB,
  305. .pixel_type = FF_PIXEL_PACKED,
  306. .depth = 8,
  307. },
  308. [PIX_FMT_RGB4] = {
  309. .nb_channels = 1,
  310. .color_type = FF_COLOR_RGB,
  311. .pixel_type = FF_PIXEL_PACKED,
  312. .depth = 4,
  313. },
  314. [PIX_FMT_RGB4_BYTE] = {
  315. .nb_channels = 1,
  316. .color_type = FF_COLOR_RGB,
  317. .pixel_type = FF_PIXEL_PACKED,
  318. .depth = 8,
  319. },
  320. [PIX_FMT_BGR8] = {
  321. .nb_channels = 1,
  322. .color_type = FF_COLOR_RGB,
  323. .pixel_type = FF_PIXEL_PACKED,
  324. .depth = 8,
  325. },
  326. [PIX_FMT_BGR4] = {
  327. .nb_channels = 1,
  328. .color_type = FF_COLOR_RGB,
  329. .pixel_type = FF_PIXEL_PACKED,
  330. .depth = 4,
  331. },
  332. [PIX_FMT_BGR4_BYTE] = {
  333. .nb_channels = 1,
  334. .color_type = FF_COLOR_RGB,
  335. .pixel_type = FF_PIXEL_PACKED,
  336. .depth = 8,
  337. },
  338. [PIX_FMT_NV12] = {
  339. .nb_channels = 2,
  340. .color_type = FF_COLOR_YUV,
  341. .pixel_type = FF_PIXEL_PLANAR,
  342. .depth = 8,
  343. },
  344. [PIX_FMT_NV21] = {
  345. .nb_channels = 2,
  346. .color_type = FF_COLOR_YUV,
  347. .pixel_type = FF_PIXEL_PLANAR,
  348. .depth = 8,
  349. },
  350. [PIX_FMT_BGRA] = {
  351. .nb_channels = 4, .is_alpha = 1,
  352. .color_type = FF_COLOR_RGB,
  353. .pixel_type = FF_PIXEL_PACKED,
  354. .depth = 8,
  355. },
  356. [PIX_FMT_RGBA] = {
  357. .nb_channels = 4, .is_alpha = 1,
  358. .color_type = FF_COLOR_RGB,
  359. .pixel_type = FF_PIXEL_PACKED,
  360. .depth = 8,
  361. },
  362. };
  363. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  364. {
  365. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  366. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  367. }
  368. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  369. {
  370. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  371. return NULL;
  372. else
  373. return av_pix_fmt_descriptors[pix_fmt].name;
  374. }
  375. #if LIBAVCODEC_VERSION_MAJOR < 53
  376. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  377. {
  378. return av_get_pix_fmt(name);
  379. }
  380. #endif
  381. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  382. {
  383. /* print header */
  384. if (pix_fmt < 0)
  385. snprintf (buf, buf_size,
  386. "name " " nb_channels" " depth" " is_alpha"
  387. );
  388. else{
  389. PixFmtInfo info= pix_fmt_info[pix_fmt];
  390. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  391. snprintf (buf, buf_size,
  392. "%-11s %5d %9d %6c",
  393. av_pix_fmt_descriptors[pix_fmt].name,
  394. info.nb_channels,
  395. info.depth,
  396. is_alpha_char
  397. );
  398. }
  399. }
  400. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  401. {
  402. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  403. }
  404. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  405. int i;
  406. for(i=0; i<256; i++){
  407. int r,g,b;
  408. switch(pix_fmt) {
  409. case PIX_FMT_RGB8:
  410. r= (i>>5 )*36;
  411. g= ((i>>2)&7)*36;
  412. b= (i&3 )*85;
  413. break;
  414. case PIX_FMT_BGR8:
  415. b= (i>>6 )*85;
  416. g= ((i>>3)&7)*36;
  417. r= (i&7 )*36;
  418. break;
  419. case PIX_FMT_RGB4_BYTE:
  420. r= (i>>3 )*255;
  421. g= ((i>>1)&3)*85;
  422. b= (i&1 )*255;
  423. break;
  424. case PIX_FMT_BGR4_BYTE:
  425. b= (i>>3 )*255;
  426. g= ((i>>1)&3)*85;
  427. r= (i&1 )*255;
  428. break;
  429. case PIX_FMT_GRAY8:
  430. r=b=g= i;
  431. break;
  432. default:
  433. return -1;
  434. }
  435. pal[i] = b + (g<<8) + (r<<16);
  436. }
  437. return 0;
  438. }
  439. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  440. {
  441. int w2;
  442. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  443. memset(picture->linesize, 0, sizeof(picture->linesize));
  444. switch(pix_fmt) {
  445. case PIX_FMT_YUV420P:
  446. case PIX_FMT_YUV422P:
  447. case PIX_FMT_YUV444P:
  448. case PIX_FMT_YUV410P:
  449. case PIX_FMT_YUV411P:
  450. case PIX_FMT_YUV440P:
  451. case PIX_FMT_YUVJ420P:
  452. case PIX_FMT_YUVJ422P:
  453. case PIX_FMT_YUVJ444P:
  454. case PIX_FMT_YUVJ440P:
  455. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  456. picture->linesize[0] = width;
  457. picture->linesize[1] = w2;
  458. picture->linesize[2] = w2;
  459. break;
  460. case PIX_FMT_YUV420P16LE:
  461. case PIX_FMT_YUV422P16LE:
  462. case PIX_FMT_YUV444P16LE:
  463. case PIX_FMT_YUV420P16BE:
  464. case PIX_FMT_YUV422P16BE:
  465. case PIX_FMT_YUV444P16BE:
  466. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  467. picture->linesize[0] = 2*width;
  468. picture->linesize[1] = 2*w2;
  469. picture->linesize[2] = 2*w2;
  470. break;
  471. case PIX_FMT_YUVA420P:
  472. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  473. picture->linesize[0] = width;
  474. picture->linesize[1] = w2;
  475. picture->linesize[2] = w2;
  476. picture->linesize[3] = width;
  477. break;
  478. case PIX_FMT_NV12:
  479. case PIX_FMT_NV21:
  480. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  481. picture->linesize[0] = width;
  482. picture->linesize[1] = 2 * w2;
  483. break;
  484. case PIX_FMT_RGB24:
  485. case PIX_FMT_BGR24:
  486. picture->linesize[0] = width * 3;
  487. break;
  488. case PIX_FMT_ARGB:
  489. case PIX_FMT_ABGR:
  490. case PIX_FMT_RGBA:
  491. case PIX_FMT_BGRA:
  492. picture->linesize[0] = width * 4;
  493. break;
  494. case PIX_FMT_RGB48BE:
  495. case PIX_FMT_RGB48LE:
  496. picture->linesize[0] = width * 6;
  497. break;
  498. case PIX_FMT_GRAY16BE:
  499. case PIX_FMT_GRAY16LE:
  500. case PIX_FMT_BGR555BE:
  501. case PIX_FMT_BGR555LE:
  502. case PIX_FMT_BGR565BE:
  503. case PIX_FMT_BGR565LE:
  504. case PIX_FMT_RGB555BE:
  505. case PIX_FMT_RGB555LE:
  506. case PIX_FMT_RGB565BE:
  507. case PIX_FMT_RGB565LE:
  508. case PIX_FMT_YUYV422:
  509. picture->linesize[0] = width * 2;
  510. break;
  511. case PIX_FMT_UYVY422:
  512. picture->linesize[0] = width * 2;
  513. break;
  514. case PIX_FMT_UYYVYY411:
  515. picture->linesize[0] = width + width/2;
  516. break;
  517. case PIX_FMT_RGB4:
  518. case PIX_FMT_BGR4:
  519. picture->linesize[0] = width / 2;
  520. break;
  521. case PIX_FMT_MONOWHITE:
  522. case PIX_FMT_MONOBLACK:
  523. picture->linesize[0] = (width + 7) >> 3;
  524. break;
  525. case PIX_FMT_PAL8:
  526. case PIX_FMT_RGB8:
  527. case PIX_FMT_BGR8:
  528. case PIX_FMT_RGB4_BYTE:
  529. case PIX_FMT_BGR4_BYTE:
  530. case PIX_FMT_GRAY8:
  531. picture->linesize[0] = width;
  532. break;
  533. default:
  534. return -1;
  535. }
  536. return 0;
  537. }
  538. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  539. int height)
  540. {
  541. int size, h2, size2;
  542. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  543. size = picture->linesize[0] * height;
  544. switch(pix_fmt) {
  545. case PIX_FMT_YUV420P:
  546. case PIX_FMT_YUV422P:
  547. case PIX_FMT_YUV444P:
  548. case PIX_FMT_YUV410P:
  549. case PIX_FMT_YUV411P:
  550. case PIX_FMT_YUV440P:
  551. case PIX_FMT_YUVJ420P:
  552. case PIX_FMT_YUVJ422P:
  553. case PIX_FMT_YUVJ444P:
  554. case PIX_FMT_YUVJ440P:
  555. case PIX_FMT_YUV420P16LE:
  556. case PIX_FMT_YUV422P16LE:
  557. case PIX_FMT_YUV444P16LE:
  558. case PIX_FMT_YUV420P16BE:
  559. case PIX_FMT_YUV422P16BE:
  560. case PIX_FMT_YUV444P16BE:
  561. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  562. size2 = picture->linesize[1] * h2;
  563. picture->data[0] = ptr;
  564. picture->data[1] = picture->data[0] + size;
  565. picture->data[2] = picture->data[1] + size2;
  566. picture->data[3] = NULL;
  567. return size + 2 * size2;
  568. case PIX_FMT_YUVA420P:
  569. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  570. size2 = picture->linesize[1] * h2;
  571. picture->data[0] = ptr;
  572. picture->data[1] = picture->data[0] + size;
  573. picture->data[2] = picture->data[1] + size2;
  574. picture->data[3] = picture->data[1] + size2 + size2;
  575. return 2 * size + 2 * size2;
  576. case PIX_FMT_NV12:
  577. case PIX_FMT_NV21:
  578. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  579. size2 = picture->linesize[1] * h2;
  580. picture->data[0] = ptr;
  581. picture->data[1] = picture->data[0] + size;
  582. picture->data[2] = NULL;
  583. picture->data[3] = NULL;
  584. return size + size2;
  585. case PIX_FMT_RGB24:
  586. case PIX_FMT_BGR24:
  587. case PIX_FMT_ARGB:
  588. case PIX_FMT_ABGR:
  589. case PIX_FMT_RGBA:
  590. case PIX_FMT_BGRA:
  591. case PIX_FMT_RGB48BE:
  592. case PIX_FMT_RGB48LE:
  593. case PIX_FMT_GRAY16BE:
  594. case PIX_FMT_GRAY16LE:
  595. case PIX_FMT_BGR555BE:
  596. case PIX_FMT_BGR555LE:
  597. case PIX_FMT_BGR565BE:
  598. case PIX_FMT_BGR565LE:
  599. case PIX_FMT_RGB555BE:
  600. case PIX_FMT_RGB555LE:
  601. case PIX_FMT_RGB565BE:
  602. case PIX_FMT_RGB565LE:
  603. case PIX_FMT_YUYV422:
  604. case PIX_FMT_UYVY422:
  605. case PIX_FMT_UYYVYY411:
  606. case PIX_FMT_RGB4:
  607. case PIX_FMT_BGR4:
  608. case PIX_FMT_MONOWHITE:
  609. case PIX_FMT_MONOBLACK:
  610. picture->data[0] = ptr;
  611. picture->data[1] = NULL;
  612. picture->data[2] = NULL;
  613. picture->data[3] = NULL;
  614. return size;
  615. case PIX_FMT_PAL8:
  616. case PIX_FMT_RGB8:
  617. case PIX_FMT_BGR8:
  618. case PIX_FMT_RGB4_BYTE:
  619. case PIX_FMT_BGR4_BYTE:
  620. case PIX_FMT_GRAY8:
  621. size2 = (size + 3) & ~3;
  622. picture->data[0] = ptr;
  623. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  624. picture->data[2] = NULL;
  625. picture->data[3] = NULL;
  626. return size2 + 256 * 4;
  627. default:
  628. picture->data[0] = NULL;
  629. picture->data[1] = NULL;
  630. picture->data[2] = NULL;
  631. picture->data[3] = NULL;
  632. return -1;
  633. }
  634. }
  635. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  636. enum PixelFormat pix_fmt, int width, int height)
  637. {
  638. if(avcodec_check_dimensions(NULL, width, height))
  639. return -1;
  640. if (ff_fill_linesize(picture, pix_fmt, width))
  641. return -1;
  642. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  643. }
  644. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  645. unsigned char *dest, int dest_size)
  646. {
  647. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  648. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  649. int i, j, w, ow, h, oh, data_planes;
  650. const unsigned char* s;
  651. int size = avpicture_get_size(pix_fmt, width, height);
  652. if (size > dest_size || size < 0)
  653. return -1;
  654. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  655. if (pix_fmt == PIX_FMT_YUYV422 ||
  656. pix_fmt == PIX_FMT_UYVY422 ||
  657. pix_fmt == PIX_FMT_BGR565BE ||
  658. pix_fmt == PIX_FMT_BGR565LE ||
  659. pix_fmt == PIX_FMT_BGR555BE ||
  660. pix_fmt == PIX_FMT_BGR555LE ||
  661. pix_fmt == PIX_FMT_RGB565BE ||
  662. pix_fmt == PIX_FMT_RGB565LE ||
  663. pix_fmt == PIX_FMT_RGB555BE ||
  664. pix_fmt == PIX_FMT_RGB555LE)
  665. w = width * 2;
  666. else if (pix_fmt == PIX_FMT_UYYVYY411)
  667. w = width + width/2;
  668. else if (pix_fmt == PIX_FMT_PAL8)
  669. w = width;
  670. else
  671. w = width * (pf->depth * pf->nb_channels / 8);
  672. data_planes = 1;
  673. h = height;
  674. } else {
  675. data_planes = pf->nb_channels;
  676. w = (width*pf->depth + 7)/8;
  677. h = height;
  678. }
  679. ow = w;
  680. oh = h;
  681. for (i=0; i<data_planes; i++) {
  682. if (i == 1) {
  683. w = ((width >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  684. h = height >> desc->log2_chroma_h;
  685. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  686. w <<= 1;
  687. } else if (i == 3) {
  688. w = ow;
  689. h = oh;
  690. }
  691. s = src->data[i];
  692. for(j=0; j<h; j++) {
  693. memcpy(dest, s, w);
  694. dest += w;
  695. s += src->linesize[i];
  696. }
  697. }
  698. if (pf->pixel_type == FF_PIXEL_PALETTE)
  699. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  700. return size;
  701. }
  702. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  703. {
  704. AVPicture dummy_pict;
  705. if(avcodec_check_dimensions(NULL, width, height))
  706. return -1;
  707. switch (pix_fmt) {
  708. case PIX_FMT_RGB8:
  709. case PIX_FMT_BGR8:
  710. case PIX_FMT_RGB4_BYTE:
  711. case PIX_FMT_BGR4_BYTE:
  712. case PIX_FMT_GRAY8:
  713. // do not include palette for these pseudo-paletted formats
  714. return width * height;
  715. }
  716. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  717. }
  718. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  719. int has_alpha)
  720. {
  721. const PixFmtInfo *pf, *ps;
  722. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  723. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  724. int loss;
  725. ps = &pix_fmt_info[src_pix_fmt];
  726. /* compute loss */
  727. loss = 0;
  728. pf = &pix_fmt_info[dst_pix_fmt];
  729. if (pf->depth < ps->depth ||
  730. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE) &&
  731. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE)))
  732. loss |= FF_LOSS_DEPTH;
  733. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  734. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  735. loss |= FF_LOSS_RESOLUTION;
  736. switch(pf->color_type) {
  737. case FF_COLOR_RGB:
  738. if (ps->color_type != FF_COLOR_RGB &&
  739. ps->color_type != FF_COLOR_GRAY)
  740. loss |= FF_LOSS_COLORSPACE;
  741. break;
  742. case FF_COLOR_GRAY:
  743. if (ps->color_type != FF_COLOR_GRAY)
  744. loss |= FF_LOSS_COLORSPACE;
  745. break;
  746. case FF_COLOR_YUV:
  747. if (ps->color_type != FF_COLOR_YUV)
  748. loss |= FF_LOSS_COLORSPACE;
  749. break;
  750. case FF_COLOR_YUV_JPEG:
  751. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  752. ps->color_type != FF_COLOR_YUV &&
  753. ps->color_type != FF_COLOR_GRAY)
  754. loss |= FF_LOSS_COLORSPACE;
  755. break;
  756. default:
  757. /* fail safe test */
  758. if (ps->color_type != pf->color_type)
  759. loss |= FF_LOSS_COLORSPACE;
  760. break;
  761. }
  762. if (pf->color_type == FF_COLOR_GRAY &&
  763. ps->color_type != FF_COLOR_GRAY)
  764. loss |= FF_LOSS_CHROMA;
  765. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  766. loss |= FF_LOSS_ALPHA;
  767. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  768. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  769. loss |= FF_LOSS_COLORQUANT;
  770. return loss;
  771. }
  772. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  773. {
  774. int bits;
  775. const PixFmtInfo *pf;
  776. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  777. pf = &pix_fmt_info[pix_fmt];
  778. switch(pf->pixel_type) {
  779. case FF_PIXEL_PACKED:
  780. switch(pix_fmt) {
  781. case PIX_FMT_YUYV422:
  782. case PIX_FMT_UYVY422:
  783. case PIX_FMT_RGB565BE:
  784. case PIX_FMT_RGB565LE:
  785. case PIX_FMT_RGB555BE:
  786. case PIX_FMT_RGB555LE:
  787. case PIX_FMT_BGR565BE:
  788. case PIX_FMT_BGR565LE:
  789. case PIX_FMT_BGR555BE:
  790. case PIX_FMT_BGR555LE:
  791. bits = 16;
  792. break;
  793. case PIX_FMT_UYYVYY411:
  794. bits = 12;
  795. break;
  796. default:
  797. bits = pf->depth * pf->nb_channels;
  798. break;
  799. }
  800. break;
  801. case FF_PIXEL_PLANAR:
  802. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  803. bits = pf->depth * pf->nb_channels;
  804. } else {
  805. bits = pf->depth + ((2 * pf->depth) >>
  806. (desc->log2_chroma_w + desc->log2_chroma_h));
  807. }
  808. break;
  809. case FF_PIXEL_PALETTE:
  810. bits = 8;
  811. break;
  812. default:
  813. bits = -1;
  814. break;
  815. }
  816. return bits;
  817. }
  818. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  819. enum PixelFormat src_pix_fmt,
  820. int has_alpha,
  821. int loss_mask)
  822. {
  823. int dist, i, loss, min_dist;
  824. enum PixelFormat dst_pix_fmt;
  825. /* find exact color match with smallest size */
  826. dst_pix_fmt = PIX_FMT_NONE;
  827. min_dist = 0x7fffffff;
  828. for(i = 0;i < PIX_FMT_NB; i++) {
  829. if (pix_fmt_mask & (1ULL << i)) {
  830. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  831. if (loss == 0) {
  832. dist = avg_bits_per_pixel(i);
  833. if (dist < min_dist) {
  834. min_dist = dist;
  835. dst_pix_fmt = i;
  836. }
  837. }
  838. }
  839. }
  840. return dst_pix_fmt;
  841. }
  842. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  843. int has_alpha, int *loss_ptr)
  844. {
  845. enum PixelFormat dst_pix_fmt;
  846. int loss_mask, i;
  847. static const int loss_mask_order[] = {
  848. ~0, /* no loss first */
  849. ~FF_LOSS_ALPHA,
  850. ~FF_LOSS_RESOLUTION,
  851. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  852. ~FF_LOSS_COLORQUANT,
  853. ~FF_LOSS_DEPTH,
  854. 0,
  855. };
  856. /* try with successive loss */
  857. i = 0;
  858. for(;;) {
  859. loss_mask = loss_mask_order[i++];
  860. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  861. has_alpha, loss_mask);
  862. if (dst_pix_fmt >= 0)
  863. goto found;
  864. if (loss_mask == 0)
  865. break;
  866. }
  867. return PIX_FMT_NONE;
  868. found:
  869. if (loss_ptr)
  870. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  871. return dst_pix_fmt;
  872. }
  873. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  874. const uint8_t *src, int src_wrap,
  875. int width, int height)
  876. {
  877. if((!dst) || (!src))
  878. return;
  879. for(;height > 0; height--) {
  880. memcpy(dst, src, width);
  881. dst += dst_wrap;
  882. src += src_wrap;
  883. }
  884. }
  885. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  886. {
  887. int bits;
  888. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  889. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  890. pf = &pix_fmt_info[pix_fmt];
  891. switch(pf->pixel_type) {
  892. case FF_PIXEL_PACKED:
  893. switch(pix_fmt) {
  894. case PIX_FMT_YUYV422:
  895. case PIX_FMT_UYVY422:
  896. case PIX_FMT_RGB565BE:
  897. case PIX_FMT_RGB565LE:
  898. case PIX_FMT_RGB555BE:
  899. case PIX_FMT_RGB555LE:
  900. case PIX_FMT_BGR565BE:
  901. case PIX_FMT_BGR565LE:
  902. case PIX_FMT_BGR555BE:
  903. case PIX_FMT_BGR555LE:
  904. bits = 16;
  905. break;
  906. case PIX_FMT_UYYVYY411:
  907. bits = 12;
  908. break;
  909. default:
  910. bits = pf->depth * pf->nb_channels;
  911. break;
  912. }
  913. return (width * bits + 7) >> 3;
  914. break;
  915. case FF_PIXEL_PLANAR:
  916. if (plane == 1 || plane == 2)
  917. width= -((-width)>>desc->log2_chroma_w);
  918. return (width * pf->depth + 7) >> 3;
  919. break;
  920. case FF_PIXEL_PALETTE:
  921. if (plane == 0)
  922. return width;
  923. break;
  924. }
  925. return -1;
  926. }
  927. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  928. enum PixelFormat pix_fmt, int width, int height)
  929. {
  930. int i;
  931. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  932. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  933. switch(pf->pixel_type) {
  934. case FF_PIXEL_PACKED:
  935. case FF_PIXEL_PLANAR:
  936. for(i = 0; i < pf->nb_channels; i++) {
  937. int h;
  938. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  939. h = height;
  940. if (i == 1 || i == 2) {
  941. h= -((-height)>>desc->log2_chroma_h);
  942. }
  943. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  944. src->data[i], src->linesize[i],
  945. bwidth, h);
  946. }
  947. break;
  948. case FF_PIXEL_PALETTE:
  949. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  950. src->data[0], src->linesize[0],
  951. width, height);
  952. /* copy the palette */
  953. memcpy(dst->data[1], src->data[1], 4*256);
  954. break;
  955. }
  956. }
  957. /* 2x2 -> 1x1 */
  958. void ff_shrink22(uint8_t *dst, int dst_wrap,
  959. const uint8_t *src, int src_wrap,
  960. int width, int height)
  961. {
  962. int w;
  963. const uint8_t *s1, *s2;
  964. uint8_t *d;
  965. for(;height > 0; height--) {
  966. s1 = src;
  967. s2 = s1 + src_wrap;
  968. d = dst;
  969. for(w = width;w >= 4; w-=4) {
  970. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  971. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  972. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  973. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  974. s1 += 8;
  975. s2 += 8;
  976. d += 4;
  977. }
  978. for(;w > 0; w--) {
  979. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  980. s1 += 2;
  981. s2 += 2;
  982. d++;
  983. }
  984. src += 2 * src_wrap;
  985. dst += dst_wrap;
  986. }
  987. }
  988. /* 4x4 -> 1x1 */
  989. void ff_shrink44(uint8_t *dst, int dst_wrap,
  990. const uint8_t *src, int src_wrap,
  991. int width, int height)
  992. {
  993. int w;
  994. const uint8_t *s1, *s2, *s3, *s4;
  995. uint8_t *d;
  996. for(;height > 0; height--) {
  997. s1 = src;
  998. s2 = s1 + src_wrap;
  999. s3 = s2 + src_wrap;
  1000. s4 = s3 + src_wrap;
  1001. d = dst;
  1002. for(w = width;w > 0; w--) {
  1003. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1004. s2[0] + s2[1] + s2[2] + s2[3] +
  1005. s3[0] + s3[1] + s3[2] + s3[3] +
  1006. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1007. s1 += 4;
  1008. s2 += 4;
  1009. s3 += 4;
  1010. s4 += 4;
  1011. d++;
  1012. }
  1013. src += 4 * src_wrap;
  1014. dst += dst_wrap;
  1015. }
  1016. }
  1017. /* 8x8 -> 1x1 */
  1018. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1019. const uint8_t *src, int src_wrap,
  1020. int width, int height)
  1021. {
  1022. int w, i;
  1023. for(;height > 0; height--) {
  1024. for(w = width;w > 0; w--) {
  1025. int tmp=0;
  1026. for(i=0; i<8; i++){
  1027. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1028. src += src_wrap;
  1029. }
  1030. *(dst++) = (tmp + 32)>>6;
  1031. src += 8 - 8*src_wrap;
  1032. }
  1033. src += 8*src_wrap - 8*width;
  1034. dst += dst_wrap - width;
  1035. }
  1036. }
  1037. int avpicture_alloc(AVPicture *picture,
  1038. enum PixelFormat pix_fmt, int width, int height)
  1039. {
  1040. int size;
  1041. void *ptr;
  1042. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1043. if(size<0)
  1044. goto fail;
  1045. ptr = av_malloc(size);
  1046. if (!ptr)
  1047. goto fail;
  1048. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1049. if(picture->data[1] && !picture->data[2])
  1050. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1051. return 0;
  1052. fail:
  1053. memset(picture, 0, sizeof(AVPicture));
  1054. return -1;
  1055. }
  1056. void avpicture_free(AVPicture *picture)
  1057. {
  1058. av_free(picture->data[0]);
  1059. }
  1060. /* return true if yuv planar */
  1061. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1062. {
  1063. return (ps->color_type == FF_COLOR_YUV ||
  1064. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1065. ps->pixel_type == FF_PIXEL_PLANAR;
  1066. }
  1067. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1068. enum PixelFormat pix_fmt, int top_band, int left_band)
  1069. {
  1070. int y_shift;
  1071. int x_shift;
  1072. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1073. return -1;
  1074. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  1075. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  1076. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1077. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1078. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1079. dst->linesize[0] = src->linesize[0];
  1080. dst->linesize[1] = src->linesize[1];
  1081. dst->linesize[2] = src->linesize[2];
  1082. return 0;
  1083. }
  1084. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1085. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1086. int *color)
  1087. {
  1088. uint8_t *optr;
  1089. int y_shift;
  1090. int x_shift;
  1091. int yheight;
  1092. int i, y;
  1093. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1094. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1095. for (i = 0; i < 3; i++) {
  1096. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  1097. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  1098. if (padtop || padleft) {
  1099. memset(dst->data[i], color[i],
  1100. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1101. }
  1102. if (padleft || padright) {
  1103. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1104. (dst->linesize[i] - (padright >> x_shift));
  1105. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1106. for (y = 0; y < yheight; y++) {
  1107. memset(optr, color[i], (padleft + padright) >> x_shift);
  1108. optr += dst->linesize[i];
  1109. }
  1110. }
  1111. if (src) { /* first line */
  1112. uint8_t *iptr = src->data[i];
  1113. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1114. (padleft >> x_shift);
  1115. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1116. iptr += src->linesize[i];
  1117. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1118. (dst->linesize[i] - (padright >> x_shift));
  1119. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1120. for (y = 0; y < yheight; y++) {
  1121. memset(optr, color[i], (padleft + padright) >> x_shift);
  1122. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1123. (width - padleft - padright) >> x_shift);
  1124. iptr += src->linesize[i];
  1125. optr += dst->linesize[i];
  1126. }
  1127. }
  1128. if (padbottom || padright) {
  1129. optr = dst->data[i] + dst->linesize[i] *
  1130. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1131. memset(optr, color[i],dst->linesize[i] *
  1132. (padbottom >> y_shift) + (padright >> x_shift));
  1133. }
  1134. }
  1135. return 0;
  1136. }
  1137. /* NOTE: we scan all the pixels to have an exact information */
  1138. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1139. {
  1140. const unsigned char *p;
  1141. int src_wrap, ret, x, y;
  1142. unsigned int a;
  1143. uint32_t *palette = (uint32_t *)src->data[1];
  1144. p = src->data[0];
  1145. src_wrap = src->linesize[0] - width;
  1146. ret = 0;
  1147. for(y=0;y<height;y++) {
  1148. for(x=0;x<width;x++) {
  1149. a = palette[p[0]] >> 24;
  1150. if (a == 0x00) {
  1151. ret |= FF_ALPHA_TRANSP;
  1152. } else if (a != 0xff) {
  1153. ret |= FF_ALPHA_SEMI_TRANSP;
  1154. }
  1155. p++;
  1156. }
  1157. p += src_wrap;
  1158. }
  1159. return ret;
  1160. }
  1161. int img_get_alpha_info(const AVPicture *src,
  1162. enum PixelFormat pix_fmt, int width, int height)
  1163. {
  1164. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1165. int ret;
  1166. /* no alpha can be represented in format */
  1167. if (!pf->is_alpha)
  1168. return 0;
  1169. switch(pix_fmt) {
  1170. case PIX_FMT_PAL8:
  1171. ret = get_alpha_info_pal8(src, width, height);
  1172. break;
  1173. default:
  1174. /* we do not know, so everything is indicated */
  1175. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1176. break;
  1177. }
  1178. return ret;
  1179. }
  1180. #if HAVE_MMX
  1181. #define DEINT_INPLACE_LINE_LUM \
  1182. movd_m2r(lum_m4[0],mm0);\
  1183. movd_m2r(lum_m3[0],mm1);\
  1184. movd_m2r(lum_m2[0],mm2);\
  1185. movd_m2r(lum_m1[0],mm3);\
  1186. movd_m2r(lum[0],mm4);\
  1187. punpcklbw_r2r(mm7,mm0);\
  1188. movd_r2m(mm2,lum_m4[0]);\
  1189. punpcklbw_r2r(mm7,mm1);\
  1190. punpcklbw_r2r(mm7,mm2);\
  1191. punpcklbw_r2r(mm7,mm3);\
  1192. punpcklbw_r2r(mm7,mm4);\
  1193. paddw_r2r(mm3,mm1);\
  1194. psllw_i2r(1,mm2);\
  1195. paddw_r2r(mm4,mm0);\
  1196. psllw_i2r(2,mm1);\
  1197. paddw_r2r(mm6,mm2);\
  1198. paddw_r2r(mm2,mm1);\
  1199. psubusw_r2r(mm0,mm1);\
  1200. psrlw_i2r(3,mm1);\
  1201. packuswb_r2r(mm7,mm1);\
  1202. movd_r2m(mm1,lum_m2[0]);
  1203. #define DEINT_LINE_LUM \
  1204. movd_m2r(lum_m4[0],mm0);\
  1205. movd_m2r(lum_m3[0],mm1);\
  1206. movd_m2r(lum_m2[0],mm2);\
  1207. movd_m2r(lum_m1[0],mm3);\
  1208. movd_m2r(lum[0],mm4);\
  1209. punpcklbw_r2r(mm7,mm0);\
  1210. punpcklbw_r2r(mm7,mm1);\
  1211. punpcklbw_r2r(mm7,mm2);\
  1212. punpcklbw_r2r(mm7,mm3);\
  1213. punpcklbw_r2r(mm7,mm4);\
  1214. paddw_r2r(mm3,mm1);\
  1215. psllw_i2r(1,mm2);\
  1216. paddw_r2r(mm4,mm0);\
  1217. psllw_i2r(2,mm1);\
  1218. paddw_r2r(mm6,mm2);\
  1219. paddw_r2r(mm2,mm1);\
  1220. psubusw_r2r(mm0,mm1);\
  1221. psrlw_i2r(3,mm1);\
  1222. packuswb_r2r(mm7,mm1);\
  1223. movd_r2m(mm1,dst[0]);
  1224. #endif
  1225. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1226. static void deinterlace_line(uint8_t *dst,
  1227. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1228. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1229. const uint8_t *lum,
  1230. int size)
  1231. {
  1232. #if !HAVE_MMX
  1233. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1234. int sum;
  1235. for(;size > 0;size--) {
  1236. sum = -lum_m4[0];
  1237. sum += lum_m3[0] << 2;
  1238. sum += lum_m2[0] << 1;
  1239. sum += lum_m1[0] << 2;
  1240. sum += -lum[0];
  1241. dst[0] = cm[(sum + 4) >> 3];
  1242. lum_m4++;
  1243. lum_m3++;
  1244. lum_m2++;
  1245. lum_m1++;
  1246. lum++;
  1247. dst++;
  1248. }
  1249. #else
  1250. {
  1251. pxor_r2r(mm7,mm7);
  1252. movq_m2r(ff_pw_4,mm6);
  1253. }
  1254. for (;size > 3; size-=4) {
  1255. DEINT_LINE_LUM
  1256. lum_m4+=4;
  1257. lum_m3+=4;
  1258. lum_m2+=4;
  1259. lum_m1+=4;
  1260. lum+=4;
  1261. dst+=4;
  1262. }
  1263. #endif
  1264. }
  1265. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1266. int size)
  1267. {
  1268. #if !HAVE_MMX
  1269. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1270. int sum;
  1271. for(;size > 0;size--) {
  1272. sum = -lum_m4[0];
  1273. sum += lum_m3[0] << 2;
  1274. sum += lum_m2[0] << 1;
  1275. lum_m4[0]=lum_m2[0];
  1276. sum += lum_m1[0] << 2;
  1277. sum += -lum[0];
  1278. lum_m2[0] = cm[(sum + 4) >> 3];
  1279. lum_m4++;
  1280. lum_m3++;
  1281. lum_m2++;
  1282. lum_m1++;
  1283. lum++;
  1284. }
  1285. #else
  1286. {
  1287. pxor_r2r(mm7,mm7);
  1288. movq_m2r(ff_pw_4,mm6);
  1289. }
  1290. for (;size > 3; size-=4) {
  1291. DEINT_INPLACE_LINE_LUM
  1292. lum_m4+=4;
  1293. lum_m3+=4;
  1294. lum_m2+=4;
  1295. lum_m1+=4;
  1296. lum+=4;
  1297. }
  1298. #endif
  1299. }
  1300. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1301. top field is copied as is, but the bottom field is deinterlaced
  1302. against the top field. */
  1303. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1304. const uint8_t *src1, int src_wrap,
  1305. int width, int height)
  1306. {
  1307. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1308. int y;
  1309. src_m2 = src1;
  1310. src_m1 = src1;
  1311. src_0=&src_m1[src_wrap];
  1312. src_p1=&src_0[src_wrap];
  1313. src_p2=&src_p1[src_wrap];
  1314. for(y=0;y<(height-2);y+=2) {
  1315. memcpy(dst,src_m1,width);
  1316. dst += dst_wrap;
  1317. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1318. src_m2 = src_0;
  1319. src_m1 = src_p1;
  1320. src_0 = src_p2;
  1321. src_p1 += 2*src_wrap;
  1322. src_p2 += 2*src_wrap;
  1323. dst += dst_wrap;
  1324. }
  1325. memcpy(dst,src_m1,width);
  1326. dst += dst_wrap;
  1327. /* do last line */
  1328. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1329. }
  1330. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1331. int width, int height)
  1332. {
  1333. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1334. int y;
  1335. uint8_t *buf;
  1336. buf = (uint8_t*)av_malloc(width);
  1337. src_m1 = src1;
  1338. memcpy(buf,src_m1,width);
  1339. src_0=&src_m1[src_wrap];
  1340. src_p1=&src_0[src_wrap];
  1341. src_p2=&src_p1[src_wrap];
  1342. for(y=0;y<(height-2);y+=2) {
  1343. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1344. src_m1 = src_p1;
  1345. src_0 = src_p2;
  1346. src_p1 += 2*src_wrap;
  1347. src_p2 += 2*src_wrap;
  1348. }
  1349. /* do last line */
  1350. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1351. av_free(buf);
  1352. }
  1353. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1354. enum PixelFormat pix_fmt, int width, int height)
  1355. {
  1356. int i;
  1357. if (pix_fmt != PIX_FMT_YUV420P &&
  1358. pix_fmt != PIX_FMT_YUV422P &&
  1359. pix_fmt != PIX_FMT_YUV444P &&
  1360. pix_fmt != PIX_FMT_YUV411P &&
  1361. pix_fmt != PIX_FMT_GRAY8)
  1362. return -1;
  1363. if ((width & 3) != 0 || (height & 3) != 0)
  1364. return -1;
  1365. for(i=0;i<3;i++) {
  1366. if (i == 1) {
  1367. switch(pix_fmt) {
  1368. case PIX_FMT_YUV420P:
  1369. width >>= 1;
  1370. height >>= 1;
  1371. break;
  1372. case PIX_FMT_YUV422P:
  1373. width >>= 1;
  1374. break;
  1375. case PIX_FMT_YUV411P:
  1376. width >>= 2;
  1377. break;
  1378. default:
  1379. break;
  1380. }
  1381. if (pix_fmt == PIX_FMT_GRAY8) {
  1382. break;
  1383. }
  1384. }
  1385. if (src == dst) {
  1386. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1387. width, height);
  1388. } else {
  1389. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1390. src->data[i], src->linesize[i],
  1391. width, height);
  1392. }
  1393. }
  1394. emms_c();
  1395. return 0;
  1396. }