You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

854 lines
30KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. /**
  19. * @file
  20. * Intel Quick Sync Video VPP base function
  21. */
  22. #include "libavutil/common.h"
  23. #include "libavutil/mathematics.h"
  24. #include "libavutil/hwcontext.h"
  25. #include "libavutil/hwcontext_qsv.h"
  26. #include "libavutil/time.h"
  27. #include "libavutil/pixdesc.h"
  28. #include "internal.h"
  29. #include "qsvvpp.h"
  30. #include "video.h"
  31. #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
  32. MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
  33. #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
  34. #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
  35. typedef struct QSVFrame {
  36. AVFrame *frame;
  37. mfxFrameSurface1 *surface;
  38. mfxFrameSurface1 surface_internal; /* for system memory */
  39. struct QSVFrame *next;
  40. } QSVFrame;
  41. /* abstract struct for all QSV filters */
  42. struct QSVVPPContext {
  43. mfxSession session;
  44. int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
  45. enum AVPixelFormat out_sw_format; /* Real output format */
  46. mfxVideoParam vpp_param;
  47. mfxFrameInfo *frame_infos; /* frame info for each input */
  48. /* members related to the input/output surface */
  49. int in_mem_mode;
  50. int out_mem_mode;
  51. QSVFrame *in_frame_list;
  52. QSVFrame *out_frame_list;
  53. int nb_surface_ptrs_in;
  54. int nb_surface_ptrs_out;
  55. mfxFrameSurface1 **surface_ptrs_in;
  56. mfxFrameSurface1 **surface_ptrs_out;
  57. /* MFXVPP extern parameters */
  58. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  59. mfxExtBuffer **ext_buffers;
  60. int nb_ext_buffers;
  61. };
  62. static const mfxHandleType handle_types[] = {
  63. MFX_HANDLE_VA_DISPLAY,
  64. MFX_HANDLE_D3D9_DEVICE_MANAGER,
  65. MFX_HANDLE_D3D11_DEVICE,
  66. };
  67. static const AVRational default_tb = { 1, 90000 };
  68. static const struct {
  69. int mfx_iopattern;
  70. const char *desc;
  71. } qsv_iopatterns[] = {
  72. {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
  73. {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
  74. {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
  75. {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
  76. {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
  77. {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
  78. };
  79. int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern,
  80. const char *extra_string)
  81. {
  82. const char *desc = NULL;
  83. for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
  84. if (qsv_iopatterns[i].mfx_iopattern == mfx_iopattern) {
  85. desc = qsv_iopatterns[i].desc;
  86. }
  87. }
  88. if (!desc)
  89. desc = "unknown iopattern";
  90. av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
  91. return 0;
  92. }
  93. static const struct {
  94. mfxStatus mfxerr;
  95. int averr;
  96. const char *desc;
  97. } qsv_errors[] = {
  98. { MFX_ERR_NONE, 0, "success" },
  99. { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
  100. { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
  101. { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
  102. { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
  103. { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
  104. { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
  105. { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
  106. { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
  107. { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
  108. /* the following 3 errors should always be handled explicitly, so those "mappings"
  109. * are for completeness only */
  110. { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
  111. { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
  112. { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
  113. { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
  114. { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
  115. { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
  116. { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
  117. { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
  118. { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
  119. { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
  120. { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
  121. { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
  122. { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
  123. { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
  124. { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
  125. { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
  126. { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
  127. { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
  128. { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
  129. { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
  130. };
  131. static int qsv_map_error(mfxStatus mfx_err, const char **desc)
  132. {
  133. int i;
  134. for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
  135. if (qsv_errors[i].mfxerr == mfx_err) {
  136. if (desc)
  137. *desc = qsv_errors[i].desc;
  138. return qsv_errors[i].averr;
  139. }
  140. }
  141. if (desc)
  142. *desc = "unknown error";
  143. return AVERROR_UNKNOWN;
  144. }
  145. int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
  146. const char *error_string)
  147. {
  148. const char *desc;
  149. int ret;
  150. ret = qsv_map_error(err, &desc);
  151. av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
  152. return ret;
  153. }
  154. int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
  155. const char *warning_string)
  156. {
  157. const char *desc;
  158. int ret;
  159. ret = qsv_map_error(err, &desc);
  160. av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
  161. return ret;
  162. }
  163. /* functions for frameAlloc */
  164. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  165. mfxFrameAllocResponse *resp)
  166. {
  167. QSVVPPContext *s = pthis;
  168. int i;
  169. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  170. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  171. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  172. return MFX_ERR_UNSUPPORTED;
  173. if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
  174. resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
  175. if (!resp->mids)
  176. return AVERROR(ENOMEM);
  177. for (i = 0; i < s->nb_surface_ptrs_in; i++)
  178. resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
  179. resp->NumFrameActual = s->nb_surface_ptrs_in;
  180. } else {
  181. resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
  182. if (!resp->mids)
  183. return AVERROR(ENOMEM);
  184. for (i = 0; i < s->nb_surface_ptrs_out; i++)
  185. resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
  186. resp->NumFrameActual = s->nb_surface_ptrs_out;
  187. }
  188. return MFX_ERR_NONE;
  189. }
  190. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  191. {
  192. av_freep(&resp->mids);
  193. return MFX_ERR_NONE;
  194. }
  195. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  196. {
  197. return MFX_ERR_UNSUPPORTED;
  198. }
  199. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  200. {
  201. return MFX_ERR_UNSUPPORTED;
  202. }
  203. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  204. {
  205. *hdl = mid;
  206. return MFX_ERR_NONE;
  207. }
  208. static int pix_fmt_to_mfx_fourcc(int format)
  209. {
  210. switch (format) {
  211. case AV_PIX_FMT_YUV420P:
  212. return MFX_FOURCC_YV12;
  213. case AV_PIX_FMT_NV12:
  214. return MFX_FOURCC_NV12;
  215. case AV_PIX_FMT_YUYV422:
  216. return MFX_FOURCC_YUY2;
  217. case AV_PIX_FMT_BGRA:
  218. return MFX_FOURCC_RGB4;
  219. }
  220. return MFX_FOURCC_NV12;
  221. }
  222. static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
  223. {
  224. switch (frame->format) {
  225. case AV_PIX_FMT_NV12:
  226. case AV_PIX_FMT_P010:
  227. surface->Data.Y = frame->data[0];
  228. surface->Data.UV = frame->data[1];
  229. break;
  230. case AV_PIX_FMT_YUV420P:
  231. surface->Data.Y = frame->data[0];
  232. surface->Data.U = frame->data[1];
  233. surface->Data.V = frame->data[2];
  234. break;
  235. case AV_PIX_FMT_YUYV422:
  236. surface->Data.Y = frame->data[0];
  237. surface->Data.U = frame->data[0] + 1;
  238. surface->Data.V = frame->data[0] + 3;
  239. break;
  240. case AV_PIX_FMT_RGB32:
  241. surface->Data.B = frame->data[0];
  242. surface->Data.G = frame->data[0] + 1;
  243. surface->Data.R = frame->data[0] + 2;
  244. surface->Data.A = frame->data[0] + 3;
  245. break;
  246. default:
  247. return MFX_ERR_UNSUPPORTED;
  248. }
  249. surface->Data.Pitch = frame->linesize[0];
  250. return 0;
  251. }
  252. /* fill the surface info */
  253. static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
  254. {
  255. enum AVPixelFormat pix_fmt;
  256. AVHWFramesContext *frames_ctx;
  257. AVQSVFramesContext *frames_hwctx;
  258. const AVPixFmtDescriptor *desc;
  259. if (link->format == AV_PIX_FMT_QSV) {
  260. if (!link->hw_frames_ctx)
  261. return AVERROR(EINVAL);
  262. frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
  263. frames_hwctx = frames_ctx->hwctx;
  264. *frameinfo = frames_hwctx->surfaces[0].Info;
  265. } else {
  266. pix_fmt = link->format;
  267. desc = av_pix_fmt_desc_get(pix_fmt);
  268. if (!desc)
  269. return AVERROR_BUG;
  270. frameinfo->CropX = 0;
  271. frameinfo->CropY = 0;
  272. frameinfo->Width = FFALIGN(link->w, 32);
  273. frameinfo->Height = FFALIGN(link->h, 32);
  274. frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  275. frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
  276. frameinfo->BitDepthLuma = desc->comp[0].depth;
  277. frameinfo->BitDepthChroma = desc->comp[0].depth;
  278. frameinfo->Shift = desc->comp[0].depth > 8;
  279. if (desc->log2_chroma_w && desc->log2_chroma_h)
  280. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  281. else if (desc->log2_chroma_w)
  282. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  283. else
  284. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  285. }
  286. frameinfo->CropW = link->w;
  287. frameinfo->CropH = link->h;
  288. frameinfo->FrameRateExtN = link->frame_rate.num;
  289. frameinfo->FrameRateExtD = link->frame_rate.den;
  290. frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
  291. frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
  292. return 0;
  293. }
  294. static void clear_unused_frames(QSVFrame *list)
  295. {
  296. while (list) {
  297. if (list->surface && !list->surface->Data.Locked) {
  298. list->surface = NULL;
  299. av_frame_free(&list->frame);
  300. }
  301. list = list->next;
  302. }
  303. }
  304. static void clear_frame_list(QSVFrame **list)
  305. {
  306. while (*list) {
  307. QSVFrame *frame;
  308. frame = *list;
  309. *list = (*list)->next;
  310. av_frame_free(&frame->frame);
  311. av_freep(&frame);
  312. }
  313. }
  314. static QSVFrame *get_free_frame(QSVFrame **list)
  315. {
  316. QSVFrame *out = *list;
  317. for (; out; out = out->next) {
  318. if (!out->surface)
  319. break;
  320. }
  321. if (!out) {
  322. out = av_mallocz(sizeof(*out));
  323. if (!out) {
  324. av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
  325. return NULL;
  326. }
  327. out->next = *list;
  328. *list = out;
  329. }
  330. return out;
  331. }
  332. /* get the input surface */
  333. static QSVFrame *submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
  334. {
  335. QSVFrame *qsv_frame;
  336. AVFilterContext *ctx = inlink->dst;
  337. clear_unused_frames(s->in_frame_list);
  338. qsv_frame = get_free_frame(&s->in_frame_list);
  339. if (!qsv_frame)
  340. return NULL;
  341. /* Turn AVFrame into mfxFrameSurface1.
  342. * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
  343. * mfxFrameSurface1 is stored in AVFrame->data[3];
  344. * for system memory mode, raw video data is stored in
  345. * AVFrame, we should map it into mfxFrameSurface1.
  346. */
  347. if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
  348. if (picref->format != AV_PIX_FMT_QSV) {
  349. av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
  350. return NULL;
  351. }
  352. qsv_frame->frame = av_frame_clone(picref);
  353. qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
  354. } else {
  355. /* make a copy if the input is not padded as libmfx requires */
  356. if (picref->height & 31 || picref->linesize[0] & 31) {
  357. qsv_frame->frame = ff_get_video_buffer(inlink,
  358. FFALIGN(inlink->w, 32),
  359. FFALIGN(inlink->h, 32));
  360. if (!qsv_frame->frame)
  361. return NULL;
  362. qsv_frame->frame->width = picref->width;
  363. qsv_frame->frame->height = picref->height;
  364. if (av_frame_copy(qsv_frame->frame, picref) < 0) {
  365. av_frame_free(&qsv_frame->frame);
  366. return NULL;
  367. }
  368. av_frame_copy_props(qsv_frame->frame, picref);
  369. } else
  370. qsv_frame->frame = av_frame_clone(picref);
  371. if (map_frame_to_surface(qsv_frame->frame,
  372. &qsv_frame->surface_internal) < 0) {
  373. av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
  374. return NULL;
  375. }
  376. qsv_frame->surface = &qsv_frame->surface_internal;
  377. }
  378. qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
  379. qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
  380. inlink->time_base, default_tb);
  381. qsv_frame->surface->Info.PicStruct =
  382. !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  383. (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  384. MFX_PICSTRUCT_FIELD_BFF);
  385. if (qsv_frame->frame->repeat_pict == 1)
  386. qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  387. else if (qsv_frame->frame->repeat_pict == 2)
  388. qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  389. else if (qsv_frame->frame->repeat_pict == 4)
  390. qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  391. return qsv_frame;
  392. }
  393. /* get the output surface */
  394. static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink)
  395. {
  396. AVFilterContext *ctx = outlink->src;
  397. QSVFrame *out_frame;
  398. int ret;
  399. clear_unused_frames(s->out_frame_list);
  400. out_frame = get_free_frame(&s->out_frame_list);
  401. if (!out_frame)
  402. return NULL;
  403. /* For video memory, get a hw frame;
  404. * For system memory, get a sw frame and map it into a mfx_surface. */
  405. if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
  406. out_frame->frame = av_frame_alloc();
  407. if (!out_frame->frame)
  408. return NULL;
  409. ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
  410. if (ret < 0) {
  411. av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
  412. return NULL;
  413. }
  414. out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
  415. } else {
  416. /* Get a frame with aligned dimensions.
  417. * Libmfx need system memory being 128x64 aligned */
  418. out_frame->frame = ff_get_video_buffer(outlink,
  419. FFALIGN(outlink->w, 128),
  420. FFALIGN(outlink->h, 64));
  421. if (!out_frame->frame)
  422. return NULL;
  423. out_frame->frame->width = outlink->w;
  424. out_frame->frame->height = outlink->h;
  425. ret = map_frame_to_surface(out_frame->frame,
  426. &out_frame->surface_internal);
  427. if (ret < 0)
  428. return NULL;
  429. out_frame->surface = &out_frame->surface_internal;
  430. }
  431. out_frame->surface->Info = s->vpp_param.vpp.Out;
  432. return out_frame;
  433. }
  434. /* create the QSV session */
  435. static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
  436. {
  437. AVFilterLink *inlink = avctx->inputs[0];
  438. AVFilterLink *outlink = avctx->outputs[0];
  439. AVQSVFramesContext *in_frames_hwctx = NULL;
  440. AVQSVFramesContext *out_frames_hwctx = NULL;
  441. AVBufferRef *device_ref;
  442. AVHWDeviceContext *device_ctx;
  443. AVQSVDeviceContext *device_hwctx;
  444. mfxHDL handle;
  445. mfxHandleType handle_type;
  446. mfxVersion ver;
  447. mfxIMPL impl;
  448. int ret, i;
  449. if (inlink->hw_frames_ctx) {
  450. AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
  451. device_ref = frames_ctx->device_ref;
  452. in_frames_hwctx = frames_ctx->hwctx;
  453. s->in_mem_mode = in_frames_hwctx->frame_type;
  454. s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
  455. sizeof(*s->surface_ptrs_in));
  456. if (!s->surface_ptrs_in)
  457. return AVERROR(ENOMEM);
  458. for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
  459. s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
  460. s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
  461. } else if (avctx->hw_device_ctx) {
  462. device_ref = avctx->hw_device_ctx;
  463. s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
  464. } else {
  465. av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
  466. return AVERROR(EINVAL);
  467. }
  468. device_ctx = (AVHWDeviceContext *)device_ref->data;
  469. device_hwctx = device_ctx->hwctx;
  470. if (outlink->format == AV_PIX_FMT_QSV) {
  471. AVHWFramesContext *out_frames_ctx;
  472. AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
  473. if (!out_frames_ref)
  474. return AVERROR(ENOMEM);
  475. s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
  476. MFX_MEMTYPE_OPAQUE_FRAME :
  477. MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  478. out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
  479. out_frames_hwctx = out_frames_ctx->hwctx;
  480. out_frames_ctx->format = AV_PIX_FMT_QSV;
  481. out_frames_ctx->width = FFALIGN(outlink->w, 32);
  482. out_frames_ctx->height = FFALIGN(outlink->h, 32);
  483. out_frames_ctx->sw_format = s->out_sw_format;
  484. out_frames_ctx->initial_pool_size = 64;
  485. if (avctx->extra_hw_frames > 0)
  486. out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
  487. out_frames_hwctx->frame_type = s->out_mem_mode;
  488. ret = av_hwframe_ctx_init(out_frames_ref);
  489. if (ret < 0) {
  490. av_buffer_unref(&out_frames_ref);
  491. av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
  492. return ret;
  493. }
  494. s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
  495. sizeof(*s->surface_ptrs_out));
  496. if (!s->surface_ptrs_out) {
  497. av_buffer_unref(&out_frames_ref);
  498. return AVERROR(ENOMEM);
  499. }
  500. for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
  501. s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
  502. s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
  503. av_buffer_unref(&outlink->hw_frames_ctx);
  504. outlink->hw_frames_ctx = out_frames_ref;
  505. } else
  506. s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
  507. /* extract the properties of the "master" session given to us */
  508. ret = MFXQueryIMPL(device_hwctx->session, &impl);
  509. if (ret == MFX_ERR_NONE)
  510. ret = MFXQueryVersion(device_hwctx->session, &ver);
  511. if (ret != MFX_ERR_NONE) {
  512. av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  513. return AVERROR_UNKNOWN;
  514. }
  515. for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
  516. ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
  517. if (ret == MFX_ERR_NONE) {
  518. handle_type = handle_types[i];
  519. break;
  520. }
  521. }
  522. if (ret < 0)
  523. return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
  524. else if (ret > 0) {
  525. ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
  526. return AVERROR_UNKNOWN;
  527. }
  528. /* create a "slave" session with those same properties, to be used for vpp */
  529. ret = MFXInit(impl, &ver, &s->session);
  530. if (ret < 0)
  531. return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
  532. else if (ret > 0) {
  533. ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
  534. return AVERROR_UNKNOWN;
  535. }
  536. if (handle) {
  537. ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
  538. if (ret != MFX_ERR_NONE)
  539. return AVERROR_UNKNOWN;
  540. }
  541. if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
  542. ret = MFXJoinSession(device_hwctx->session, s->session);
  543. if (ret != MFX_ERR_NONE)
  544. return AVERROR_UNKNOWN;
  545. }
  546. if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
  547. s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
  548. s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
  549. s->opaque_alloc.In.Type = s->in_mem_mode;
  550. s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
  551. s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
  552. s->opaque_alloc.Out.Type = s->out_mem_mode;
  553. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  554. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  555. } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
  556. mfxFrameAllocator frame_allocator = {
  557. .pthis = s,
  558. .Alloc = frame_alloc,
  559. .Lock = frame_lock,
  560. .Unlock = frame_unlock,
  561. .GetHDL = frame_get_hdl,
  562. .Free = frame_free,
  563. };
  564. ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
  565. if (ret != MFX_ERR_NONE)
  566. return AVERROR_UNKNOWN;
  567. }
  568. return 0;
  569. }
  570. int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
  571. {
  572. int i;
  573. int ret;
  574. QSVVPPContext *s;
  575. s = av_mallocz(sizeof(*s));
  576. if (!s)
  577. return AVERROR(ENOMEM);
  578. s->filter_frame = param->filter_frame;
  579. if (!s->filter_frame)
  580. s->filter_frame = ff_filter_frame;
  581. s->out_sw_format = param->out_sw_format;
  582. /* create the vpp session */
  583. ret = init_vpp_session(avctx, s);
  584. if (ret < 0)
  585. goto failed;
  586. s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
  587. if (!s->frame_infos) {
  588. ret = AVERROR(ENOMEM);
  589. goto failed;
  590. }
  591. /* Init each input's information */
  592. for (i = 0; i < avctx->nb_inputs; i++) {
  593. ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
  594. if (ret < 0)
  595. goto failed;
  596. }
  597. /* Update input's frame info according to crop */
  598. for (i = 0; i < param->num_crop; i++) {
  599. QSVVPPCrop *crop = param->crop + i;
  600. if (crop->in_idx > avctx->nb_inputs) {
  601. ret = AVERROR(EINVAL);
  602. goto failed;
  603. }
  604. s->frame_infos[crop->in_idx].CropX = crop->x;
  605. s->frame_infos[crop->in_idx].CropY = crop->y;
  606. s->frame_infos[crop->in_idx].CropW = crop->w;
  607. s->frame_infos[crop->in_idx].CropH = crop->h;
  608. }
  609. s->vpp_param.vpp.In = s->frame_infos[0];
  610. ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
  611. if (ret < 0) {
  612. av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
  613. goto failed;
  614. }
  615. if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
  616. s->nb_ext_buffers = param->num_ext_buf + 1;
  617. s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
  618. if (!s->ext_buffers) {
  619. ret = AVERROR(ENOMEM);
  620. goto failed;
  621. }
  622. s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
  623. for (i = 1; i < param->num_ext_buf; i++)
  624. s->ext_buffers[i] = param->ext_buf[i - 1];
  625. s->vpp_param.ExtParam = s->ext_buffers;
  626. s->vpp_param.NumExtParam = s->nb_ext_buffers;
  627. } else {
  628. s->vpp_param.NumExtParam = param->num_ext_buf;
  629. s->vpp_param.ExtParam = param->ext_buf;
  630. }
  631. s->vpp_param.AsyncDepth = 1;
  632. if (IS_SYSTEM_MEMORY(s->in_mem_mode))
  633. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  634. else if (IS_VIDEO_MEMORY(s->in_mem_mode))
  635. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
  636. else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
  637. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  638. if (IS_SYSTEM_MEMORY(s->out_mem_mode))
  639. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  640. else if (IS_VIDEO_MEMORY(s->out_mem_mode))
  641. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
  642. else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
  643. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
  644. /* Print input memory mode */
  645. ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
  646. /* Print output memory mode */
  647. ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
  648. ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
  649. if (ret < 0) {
  650. ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
  651. goto failed;
  652. } else if (ret > 0)
  653. ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
  654. *vpp = s;
  655. return 0;
  656. failed:
  657. ff_qsvvpp_free(&s);
  658. return ret;
  659. }
  660. int ff_qsvvpp_free(QSVVPPContext **vpp)
  661. {
  662. QSVVPPContext *s = *vpp;
  663. if (!s)
  664. return 0;
  665. if (s->session) {
  666. MFXVideoVPP_Close(s->session);
  667. MFXClose(s->session);
  668. }
  669. /* release all the resources */
  670. clear_frame_list(&s->in_frame_list);
  671. clear_frame_list(&s->out_frame_list);
  672. av_freep(&s->surface_ptrs_in);
  673. av_freep(&s->surface_ptrs_out);
  674. av_freep(&s->ext_buffers);
  675. av_freep(&s->frame_infos);
  676. av_freep(vpp);
  677. return 0;
  678. }
  679. int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
  680. {
  681. AVFilterContext *ctx = inlink->dst;
  682. AVFilterLink *outlink = ctx->outputs[0];
  683. mfxSyncPoint sync;
  684. QSVFrame *in_frame, *out_frame;
  685. int ret, filter_ret;
  686. in_frame = submit_frame(s, inlink, picref);
  687. if (!in_frame) {
  688. av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
  689. FF_INLINK_IDX(inlink));
  690. return AVERROR(ENOMEM);
  691. }
  692. do {
  693. out_frame = query_frame(s, outlink);
  694. if (!out_frame) {
  695. av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
  696. return AVERROR(ENOMEM);
  697. }
  698. do {
  699. ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
  700. out_frame->surface, NULL, &sync);
  701. if (ret == MFX_WRN_DEVICE_BUSY)
  702. av_usleep(500);
  703. } while (ret == MFX_WRN_DEVICE_BUSY);
  704. if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
  705. /* Ignore more_data error */
  706. if (ret == MFX_ERR_MORE_DATA)
  707. ret = AVERROR(EAGAIN);
  708. break;
  709. }
  710. if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
  711. av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
  712. out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
  713. default_tb, outlink->time_base);
  714. filter_ret = s->filter_frame(outlink, out_frame->frame);
  715. if (filter_ret < 0) {
  716. av_frame_free(&out_frame->frame);
  717. ret = filter_ret;
  718. break;
  719. }
  720. out_frame->frame = NULL;
  721. } while(ret == MFX_ERR_MORE_SURFACE);
  722. return ret;
  723. }