You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

879 lines
30KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. /**
  19. * @file
  20. * Intel Quick Sync Video VPP base function
  21. */
  22. #include "libavutil/common.h"
  23. #include "libavutil/mathematics.h"
  24. #include "libavutil/hwcontext.h"
  25. #include "libavutil/hwcontext_qsv.h"
  26. #include "libavutil/time.h"
  27. #include "libavutil/pixdesc.h"
  28. #include "internal.h"
  29. #include "qsvvpp.h"
  30. #include "video.h"
  31. #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
  32. MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
  33. #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
  34. #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
  35. static const mfxHandleType handle_types[] = {
  36. MFX_HANDLE_VA_DISPLAY,
  37. MFX_HANDLE_D3D9_DEVICE_MANAGER,
  38. MFX_HANDLE_D3D11_DEVICE,
  39. };
  40. static const AVRational default_tb = { 1, 90000 };
  41. static const struct {
  42. int mfx_iopattern;
  43. const char *desc;
  44. } qsv_iopatterns[] = {
  45. {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
  46. {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
  47. {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
  48. {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
  49. {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
  50. {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
  51. };
  52. int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern,
  53. const char *extra_string)
  54. {
  55. const char *desc = NULL;
  56. for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
  57. if (qsv_iopatterns[i].mfx_iopattern == mfx_iopattern) {
  58. desc = qsv_iopatterns[i].desc;
  59. }
  60. }
  61. if (!desc)
  62. desc = "unknown iopattern";
  63. av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
  64. return 0;
  65. }
  66. static const struct {
  67. mfxStatus mfxerr;
  68. int averr;
  69. const char *desc;
  70. } qsv_errors[] = {
  71. { MFX_ERR_NONE, 0, "success" },
  72. { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
  73. { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
  74. { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
  75. { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
  76. { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
  77. { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
  78. { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
  79. { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
  80. { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
  81. /* the following 3 errors should always be handled explicitly, so those "mappings"
  82. * are for completeness only */
  83. { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
  84. { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
  85. { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
  86. { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
  87. { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
  88. { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
  89. { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
  90. { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
  91. { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
  92. { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
  93. { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
  94. { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
  95. { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
  96. { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
  97. { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
  98. { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
  99. { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
  100. { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
  101. { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
  102. { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
  103. };
  104. static int qsv_map_error(mfxStatus mfx_err, const char **desc)
  105. {
  106. int i;
  107. for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
  108. if (qsv_errors[i].mfxerr == mfx_err) {
  109. if (desc)
  110. *desc = qsv_errors[i].desc;
  111. return qsv_errors[i].averr;
  112. }
  113. }
  114. if (desc)
  115. *desc = "unknown error";
  116. return AVERROR_UNKNOWN;
  117. }
  118. int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
  119. const char *error_string)
  120. {
  121. const char *desc;
  122. int ret;
  123. ret = qsv_map_error(err, &desc);
  124. av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
  125. return ret;
  126. }
  127. int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
  128. const char *warning_string)
  129. {
  130. const char *desc;
  131. int ret;
  132. ret = qsv_map_error(err, &desc);
  133. av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
  134. return ret;
  135. }
  136. /* functions for frameAlloc */
  137. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  138. mfxFrameAllocResponse *resp)
  139. {
  140. QSVVPPContext *s = pthis;
  141. int i;
  142. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  143. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  144. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  145. return MFX_ERR_UNSUPPORTED;
  146. if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
  147. resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
  148. if (!resp->mids)
  149. return AVERROR(ENOMEM);
  150. for (i = 0; i < s->nb_surface_ptrs_in; i++)
  151. resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
  152. resp->NumFrameActual = s->nb_surface_ptrs_in;
  153. } else {
  154. resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
  155. if (!resp->mids)
  156. return AVERROR(ENOMEM);
  157. for (i = 0; i < s->nb_surface_ptrs_out; i++)
  158. resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
  159. resp->NumFrameActual = s->nb_surface_ptrs_out;
  160. }
  161. return MFX_ERR_NONE;
  162. }
  163. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  164. {
  165. av_freep(&resp->mids);
  166. return MFX_ERR_NONE;
  167. }
  168. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  169. {
  170. return MFX_ERR_UNSUPPORTED;
  171. }
  172. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  173. {
  174. return MFX_ERR_UNSUPPORTED;
  175. }
  176. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  177. {
  178. *hdl = mid;
  179. return MFX_ERR_NONE;
  180. }
  181. static int pix_fmt_to_mfx_fourcc(int format)
  182. {
  183. switch (format) {
  184. case AV_PIX_FMT_YUV420P:
  185. return MFX_FOURCC_YV12;
  186. case AV_PIX_FMT_NV12:
  187. return MFX_FOURCC_NV12;
  188. case AV_PIX_FMT_YUYV422:
  189. return MFX_FOURCC_YUY2;
  190. case AV_PIX_FMT_BGRA:
  191. return MFX_FOURCC_RGB4;
  192. }
  193. return MFX_FOURCC_NV12;
  194. }
  195. static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
  196. {
  197. switch (frame->format) {
  198. case AV_PIX_FMT_NV12:
  199. case AV_PIX_FMT_P010:
  200. surface->Data.Y = frame->data[0];
  201. surface->Data.UV = frame->data[1];
  202. break;
  203. case AV_PIX_FMT_YUV420P:
  204. surface->Data.Y = frame->data[0];
  205. surface->Data.U = frame->data[1];
  206. surface->Data.V = frame->data[2];
  207. break;
  208. case AV_PIX_FMT_YUYV422:
  209. surface->Data.Y = frame->data[0];
  210. surface->Data.U = frame->data[0] + 1;
  211. surface->Data.V = frame->data[0] + 3;
  212. break;
  213. case AV_PIX_FMT_RGB32:
  214. surface->Data.B = frame->data[0];
  215. surface->Data.G = frame->data[0] + 1;
  216. surface->Data.R = frame->data[0] + 2;
  217. surface->Data.A = frame->data[0] + 3;
  218. break;
  219. default:
  220. return MFX_ERR_UNSUPPORTED;
  221. }
  222. surface->Data.Pitch = frame->linesize[0];
  223. return 0;
  224. }
  225. /* fill the surface info */
  226. static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
  227. {
  228. enum AVPixelFormat pix_fmt;
  229. AVHWFramesContext *frames_ctx;
  230. AVQSVFramesContext *frames_hwctx;
  231. const AVPixFmtDescriptor *desc;
  232. if (link->format == AV_PIX_FMT_QSV) {
  233. if (!link->hw_frames_ctx)
  234. return AVERROR(EINVAL);
  235. frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
  236. frames_hwctx = frames_ctx->hwctx;
  237. *frameinfo = frames_hwctx->surfaces[0].Info;
  238. } else {
  239. pix_fmt = link->format;
  240. desc = av_pix_fmt_desc_get(pix_fmt);
  241. if (!desc)
  242. return AVERROR_BUG;
  243. frameinfo->CropX = 0;
  244. frameinfo->CropY = 0;
  245. frameinfo->Width = FFALIGN(link->w, 32);
  246. frameinfo->Height = FFALIGN(link->h, 32);
  247. frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  248. frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
  249. frameinfo->BitDepthLuma = desc->comp[0].depth;
  250. frameinfo->BitDepthChroma = desc->comp[0].depth;
  251. frameinfo->Shift = desc->comp[0].depth > 8;
  252. if (desc->log2_chroma_w && desc->log2_chroma_h)
  253. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  254. else if (desc->log2_chroma_w)
  255. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  256. else
  257. frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  258. }
  259. frameinfo->CropW = link->w;
  260. frameinfo->CropH = link->h;
  261. frameinfo->FrameRateExtN = link->frame_rate.num;
  262. frameinfo->FrameRateExtD = link->frame_rate.den;
  263. frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
  264. frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
  265. return 0;
  266. }
  267. static void clear_unused_frames(QSVFrame *list)
  268. {
  269. while (list) {
  270. /* list->queued==1 means the frame is not cached in VPP
  271. * process any more, it can be released to pool. */
  272. if ((list->queued == 1) && !list->surface.Data.Locked) {
  273. av_frame_free(&list->frame);
  274. list->queued = 0;
  275. }
  276. list = list->next;
  277. }
  278. }
  279. static void clear_frame_list(QSVFrame **list)
  280. {
  281. while (*list) {
  282. QSVFrame *frame;
  283. frame = *list;
  284. *list = (*list)->next;
  285. av_frame_free(&frame->frame);
  286. av_freep(&frame);
  287. }
  288. }
  289. static QSVFrame *get_free_frame(QSVFrame **list)
  290. {
  291. QSVFrame *out = *list;
  292. for (; out; out = out->next) {
  293. if (!out->queued) {
  294. out->queued = 1;
  295. break;
  296. }
  297. }
  298. if (!out) {
  299. out = av_mallocz(sizeof(*out));
  300. if (!out) {
  301. av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
  302. return NULL;
  303. }
  304. out->queued = 1;
  305. out->next = *list;
  306. *list = out;
  307. }
  308. return out;
  309. }
  310. /* get the input surface */
  311. static QSVFrame *submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
  312. {
  313. QSVFrame *qsv_frame;
  314. AVFilterContext *ctx = inlink->dst;
  315. clear_unused_frames(s->in_frame_list);
  316. qsv_frame = get_free_frame(&s->in_frame_list);
  317. if (!qsv_frame)
  318. return NULL;
  319. /* Turn AVFrame into mfxFrameSurface1.
  320. * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
  321. * mfxFrameSurface1 is stored in AVFrame->data[3];
  322. * for system memory mode, raw video data is stored in
  323. * AVFrame, we should map it into mfxFrameSurface1.
  324. */
  325. if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
  326. if (picref->format != AV_PIX_FMT_QSV) {
  327. av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
  328. return NULL;
  329. }
  330. qsv_frame->frame = av_frame_clone(picref);
  331. qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
  332. } else {
  333. /* make a copy if the input is not padded as libmfx requires */
  334. if (picref->height & 31 || picref->linesize[0] & 31) {
  335. qsv_frame->frame = ff_get_video_buffer(inlink,
  336. FFALIGN(inlink->w, 32),
  337. FFALIGN(inlink->h, 32));
  338. if (!qsv_frame->frame)
  339. return NULL;
  340. qsv_frame->frame->width = picref->width;
  341. qsv_frame->frame->height = picref->height;
  342. if (av_frame_copy(qsv_frame->frame, picref) < 0) {
  343. av_frame_free(&qsv_frame->frame);
  344. return NULL;
  345. }
  346. av_frame_copy_props(qsv_frame->frame, picref);
  347. } else
  348. qsv_frame->frame = av_frame_clone(picref);
  349. if (map_frame_to_surface(qsv_frame->frame,
  350. &qsv_frame->surface) < 0) {
  351. av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
  352. return NULL;
  353. }
  354. }
  355. qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
  356. qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
  357. inlink->time_base, default_tb);
  358. qsv_frame->surface.Info.PicStruct =
  359. !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  360. (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  361. MFX_PICSTRUCT_FIELD_BFF);
  362. if (qsv_frame->frame->repeat_pict == 1)
  363. qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  364. else if (qsv_frame->frame->repeat_pict == 2)
  365. qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  366. else if (qsv_frame->frame->repeat_pict == 4)
  367. qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  368. return qsv_frame;
  369. }
  370. /* get the output surface */
  371. static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink)
  372. {
  373. AVFilterContext *ctx = outlink->src;
  374. QSVFrame *out_frame;
  375. int ret;
  376. clear_unused_frames(s->out_frame_list);
  377. out_frame = get_free_frame(&s->out_frame_list);
  378. if (!out_frame)
  379. return NULL;
  380. /* For video memory, get a hw frame;
  381. * For system memory, get a sw frame and map it into a mfx_surface. */
  382. if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
  383. out_frame->frame = av_frame_alloc();
  384. if (!out_frame->frame)
  385. return NULL;
  386. ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
  387. if (ret < 0) {
  388. av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
  389. return NULL;
  390. }
  391. out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
  392. } else {
  393. /* Get a frame with aligned dimensions.
  394. * Libmfx need system memory being 128x64 aligned */
  395. out_frame->frame = ff_get_video_buffer(outlink,
  396. FFALIGN(outlink->w, 128),
  397. FFALIGN(outlink->h, 64));
  398. if (!out_frame->frame)
  399. return NULL;
  400. out_frame->frame->width = outlink->w;
  401. out_frame->frame->height = outlink->h;
  402. ret = map_frame_to_surface(out_frame->frame,
  403. &out_frame->surface);
  404. if (ret < 0)
  405. return NULL;
  406. }
  407. out_frame->surface.Info = s->vpp_param.vpp.Out;
  408. return out_frame;
  409. }
  410. /* create the QSV session */
  411. static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
  412. {
  413. AVFilterLink *inlink = avctx->inputs[0];
  414. AVFilterLink *outlink = avctx->outputs[0];
  415. AVQSVFramesContext *in_frames_hwctx = NULL;
  416. AVQSVFramesContext *out_frames_hwctx = NULL;
  417. AVBufferRef *device_ref;
  418. AVHWDeviceContext *device_ctx;
  419. AVQSVDeviceContext *device_hwctx;
  420. mfxHDL handle;
  421. mfxHandleType handle_type;
  422. mfxVersion ver;
  423. mfxIMPL impl;
  424. int ret, i;
  425. if (inlink->hw_frames_ctx) {
  426. AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
  427. device_ref = frames_ctx->device_ref;
  428. in_frames_hwctx = frames_ctx->hwctx;
  429. s->in_mem_mode = in_frames_hwctx->frame_type;
  430. s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
  431. sizeof(*s->surface_ptrs_in));
  432. if (!s->surface_ptrs_in)
  433. return AVERROR(ENOMEM);
  434. for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
  435. s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
  436. s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
  437. } else if (avctx->hw_device_ctx) {
  438. device_ref = avctx->hw_device_ctx;
  439. s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
  440. } else {
  441. av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
  442. return AVERROR(EINVAL);
  443. }
  444. device_ctx = (AVHWDeviceContext *)device_ref->data;
  445. device_hwctx = device_ctx->hwctx;
  446. if (outlink->format == AV_PIX_FMT_QSV) {
  447. AVHWFramesContext *out_frames_ctx;
  448. AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
  449. if (!out_frames_ref)
  450. return AVERROR(ENOMEM);
  451. s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
  452. MFX_MEMTYPE_OPAQUE_FRAME :
  453. MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  454. out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
  455. out_frames_hwctx = out_frames_ctx->hwctx;
  456. out_frames_ctx->format = AV_PIX_FMT_QSV;
  457. out_frames_ctx->width = FFALIGN(outlink->w, 32);
  458. out_frames_ctx->height = FFALIGN(outlink->h, 32);
  459. out_frames_ctx->sw_format = s->out_sw_format;
  460. out_frames_ctx->initial_pool_size = 64;
  461. if (avctx->extra_hw_frames > 0)
  462. out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
  463. out_frames_hwctx->frame_type = s->out_mem_mode;
  464. ret = av_hwframe_ctx_init(out_frames_ref);
  465. if (ret < 0) {
  466. av_buffer_unref(&out_frames_ref);
  467. av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
  468. return ret;
  469. }
  470. s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
  471. sizeof(*s->surface_ptrs_out));
  472. if (!s->surface_ptrs_out) {
  473. av_buffer_unref(&out_frames_ref);
  474. return AVERROR(ENOMEM);
  475. }
  476. for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
  477. s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
  478. s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
  479. av_buffer_unref(&outlink->hw_frames_ctx);
  480. outlink->hw_frames_ctx = out_frames_ref;
  481. } else
  482. s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
  483. /* extract the properties of the "master" session given to us */
  484. ret = MFXQueryIMPL(device_hwctx->session, &impl);
  485. if (ret == MFX_ERR_NONE)
  486. ret = MFXQueryVersion(device_hwctx->session, &ver);
  487. if (ret != MFX_ERR_NONE) {
  488. av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  489. return AVERROR_UNKNOWN;
  490. }
  491. for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
  492. ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
  493. if (ret == MFX_ERR_NONE) {
  494. handle_type = handle_types[i];
  495. break;
  496. }
  497. }
  498. if (ret < 0)
  499. return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
  500. else if (ret > 0) {
  501. ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
  502. return AVERROR_UNKNOWN;
  503. }
  504. /* create a "slave" session with those same properties, to be used for vpp */
  505. ret = MFXInit(impl, &ver, &s->session);
  506. if (ret < 0)
  507. return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
  508. else if (ret > 0) {
  509. ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
  510. return AVERROR_UNKNOWN;
  511. }
  512. if (handle) {
  513. ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
  514. if (ret != MFX_ERR_NONE)
  515. return AVERROR_UNKNOWN;
  516. }
  517. if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
  518. ret = MFXJoinSession(device_hwctx->session, s->session);
  519. if (ret != MFX_ERR_NONE)
  520. return AVERROR_UNKNOWN;
  521. }
  522. if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
  523. s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
  524. s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
  525. s->opaque_alloc.In.Type = s->in_mem_mode;
  526. s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
  527. s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
  528. s->opaque_alloc.Out.Type = s->out_mem_mode;
  529. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  530. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  531. } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
  532. mfxFrameAllocator frame_allocator = {
  533. .pthis = s,
  534. .Alloc = frame_alloc,
  535. .Lock = frame_lock,
  536. .Unlock = frame_unlock,
  537. .GetHDL = frame_get_hdl,
  538. .Free = frame_free,
  539. };
  540. ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
  541. if (ret != MFX_ERR_NONE)
  542. return AVERROR_UNKNOWN;
  543. }
  544. return 0;
  545. }
  546. static unsigned int qsv_fifo_item_size(void)
  547. {
  548. return sizeof(mfxSyncPoint) + sizeof(QSVFrame*);
  549. }
  550. static unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
  551. {
  552. return av_fifo_size(fifo)/qsv_fifo_item_size();
  553. }
  554. int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
  555. {
  556. int i;
  557. int ret;
  558. QSVVPPContext *s;
  559. s = av_mallocz(sizeof(*s));
  560. if (!s)
  561. return AVERROR(ENOMEM);
  562. s->filter_frame = param->filter_frame;
  563. if (!s->filter_frame)
  564. s->filter_frame = ff_filter_frame;
  565. s->out_sw_format = param->out_sw_format;
  566. /* create the vpp session */
  567. ret = init_vpp_session(avctx, s);
  568. if (ret < 0)
  569. goto failed;
  570. s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
  571. if (!s->frame_infos) {
  572. ret = AVERROR(ENOMEM);
  573. goto failed;
  574. }
  575. /* Init each input's information */
  576. for (i = 0; i < avctx->nb_inputs; i++) {
  577. ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
  578. if (ret < 0)
  579. goto failed;
  580. }
  581. /* Update input's frame info according to crop */
  582. for (i = 0; i < param->num_crop; i++) {
  583. QSVVPPCrop *crop = param->crop + i;
  584. if (crop->in_idx > avctx->nb_inputs) {
  585. ret = AVERROR(EINVAL);
  586. goto failed;
  587. }
  588. s->frame_infos[crop->in_idx].CropX = crop->x;
  589. s->frame_infos[crop->in_idx].CropY = crop->y;
  590. s->frame_infos[crop->in_idx].CropW = crop->w;
  591. s->frame_infos[crop->in_idx].CropH = crop->h;
  592. }
  593. s->vpp_param.vpp.In = s->frame_infos[0];
  594. ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
  595. if (ret < 0) {
  596. av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
  597. goto failed;
  598. }
  599. if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
  600. s->nb_ext_buffers = param->num_ext_buf + 1;
  601. s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
  602. if (!s->ext_buffers) {
  603. ret = AVERROR(ENOMEM);
  604. goto failed;
  605. }
  606. s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
  607. for (i = 1; i < param->num_ext_buf; i++)
  608. s->ext_buffers[i] = param->ext_buf[i - 1];
  609. s->vpp_param.ExtParam = s->ext_buffers;
  610. s->vpp_param.NumExtParam = s->nb_ext_buffers;
  611. } else {
  612. s->vpp_param.NumExtParam = param->num_ext_buf;
  613. s->vpp_param.ExtParam = param->ext_buf;
  614. }
  615. s->got_frame = 0;
  616. /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
  617. s->async_fifo = av_fifo_alloc((param->async_depth + 1) * qsv_fifo_item_size());
  618. s->async_depth = param->async_depth;
  619. if (!s->async_fifo) {
  620. ret = AVERROR(ENOMEM);
  621. goto failed;
  622. }
  623. s->vpp_param.AsyncDepth = param->async_depth;
  624. if (IS_SYSTEM_MEMORY(s->in_mem_mode))
  625. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  626. else if (IS_VIDEO_MEMORY(s->in_mem_mode))
  627. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
  628. else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
  629. s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  630. if (IS_SYSTEM_MEMORY(s->out_mem_mode))
  631. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  632. else if (IS_VIDEO_MEMORY(s->out_mem_mode))
  633. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
  634. else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
  635. s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
  636. /* Print input memory mode */
  637. ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
  638. /* Print output memory mode */
  639. ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
  640. ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
  641. if (ret < 0) {
  642. ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
  643. goto failed;
  644. } else if (ret > 0)
  645. ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
  646. *vpp = s;
  647. return 0;
  648. failed:
  649. ff_qsvvpp_free(&s);
  650. return ret;
  651. }
  652. int ff_qsvvpp_free(QSVVPPContext **vpp)
  653. {
  654. QSVVPPContext *s = *vpp;
  655. if (!s)
  656. return 0;
  657. if (s->session) {
  658. MFXVideoVPP_Close(s->session);
  659. MFXClose(s->session);
  660. }
  661. /* release all the resources */
  662. clear_frame_list(&s->in_frame_list);
  663. clear_frame_list(&s->out_frame_list);
  664. av_freep(&s->surface_ptrs_in);
  665. av_freep(&s->surface_ptrs_out);
  666. av_freep(&s->ext_buffers);
  667. av_freep(&s->frame_infos);
  668. av_fifo_free(s->async_fifo);
  669. av_freep(vpp);
  670. return 0;
  671. }
  672. int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
  673. {
  674. AVFilterContext *ctx = inlink->dst;
  675. AVFilterLink *outlink = ctx->outputs[0];
  676. mfxSyncPoint sync;
  677. QSVFrame *in_frame, *out_frame, *tmp;
  678. int ret, filter_ret;
  679. while (s->eof && qsv_fifo_size(s->async_fifo)) {
  680. av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
  681. av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
  682. if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
  683. av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
  684. filter_ret = s->filter_frame(outlink, tmp->frame);
  685. if (filter_ret < 0) {
  686. av_frame_free(&tmp->frame);
  687. ret = filter_ret;
  688. break;
  689. }
  690. tmp->queued--;
  691. s->got_frame = 1;
  692. tmp->frame = NULL;
  693. };
  694. if (!picref)
  695. return 0;
  696. in_frame = submit_frame(s, inlink, picref);
  697. if (!in_frame) {
  698. av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
  699. FF_INLINK_IDX(inlink));
  700. return AVERROR(ENOMEM);
  701. }
  702. do {
  703. out_frame = query_frame(s, outlink);
  704. if (!out_frame) {
  705. av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
  706. return AVERROR(ENOMEM);
  707. }
  708. do {
  709. ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
  710. &out_frame->surface, NULL, &sync);
  711. if (ret == MFX_WRN_DEVICE_BUSY)
  712. av_usleep(500);
  713. } while (ret == MFX_WRN_DEVICE_BUSY);
  714. if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
  715. /* Ignore more_data error */
  716. if (ret == MFX_ERR_MORE_DATA)
  717. ret = AVERROR(EAGAIN);
  718. break;
  719. }
  720. out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
  721. default_tb, outlink->time_base);
  722. out_frame->queued++;
  723. av_fifo_generic_write(s->async_fifo, &out_frame, sizeof(out_frame), NULL);
  724. av_fifo_generic_write(s->async_fifo, &sync, sizeof(sync), NULL);
  725. if (qsv_fifo_size(s->async_fifo) > s->async_depth) {
  726. av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
  727. av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
  728. do {
  729. ret = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
  730. } while (ret == MFX_WRN_IN_EXECUTION);
  731. filter_ret = s->filter_frame(outlink, tmp->frame);
  732. if (filter_ret < 0) {
  733. av_frame_free(&tmp->frame);
  734. ret = filter_ret;
  735. break;
  736. }
  737. tmp->queued--;
  738. s->got_frame = 1;
  739. tmp->frame = NULL;
  740. }
  741. } while(ret == MFX_ERR_MORE_SURFACE);
  742. return ret;
  743. }