You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1681 lines
59KB

  1. /*
  2. * H.264 hardware encoding using nvidia nvenc
  3. * Copyright (c) 2014 Timo Rothenpieler <timo@rothenpieler.org>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "config.h"
  22. #if defined(_WIN32)
  23. #include <windows.h>
  24. #else
  25. #include <dlfcn.h>
  26. #endif
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/avassert.h"
  29. #include "libavutil/mem.h"
  30. #include "libavutil/hwcontext.h"
  31. #include "internal.h"
  32. #include "thread.h"
  33. #include "nvenc.h"
  34. #if CONFIG_CUDA
  35. #include "libavutil/hwcontext_cuda.h"
  36. #endif
  37. #if defined(_WIN32)
  38. #define LOAD_FUNC(l, s) GetProcAddress(l, s)
  39. #define DL_CLOSE_FUNC(l) FreeLibrary(l)
  40. #else
  41. #define LOAD_FUNC(l, s) dlsym(l, s)
  42. #define DL_CLOSE_FUNC(l) dlclose(l)
  43. #endif
  44. const enum AVPixelFormat ff_nvenc_pix_fmts[] = {
  45. AV_PIX_FMT_YUV420P,
  46. AV_PIX_FMT_NV12,
  47. AV_PIX_FMT_YUV444P,
  48. #if CONFIG_CUDA
  49. AV_PIX_FMT_CUDA,
  50. #endif
  51. AV_PIX_FMT_NONE
  52. };
  53. typedef struct NvencData
  54. {
  55. union {
  56. int64_t timestamp;
  57. NvencSurface *surface;
  58. } u;
  59. } NvencData;
  60. typedef struct NvencValuePair
  61. {
  62. const char *str;
  63. uint32_t num;
  64. } NvencValuePair;
  65. static const NvencValuePair nvenc_h264_level_pairs[] = {
  66. { "auto", NV_ENC_LEVEL_AUTOSELECT },
  67. { "1" , NV_ENC_LEVEL_H264_1 },
  68. { "1.0" , NV_ENC_LEVEL_H264_1 },
  69. { "1b" , NV_ENC_LEVEL_H264_1b },
  70. { "1.0b", NV_ENC_LEVEL_H264_1b },
  71. { "1.1" , NV_ENC_LEVEL_H264_11 },
  72. { "1.2" , NV_ENC_LEVEL_H264_12 },
  73. { "1.3" , NV_ENC_LEVEL_H264_13 },
  74. { "2" , NV_ENC_LEVEL_H264_2 },
  75. { "2.0" , NV_ENC_LEVEL_H264_2 },
  76. { "2.1" , NV_ENC_LEVEL_H264_21 },
  77. { "2.2" , NV_ENC_LEVEL_H264_22 },
  78. { "3" , NV_ENC_LEVEL_H264_3 },
  79. { "3.0" , NV_ENC_LEVEL_H264_3 },
  80. { "3.1" , NV_ENC_LEVEL_H264_31 },
  81. { "3.2" , NV_ENC_LEVEL_H264_32 },
  82. { "4" , NV_ENC_LEVEL_H264_4 },
  83. { "4.0" , NV_ENC_LEVEL_H264_4 },
  84. { "4.1" , NV_ENC_LEVEL_H264_41 },
  85. { "4.2" , NV_ENC_LEVEL_H264_42 },
  86. { "5" , NV_ENC_LEVEL_H264_5 },
  87. { "5.0" , NV_ENC_LEVEL_H264_5 },
  88. { "5.1" , NV_ENC_LEVEL_H264_51 },
  89. { NULL }
  90. };
  91. static const NvencValuePair nvenc_hevc_level_pairs[] = {
  92. { "auto", NV_ENC_LEVEL_AUTOSELECT },
  93. { "1" , NV_ENC_LEVEL_HEVC_1 },
  94. { "1.0" , NV_ENC_LEVEL_HEVC_1 },
  95. { "2" , NV_ENC_LEVEL_HEVC_2 },
  96. { "2.0" , NV_ENC_LEVEL_HEVC_2 },
  97. { "2.1" , NV_ENC_LEVEL_HEVC_21 },
  98. { "3" , NV_ENC_LEVEL_HEVC_3 },
  99. { "3.0" , NV_ENC_LEVEL_HEVC_3 },
  100. { "3.1" , NV_ENC_LEVEL_HEVC_31 },
  101. { "4" , NV_ENC_LEVEL_HEVC_4 },
  102. { "4.0" , NV_ENC_LEVEL_HEVC_4 },
  103. { "4.1" , NV_ENC_LEVEL_HEVC_41 },
  104. { "5" , NV_ENC_LEVEL_HEVC_5 },
  105. { "5.0" , NV_ENC_LEVEL_HEVC_5 },
  106. { "5.1" , NV_ENC_LEVEL_HEVC_51 },
  107. { "5.2" , NV_ENC_LEVEL_HEVC_52 },
  108. { "6" , NV_ENC_LEVEL_HEVC_6 },
  109. { "6.0" , NV_ENC_LEVEL_HEVC_6 },
  110. { "6.1" , NV_ENC_LEVEL_HEVC_61 },
  111. { "6.2" , NV_ENC_LEVEL_HEVC_62 },
  112. { NULL }
  113. };
  114. static const struct {
  115. NVENCSTATUS nverr;
  116. int averr;
  117. const char *desc;
  118. } nvenc_errors[] = {
  119. { NV_ENC_SUCCESS, 0, "success" },
  120. { NV_ENC_ERR_NO_ENCODE_DEVICE, AVERROR(ENOENT), "no encode device" },
  121. { NV_ENC_ERR_UNSUPPORTED_DEVICE, AVERROR(ENOSYS), "unsupported device" },
  122. { NV_ENC_ERR_INVALID_ENCODERDEVICE, AVERROR(EINVAL), "invalid encoder device" },
  123. { NV_ENC_ERR_INVALID_DEVICE, AVERROR(EINVAL), "invalid device" },
  124. { NV_ENC_ERR_DEVICE_NOT_EXIST, AVERROR(EIO), "device does not exist" },
  125. { NV_ENC_ERR_INVALID_PTR, AVERROR(EFAULT), "invalid ptr" },
  126. { NV_ENC_ERR_INVALID_EVENT, AVERROR(EINVAL), "invalid event" },
  127. { NV_ENC_ERR_INVALID_PARAM, AVERROR(EINVAL), "invalid param" },
  128. { NV_ENC_ERR_INVALID_CALL, AVERROR(EINVAL), "invalid call" },
  129. { NV_ENC_ERR_OUT_OF_MEMORY, AVERROR(ENOMEM), "out of memory" },
  130. { NV_ENC_ERR_ENCODER_NOT_INITIALIZED, AVERROR(EINVAL), "encoder not initialized" },
  131. { NV_ENC_ERR_UNSUPPORTED_PARAM, AVERROR(ENOSYS), "unsupported param" },
  132. { NV_ENC_ERR_LOCK_BUSY, AVERROR(EAGAIN), "lock busy" },
  133. { NV_ENC_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOBUFS), "not enough buffer" },
  134. { NV_ENC_ERR_INVALID_VERSION, AVERROR(EINVAL), "invalid version" },
  135. { NV_ENC_ERR_MAP_FAILED, AVERROR(EIO), "map failed" },
  136. { NV_ENC_ERR_NEED_MORE_INPUT, AVERROR(EAGAIN), "need more input" },
  137. { NV_ENC_ERR_ENCODER_BUSY, AVERROR(EAGAIN), "encoder busy" },
  138. { NV_ENC_ERR_EVENT_NOT_REGISTERD, AVERROR(EBADF), "event not registered" },
  139. { NV_ENC_ERR_GENERIC, AVERROR_UNKNOWN, "generic error" },
  140. { NV_ENC_ERR_INCOMPATIBLE_CLIENT_KEY, AVERROR(EINVAL), "incompatible client key" },
  141. { NV_ENC_ERR_UNIMPLEMENTED, AVERROR(ENOSYS), "unimplemented" },
  142. { NV_ENC_ERR_RESOURCE_REGISTER_FAILED, AVERROR(EIO), "resource register failed" },
  143. { NV_ENC_ERR_RESOURCE_NOT_REGISTERED, AVERROR(EBADF), "resource not registered" },
  144. { NV_ENC_ERR_RESOURCE_NOT_MAPPED, AVERROR(EBADF), "resource not mapped" },
  145. };
  146. static int nvenc_map_error(NVENCSTATUS err, const char **desc)
  147. {
  148. int i;
  149. for (i = 0; i < FF_ARRAY_ELEMS(nvenc_errors); i++) {
  150. if (nvenc_errors[i].nverr == err) {
  151. if (desc)
  152. *desc = nvenc_errors[i].desc;
  153. return nvenc_errors[i].averr;
  154. }
  155. }
  156. if (desc)
  157. *desc = "unknown error";
  158. return AVERROR_UNKNOWN;
  159. }
  160. static int nvenc_print_error(void *log_ctx, NVENCSTATUS err,
  161. const char *error_string)
  162. {
  163. const char *desc;
  164. int ret;
  165. ret = nvenc_map_error(err, &desc);
  166. av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
  167. return ret;
  168. }
  169. static int input_string_to_uint32(AVCodecContext *avctx, const NvencValuePair *pair, const char *input, uint32_t *output)
  170. {
  171. for (; pair->str; ++pair) {
  172. if (!strcmp(input, pair->str)) {
  173. *output = pair->num;
  174. return 0;
  175. }
  176. }
  177. return AVERROR(EINVAL);
  178. }
  179. static void timestamp_queue_enqueue(AVFifoBuffer* queue, int64_t timestamp)
  180. {
  181. av_fifo_generic_write(queue, &timestamp, sizeof(timestamp), NULL);
  182. }
  183. static int64_t timestamp_queue_dequeue(AVFifoBuffer* queue)
  184. {
  185. int64_t timestamp = AV_NOPTS_VALUE;
  186. if (av_fifo_size(queue) > 0)
  187. av_fifo_generic_read(queue, &timestamp, sizeof(timestamp), NULL);
  188. return timestamp;
  189. }
  190. #define CHECK_LOAD_FUNC(t, f, s) \
  191. do { \
  192. (f) = (t)LOAD_FUNC(dl_fn->cuda_lib, s); \
  193. if (!(f)) { \
  194. av_log(avctx, AV_LOG_FATAL, "Failed loading %s from CUDA library\n", s); \
  195. goto error; \
  196. } \
  197. } while (0)
  198. static av_cold int nvenc_dyload_cuda(AVCodecContext *avctx)
  199. {
  200. NvencContext *ctx = avctx->priv_data;
  201. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  202. #if CONFIG_CUDA
  203. dl_fn->cu_init = cuInit;
  204. dl_fn->cu_device_get_count = cuDeviceGetCount;
  205. dl_fn->cu_device_get = cuDeviceGet;
  206. dl_fn->cu_device_get_name = cuDeviceGetName;
  207. dl_fn->cu_device_compute_capability = cuDeviceComputeCapability;
  208. dl_fn->cu_ctx_create = cuCtxCreate_v2;
  209. dl_fn->cu_ctx_pop_current = cuCtxPopCurrent_v2;
  210. dl_fn->cu_ctx_destroy = cuCtxDestroy_v2;
  211. return 1;
  212. #else
  213. if (dl_fn->cuda_lib)
  214. return 1;
  215. #if defined(_WIN32)
  216. dl_fn->cuda_lib = LoadLibrary(TEXT("nvcuda.dll"));
  217. #else
  218. dl_fn->cuda_lib = dlopen("libcuda.so", RTLD_LAZY);
  219. #endif
  220. if (!dl_fn->cuda_lib) {
  221. av_log(avctx, AV_LOG_FATAL, "Failed loading CUDA library\n");
  222. goto error;
  223. }
  224. CHECK_LOAD_FUNC(PCUINIT, dl_fn->cu_init, "cuInit");
  225. CHECK_LOAD_FUNC(PCUDEVICEGETCOUNT, dl_fn->cu_device_get_count, "cuDeviceGetCount");
  226. CHECK_LOAD_FUNC(PCUDEVICEGET, dl_fn->cu_device_get, "cuDeviceGet");
  227. CHECK_LOAD_FUNC(PCUDEVICEGETNAME, dl_fn->cu_device_get_name, "cuDeviceGetName");
  228. CHECK_LOAD_FUNC(PCUDEVICECOMPUTECAPABILITY, dl_fn->cu_device_compute_capability, "cuDeviceComputeCapability");
  229. CHECK_LOAD_FUNC(PCUCTXCREATE, dl_fn->cu_ctx_create, "cuCtxCreate_v2");
  230. CHECK_LOAD_FUNC(PCUCTXPOPCURRENT, dl_fn->cu_ctx_pop_current, "cuCtxPopCurrent_v2");
  231. CHECK_LOAD_FUNC(PCUCTXDESTROY, dl_fn->cu_ctx_destroy, "cuCtxDestroy_v2");
  232. return 1;
  233. error:
  234. if (dl_fn->cuda_lib)
  235. DL_CLOSE_FUNC(dl_fn->cuda_lib);
  236. dl_fn->cuda_lib = NULL;
  237. return 0;
  238. #endif
  239. }
  240. static av_cold int check_cuda_errors(AVCodecContext *avctx, CUresult err, const char *func)
  241. {
  242. if (err != CUDA_SUCCESS) {
  243. av_log(avctx, AV_LOG_FATAL, ">> %s - failed with error code 0x%x\n", func, err);
  244. return 0;
  245. }
  246. return 1;
  247. }
  248. #define check_cuda_errors(f) if (!check_cuda_errors(avctx, f, #f)) goto error
  249. static av_cold int nvenc_check_cuda(AVCodecContext *avctx)
  250. {
  251. int device_count = 0;
  252. CUdevice cu_device = 0;
  253. char gpu_name[128];
  254. int smminor = 0, smmajor = 0;
  255. int i, smver, target_smver;
  256. NvencContext *ctx = avctx->priv_data;
  257. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  258. switch (avctx->codec->id) {
  259. case AV_CODEC_ID_H264:
  260. target_smver = ctx->data_pix_fmt == AV_PIX_FMT_YUV444P ? 0x52 : 0x30;
  261. break;
  262. case AV_CODEC_ID_H265:
  263. target_smver = 0x52;
  264. break;
  265. default:
  266. av_log(avctx, AV_LOG_FATAL, "Unknown codec name\n");
  267. goto error;
  268. }
  269. if (!strncmp(ctx->preset, "lossless", 8))
  270. target_smver = 0x52;
  271. if (!nvenc_dyload_cuda(avctx))
  272. return 0;
  273. if (dl_fn->nvenc_device_count > 0)
  274. return 1;
  275. check_cuda_errors(dl_fn->cu_init(0));
  276. check_cuda_errors(dl_fn->cu_device_get_count(&device_count));
  277. if (!device_count) {
  278. av_log(avctx, AV_LOG_FATAL, "No CUDA capable devices found\n");
  279. goto error;
  280. }
  281. av_log(avctx, AV_LOG_VERBOSE, "%d CUDA capable devices found\n", device_count);
  282. dl_fn->nvenc_device_count = 0;
  283. for (i = 0; i < device_count; ++i) {
  284. check_cuda_errors(dl_fn->cu_device_get(&cu_device, i));
  285. check_cuda_errors(dl_fn->cu_device_get_name(gpu_name, sizeof(gpu_name), cu_device));
  286. check_cuda_errors(dl_fn->cu_device_compute_capability(&smmajor, &smminor, cu_device));
  287. smver = (smmajor << 4) | smminor;
  288. av_log(avctx, AV_LOG_VERBOSE, "[ GPU #%d - < %s > has Compute SM %d.%d, NVENC %s ]\n", i, gpu_name, smmajor, smminor, (smver >= target_smver) ? "Available" : "Not Available");
  289. if (smver >= target_smver)
  290. dl_fn->nvenc_devices[dl_fn->nvenc_device_count++] = cu_device;
  291. }
  292. if (!dl_fn->nvenc_device_count) {
  293. av_log(avctx, AV_LOG_FATAL, "No NVENC capable devices found\n");
  294. goto error;
  295. }
  296. return 1;
  297. error:
  298. dl_fn->nvenc_device_count = 0;
  299. return 0;
  300. }
  301. static av_cold int nvenc_dyload_nvenc(AVCodecContext *avctx)
  302. {
  303. PNVENCODEAPICREATEINSTANCE nvEncodeAPICreateInstance = 0;
  304. NVENCSTATUS nvstatus;
  305. NvencContext *ctx = avctx->priv_data;
  306. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  307. if (!nvenc_check_cuda(avctx))
  308. return 0;
  309. if (dl_fn->nvenc_lib)
  310. return 1;
  311. #if defined(_WIN32)
  312. if (sizeof(void*) == 8) {
  313. dl_fn->nvenc_lib = LoadLibrary(TEXT("nvEncodeAPI64.dll"));
  314. } else {
  315. dl_fn->nvenc_lib = LoadLibrary(TEXT("nvEncodeAPI.dll"));
  316. }
  317. #else
  318. dl_fn->nvenc_lib = dlopen("libnvidia-encode.so.1", RTLD_LAZY);
  319. #endif
  320. if (!dl_fn->nvenc_lib) {
  321. av_log(avctx, AV_LOG_FATAL, "Failed loading the nvenc library\n");
  322. goto error;
  323. }
  324. nvEncodeAPICreateInstance = (PNVENCODEAPICREATEINSTANCE)LOAD_FUNC(dl_fn->nvenc_lib, "NvEncodeAPICreateInstance");
  325. if (!nvEncodeAPICreateInstance) {
  326. av_log(avctx, AV_LOG_FATAL, "Failed to load nvenc entrypoint\n");
  327. goto error;
  328. }
  329. dl_fn->nvenc_funcs.version = NV_ENCODE_API_FUNCTION_LIST_VER;
  330. nvstatus = nvEncodeAPICreateInstance(&dl_fn->nvenc_funcs);
  331. if (nvstatus != NV_ENC_SUCCESS) {
  332. nvenc_print_error(avctx, nvstatus, "Failed to create nvenc instance");
  333. goto error;
  334. }
  335. av_log(avctx, AV_LOG_VERBOSE, "Nvenc initialized successfully\n");
  336. return 1;
  337. error:
  338. if (dl_fn->nvenc_lib)
  339. DL_CLOSE_FUNC(dl_fn->nvenc_lib);
  340. dl_fn->nvenc_lib = NULL;
  341. return 0;
  342. }
  343. static av_cold void nvenc_unload_nvenc(AVCodecContext *avctx)
  344. {
  345. NvencContext *ctx = avctx->priv_data;
  346. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  347. DL_CLOSE_FUNC(dl_fn->nvenc_lib);
  348. dl_fn->nvenc_lib = NULL;
  349. dl_fn->nvenc_device_count = 0;
  350. #if !CONFIG_CUDA
  351. DL_CLOSE_FUNC(dl_fn->cuda_lib);
  352. dl_fn->cuda_lib = NULL;
  353. #endif
  354. dl_fn->cu_init = NULL;
  355. dl_fn->cu_device_get_count = NULL;
  356. dl_fn->cu_device_get = NULL;
  357. dl_fn->cu_device_get_name = NULL;
  358. dl_fn->cu_device_compute_capability = NULL;
  359. dl_fn->cu_ctx_create = NULL;
  360. dl_fn->cu_ctx_pop_current = NULL;
  361. dl_fn->cu_ctx_destroy = NULL;
  362. av_log(avctx, AV_LOG_VERBOSE, "Nvenc unloaded\n");
  363. }
  364. static av_cold int nvenc_setup_device(AVCodecContext *avctx)
  365. {
  366. NvencContext *ctx = avctx->priv_data;
  367. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  368. CUresult cu_res;
  369. CUcontext cu_context_curr;
  370. ctx->data_pix_fmt = avctx->pix_fmt;
  371. #if CONFIG_CUDA
  372. if (avctx->pix_fmt == AV_PIX_FMT_CUDA) {
  373. AVHWFramesContext *frames_ctx;
  374. AVCUDADeviceContext *device_hwctx;
  375. if (!avctx->hw_frames_ctx) {
  376. av_log(avctx, AV_LOG_ERROR, "hw_frames_ctx must be set when using GPU frames as input\n");
  377. return AVERROR(EINVAL);
  378. }
  379. frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  380. device_hwctx = frames_ctx->device_ctx->hwctx;
  381. ctx->cu_context = device_hwctx->cuda_ctx;
  382. ctx->data_pix_fmt = frames_ctx->sw_format;
  383. return 0;
  384. }
  385. #endif
  386. if (ctx->gpu >= dl_fn->nvenc_device_count) {
  387. av_log(avctx, AV_LOG_FATAL, "Requested GPU %d, but only %d GPUs are available!\n", ctx->gpu, dl_fn->nvenc_device_count);
  388. return AVERROR(EINVAL);
  389. }
  390. ctx->cu_context = NULL;
  391. cu_res = dl_fn->cu_ctx_create(&ctx->cu_context_internal, 4, dl_fn->nvenc_devices[ctx->gpu]); // CU_CTX_SCHED_BLOCKING_SYNC=4, avoid CPU spins
  392. if (cu_res != CUDA_SUCCESS) {
  393. av_log(avctx, AV_LOG_FATAL, "Failed creating CUDA context for NVENC: 0x%x\n", (int)cu_res);
  394. return AVERROR_EXTERNAL;
  395. }
  396. cu_res = dl_fn->cu_ctx_pop_current(&cu_context_curr);
  397. if (cu_res != CUDA_SUCCESS) {
  398. av_log(avctx, AV_LOG_FATAL, "Failed popping CUDA context: 0x%x\n", (int)cu_res);
  399. return AVERROR_EXTERNAL;
  400. }
  401. ctx->cu_context = ctx->cu_context_internal;
  402. return 0;
  403. }
  404. static av_cold int nvenc_open_session(AVCodecContext *avctx)
  405. {
  406. NvencContext *ctx = avctx->priv_data;
  407. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  408. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  409. NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS encode_session_params = { 0 };
  410. NVENCSTATUS nv_status;
  411. encode_session_params.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER;
  412. encode_session_params.apiVersion = NVENCAPI_VERSION;
  413. encode_session_params.device = ctx->cu_context;
  414. encode_session_params.deviceType = NV_ENC_DEVICE_TYPE_CUDA;
  415. nv_status = p_nvenc->nvEncOpenEncodeSessionEx(&encode_session_params, &ctx->nvencoder);
  416. if (nv_status != NV_ENC_SUCCESS) {
  417. ctx->nvencoder = NULL;
  418. return nvenc_print_error(avctx, nv_status, "OpenEncodeSessionEx failed");
  419. }
  420. return 0;
  421. }
  422. static av_cold void set_constqp(AVCodecContext *avctx)
  423. {
  424. NvencContext *ctx = avctx->priv_data;
  425. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CONSTQP;
  426. ctx->encode_config.rcParams.constQP.qpInterB = avctx->global_quality;
  427. ctx->encode_config.rcParams.constQP.qpInterP = avctx->global_quality;
  428. ctx->encode_config.rcParams.constQP.qpIntra = avctx->global_quality;
  429. }
  430. static av_cold void set_vbr(AVCodecContext *avctx)
  431. {
  432. NvencContext *ctx = avctx->priv_data;
  433. ctx->encode_config.rcParams.enableMinQP = 1;
  434. ctx->encode_config.rcParams.enableMaxQP = 1;
  435. ctx->encode_config.rcParams.minQP.qpInterB = avctx->qmin;
  436. ctx->encode_config.rcParams.minQP.qpInterP = avctx->qmin;
  437. ctx->encode_config.rcParams.minQP.qpIntra = avctx->qmin;
  438. ctx->encode_config.rcParams.maxQP.qpInterB = avctx->qmax;
  439. ctx->encode_config.rcParams.maxQP.qpInterP = avctx->qmax;
  440. ctx->encode_config.rcParams.maxQP.qpIntra = avctx->qmax;
  441. }
  442. static av_cold void set_lossless(AVCodecContext *avctx)
  443. {
  444. NvencContext *ctx = avctx->priv_data;
  445. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CONSTQP;
  446. ctx->encode_config.rcParams.constQP.qpInterB = 0;
  447. ctx->encode_config.rcParams.constQP.qpInterP = 0;
  448. ctx->encode_config.rcParams.constQP.qpIntra = 0;
  449. }
  450. static av_cold void nvenc_setup_rate_control(AVCodecContext *avctx, int lossless)
  451. {
  452. NvencContext *ctx = avctx->priv_data;
  453. int qp_inter_p;
  454. if (avctx->bit_rate > 0) {
  455. ctx->encode_config.rcParams.averageBitRate = avctx->bit_rate;
  456. } else if (ctx->encode_config.rcParams.averageBitRate > 0) {
  457. ctx->encode_config.rcParams.maxBitRate = ctx->encode_config.rcParams.averageBitRate;
  458. }
  459. if (avctx->rc_max_rate > 0)
  460. ctx->encode_config.rcParams.maxBitRate = avctx->rc_max_rate;
  461. if (lossless) {
  462. if (avctx->codec->id == AV_CODEC_ID_H264)
  463. ctx->encode_config.encodeCodecConfig.h264Config.qpPrimeYZeroTransformBypassFlag = 1;
  464. set_lossless(avctx);
  465. avctx->qmin = -1;
  466. avctx->qmax = -1;
  467. } else if (ctx->cbr) {
  468. if (!ctx->twopass) {
  469. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR;
  470. } else {
  471. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_2_PASS_QUALITY;
  472. if (avctx->codec->id == AV_CODEC_ID_H264) {
  473. ctx->encode_config.encodeCodecConfig.h264Config.adaptiveTransformMode = NV_ENC_H264_ADAPTIVE_TRANSFORM_ENABLE;
  474. ctx->encode_config.encodeCodecConfig.h264Config.fmoMode = NV_ENC_H264_FMO_DISABLE;
  475. }
  476. }
  477. if (avctx->codec->id == AV_CODEC_ID_H264) {
  478. ctx->encode_config.encodeCodecConfig.h264Config.outputBufferingPeriodSEI = 1;
  479. ctx->encode_config.encodeCodecConfig.h264Config.outputPictureTimingSEI = 1;
  480. } else if (avctx->codec->id == AV_CODEC_ID_H265) {
  481. ctx->encode_config.encodeCodecConfig.hevcConfig.outputBufferingPeriodSEI = 1;
  482. ctx->encode_config.encodeCodecConfig.hevcConfig.outputPictureTimingSEI = 1;
  483. }
  484. } else if (avctx->global_quality > 0) {
  485. set_constqp(avctx);
  486. avctx->qmin = -1;
  487. avctx->qmax = -1;
  488. } else {
  489. if (avctx->qmin >= 0 && avctx->qmax >= 0) {
  490. set_vbr(avctx);
  491. qp_inter_p = (avctx->qmax + 3 * avctx->qmin) / 4; // biased towards Qmin
  492. if (ctx->twopass) {
  493. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_2_PASS_VBR;
  494. if (avctx->codec->id == AV_CODEC_ID_H264) {
  495. ctx->encode_config.encodeCodecConfig.h264Config.adaptiveTransformMode = NV_ENC_H264_ADAPTIVE_TRANSFORM_ENABLE;
  496. ctx->encode_config.encodeCodecConfig.h264Config.fmoMode = NV_ENC_H264_FMO_DISABLE;
  497. }
  498. } else {
  499. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_VBR_MINQP;
  500. }
  501. } else {
  502. qp_inter_p = 26; // default to 26
  503. if (ctx->twopass) {
  504. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_2_PASS_VBR;
  505. } else {
  506. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_VBR;
  507. }
  508. }
  509. ctx->encode_config.rcParams.enableInitialRCQP = 1;
  510. ctx->encode_config.rcParams.initialRCQP.qpInterP = qp_inter_p;
  511. if (avctx->i_quant_factor != 0.0 && avctx->b_quant_factor != 0.0) {
  512. ctx->encode_config.rcParams.initialRCQP.qpIntra = av_clip(
  513. qp_inter_p * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51);
  514. ctx->encode_config.rcParams.initialRCQP.qpInterB = av_clip(
  515. qp_inter_p * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51);
  516. } else {
  517. ctx->encode_config.rcParams.initialRCQP.qpIntra = qp_inter_p;
  518. ctx->encode_config.rcParams.initialRCQP.qpInterB = qp_inter_p;
  519. }
  520. }
  521. if (avctx->rc_buffer_size > 0) {
  522. ctx->encode_config.rcParams.vbvBufferSize = avctx->rc_buffer_size;
  523. } else if (ctx->encode_config.rcParams.averageBitRate > 0) {
  524. ctx->encode_config.rcParams.vbvBufferSize = 2 * ctx->encode_config.rcParams.averageBitRate;
  525. }
  526. }
  527. static av_cold int nvenc_setup_h264_config(AVCodecContext *avctx, int lossless)
  528. {
  529. NvencContext *ctx = avctx->priv_data;
  530. int res;
  531. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourMatrix = avctx->colorspace;
  532. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourPrimaries = avctx->color_primaries;
  533. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.transferCharacteristics = avctx->color_trc;
  534. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoFullRangeFlag = (avctx->color_range == AVCOL_RANGE_JPEG
  535. || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ420P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ422P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ444P);
  536. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourDescriptionPresentFlag =
  537. (avctx->colorspace != 2 || avctx->color_primaries != 2 || avctx->color_trc != 2);
  538. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoSignalTypePresentFlag =
  539. (ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourDescriptionPresentFlag
  540. || ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoFormat != 5
  541. || ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoFullRangeFlag != 0);
  542. ctx->encode_config.encodeCodecConfig.h264Config.sliceMode = 3;
  543. ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData = 1;
  544. ctx->encode_config.encodeCodecConfig.h264Config.disableSPSPPS = (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) ? 1 : 0;
  545. ctx->encode_config.encodeCodecConfig.h264Config.repeatSPSPPS = (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) ? 0 : 1;
  546. ctx->encode_config.encodeCodecConfig.h264Config.outputAUD = 1;
  547. if (!ctx->profile && !lossless) {
  548. switch (avctx->profile) {
  549. case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
  550. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_444_GUID;
  551. break;
  552. case FF_PROFILE_H264_BASELINE:
  553. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID;
  554. break;
  555. case FF_PROFILE_H264_MAIN:
  556. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_MAIN_GUID;
  557. break;
  558. case FF_PROFILE_H264_HIGH:
  559. case FF_PROFILE_UNKNOWN:
  560. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID;
  561. break;
  562. default:
  563. av_log(avctx, AV_LOG_WARNING, "Unsupported profile requested, falling back to high\n");
  564. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID;
  565. break;
  566. }
  567. } else if (!lossless) {
  568. if (!strcmp(ctx->profile, "high")) {
  569. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID;
  570. avctx->profile = FF_PROFILE_H264_HIGH;
  571. } else if (!strcmp(ctx->profile, "main")) {
  572. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_MAIN_GUID;
  573. avctx->profile = FF_PROFILE_H264_MAIN;
  574. } else if (!strcmp(ctx->profile, "baseline")) {
  575. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID;
  576. avctx->profile = FF_PROFILE_H264_BASELINE;
  577. } else if (!strcmp(ctx->profile, "high444p")) {
  578. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_444_GUID;
  579. avctx->profile = FF_PROFILE_H264_HIGH_444_PREDICTIVE;
  580. } else {
  581. av_log(avctx, AV_LOG_FATAL, "Profile \"%s\" is unknown! Supported profiles: high, main, baseline\n", ctx->profile);
  582. return AVERROR(EINVAL);
  583. }
  584. }
  585. // force setting profile as high444p if input is AV_PIX_FMT_YUV444P
  586. if (ctx->data_pix_fmt == AV_PIX_FMT_YUV444P) {
  587. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_444_GUID;
  588. avctx->profile = FF_PROFILE_H264_HIGH_444_PREDICTIVE;
  589. }
  590. ctx->encode_config.encodeCodecConfig.h264Config.chromaFormatIDC = avctx->profile == FF_PROFILE_H264_HIGH_444_PREDICTIVE ? 3 : 1;
  591. if (ctx->level) {
  592. res = input_string_to_uint32(avctx, nvenc_h264_level_pairs, ctx->level, &ctx->encode_config.encodeCodecConfig.h264Config.level);
  593. if (res) {
  594. av_log(avctx, AV_LOG_FATAL, "Level \"%s\" is unknown! Supported levels: auto, 1, 1b, 1.1, 1.2, 1.3, 2, 2.1, 2.2, 3, 3.1, 3.2, 4, 4.1, 4.2, 5, 5.1\n", ctx->level);
  595. return res;
  596. }
  597. } else {
  598. ctx->encode_config.encodeCodecConfig.h264Config.level = NV_ENC_LEVEL_AUTOSELECT;
  599. }
  600. return 0;
  601. }
  602. static av_cold int nvenc_setup_hevc_config(AVCodecContext *avctx)
  603. {
  604. NvencContext *ctx = avctx->priv_data;
  605. int res;
  606. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.colourMatrix = avctx->colorspace;
  607. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.colourPrimaries = avctx->color_primaries;
  608. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.transferCharacteristics = avctx->color_trc;
  609. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.videoFullRangeFlag = (avctx->color_range == AVCOL_RANGE_JPEG
  610. || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ420P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ422P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ444P);
  611. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.colourDescriptionPresentFlag =
  612. (avctx->colorspace != 2 || avctx->color_primaries != 2 || avctx->color_trc != 2);
  613. ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.videoSignalTypePresentFlag =
  614. (ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.colourDescriptionPresentFlag
  615. || ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.videoFormat != 5
  616. || ctx->encode_config.encodeCodecConfig.hevcConfig.hevcVUIParameters.videoFullRangeFlag != 0);
  617. ctx->encode_config.encodeCodecConfig.hevcConfig.sliceMode = 3;
  618. ctx->encode_config.encodeCodecConfig.hevcConfig.sliceModeData = 1;
  619. ctx->encode_config.encodeCodecConfig.hevcConfig.disableSPSPPS = (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) ? 1 : 0;
  620. ctx->encode_config.encodeCodecConfig.hevcConfig.repeatSPSPPS = (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) ? 0 : 1;
  621. ctx->encode_config.encodeCodecConfig.hevcConfig.outputAUD = 1;
  622. /* No other profile is supported in the current SDK version 5 */
  623. ctx->encode_config.profileGUID = NV_ENC_HEVC_PROFILE_MAIN_GUID;
  624. avctx->profile = FF_PROFILE_HEVC_MAIN;
  625. if (ctx->level) {
  626. res = input_string_to_uint32(avctx, nvenc_hevc_level_pairs, ctx->level, &ctx->encode_config.encodeCodecConfig.hevcConfig.level);
  627. if (res) {
  628. av_log(avctx, AV_LOG_FATAL, "Level \"%s\" is unknown! Supported levels: auto, 1, 2, 2.1, 3, 3.1, 4, 4.1, 5, 5.1, 5.2, 6, 6.1, 6.2\n", ctx->level);
  629. return res;
  630. }
  631. } else {
  632. ctx->encode_config.encodeCodecConfig.hevcConfig.level = NV_ENC_LEVEL_AUTOSELECT;
  633. }
  634. if (ctx->tier) {
  635. if (!strcmp(ctx->tier, "main")) {
  636. ctx->encode_config.encodeCodecConfig.hevcConfig.tier = NV_ENC_TIER_HEVC_MAIN;
  637. } else if (!strcmp(ctx->tier, "high")) {
  638. ctx->encode_config.encodeCodecConfig.hevcConfig.tier = NV_ENC_TIER_HEVC_HIGH;
  639. } else {
  640. av_log(avctx, AV_LOG_FATAL, "Tier \"%s\" is unknown! Supported tiers: main, high\n", ctx->tier);
  641. return AVERROR(EINVAL);
  642. }
  643. }
  644. return 0;
  645. }
  646. static av_cold int nvenc_setup_codec_config(AVCodecContext *avctx, int lossless)
  647. {
  648. switch (avctx->codec->id) {
  649. case AV_CODEC_ID_H264:
  650. return nvenc_setup_h264_config(avctx, lossless);
  651. case AV_CODEC_ID_H265:
  652. return nvenc_setup_hevc_config(avctx);
  653. /* Earlier switch/case will return if unknown codec is passed. */
  654. }
  655. return 0;
  656. }
  657. static av_cold int nvenc_setup_encoder(AVCodecContext *avctx)
  658. {
  659. NvencContext *ctx = avctx->priv_data;
  660. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  661. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  662. NV_ENC_PRESET_CONFIG preset_config = { 0 };
  663. GUID encoder_preset = NV_ENC_PRESET_HQ_GUID;
  664. GUID codec;
  665. NVENCSTATUS nv_status = NV_ENC_SUCCESS;
  666. AVCPBProperties *cpb_props;
  667. int num_mbs;
  668. int isLL = 0;
  669. int lossless = 0;
  670. int res = 0;
  671. int dw, dh;
  672. ctx->last_dts = AV_NOPTS_VALUE;
  673. ctx->encode_config.version = NV_ENC_CONFIG_VER;
  674. ctx->init_encode_params.version = NV_ENC_INITIALIZE_PARAMS_VER;
  675. preset_config.version = NV_ENC_PRESET_CONFIG_VER;
  676. preset_config.presetCfg.version = NV_ENC_CONFIG_VER;
  677. if (ctx->preset) {
  678. if (!strcmp(ctx->preset, "slow")) {
  679. encoder_preset = NV_ENC_PRESET_HQ_GUID;
  680. ctx->twopass = 1;
  681. } else if (!strcmp(ctx->preset, "medium")) {
  682. encoder_preset = NV_ENC_PRESET_HQ_GUID;
  683. ctx->twopass = 0;
  684. } else if (!strcmp(ctx->preset, "fast")) {
  685. encoder_preset = NV_ENC_PRESET_HP_GUID;
  686. ctx->twopass = 0;
  687. } else if (!strcmp(ctx->preset, "hq")) {
  688. encoder_preset = NV_ENC_PRESET_HQ_GUID;
  689. } else if (!strcmp(ctx->preset, "hp")) {
  690. encoder_preset = NV_ENC_PRESET_HP_GUID;
  691. } else if (!strcmp(ctx->preset, "bd")) {
  692. encoder_preset = NV_ENC_PRESET_BD_GUID;
  693. } else if (!strcmp(ctx->preset, "ll")) {
  694. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_DEFAULT_GUID;
  695. isLL = 1;
  696. } else if (!strcmp(ctx->preset, "llhp")) {
  697. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_HP_GUID;
  698. isLL = 1;
  699. } else if (!strcmp(ctx->preset, "llhq")) {
  700. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID;
  701. isLL = 1;
  702. } else if (!strcmp(ctx->preset, "lossless")) {
  703. encoder_preset = NV_ENC_PRESET_LOSSLESS_DEFAULT_GUID;
  704. lossless = 1;
  705. } else if (!strcmp(ctx->preset, "losslesshp")) {
  706. encoder_preset = NV_ENC_PRESET_LOSSLESS_HP_GUID;
  707. lossless = 1;
  708. } else if (!strcmp(ctx->preset, "default")) {
  709. encoder_preset = NV_ENC_PRESET_DEFAULT_GUID;
  710. } else {
  711. av_log(avctx, AV_LOG_FATAL, "Preset \"%s\" is unknown! Supported presets: slow, medium, fast, hp, hq, bd, ll, llhp, llhq, lossless, losslesshp, default\n", ctx->preset);
  712. return AVERROR(EINVAL);
  713. }
  714. }
  715. if (ctx->twopass < 0) {
  716. ctx->twopass = isLL;
  717. }
  718. switch (avctx->codec->id) {
  719. case AV_CODEC_ID_H264:
  720. codec = NV_ENC_CODEC_H264_GUID;
  721. break;
  722. case AV_CODEC_ID_H265:
  723. codec = NV_ENC_CODEC_HEVC_GUID;
  724. break;
  725. default:
  726. av_log(avctx, AV_LOG_ERROR, "Unknown codec name\n");
  727. return AVERROR(EINVAL);
  728. }
  729. nv_status = p_nvenc->nvEncGetEncodePresetConfig(ctx->nvencoder, codec, encoder_preset, &preset_config);
  730. if (nv_status != NV_ENC_SUCCESS) {
  731. return nvenc_print_error(avctx, nv_status, "GetEncodePresetConfig failed");
  732. }
  733. ctx->init_encode_params.encodeGUID = codec;
  734. ctx->init_encode_params.encodeHeight = avctx->height;
  735. ctx->init_encode_params.encodeWidth = avctx->width;
  736. if (avctx->sample_aspect_ratio.num && avctx->sample_aspect_ratio.den &&
  737. (avctx->sample_aspect_ratio.num != 1 || avctx->sample_aspect_ratio.num != 1)) {
  738. av_reduce(&dw, &dh,
  739. avctx->width * avctx->sample_aspect_ratio.num,
  740. avctx->height * avctx->sample_aspect_ratio.den,
  741. 1024 * 1024);
  742. ctx->init_encode_params.darHeight = dh;
  743. ctx->init_encode_params.darWidth = dw;
  744. } else {
  745. ctx->init_encode_params.darHeight = avctx->height;
  746. ctx->init_encode_params.darWidth = avctx->width;
  747. }
  748. // De-compensate for hardware, dubiously, trying to compensate for
  749. // playback at 704 pixel width.
  750. if (avctx->width == 720 &&
  751. (avctx->height == 480 || avctx->height == 576)) {
  752. av_reduce(&dw, &dh,
  753. ctx->init_encode_params.darWidth * 44,
  754. ctx->init_encode_params.darHeight * 45,
  755. 1024 * 1024);
  756. ctx->init_encode_params.darHeight = dh;
  757. ctx->init_encode_params.darWidth = dw;
  758. }
  759. ctx->init_encode_params.frameRateNum = avctx->time_base.den;
  760. ctx->init_encode_params.frameRateDen = avctx->time_base.num * avctx->ticks_per_frame;
  761. num_mbs = ((avctx->width + 15) >> 4) * ((avctx->height + 15) >> 4);
  762. ctx->max_surface_count = (num_mbs >= 8160) ? 32 : 48;
  763. if (ctx->buffer_delay >= ctx->max_surface_count)
  764. ctx->buffer_delay = ctx->max_surface_count - 1;
  765. ctx->init_encode_params.enableEncodeAsync = 0;
  766. ctx->init_encode_params.enablePTD = 1;
  767. ctx->init_encode_params.presetGUID = encoder_preset;
  768. ctx->init_encode_params.encodeConfig = &ctx->encode_config;
  769. memcpy(&ctx->encode_config, &preset_config.presetCfg, sizeof(ctx->encode_config));
  770. ctx->encode_config.version = NV_ENC_CONFIG_VER;
  771. if (avctx->refs >= 0) {
  772. /* 0 means "let the hardware decide" */
  773. switch (avctx->codec->id) {
  774. case AV_CODEC_ID_H264:
  775. ctx->encode_config.encodeCodecConfig.h264Config.maxNumRefFrames = avctx->refs;
  776. break;
  777. case AV_CODEC_ID_H265:
  778. ctx->encode_config.encodeCodecConfig.hevcConfig.maxNumRefFramesInDPB = avctx->refs;
  779. break;
  780. /* Earlier switch/case will return if unknown codec is passed. */
  781. }
  782. }
  783. if (avctx->gop_size > 0) {
  784. if (avctx->max_b_frames >= 0) {
  785. /* 0 is intra-only, 1 is I/P only, 2 is one B Frame, 3 two B frames, and so on. */
  786. ctx->encode_config.frameIntervalP = avctx->max_b_frames + 1;
  787. }
  788. ctx->encode_config.gopLength = avctx->gop_size;
  789. switch (avctx->codec->id) {
  790. case AV_CODEC_ID_H264:
  791. ctx->encode_config.encodeCodecConfig.h264Config.idrPeriod = avctx->gop_size;
  792. break;
  793. case AV_CODEC_ID_H265:
  794. ctx->encode_config.encodeCodecConfig.hevcConfig.idrPeriod = avctx->gop_size;
  795. break;
  796. /* Earlier switch/case will return if unknown codec is passed. */
  797. }
  798. } else if (avctx->gop_size == 0) {
  799. ctx->encode_config.frameIntervalP = 0;
  800. ctx->encode_config.gopLength = 1;
  801. switch (avctx->codec->id) {
  802. case AV_CODEC_ID_H264:
  803. ctx->encode_config.encodeCodecConfig.h264Config.idrPeriod = 1;
  804. break;
  805. case AV_CODEC_ID_H265:
  806. ctx->encode_config.encodeCodecConfig.hevcConfig.idrPeriod = 1;
  807. break;
  808. /* Earlier switch/case will return if unknown codec is passed. */
  809. }
  810. }
  811. /* when there're b frames, set dts offset */
  812. if (ctx->encode_config.frameIntervalP >= 2)
  813. ctx->last_dts = -2;
  814. nvenc_setup_rate_control(avctx, lossless);
  815. if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
  816. ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FIELD;
  817. } else {
  818. ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME;
  819. }
  820. res = nvenc_setup_codec_config(avctx, lossless);
  821. if (res)
  822. return res;
  823. nv_status = p_nvenc->nvEncInitializeEncoder(ctx->nvencoder, &ctx->init_encode_params);
  824. if (nv_status != NV_ENC_SUCCESS) {
  825. return nvenc_print_error(avctx, nv_status, "InitializeEncoder failed");
  826. }
  827. if (ctx->encode_config.frameIntervalP > 1)
  828. avctx->has_b_frames = 2;
  829. if (ctx->encode_config.rcParams.averageBitRate > 0)
  830. avctx->bit_rate = ctx->encode_config.rcParams.averageBitRate;
  831. cpb_props = ff_add_cpb_side_data(avctx);
  832. if (!cpb_props)
  833. return AVERROR(ENOMEM);
  834. cpb_props->max_bitrate = ctx->encode_config.rcParams.maxBitRate;
  835. cpb_props->avg_bitrate = avctx->bit_rate;
  836. cpb_props->buffer_size = ctx->encode_config.rcParams.vbvBufferSize;
  837. return 0;
  838. }
  839. static av_cold int nvenc_alloc_surface(AVCodecContext *avctx, int idx)
  840. {
  841. NvencContext *ctx = avctx->priv_data;
  842. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  843. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  844. NVENCSTATUS nv_status;
  845. NV_ENC_CREATE_BITSTREAM_BUFFER allocOut = { 0 };
  846. allocOut.version = NV_ENC_CREATE_BITSTREAM_BUFFER_VER;
  847. switch (ctx->data_pix_fmt) {
  848. case AV_PIX_FMT_YUV420P:
  849. ctx->surfaces[idx].format = NV_ENC_BUFFER_FORMAT_YV12_PL;
  850. break;
  851. case AV_PIX_FMT_NV12:
  852. ctx->surfaces[idx].format = NV_ENC_BUFFER_FORMAT_NV12_PL;
  853. break;
  854. case AV_PIX_FMT_YUV444P:
  855. ctx->surfaces[idx].format = NV_ENC_BUFFER_FORMAT_YUV444_PL;
  856. break;
  857. default:
  858. av_log(avctx, AV_LOG_FATAL, "Invalid input pixel format\n");
  859. return AVERROR(EINVAL);
  860. }
  861. if (avctx->pix_fmt == AV_PIX_FMT_CUDA) {
  862. ctx->surfaces[idx].in_ref = av_frame_alloc();
  863. if (!ctx->surfaces[idx].in_ref)
  864. return AVERROR(ENOMEM);
  865. } else {
  866. NV_ENC_CREATE_INPUT_BUFFER allocSurf = { 0 };
  867. allocSurf.version = NV_ENC_CREATE_INPUT_BUFFER_VER;
  868. allocSurf.width = (avctx->width + 31) & ~31;
  869. allocSurf.height = (avctx->height + 31) & ~31;
  870. allocSurf.memoryHeap = NV_ENC_MEMORY_HEAP_SYSMEM_CACHED;
  871. allocSurf.bufferFmt = ctx->surfaces[idx].format;
  872. nv_status = p_nvenc->nvEncCreateInputBuffer(ctx->nvencoder, &allocSurf);
  873. if (nv_status != NV_ENC_SUCCESS) {
  874. return nvenc_print_error(avctx, nv_status, "CreateInputBuffer failed");
  875. }
  876. ctx->surfaces[idx].input_surface = allocSurf.inputBuffer;
  877. ctx->surfaces[idx].width = allocSurf.width;
  878. ctx->surfaces[idx].height = allocSurf.height;
  879. }
  880. ctx->surfaces[idx].lockCount = 0;
  881. /* 1MB is large enough to hold most output frames. NVENC increases this automaticaly if it's not enough. */
  882. allocOut.size = 1024 * 1024;
  883. allocOut.memoryHeap = NV_ENC_MEMORY_HEAP_SYSMEM_CACHED;
  884. nv_status = p_nvenc->nvEncCreateBitstreamBuffer(ctx->nvencoder, &allocOut);
  885. if (nv_status != NV_ENC_SUCCESS) {
  886. int err = nvenc_print_error(avctx, nv_status, "CreateBitstreamBuffer failed");
  887. if (avctx->pix_fmt != AV_PIX_FMT_CUDA)
  888. p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->surfaces[idx].input_surface);
  889. av_frame_free(&ctx->surfaces[idx].in_ref);
  890. return err;
  891. }
  892. ctx->surfaces[idx].output_surface = allocOut.bitstreamBuffer;
  893. ctx->surfaces[idx].size = allocOut.size;
  894. return 0;
  895. }
  896. static av_cold int nvenc_setup_surfaces(AVCodecContext *avctx, int* surfaceCount)
  897. {
  898. int res;
  899. NvencContext *ctx = avctx->priv_data;
  900. ctx->surfaces = av_malloc(ctx->max_surface_count * sizeof(*ctx->surfaces));
  901. if (!ctx->surfaces) {
  902. return AVERROR(ENOMEM);
  903. }
  904. ctx->timestamp_list = av_fifo_alloc(ctx->max_surface_count * sizeof(int64_t));
  905. if (!ctx->timestamp_list)
  906. return AVERROR(ENOMEM);
  907. ctx->output_surface_queue = av_fifo_alloc(ctx->max_surface_count * sizeof(NvencSurface*));
  908. if (!ctx->output_surface_queue)
  909. return AVERROR(ENOMEM);
  910. ctx->output_surface_ready_queue = av_fifo_alloc(ctx->max_surface_count * sizeof(NvencSurface*));
  911. if (!ctx->output_surface_ready_queue)
  912. return AVERROR(ENOMEM);
  913. for (*surfaceCount = 0; *surfaceCount < ctx->max_surface_count; ++*surfaceCount) {
  914. res = nvenc_alloc_surface(avctx, *surfaceCount);
  915. if (res)
  916. return res;
  917. }
  918. return 0;
  919. }
  920. static av_cold int nvenc_setup_extradata(AVCodecContext *avctx)
  921. {
  922. NvencContext *ctx = avctx->priv_data;
  923. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  924. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  925. NVENCSTATUS nv_status;
  926. uint32_t outSize = 0;
  927. char tmpHeader[256];
  928. NV_ENC_SEQUENCE_PARAM_PAYLOAD payload = { 0 };
  929. payload.version = NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER;
  930. payload.spsppsBuffer = tmpHeader;
  931. payload.inBufferSize = sizeof(tmpHeader);
  932. payload.outSPSPPSPayloadSize = &outSize;
  933. nv_status = p_nvenc->nvEncGetSequenceParams(ctx->nvencoder, &payload);
  934. if (nv_status != NV_ENC_SUCCESS) {
  935. return nvenc_print_error(avctx, nv_status, "GetSequenceParams failed");
  936. }
  937. avctx->extradata_size = outSize;
  938. avctx->extradata = av_mallocz(outSize + AV_INPUT_BUFFER_PADDING_SIZE);
  939. if (!avctx->extradata) {
  940. return AVERROR(ENOMEM);
  941. }
  942. memcpy(avctx->extradata, tmpHeader, outSize);
  943. return 0;
  944. }
  945. av_cold int ff_nvenc_encode_init(AVCodecContext *avctx)
  946. {
  947. NvencContext *ctx = avctx->priv_data;
  948. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  949. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  950. int res;
  951. int i;
  952. int surfaceCount = 0;
  953. if (!nvenc_dyload_nvenc(avctx))
  954. return AVERROR_EXTERNAL;
  955. res = nvenc_setup_device(avctx);
  956. if (res)
  957. goto error;
  958. res = nvenc_open_session(avctx);
  959. if (res)
  960. goto error;
  961. res = nvenc_setup_encoder(avctx);
  962. if (res)
  963. goto error;
  964. res = nvenc_setup_surfaces(avctx, &surfaceCount);
  965. if (res)
  966. goto error;
  967. if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
  968. res = nvenc_setup_extradata(avctx);
  969. if (res)
  970. goto error;
  971. }
  972. return 0;
  973. error:
  974. av_fifo_freep(&ctx->timestamp_list);
  975. av_fifo_freep(&ctx->output_surface_ready_queue);
  976. av_fifo_freep(&ctx->output_surface_queue);
  977. for (i = 0; i < surfaceCount; ++i) {
  978. if (avctx->pix_fmt != AV_PIX_FMT_CUDA)
  979. p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->surfaces[i].input_surface);
  980. av_frame_free(&ctx->surfaces[i].in_ref);
  981. p_nvenc->nvEncDestroyBitstreamBuffer(ctx->nvencoder, ctx->surfaces[i].output_surface);
  982. }
  983. av_freep(&ctx->surfaces);
  984. if (ctx->nvencoder)
  985. p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
  986. ctx->nvencoder = NULL;
  987. if (ctx->cu_context_internal)
  988. dl_fn->cu_ctx_destroy(ctx->cu_context_internal);
  989. ctx->cu_context = ctx->cu_context_internal = NULL;
  990. nvenc_unload_nvenc(avctx);
  991. return res;
  992. }
  993. av_cold int ff_nvenc_encode_close(AVCodecContext *avctx)
  994. {
  995. NvencContext *ctx = avctx->priv_data;
  996. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  997. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  998. int i;
  999. av_fifo_freep(&ctx->timestamp_list);
  1000. av_fifo_freep(&ctx->output_surface_ready_queue);
  1001. av_fifo_freep(&ctx->output_surface_queue);
  1002. if (avctx->pix_fmt == AV_PIX_FMT_CUDA) {
  1003. for (i = 0; i < ctx->max_surface_count; ++i) {
  1004. if (ctx->surfaces[i].input_surface) {
  1005. p_nvenc->nvEncUnmapInputResource(ctx->nvencoder, ctx->surfaces[i].in_map.mappedResource);
  1006. }
  1007. }
  1008. for (i = 0; i < ctx->nb_registered_frames; i++) {
  1009. if (ctx->registered_frames[i].regptr)
  1010. p_nvenc->nvEncUnregisterResource(ctx->nvencoder, ctx->registered_frames[i].regptr);
  1011. }
  1012. ctx->nb_registered_frames = 0;
  1013. }
  1014. for (i = 0; i < ctx->max_surface_count; ++i) {
  1015. if (avctx->pix_fmt != AV_PIX_FMT_CUDA)
  1016. p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->surfaces[i].input_surface);
  1017. av_frame_free(&ctx->surfaces[i].in_ref);
  1018. p_nvenc->nvEncDestroyBitstreamBuffer(ctx->nvencoder, ctx->surfaces[i].output_surface);
  1019. }
  1020. av_freep(&ctx->surfaces);
  1021. ctx->max_surface_count = 0;
  1022. p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
  1023. ctx->nvencoder = NULL;
  1024. if (ctx->cu_context_internal)
  1025. dl_fn->cu_ctx_destroy(ctx->cu_context_internal);
  1026. ctx->cu_context = ctx->cu_context_internal = NULL;
  1027. nvenc_unload_nvenc(avctx);
  1028. return 0;
  1029. }
  1030. static NvencSurface *get_free_frame(NvencContext *ctx)
  1031. {
  1032. int i;
  1033. for (i = 0; i < ctx->max_surface_count; ++i) {
  1034. if (!ctx->surfaces[i].lockCount) {
  1035. ctx->surfaces[i].lockCount = 1;
  1036. return &ctx->surfaces[i];
  1037. }
  1038. }
  1039. return NULL;
  1040. }
  1041. static int nvenc_copy_frame(AVCodecContext *avctx, NvencSurface *inSurf,
  1042. NV_ENC_LOCK_INPUT_BUFFER *lockBufferParams, const AVFrame *frame)
  1043. {
  1044. uint8_t *buf = lockBufferParams->bufferDataPtr;
  1045. int off = inSurf->height * lockBufferParams->pitch;
  1046. if (frame->format == AV_PIX_FMT_YUV420P) {
  1047. av_image_copy_plane(buf, lockBufferParams->pitch,
  1048. frame->data[0], frame->linesize[0],
  1049. avctx->width, avctx->height);
  1050. buf += off;
  1051. av_image_copy_plane(buf, lockBufferParams->pitch >> 1,
  1052. frame->data[2], frame->linesize[2],
  1053. avctx->width >> 1, avctx->height >> 1);
  1054. buf += off >> 2;
  1055. av_image_copy_plane(buf, lockBufferParams->pitch >> 1,
  1056. frame->data[1], frame->linesize[1],
  1057. avctx->width >> 1, avctx->height >> 1);
  1058. } else if (frame->format == AV_PIX_FMT_NV12) {
  1059. av_image_copy_plane(buf, lockBufferParams->pitch,
  1060. frame->data[0], frame->linesize[0],
  1061. avctx->width, avctx->height);
  1062. buf += off;
  1063. av_image_copy_plane(buf, lockBufferParams->pitch,
  1064. frame->data[1], frame->linesize[1],
  1065. avctx->width, avctx->height >> 1);
  1066. } else if (frame->format == AV_PIX_FMT_YUV444P) {
  1067. av_image_copy_plane(buf, lockBufferParams->pitch,
  1068. frame->data[0], frame->linesize[0],
  1069. avctx->width, avctx->height);
  1070. buf += off;
  1071. av_image_copy_plane(buf, lockBufferParams->pitch,
  1072. frame->data[1], frame->linesize[1],
  1073. avctx->width, avctx->height);
  1074. buf += off;
  1075. av_image_copy_plane(buf, lockBufferParams->pitch,
  1076. frame->data[2], frame->linesize[2],
  1077. avctx->width, avctx->height);
  1078. } else {
  1079. av_log(avctx, AV_LOG_FATAL, "Invalid pixel format!\n");
  1080. return AVERROR(EINVAL);
  1081. }
  1082. return 0;
  1083. }
  1084. static int nvenc_find_free_reg_resource(AVCodecContext *avctx)
  1085. {
  1086. NvencContext *ctx = avctx->priv_data;
  1087. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  1088. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  1089. int i;
  1090. if (ctx->nb_registered_frames == FF_ARRAY_ELEMS(ctx->registered_frames)) {
  1091. for (i = 0; i < ctx->nb_registered_frames; i++) {
  1092. if (!ctx->registered_frames[i].mapped) {
  1093. if (ctx->registered_frames[i].regptr) {
  1094. p_nvenc->nvEncUnregisterResource(ctx->nvencoder,
  1095. ctx->registered_frames[i].regptr);
  1096. ctx->registered_frames[i].regptr = NULL;
  1097. }
  1098. return i;
  1099. }
  1100. }
  1101. } else {
  1102. return ctx->nb_registered_frames++;
  1103. }
  1104. av_log(avctx, AV_LOG_ERROR, "Too many registered CUDA frames\n");
  1105. return AVERROR(ENOMEM);
  1106. }
  1107. static int nvenc_register_frame(AVCodecContext *avctx, const AVFrame *frame)
  1108. {
  1109. NvencContext *ctx = avctx->priv_data;
  1110. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  1111. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  1112. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  1113. NV_ENC_REGISTER_RESOURCE reg;
  1114. int i, idx, ret;
  1115. for (i = 0; i < ctx->nb_registered_frames; i++) {
  1116. if (ctx->registered_frames[i].ptr == (CUdeviceptr)frame->data[0])
  1117. return i;
  1118. }
  1119. idx = nvenc_find_free_reg_resource(avctx);
  1120. if (idx < 0)
  1121. return idx;
  1122. reg.version = NV_ENC_REGISTER_RESOURCE_VER;
  1123. reg.resourceType = NV_ENC_INPUT_RESOURCE_TYPE_CUDADEVICEPTR;
  1124. reg.width = frames_ctx->width;
  1125. reg.height = frames_ctx->height;
  1126. reg.bufferFormat = ctx->surfaces[0].format;
  1127. reg.pitch = frame->linesize[0];
  1128. reg.resourceToRegister = frame->data[0];
  1129. ret = p_nvenc->nvEncRegisterResource(ctx->nvencoder, &reg);
  1130. if (ret != NV_ENC_SUCCESS) {
  1131. nvenc_print_error(avctx, ret, "Error registering an input resource");
  1132. return AVERROR_UNKNOWN;
  1133. }
  1134. ctx->registered_frames[idx].ptr = (CUdeviceptr)frame->data[0];
  1135. ctx->registered_frames[idx].regptr = reg.registeredResource;
  1136. return idx;
  1137. }
  1138. static int nvenc_upload_frame(AVCodecContext *avctx, const AVFrame *frame,
  1139. NvencSurface *nvenc_frame)
  1140. {
  1141. NvencContext *ctx = avctx->priv_data;
  1142. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  1143. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  1144. int res;
  1145. NVENCSTATUS nv_status;
  1146. if (avctx->pix_fmt == AV_PIX_FMT_CUDA) {
  1147. int reg_idx = nvenc_register_frame(avctx, frame);
  1148. if (reg_idx < 0) {
  1149. av_log(avctx, AV_LOG_ERROR, "Could not register an input CUDA frame\n");
  1150. return reg_idx;
  1151. }
  1152. res = av_frame_ref(nvenc_frame->in_ref, frame);
  1153. if (res < 0)
  1154. return res;
  1155. nvenc_frame->in_map.version = NV_ENC_MAP_INPUT_RESOURCE_VER;
  1156. nvenc_frame->in_map.registeredResource = ctx->registered_frames[reg_idx].regptr;
  1157. nv_status = p_nvenc->nvEncMapInputResource(ctx->nvencoder, &nvenc_frame->in_map);
  1158. if (nv_status != NV_ENC_SUCCESS) {
  1159. av_frame_unref(nvenc_frame->in_ref);
  1160. return nvenc_print_error(avctx, nv_status, "Error mapping an input resource");
  1161. }
  1162. ctx->registered_frames[reg_idx].mapped = 1;
  1163. nvenc_frame->reg_idx = reg_idx;
  1164. nvenc_frame->input_surface = nvenc_frame->in_map.mappedResource;
  1165. return 0;
  1166. } else {
  1167. NV_ENC_LOCK_INPUT_BUFFER lockBufferParams = { 0 };
  1168. lockBufferParams.version = NV_ENC_LOCK_INPUT_BUFFER_VER;
  1169. lockBufferParams.inputBuffer = nvenc_frame->input_surface;
  1170. nv_status = p_nvenc->nvEncLockInputBuffer(ctx->nvencoder, &lockBufferParams);
  1171. if (nv_status != NV_ENC_SUCCESS) {
  1172. return nvenc_print_error(avctx, nv_status, "Failed locking nvenc input buffer");
  1173. }
  1174. res = nvenc_copy_frame(avctx, nvenc_frame, &lockBufferParams, frame);
  1175. nv_status = p_nvenc->nvEncUnlockInputBuffer(ctx->nvencoder, nvenc_frame->input_surface);
  1176. if (nv_status != NV_ENC_SUCCESS) {
  1177. return nvenc_print_error(avctx, nv_status, "Failed unlocking input buffer!");
  1178. }
  1179. return res;
  1180. }
  1181. }
  1182. static void nvenc_codec_specific_pic_params(AVCodecContext *avctx,
  1183. NV_ENC_PIC_PARAMS *params)
  1184. {
  1185. NvencContext *ctx = avctx->priv_data;
  1186. switch (avctx->codec->id) {
  1187. case AV_CODEC_ID_H264:
  1188. params->codecPicParams.h264PicParams.sliceMode = ctx->encode_config.encodeCodecConfig.h264Config.sliceMode;
  1189. params->codecPicParams.h264PicParams.sliceModeData = ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData;
  1190. break;
  1191. case AV_CODEC_ID_H265:
  1192. params->codecPicParams.hevcPicParams.sliceMode = ctx->encode_config.encodeCodecConfig.hevcConfig.sliceMode;
  1193. params->codecPicParams.hevcPicParams.sliceModeData = ctx->encode_config.encodeCodecConfig.hevcConfig.sliceModeData;
  1194. break;
  1195. }
  1196. }
  1197. static int process_output_surface(AVCodecContext *avctx, AVPacket *pkt, NvencSurface *tmpoutsurf)
  1198. {
  1199. NvencContext *ctx = avctx->priv_data;
  1200. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  1201. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  1202. uint32_t slice_mode_data;
  1203. uint32_t *slice_offsets;
  1204. NV_ENC_LOCK_BITSTREAM lock_params = { 0 };
  1205. NVENCSTATUS nv_status;
  1206. int res = 0;
  1207. enum AVPictureType pict_type;
  1208. switch (avctx->codec->id) {
  1209. case AV_CODEC_ID_H264:
  1210. slice_mode_data = ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData;
  1211. break;
  1212. case AV_CODEC_ID_H265:
  1213. slice_mode_data = ctx->encode_config.encodeCodecConfig.hevcConfig.sliceModeData;
  1214. break;
  1215. default:
  1216. av_log(avctx, AV_LOG_ERROR, "Unknown codec name\n");
  1217. res = AVERROR(EINVAL);
  1218. goto error;
  1219. }
  1220. slice_offsets = av_mallocz(slice_mode_data * sizeof(*slice_offsets));
  1221. if (!slice_offsets)
  1222. return AVERROR(ENOMEM);
  1223. lock_params.version = NV_ENC_LOCK_BITSTREAM_VER;
  1224. lock_params.doNotWait = 0;
  1225. lock_params.outputBitstream = tmpoutsurf->output_surface;
  1226. lock_params.sliceOffsets = slice_offsets;
  1227. nv_status = p_nvenc->nvEncLockBitstream(ctx->nvencoder, &lock_params);
  1228. if (nv_status != NV_ENC_SUCCESS) {
  1229. res = nvenc_print_error(avctx, nv_status, "Failed locking bitstream buffer");
  1230. goto error;
  1231. }
  1232. if (res = ff_alloc_packet2(avctx, pkt, lock_params.bitstreamSizeInBytes,0)) {
  1233. p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
  1234. goto error;
  1235. }
  1236. memcpy(pkt->data, lock_params.bitstreamBufferPtr, lock_params.bitstreamSizeInBytes);
  1237. nv_status = p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
  1238. if (nv_status != NV_ENC_SUCCESS)
  1239. nvenc_print_error(avctx, nv_status, "Failed unlocking bitstream buffer, expect the gates of mordor to open");
  1240. if (avctx->pix_fmt == AV_PIX_FMT_CUDA) {
  1241. p_nvenc->nvEncUnmapInputResource(ctx->nvencoder, tmpoutsurf->in_map.mappedResource);
  1242. av_frame_unref(tmpoutsurf->in_ref);
  1243. ctx->registered_frames[tmpoutsurf->reg_idx].mapped = 0;
  1244. tmpoutsurf->input_surface = NULL;
  1245. }
  1246. switch (lock_params.pictureType) {
  1247. case NV_ENC_PIC_TYPE_IDR:
  1248. pkt->flags |= AV_PKT_FLAG_KEY;
  1249. case NV_ENC_PIC_TYPE_I:
  1250. pict_type = AV_PICTURE_TYPE_I;
  1251. break;
  1252. case NV_ENC_PIC_TYPE_P:
  1253. pict_type = AV_PICTURE_TYPE_P;
  1254. break;
  1255. case NV_ENC_PIC_TYPE_B:
  1256. pict_type = AV_PICTURE_TYPE_B;
  1257. break;
  1258. case NV_ENC_PIC_TYPE_BI:
  1259. pict_type = AV_PICTURE_TYPE_BI;
  1260. break;
  1261. default:
  1262. av_log(avctx, AV_LOG_ERROR, "Unknown picture type encountered, expect the output to be broken.\n");
  1263. av_log(avctx, AV_LOG_ERROR, "Please report this error and include as much information on how to reproduce it as possible.\n");
  1264. res = AVERROR_EXTERNAL;
  1265. goto error;
  1266. }
  1267. #if FF_API_CODED_FRAME
  1268. FF_DISABLE_DEPRECATION_WARNINGS
  1269. avctx->coded_frame->pict_type = pict_type;
  1270. FF_ENABLE_DEPRECATION_WARNINGS
  1271. #endif
  1272. ff_side_data_set_encoder_stats(pkt,
  1273. (lock_params.frameAvgQP - 1) * FF_QP2LAMBDA, NULL, 0, pict_type);
  1274. pkt->pts = lock_params.outputTimeStamp;
  1275. pkt->dts = timestamp_queue_dequeue(ctx->timestamp_list);
  1276. /* when there're b frame(s), set dts offset */
  1277. if (ctx->encode_config.frameIntervalP >= 2)
  1278. pkt->dts -= 1;
  1279. if (pkt->dts > pkt->pts)
  1280. pkt->dts = pkt->pts;
  1281. if (ctx->last_dts != AV_NOPTS_VALUE && pkt->dts <= ctx->last_dts)
  1282. pkt->dts = ctx->last_dts + 1;
  1283. ctx->last_dts = pkt->dts;
  1284. av_free(slice_offsets);
  1285. return 0;
  1286. error:
  1287. av_free(slice_offsets);
  1288. timestamp_queue_dequeue(ctx->timestamp_list);
  1289. return res;
  1290. }
  1291. static int output_ready(NvencContext *ctx, int flush)
  1292. {
  1293. int nb_ready, nb_pending;
  1294. nb_ready = av_fifo_size(ctx->output_surface_ready_queue) / sizeof(NvencSurface*);
  1295. nb_pending = av_fifo_size(ctx->output_surface_queue) / sizeof(NvencSurface*);
  1296. return nb_ready > 0 && (flush || nb_ready + nb_pending >= ctx->buffer_delay);
  1297. }
  1298. int ff_nvenc_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  1299. const AVFrame *frame, int *got_packet)
  1300. {
  1301. NVENCSTATUS nv_status;
  1302. NvencSurface *tmpoutsurf, *inSurf;
  1303. int res;
  1304. NvencContext *ctx = avctx->priv_data;
  1305. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  1306. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  1307. NV_ENC_PIC_PARAMS pic_params = { 0 };
  1308. pic_params.version = NV_ENC_PIC_PARAMS_VER;
  1309. if (frame) {
  1310. inSurf = get_free_frame(ctx);
  1311. av_assert0(inSurf);
  1312. res = nvenc_upload_frame(avctx, frame, inSurf);
  1313. if (res) {
  1314. inSurf->lockCount = 0;
  1315. return res;
  1316. }
  1317. pic_params.inputBuffer = inSurf->input_surface;
  1318. pic_params.bufferFmt = inSurf->format;
  1319. pic_params.inputWidth = avctx->width;
  1320. pic_params.inputHeight = avctx->height;
  1321. pic_params.outputBitstream = inSurf->output_surface;
  1322. pic_params.completionEvent = 0;
  1323. if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
  1324. if (frame->top_field_first) {
  1325. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
  1326. } else {
  1327. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
  1328. }
  1329. } else {
  1330. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
  1331. }
  1332. pic_params.encodePicFlags = 0;
  1333. pic_params.inputTimeStamp = frame->pts;
  1334. pic_params.inputDuration = 0;
  1335. nvenc_codec_specific_pic_params(avctx, &pic_params);
  1336. timestamp_queue_enqueue(ctx->timestamp_list, frame->pts);
  1337. } else {
  1338. pic_params.encodePicFlags = NV_ENC_PIC_FLAG_EOS;
  1339. }
  1340. nv_status = p_nvenc->nvEncEncodePicture(ctx->nvencoder, &pic_params);
  1341. if (frame && nv_status == NV_ENC_ERR_NEED_MORE_INPUT)
  1342. av_fifo_generic_write(ctx->output_surface_queue, &inSurf, sizeof(inSurf), NULL);
  1343. if (nv_status != NV_ENC_SUCCESS && nv_status != NV_ENC_ERR_NEED_MORE_INPUT) {
  1344. return nvenc_print_error(avctx, nv_status, "EncodePicture failed!");
  1345. }
  1346. if (nv_status != NV_ENC_ERR_NEED_MORE_INPUT) {
  1347. while (av_fifo_size(ctx->output_surface_queue) > 0) {
  1348. av_fifo_generic_read(ctx->output_surface_queue, &tmpoutsurf, sizeof(tmpoutsurf), NULL);
  1349. av_fifo_generic_write(ctx->output_surface_ready_queue, &tmpoutsurf, sizeof(tmpoutsurf), NULL);
  1350. }
  1351. if (frame)
  1352. av_fifo_generic_write(ctx->output_surface_ready_queue, &inSurf, sizeof(inSurf), NULL);
  1353. }
  1354. if (output_ready(ctx, !frame)) {
  1355. av_fifo_generic_read(ctx->output_surface_ready_queue, &tmpoutsurf, sizeof(tmpoutsurf), NULL);
  1356. res = process_output_surface(avctx, pkt, tmpoutsurf);
  1357. if (res)
  1358. return res;
  1359. av_assert0(tmpoutsurf->lockCount);
  1360. tmpoutsurf->lockCount--;
  1361. *got_packet = 1;
  1362. } else {
  1363. *got_packet = 0;
  1364. }
  1365. return 0;
  1366. }