I have been working on an NVEnc project but the images seem to come out blurry no matter what I do in the setup if I am using CBR as my rate control mode. I am adapting the code from the NVidia sample here: https://github.com/NVIDIA/video-sdk-samples/blob/master/nvEncBroadcastSample/nvEnc/nvCodec/nvEncoder/NvEncoder.cpp
I am using CBR as my rate control mode as suggested in this table from the nvidia docs.
m_encoder->SetEncoderParams(&m_encInitParams, m_codecId, NV_ENC_PRESET_LOW_LATENCY_HQ_GUID);
[...]
void NvEncoder::SetEncoderParams(NV_ENC_INITIALIZE_PARAMS* pIntializeParams, GUID codecGuid, GUID presetGuid)
{
if (!m_encoder)
{
NVENC_THROW_ERROR("Encoder Initialization failed", NV_ENC_ERR_NO_ENCODE_DEVICE);
return;
}
if (pIntializeParams == nullptr || pIntializeParams->encodeConfig == nullptr)
{
NVENC_THROW_ERROR("pInitializeParams and pInitializeParams->encodeConfig can't be NULL", NV_ENC_ERR_INVALID_PTR);
}
memset(pIntializeParams->encodeConfig, 0, sizeof(NV_ENC_CONFIG));
auto pEncodeConfig = pIntializeParams->encodeConfig;
memset(pIntializeParams, 0, sizeof(NV_ENC_INITIALIZE_PARAMS));
pIntializeParams->encodeConfig = pEncodeConfig;
pIntializeParams->encodeConfig->version = NV_ENC_CONFIG_VER;
pIntializeParams->version = NV_ENC_INITIALIZE_PARAMS_VER;
pIntializeParams->encodeGUID = codecGuid;
pIntializeParams->presetGUID = presetGuid;
pIntializeParams->encodeWidth = m_width;
pIntializeParams->encodeHeight = m_height;
pIntializeParams->darWidth = m_width;
pIntializeParams->darHeight = m_height;
pIntializeParams->maxEncodeWidth = m_width;
pIntializeParams->maxEncodeHeight = m_height;
pIntializeParams->frameRateNum = 60;
pIntializeParams->frameRateDen = 1;
pIntializeParams->enablePTD = 1;
pIntializeParams->reportSliceOffsets = 1;
pIntializeParams->enableSubFrameWrite = 0;
pIntializeParams->enableMEOnlyMode = m_motionEstimationOnly;
pIntializeParams->enableWeightedPrediction = 1;
#if defined(_WIN32)
pIntializeParams->enableEncodeAsync = true;
#endif
NV_ENC_PRESET_CONFIG presetConfig = { NV_ENC_PRESET_CONFIG_VER, { NV_ENC_CONFIG_VER } };
m_nvenc.nvEncGetEncodePresetConfig(m_encoder, codecGuid, presetGuid, &presetConfig);
memcpy(pIntializeParams->encodeConfig, &presetConfig.presetCfg, sizeof(NV_ENC_CONFIG));
pIntializeParams->encodeConfig->frameIntervalP = 1;
pIntializeParams->encodeConfig->gopLength = NVENC_INFINITE_GOPLENGTH;
pIntializeParams->encodeConfig->rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ;
pIntializeParams->encodeConfig->rcParams.averageBitRate = INT16_MAX;
//pIntializeParams->encodeConfig->rcParams.maxBitRate = 99999999999;
pIntializeParams->encodeConfig->rcParams.zeroReorderDelay = 1;
if (pIntializeParams->encodeGUID == NV_ENC_CODEC_H264_GUID)
{
pIntializeParams->encodeConfig->encodeCodecConfig.h264Config.idrPeriod = NVENC_INFINITE_GOPLENGTH; //pIntializeParams->encodeConfig->gopLength;
pIntializeParams->encodeConfig->encodeCodecConfig.h264Config.sliceMode = 1;
pIntializeParams->encodeConfig->encodeCodecConfig.h264Config.sliceModeData = INT16_MAX - 50;
pIntializeParams->encodeConfig->encodeCodecConfig.h264Config.repeatSPSPPS = 1;
}
else if (pIntializeParams->encodeGUID == NV_ENC_CODEC_HEVC_GUID)
{
pIntializeParams->encodeConfig->encodeCodecConfig.hevcConfig.pixelBitDepthMinus8 = 0;
pIntializeParams->encodeConfig->encodeCodecConfig.hevcConfig.idrPeriod = pIntializeParams->encodeConfig->gopLength;
}
}
These settings are generating images that look like this:
I'd really appreciate any pointers as to why my images are coming out so heavily pixelated when the settings appear to be correct AFAICT.
You are setting the average bitrate to 32768 (INT16_MAX)
pIntializeParams->encodeConfig->rcParams.averageBitRate = INT16_MAX;
I'd use 5000000 for HD and 20000000 for 4k as average bitrate.
FYI: NV_ENC_RC_PARAMS::averageBitRate is a 32 bit unsigned integer (uint32_t)