CUDA H.265 decoder initialization fault

143 views Asked by At

I'm trying to decode h.265 frame with nvidia_video_codec_sdk, video size is 192x168, but cuvidCreateDecoder asserts CUDA_ERROR_INVALID_VALUE.

int NvDecoder::HandleVideoSequence(CUVIDEOFORMAT* pVideoFormat) {
  int nDecodeSurface = pVideoFormat->min_num_decode_surfaces;

  // eCodec has been set in the constructor (for parser). Here it's set again
  // for potential correction
  m_eCodec = pVideoFormat->codec;
  m_eChromaFormat = pVideoFormat->chroma_format;
  m_nBitDepthMinus8 = pVideoFormat->bit_depth_luma_minus8;
  m_nBPP = m_nBitDepthMinus8 > 0 ? 2 : 1;
  m_eOutputFormat = cudaVideoSurfaceFormat_NV12;
  m_videoFormat = *pVideoFormat;

  CUVIDDECODECREATEINFO videoDecodeCreateInfo = {0};
  videoDecodeCreateInfo.CodecType = pVideoFormat->codec;
  videoDecodeCreateInfo.ChromaFormat = pVideoFormat->chroma_format;
  videoDecodeCreateInfo.OutputFormat = m_eOutputFormat;
  videoDecodeCreateInfo.bitDepthMinus8 = pVideoFormat->bit_depth_luma_minus8;
  if (pVideoFormat->progressive_sequence)
    videoDecodeCreateInfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;
  else
    videoDecodeCreateInfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Adaptive;
  videoDecodeCreateInfo.ulNumOutputSurfaces = 2;
  // With PreferCUVID, JPEG is still decoded by CUDA while video is decoded by
  // NVDEC hardware
  videoDecodeCreateInfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
  videoDecodeCreateInfo.ulNumDecodeSurfaces = nDecodeSurface;
  videoDecodeCreateInfo.vidLock = m_ctxLock;
  videoDecodeCreateInfo.ulWidth = pVideoFormat->coded_width;
  videoDecodeCreateInfo.ulHeight = pVideoFormat->coded_height;
  if (m_nMaxWidth < (int)pVideoFormat->coded_width)
    m_nMaxWidth = pVideoFormat->coded_width;
  if (m_nMaxHeight < (int)pVideoFormat->coded_height)
    m_nMaxHeight = pVideoFormat->coded_height;
  videoDecodeCreateInfo.ulMaxWidth = m_nMaxWidth;
  videoDecodeCreateInfo.ulMaxHeight = m_nMaxHeight;
  videoDecodeCreateInfo.ulTargetWidth = m_nWidth;
  videoDecodeCreateInfo.ulTargetHeight = m_nLumaHeight;
  m_nChromaHeight =
      (int)(ceil(m_nLumaHeight * GetChromaHeightFactor(m_eOutputFormat)));
  m_nNumChromaPlanes = GetChromaPlaneCount(m_eOutputFormat);
  m_nSurfaceHeight = videoDecodeCreateInfo.ulTargetHeight;
  m_nSurfaceWidth = videoDecodeCreateInfo.ulTargetWidth;
  m_displayRect.b = videoDecodeCreateInfo.display_area.bottom;
  m_displayRect.t = videoDecodeCreateInfo.display_area.top;
  m_displayRect.l = videoDecodeCreateInfo.display_area.left;
  m_displayRect.r = videoDecodeCreateInfo.display_area.right;
  videoDecodeCreateInfo.ulIntraDecodeOnly = 1;
  CUDA_DRVAPI_CALL(cuCtxPushCurrent(m_cuContext));
  NVDEC_API_CALL(cuvidCreateDecoder(&m_hDecoder, &videoDecodeCreateInfo)); // asserts fail, return 1
  CUDA_DRVAPI_CALL(cuCtxPopCurrent(NULL));
  return nDecodeSurface;
}

My environment

  • NVIDIA GeForce RTX 3060
  • NVIDIA-SMI 470.141.03 Driver Version: 470.141.03 CUDA Version: 11.4
  • nvidia_video_codec_sdk 11.1.5
1

There are 1 answers

1
375 On BEST ANSWER

I have found the reason. Nvidia video codec sdk requires minimum 144x144 resolution. Use cuvidGetDecoderCaps to get the device decode capabilities, as below: enter image description here