I am decoding RTSP video stream with FFMpeg.
At display time (call to cv::imshow(...)
), I get the following exception:
[swscaler @ 0d55e5c0] deprecated pixel format used, make sure you did set range correctly
I am converting the pixel format from "AV_PIX_FMT_YUVJ420P" to "AV_PIX_FMT_YUV420P". Still getting the above exception. Any help is appreciated;
int Decodestream()
{
av_register_all();
avdevice_register_all();
avcodec_register_all();
avformat_network_init();
const char *filenameSrc = "rtsp://192.168.1.67/gnz_media/second";
AVCodecContext *pCodecCtx;
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVCodec * pCodec;
AVFrame *pFrame, *pFrameRGB;
if(avformat_open_input(&pFormatCtx,filenameSrc,NULL,NULL) != 0)
{return -1;}
if(av_find_stream_info(pFormatCtx) < 0)
{return -1;}
av_dump_format(pFormatCtx, 0, filenameSrc, 0);
int videoStream = 1;
for(int i=0; i < pFormatCtx->nb_streams; i++)
{
if(pFormatCtx->streams[i]->codec->coder_type==AVMEDIA_TYPE_VIDEO)
{
videoStream = i;
break;
}
}
if(videoStream == -1) return -1 ;
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec =avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{return -1;} //codec not found
if(avcodec_open2(pCodecCtx,pCodec,NULL) < 0)
{ return -1;}
pFrame = avcodec_alloc_frame();
pFrameRGB = avcodec_alloc_frame();
uint8_t *buffer;
int numBytes;
AVPixelFormat pFormat;
switch (pFormatCtx->streams[videoStream]->codec->pix_fmt)
{
case AV_PIX_FMT_YUVJ420P : pFormat = AV_PIX_FMT_YUV420P; break;
case AV_PIX_FMT_YUVJ422P : pFormat = AV_PIX_FMT_YUV422P; break;
case AV_PIX_FMT_YUVJ444P : pFormat = AV_PIX_FMT_YUV444P; break;
case AV_PIX_FMT_YUVJ440P : pFormat = AV_PIX_FMT_YUV440P;
default:
pFormat = pFormatCtx->streams[videoStream]->codec->pix_fmt;
break;
}
numBytes = avpicture_get_size(pFormat,pCodecCtx->width,pCodecCtx->height) ;
buffer = (uint8_t *) av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameRGB,buffer,pFormat,pCodecCtx->width,pCodecCtx->height);
int res, frameFinished;
AVPacket packet;
while(res = av_read_frame(pFormatCtx,&packet)>=0)
{
if(packet.stream_index == videoStream){
avcodec_decode_video2(pCodecCtx,pFrame,&frameFinished,&packet);
if(frameFinished){
struct SwsContext * img_convert_ctx;
img_convert_ctx = sws_getCachedContext(NULL,pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL,NULL);
sws_scale(img_convert_ctx, ((AVPicture*)pFrame)->data, ((AVPicture*)pFrame)->linesize, 0, pCodecCtx->height, ((AVPicture *)pFrameRGB)->data, ((AVPicture *)pFrameRGB)->linesize);
cv::Mat img(pFrame->height,pFrame->width,CV_8UC3,pFrameRGB->data[0]);
cv::imshow("display",img);
cvWaitKey(10);
}
}
}
//Memory clean up code goes here
}