I need to play RTSP video from an IP camera.
If I use ffplay or VLC, everything is fine.
But if I use FFmpeg API (C#, FFmpeg.AutoGen), then artifacts appear on the frames. What could be wrong with using FFmpeg API?
AVFormatContext* pFormatContext = ffmpeg.avformat_alloc_context();
int error;
error = ffmpeg.avformat_open_input(&pFormatContext, rtspUrl, null, null);
if (error != 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
if (error != 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
AVStream* pStream = null;
for (var i = 0; i < pFormatContext->nb_streams; i++)
{
if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
{
pStream = pFormatContext->streams[i];
}
}
if (pStream == null)
throw new ApplicationException(@"Could not found video stream.");
AVCodecContext codecContext = *pStream->codec;
int width = codecContext.width;
int height = codecContext.height;
AVPixelFormat sourcePixFmt = codecContext.pix_fmt;
AVCodecID codecId = codecContext.codec_id;
AVPixelFormat destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecId == AVCodecID.AV_CODEC_ID_H264)
{
sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
}
SwsContext* pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt, width, height, destinationPixFmt, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
if (pConvertContext == null)
throw new ApplicationException(@"Could not initialize the conversion context.");
AVFrame* pConvertedFrame = ffmpeg.av_frame_alloc();
int convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
IntPtr convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
byte_ptrArray4 dstData = new byte_ptrArray4();
int_array4 dstLinesize = new int_array4();
ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte*)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);
WorkEnabled = true;
AVCodec* pCodec = ffmpeg.avcodec_find_decoder(codecId);
if (pCodec == null)
throw new ApplicationException(@"Unsupported codec.");
AVCodecContext* pCodecContext = &codecContext;
if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
{
pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
}
error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
AVFrame* pDecodedFrame = ffmpeg.av_frame_alloc();
AVPacket packet = new AVPacket();
AVPacket* pPacket = &packet;
ffmpeg.av_init_packet(pPacket);
Bitmap bitmap = null;
Bitmap previousbBitmap = null;
while (WorkEnabled)
{
try
{
do
{
error = ffmpeg.av_read_frame(pFormatContext, pPacket);
if (error == ffmpeg.AVERROR_EOF)
{
Thread.Sleep(1000);
break;
}
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
if (pPacket->stream_index != pStream->index) continue;
error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) && WorkEnabled);
if (error == ffmpeg.AVERROR_EOF)
{
Thread.Sleep(1000);
continue;
}
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
if (pPacket->stream_index != pStream->index) continue;
// YUV->RGB
ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
}
catch (Exception)
{}
finally
{
ffmpeg.av_packet_unref(pPacket);
ffmpeg.av_frame_unref(pDecodedFrame);
}
bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
previousbBitmap = bitmap;
System.Windows.Application.Current.Dispatcher.Invoke(() => _pictureBox.Image = bitmap);
}
3