Receiving Rtsp Stream Using Ffmpeg Library

Output RTSP stream with ffmpeg

I solved the integer division by zero by building ffmpeg on my Windows instance and debugging the av_interleaved_write_frame call. Turns out it was the pts not being set on the video stream object that was causing the exception.

Adding the line below to the while loop in the main function fixed the problem:

video_st->pts.val += av_rescale_q(1, video_st->codec->time_base, video_st->time_base);

Here's a sample that works to get a H264 encoded dummy stream to a Wowza server via ffmpeg's RTSP pipeline.

// Roughly based on: https://ffmpeg.org/doxygen/trunk/muxing_8c-source.html

#include <chrono>
#include <thread>
#include <tchar.h>

extern "C"
{
#include <libavcodec\avcodec.h>
#include <libavformat\avformat.h>
#include <libavformat\avio.h>
#include <libswscale\swscale.h>
#include <libavutil\time.h>
}

#pragma comment(lib,"libavformat/libavformat.a")
#pragma comment(lib,"libavcodec/libavcodec.a")
#pragma comment(lib,"libavutil/libavutil.a")
#pragma comment(lib,"libswscale/libswscale.a")
#pragma comment(lib,"x264.lib")
#pragma comment(lib,"libswresample/libswresample.a")

using namespace std;

static int video_is_eof;

#define STREAM_DURATION 20
#define STREAM_FRAME_RATE 25 /* 25 images/s */
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ //AV_PIX_FMT_NV12;
#define VIDEO_CODEC_ID CODEC_ID_H264

/* video output */
static AVFrame *frame;
static AVPicture src_picture, dst_picture;

/* Add an output stream. */
static AVStream *add_stream(AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
{
AVCodecContext *c;
AVStream *st;

/* find the encoder */
*codec = avcodec_find_encoder(codec_id);
if (!(*codec)) {
av_log(NULL, AV_LOG_ERROR, "Could not find encoder for '%s'.\n", avcodec_get_name(codec_id));
}
else {
st = avformat_new_stream(oc, *codec);
if (!st) {
av_log(NULL, AV_LOG_ERROR, "Could not allocate stream.\n");
}
else {
st->id = oc->nb_streams - 1;
st->time_base.den = st->pts.den = 90000;
st->time_base.num = st->pts.num = 1;

c = st->codec;
c->codec_id = codec_id;
c->bit_rate = 400000;
c->width = 352;
c->height = 288;
c->time_base.den = STREAM_FRAME_RATE;
c->time_base.num = 1;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = STREAM_PIX_FMT;
}
}

return st;
}

static int open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st)
{
int ret;
AVCodecContext *c = st->codec;

/* open the codec */
ret = avcodec_open2(c, codec, NULL);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Could not open video codec.\n", avcodec_get_name(c->codec_id));
}
else {

/* allocate and init a re-usable frame */
frame = av_frame_alloc();
if (!frame) {
av_log(NULL, AV_LOG_ERROR, "Could not allocate video frame.\n");
ret = -1;
}
else {
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;

/* Allocate the encoded raw picture. */
ret = avpicture_alloc(&dst_picture, c->pix_fmt, c->width, c->height);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Could not allocate picture.\n");
}
else {
/* copy data and linesize picture pointers to frame */
*((AVPicture *)frame) = dst_picture;
}
}
}

return ret;
}

/* Prepare a dummy image. */
static void fill_yuv_image(AVPicture *pict, int frame_index, int width, int height)
{
int x, y, i;

i = frame_index;

/* Y */
for (y = 0; y < height; y++)
for (x = 0; x < width; x++)
pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;

/* Cb and Cr */
for (y = 0; y < height / 2; y++) {
for (x = 0; x < width / 2; x++) {
pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
}
}
}

static int write_video_frame(AVFormatContext *oc, AVStream *st, int frameCount)
{
int ret = 0;
AVCodecContext *c = st->codec;

fill_yuv_image(&dst_picture, frameCount, c->width, c->height);

AVPacket pkt = { 0 };
int got_packet;
av_init_packet(&pkt);

/* encode the image */
frame->pts = frameCount;
ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error encoding video frame.\n");
}
else {
if (got_packet) {
pkt.stream_index = st->index;
pkt.pts = av_rescale_q_rnd(pkt.pts, c->time_base, st->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
ret = av_write_frame(oc, &pkt);

if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while writing video frame.\n");
}
}
}

return ret;
}

int _tmain(int argc, _TCHAR* argv[])
{
printf("starting...\n");

const char *url = "rtsp://test:password@192.168.33.19:1935/ffmpeg/0";
//const char *url = "rtsp://192.168.33.19:1935/ffmpeg/0";

AVFormatContext *outContext;
AVStream *video_st;
AVCodec *video_codec;
int ret = 0, frameCount = 0;

av_log_set_level(AV_LOG_DEBUG);
//av_log_set_level(AV_LOG_TRACE);

av_register_all();
avformat_network_init();

avformat_alloc_output_context2(&outContext, NULL, "rtsp", url);

if (!outContext) {
av_log(NULL, AV_LOG_FATAL, "Could not allocate an output context for '%s'.\n", url);
goto end;
}

if (!outContext->oformat) {
av_log(NULL, AV_LOG_FATAL, "Could not create the output format for '%s'.\n", url);
goto end;
}

video_st = add_stream(outContext, &video_codec, VIDEO_CODEC_ID);

/* Now that all the parameters are set, we can open the video codec and allocate the necessary encode buffers. */
if (video_st) {
av_log(NULL, AV_LOG_DEBUG, "Video stream codec %s.\n ", avcodec_get_name(video_st->codec->codec_id));

ret = open_video(outContext, video_codec, video_st);
if (ret < 0) {
av_log(NULL, AV_LOG_FATAL, "Open video stream failed.\n");
goto end;
}
}
else {
av_log(NULL, AV_LOG_FATAL, "Add video stream for the codec '%s' failed.\n", avcodec_get_name(VIDEO_CODEC_ID));
goto end;
}

av_dump_format(outContext, 0, url, 1);

ret = avformat_write_header(outContext, NULL);
if (ret != 0) {
av_log(NULL, AV_LOG_ERROR, "Failed to connect to RTSP server for '%s'.\n", url);
goto end;
}

printf("Press any key to start streaming...\n");
getchar();

auto startSend = std::chrono::system_clock::now();

while (video_st) {
frameCount++;
auto startFrame = std::chrono::system_clock::now();

ret = write_video_frame(outContext, video_st, frameCount);

if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Write video frame failed.\n", url);
goto end;
}

auto streamDuration = std::chrono::duration_cast<chrono::milliseconds>(std::chrono::system_clock::now() - startSend).count();

printf("Elapsed time %ldms, video stream pts %ld.\n", streamDuration, video_st->pts.val);

if (streamDuration / 1000.0 > STREAM_DURATION) {
break;
}
else {
auto frameDuration = std::chrono::duration_cast<chrono::milliseconds>(std::chrono::system_clock::now() - startFrame).count();
std::this_thread::sleep_for(std::chrono::milliseconds((long)(1000.0 / STREAM_FRAME_RATE - frameDuration)));
}
}

if (video_st) {
avcodec_close(video_st->codec);
av_free(src_picture.data[0]);
av_free(dst_picture.data[0]);
av_frame_free(&frame);
}

avformat_free_context(outContext);

end:
printf("finished.\n");

getchar();

return 0;
}

Possible for ffmpeg to downsample over time, serve RTSP?

Yes, all we have to do is adding -r 0.2 argument, and re-encode the video.

It is also recommended to add -tune zerolatency or -g 1 for making sure every frame is a key frame (required in case video latency is relevant).


Example:

Receiving RTSP stream from localhost, and streaming at 0.2fps (to localhost with different port):

ffmpeg -rtsp_flags listen -rtsp_transport tcp -stimeout 1000000 -i rtsp://127.0.0.1:10000/live.stream -r 0.2 -vcodec libx264 -tune zerolatency -pix_fmt yuv420p -rtsp_transport tcp -f rtsp rtsp://127.0.0.1:20000/live.stream

Testing:

For testing I simulated the RTSP camera with FFmpeg (streaming synthetic video at 25fps).

The RTSP stream is captured by another FFmpeg process that reduces the rate to 0.2fps.

The 0.2fps video is captured and displayed using FFprobe.

The test is implemented as a batch file:

::Play the video for testing
start ffplay -rtsp_flags listen -rtsp_transport tcp -flags low_delay -vf setpts=0 -listen_timeout 1000000 rtsp://127.0.0.1:20000/live.stream

::Wait 5 seconds
ping 127.0.0.1 -n 5 > nul

::Capture the RTSP camera at 25fps, convert to 0.2fps (with re-encoding)
start ffmpeg -rtsp_flags listen -rtsp_transport tcp -stimeout 1000000 -i rtsp://127.0.0.1:10000/live.stream -r 0.2 -vcodec libx264 -tune zerolatency -pix_fmt yuv420p -rtsp_transport tcp -f rtsp rtsp://127.0.0.1:20000/live.stream

::Wait 5 seconds
ping 127.0.0.1 -n 5 > nul

::Simulate an RTSP camera at 25fps
ffmpeg -re -f lavfi -i testsrc=size=192x108:rate=25 -vcodec libx264 -pix_fmt yuv420p -g 30 -rtsp_transport tcp -f rtsp -muxdelay 0.1 rtsp://127.0.0.1:10000/live.stream

It starts awkward and gets stable after few frames.

(We mat use select filter for solving it).

Sample frames:

Sample Image

Sample Image

Sample Image

Sample Image

Sample Image

Sample Image



Related Topics



Leave a reply



Submit