FFmpeg支持Rtsp流接收功能,协议实现已经较为完善,利用FFmpeg还可以将RTSP收到的流录制为某种容器格式。这里我向大家介绍怎么用它的API来实现RTSP接收和录制这两个功能。
我把接收RTSP和录制文件的逻辑都用一个类RtspStreamMuxTask来处理,下面给出这个类的头文件和源文件。
RtspStreamMuxTask.h文件:
#ifndef RtspStreamMuxTask_H
#define RtspStreamMuxTask_H
#ifdef __cplusplus
extern "C" {
#endif
#ifdef HAVE_AV_CONFIG_H
#undef HAVE_AV_CONFIG_H
#endif
#include "./include/libavcodec/avcodec.h"
#include "./include/libavutil/mathematics.h"
#include "./include/libavutil/avutil.h"
#include "./include/libswscale/swscale.h"
#include "./include/libavutil/fifo.h"
#include "./include/libavformat/avformat.h"
#include "./include/libavutil/opt.h"
#include "./include/libavutil/error.h"
#include "./include/libswresample/swresample.h"
#ifdef __cplusplus
}
#endif
#pragma comment( lib, "avcodec.lib")
#pragma comment( lib, "avutil.lib")
#pragma comment( lib, "avformat.lib")
#pragma comment(lib, "swresample.lib")
#pragma comment( lib, "swscale.lib" )
#ifndef CodecID
#define CodecID AVCodecID
#endif
class RtspStreamMuxTask
{
public:
RtspStreamMuxTask();
virtual ~RtspStreamMuxTask();
void SetInputUrl(string rtspUrl);
void SetOutputPath(string outputPath);
void StartRecvStream();
void StopRecvStream();
void GetVideoSize(long & width, long & height) //获取视频分辨率
{
width = coded_width;
height = coded_height;
}
private:
void run();
int OpenInputStream();
void CloseInputStream();
void readAndMux();
static DWORD WINAPI ReadingThrd(void * pParam);
int openOutputStream();
void closeOutputStream();
void ReleaseCodecs();
private:
string m_inputUrl;
string m_outputFile;
AVFormatContext* m_inputAVFormatCxt;
AVBitStreamFilterContext* m_bsfcAAC;
AVBitStreamFilterContext* m_bsfcH264;
int m_videoStreamIndex;
int m_audioStreamIndex;
AVFormatContext* m_outputAVFormatCxt;
char m_tmpErrString[64];
bool m_stop_status;
HANDLE m_hReadThread;
BOOL m_bInputInited;
BOOL m_bOutputInited;
int coded_width, coded_height;
int m_frame_rate;
};
#endif // RtspStreamMuxTask_H
RtspStreamMuxTask.cpp文件:
#include "stdafx.h"
#include "RtspStreamMuxTask.h"
#include <sstream>
string to_string(int n)
{
std::ostringstream stm;
string str;
stm << n;
str = stm.str();
//std::cout << str << std::endl;
return str;
}
//
RtspStreamMuxTask::RtspStreamMuxTask()
{
m_stop_status = false;
m_inputAVFormatCxt = nullptr;
m_bsfcAAC = nullptr;
m_bsfcH264 = nullptr;
m_videoStreamIndex = -1;
m_audioStreamIndex = -1;
m_outputAVFormatCxt = nullptr;
m_hReadThread = NULL;
m_bInputInited = FALSE;
m_bOutputInited = FALSE;
coded_width = coded_height = 0;
m_frame_rate = 25;
/* register all codecs, demux and protocols */
avcodec_register_all();
av_register_all();
}
RtspStreamMuxTask::~RtspStreamMuxTask()
{
StopRecvStream();
}
void RtspStreamMuxTask::SetInputUrl(string rtspUrl)
{
m_inputUrl = rtspUrl;
}
void RtspStreamMuxTask::SetOutputPath(string outputPath)
{
m_outputFile = outputPath;
}
void RtspStreamMuxTask::StartRecvStream()
{
if(m_inputUrl.empty())
return;
m_videoStreamIndex = -1;
m_audioStreamIndex = -1;
m_bInputInited = FALSE;
m_bOutputInited = FALSE;
coded_width = coded_height = 0;
DWORD threadID = 0;
m_hReadThread = CreateThread(NULL, 0, ReadingThrd, this, 0, &threadID);
}
void RtspStreamMuxTask::StopRecvStream()
{
m_stop_status = true;
if (m_hReadThread != NULL)
{
WaitForSingleObject(m_hReadThread, INFINITE);
CloseHandle(m_hReadThread);
m_hReadThread = NULL;
}
CloseInputStream();
}
DWORD WINAPI RtspStreamMuxTask::ReadingThrd(void * pParam)
{
RtspStreamMuxTask * pTask = (RtspStreamMuxTask *) pParam;
pTask->run();
OutputDebugString("ReadingThrd exited\n");
return 0;
}
void RtspStreamMuxTask::run()
{
try
{
m_stop_status = false;
OpenInputStream();
openOutputStream();
m_stop_status = false;
readAndMux();
CloseInputStream();
closeOutputStream();
}
catch(std::exception& e)
{
TRACE("%s \n", e.what());
CloseInputStream();
}
}
int RtspStreamMuxTask::OpenInputStream()
{
if (m_inputAVFormatCxt)
{
string strError = ("already has input avformat");
TRACE("%s \n", strError.c_str());
return -1;
版权声明:本文为toshiba689原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。