实现了一个单播的rtsp服务器来推送实时的h264码流,参考了官方的testProgs目录下的testOnDemandRTSPServer例程和liveMedia目录下的DeviceSource.cpp文件。我这边是把从摄像头采集出来的码流放入了一个缓冲队列,然后直接从缓冲队列里取出来。
rtsp.h:
#ifndef _RTSP_H_
#define _RTSP_H_
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
void create_rtsp_server(void);
class H264LiveServerMediaSession : public OnDemandServerMediaSubsession
{
public:
static H264LiveServerMediaSession *createNew(UsageEnvironment &env, Boolean reuseFirstSource);
void checkForAuxSDPLine1();
void afterPlayingDummy1();
protected:
H264LiveServerMediaSession(UsageEnvironment &env, Boolean reuseFirstSource);
virtual ~H264LiveServerMediaSession(void);
void setDoneFlag() { fDoneFlag = ~0; }
protected:
virtual char const *getAuxSDPLine(RTPSink *rtpSink, FramedSource *inputSource);
virtual FramedSource *createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate);
virtual RTPSink *createNewRTPSink(Groupsock *rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource *inputSource);
private:
char *fAuxSDPLine;
char fDoneFlag;
RTPSink *fDummyRTPSink;
};
// 创建一个自定义的实时码流数据源类
class H264VideoStreamSource : public FramedSource
{
public:
static H264VideoStreamSource *createNew(UsageEnvironment &env);
unsigned maxFrameSize() const;
protected:
H264VideoStreamSource(UsageEnvironment &env);
virtual ~H264VideoStreamSource();
private:
virtual void doGetNextFrame();
virtual void doStopGettingFrames();
};
#endif // _RTSP_H_
rtsp.cpp:
文章来源:https://uudwc.com/A/vmpNN
#include <iostream>
#include "rtsp.h"
#include "ringQueue.h"
extern ringQueue *rQueue;
void create_rtsp_server(void)
{
TaskScheduler *scheduler;
UsageEnvironment *env;
RTSPServer *rtspServer;
scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
return;
}
ServerMediaSession *sms = ServerMediaSession::createNew(*env);
sms->addSubsession(H264LiveServerMediaSession::createNew(*env, true));
rtspServer->addServerMediaSession(sms);
char *url = rtspServer->rtspURL(sms);
*env << "Play the stream using url " << url << "\n";
delete[] url;
env->taskScheduler().doEventLoop(); // 进入事件循环
}
// H264LiveServerMediaSession 实现:
H264LiveServerMediaSession *H264LiveServerMediaSession::createNew(UsageEnvironment &env, Boolean reuseFirstSource)
{
return new H264LiveServerMediaSession(env, reuseFirstSource);
}
H264LiveServerMediaSession::H264LiveServerMediaSession(UsageEnvironment &env, Boolean reuseFirstSource) : OnDemandServerMediaSubsession(env, reuseFirstSource)
{
fAuxSDPLine = NULL;
fDoneFlag = 0;
fDummyRTPSink = NULL;
}
H264LiveServerMediaSession::~H264LiveServerMediaSession()
{
delete[] fAuxSDPLine;
}
static void afterPlayingDummy(void *clientData)
{
H264LiveServerMediaSession *subsess = (H264LiveServerMediaSession *)clientData;
subsess->afterPlayingDummy1();
}
void H264LiveServerMediaSession::afterPlayingDummy1()
{
envir().taskScheduler().unscheduleDelayedTask(nextTask());
setDoneFlag();
}
static void checkForAuxSDPLine(void *clientData)
{
H264LiveServerMediaSession *subsess = (H264LiveServerMediaSession *)clientData;
subsess->checkForAuxSDPLine1();
}
void H264LiveServerMediaSession::checkForAuxSDPLine1()
{
nextTask() = NULL;
char const *dasl;
if (fAuxSDPLine != NULL)
{
setDoneFlag();
}
else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL)
{
fAuxSDPLine = strDup(dasl);
fDummyRTPSink = NULL;
setDoneFlag();
}
else if (!fDoneFlag)
{
// try again after a brief delay:
int uSecsToDelay = 100000; // 100 ms
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc *)checkForAuxSDPLine, this);
}
}
char const *H264LiveServerMediaSession::getAuxSDPLine(RTPSink *rtpSink, FramedSource *inputSource)
{
if (fAuxSDPLine != NULL)
{
return fAuxSDPLine;
}
if (fDummyRTPSink == NULL)
{
fDummyRTPSink = rtpSink;
fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
checkForAuxSDPLine(this);
}
envir().taskScheduler().doEventLoop(&fDoneFlag);
return fAuxSDPLine;
}
FramedSource *H264LiveServerMediaSession::createNewStreamSource(unsigned clientSessionId, unsigned &estBitrate)
{
estBitrate = 5000; // kbps, estimate
H264VideoStreamSource *videoSource = H264VideoStreamSource::createNew(envir());
if (videoSource == NULL)
{
return NULL;
}
return H264VideoStreamFramer::createNew(envir(), videoSource);
}
RTPSink *H264LiveServerMediaSession ::createNewRTPSink(Groupsock *rtpGroupsock,
unsigned char rtpPayloadTypeIfDynamic,
FramedSource *inputSource)
{
// OutPacketBuffer::maxSize = 2000000;
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
// H264VideoStreamSource 实现:
H264VideoStreamSource *H264VideoStreamSource::createNew(UsageEnvironment &env)
{
return new H264VideoStreamSource(env);
}
H264VideoStreamSource::H264VideoStreamSource(UsageEnvironment &env) : FramedSource(env)
{
}
H264VideoStreamSource::~H264VideoStreamSource()
{
}
unsigned int H264VideoStreamSource::maxFrameSize() const
{
return 100000; // 设置fMaxSize的值
}
void H264VideoStreamSource::doGetNextFrame()
{
rQueue_data e;
uint32_t timestamp = 0;
static uint8_t buffer_data[1024 * 512] = {0};
// 还没准备好要数据
if (!isCurrentlyAwaitingData())
{
std::cout << "isCurrentlyAwaitingData" << std::endl;
return;
}
// 从队列中取出数据
e.buffer = buffer_data;
e.len = sizeof(buffer_data);
if(rQueue_de(rQueue, &e) == -1)
{
FramedSource::afterGetting(this);
return;
}
if (e.len > fMaxSize)
{
fFrameSize = fMaxSize;
fNumTruncatedBytes = e.len - fMaxSize;
}
else
{
fFrameSize = e.len;
}
gettimeofday(&fPresentationTime, NULL);
memcpy(fTo, buffer_data, fFrameSize);
FramedSource::afterGetting(this);
}
void H264VideoStreamSource::doStopGettingFrames()
{
std::cout << "doStopGettingFrames" << std::endl;
}
文章来源地址https://uudwc.com/A/vmpNN