总述
Live555是使用c++编写的RTSP开源库,以文件为载体进行推流,其中实现了多种类型的音频以及视频的流化。
简述RTSP
RTSP是一种媒体传输协议,基于TCP协议,主要功能为媒体播放的控制以及开始连接时的握手操作。时间流传输走的时RTP协议,流传输控制协议走的是RTCP,RTP和RTCP具体使用TCP还是UDP,有RTSP SETUP阶段根据客户端选择而定。
实现自定义数据流传输
首先实现一个比较通用的MediaSubsession,继承OnDemandServerMediaSubsession文章来源:https://www.toymoban.com/news/detail-523330.html
class ComMediaSubsession:public OnDemandServerMediaSubsession
{
public:
static ComMediaSubsession* createNew(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort = 6970);
protected:
ComMediaSubsession(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort);
~ComMediaSubsession();
protected:
// virtual function new source and rtplink
FramedSource* createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate);
RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);
public:
int fStreamType;
int fVideoType;
int fChannelNO;
};
#include "ComMediaSubsession.h"
#include "ComFrameSource.h"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include "MP3ADURTPSink.hh"
#include "MPEG1or2AudioRTPSink.hh"
ComMediaSubsession::ComMediaSubsession(UsageEnvironment& env, int streamType, int videoType, int channelNO, bool reuseFirstSource, portNumBits initalNumPort)
:OnDemandServerMediaSubsession(env, reuseFirstSource), fStreamType(streamType), fVideoType(videoType), fChannelNO(channelNO)
{
}
ComMediaSubsession::~ComMediaSubsession()
{
}
ComMediaSubsession* DemoH264MediaSubsession::createNew(UsageEnvironment& env, int streamType, int videoType, int channelNO,
bool reuseFirstSource, portNumBits initalNumPort)
{
ComMediaSubsession* sms = new ComMediaSubsession(env, streamType, videoType, channelNO, reuseFirstSource, initalNumPort);
return sms;
}
FramedSource* ComMediaSubsession::createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate)
{
if(fVideoType == 0x01)
{
// H264 video
estBitrate = 2000; // kbps
ComFrameSource * source = ComFrameSource::createNew(envir(), fStreamType, fChannelNO, 0);
if ( source == NULL )
{
DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );
return NULL;
}
return H264VideoStreamFramer::createNew(envir(), source);
}
else if ( fVideoType == 0x2)
{
// Mpeg-4 video
estBitrate = 128; // kbps
ComFrameSource * source = ComFrameSource::createNew(envir(), fStreamType, fChannelNO, 1);
if ( source == NULL )
{
DBG_LIVE555_PRINT("create source failed videoType:%d!\n", fVideoType );
return NULL;
}
return source;
}
return NULL;
}
RTPSink* ComMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
if( fVideoType == 0x01)
{
// H264 video
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
else if( fVideoType == 0x02)
{
// Mpeg-4
return MP3ADURTPSink::createNew(envir(), rtpGroupsock,rtpPayloadTypeIfDynamic);
}
return NULL;
}
接着实现数据获取类文章来源地址https://www.toymoban.com/news/detail-523330.html
#include "FramedSource.hh"
class ComFrameSource:public FramedSource
{
public:
static ComFrameSource* createNew(UsageEnvironment& env, int streamtype, int channelno, int sourceType);
protected:
ComFrameSource(UsageEnvironment& env, long sourceHandle, int sourceType);
~ComFrameSource();
private:
virtual void doGetNextFrame();
public:
void doStopGetFrame();
public:
unsigned fLastBufSize;
unsigned fLeftDataSize;
int fSourceType;
int fFirstFrame;
};
#include "ComFrameSource.h"
ComFrameSource::ComFrameSource(UsageEnvironment& env, int sourceType):
FramedSource(env), fLastBufSize(0), fLeftDataSize(0), fSourceType(sourceType), fFirstFrame(1)
{
}
ComFrameSource::~ComFrameSource()
{
}
ComFrameSource* ComFrameSource::createNew(UsageEnvironment& env, int streamType, int channelNO, int sourceType)
{
return new ComFrameSource(env, sourceType);
}
void ComFrameSource::doGetNextFrame()
{
int ret = 0;
/*实现获取数据到fTo,最大fMaxSize*/
ret = getStreamData(fSourceHandle, (char *)fTo, fMaxSize);
if (ret <= 0)
{
fFrameSize = 0;
nextTask() = envir().taskScheduler().scheduleDelayedTask(10,
(TaskFunc*)FramedSource::afterGetting, this);
return;
}
gettimeofday(&fPresentationTime, NULL);
// Switch to another task, and inform the reader that he has data:
nextTask() = envir().taskScheduler().scheduleDelayedTask(10,
(TaskFunc*)FramedSource::afterGetting, this);
}
void ComFrameSource::doStopGetFrame()
{
}
到了这里,关于基于Live555实现数据流的推送的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!