Live555实时视频流应用总结

发布时间 2023-07-26 09:18:45作者: 阿风小子
1,linux 环境:
官网上下载,下载地址:http://www.live555.com/liveMedia/public/
live555 版本:“2018.12.14”
参考:http://www.live555.com/liveMedia/faq.html 这个FAQ要仔细阅读。
2,编译
根据不同的平台来配置,并生成对应的Makefile
2.1 ARM平台:
修改交叉编译工具
cp config.armlinux config.arm
vi config.arm
CROSS_COMPILE?= arm-buildroot-linux-uclibcgnueabi-
生成Makefile: ./genMakefiles arm
2.2 Linux 64位平台(x86-64 ):
./genMakefiles linux-64bit
2.3 Linux 32位平台(x86):
./genMakefiles linux
make
 
生成mediaServer/live555MediaServer
 
3,测试
3.1,mediaServer下 会生成 live555MediaServer。
live555MediaServer test.264
如果出现Correct this by increasing “OutPacketBuffer::maxSize” to at least 186818, before creating this ‘RTPSink’. (Current value is 100000.)
在DynamicRTSPServer.cpp文件ServerMediaSession* createNewSMS()
里修改OutPacketBuffer::maxSize
 
if (strcmp(extension, ".264") == 0) {
    // Assumed to be a H.264 Video Elementary Stream file:
    NEW_SMS("H.264 Video");
    OutPacketBuffer::maxSize = 300000; //100000;// allow for some possibly large H.264 frames
    sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource));
  }
 
createNewSMS是在RTSP setup时调用的。
3.2,testProgs
testProgs 目录下各种测试文件,每个文件的作用和用法,官网上有详细的介绍。这些测试用例目前基本上都是以文件的形式作为输入源,下面重点介绍以实时流的形式作为输入源的2种方法。
主要是参考testH264VideoStreamer 和testOnDemandRTSPServer来修改。
 
4.不用读文件,使用实时视频流作为输入源
**
最简单的方法:将实时视频流推送到一个FIFO管道(或stdin),将文件名改为这个管道的文件名,这里不做详细介绍了。
 
4.1  方法1,参考testH264VideoStreamer
 
参考"liveMedia/DeviceSource.cpp"
定义一个H264LiveVideoSource例并继承DeviceSource,填充其成员,
 
void play() {
  // Open the input file as a 'byte-stream file source':
  ByteStreamFileSource* fileSource
    =ByteStreamFileSource::createNew(*env, inputFileName);
}
 
这里用H264LiveVideoSource代替ByteStreamFileSource
 
H264LiveVideoSource类后面会给出具体的代码。
修改testH264VideoStreamer.cpp main()
 
ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, "testStream", NULL,
             "Session streamed by \"testH264VideoStreamer\"",
                         True /*SSM*/);
 
修改play()函数如下:
 
void play() {
  // Open the input file as a 'byte-stream file source':
  #if 1
  H264LiveVideoSource* fileSource
     = new H264LiveVideoSource(*env);
   if (fileSource == NULL) {
     *env << "Unable to open file \"" << inputFileName
          << "\" as a byte-stream file source\n";
     exit(1);
   }
 
  #else
  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
         << "\" as a byte-stream file source\n";
    exit(1);
  }
  #endif
  FramedSource* videoES = fileSource;
 
  // Create a framer for the Video Elementary Stream:
  videoSource = H264VideoStreamFramer::createNew(*env, videoES);
 
  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
 
 
 
4.2 方法2,参考testOnDemandRTSPServer
1)set the variable “reuseFirstSource” to “True”
2)根据类H264VideoFileServerMediaSubsession,新建一个新类H264LiveVideoServerMediaSubsession, implementation of the two pure virtual functions “createNewStreamSource()” and “createNewRTPSink()”
在createNewStreamSource()里用上面的H264LiveVideoSource代替ByteStreamFileSource。
 
H264VideoRTPSink继承关系:
H264VideoRTPSink->H264or5VideoRTPSink->VideoRTPSink->MultiFramedRTPSink->RTPSink->MediaSink->Medium。
H264VideoRTPSource继承关系:
H264VideoRTPSource->MultiFramedRTPSource->RTPSource->FramedSource->MediaSource->Medium.
H264VideoStreamFramer继承关系:
H264VideoStreamFramer->H264or5VideoStreamFramer->MPEGVideoStreamFramer->FramedFilter->FramedSource ->MediaSource->Medium.
 
下面列出具体实现的代码。
 
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
 
#include "OnDemandServerMediaSubsession.hh"
#include "liveMedia.hh"
#include "UsageEnvironment.hh"
#include "GroupsockHelper.hh"
 
class H264LiveVideoServerMediaSubsession: public OnDemandServerMediaSubsession
{
public:
    H264LiveVideoServerMediaSubsession(UsageEnvironment & env,Boolean reuseFirstSource);
    ~H264LiveVideoServerMediaSubsession();
     static H264LiveVideoServerMediaSubsession* createNew(UsageEnvironment& env,Boolean reuseFirstSource);
public: // new virtual functions, defined by all subclasses
    virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
      unsigned& estBitrate) ;
      // "estBitrate" is the stream's estimated bitrate, in kbps
    virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,
    unsigned char rtpPayloadTypeIfDynamic,
    FramedSource* inputSource);
    virtual char const * getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource);
    static H264LiveVideoServerMediaSubsession* createNew(UsageEnvironment & env, FramedSource * source);
    static void afterPlayingDummy(void * ptr);
 
static void chkForAuxSDPLine(void * ptr);
void chkForAuxSDPLine1();
private:
FramedSource * m_pSource;
char * m_pSDPLine;
RTPSink * m_pDummyRTPSink;
char m_done;    
};
 
#endif
 
 
H264LiveVideoServerMediaSubsession.cpp
 
H264LiveVideoServerMediaSubsession::H264LiveVideoServerMediaSubsession(UsageEnvironment & env,Boolean reuseFirstSource):OnDemandServerMediaSubsession(env,reuseFirstSource)
{
    m_pSource = NULL;//source;
m_pSDPLine = NULL;
m_pDummyRTPSink =NULL;
m_done=0;
 
}
 
H264LiveVideoServerMediaSubsession::~H264LiveVideoServerMediaSubsession()
{
    if (m_pSDPLine)
{
free(m_pSDPLine);
}
}
H264LiveVideoServerMediaSubsession * H264LiveVideoServerMediaSubsession::createNew(UsageEnvironment& env,Boolean reuseFirstSource)
{
   return new H264LiveVideoServerMediaSubsession(env,reuseFirstSource); 
}
FramedSource * H264LiveVideoServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate)
{
    //printf("===========createNewStreamSource===================\n");
    estBitrate = 500; 
    
return H264VideoStreamFramer::createNew(envir(), new H264LiveVideoSource(envir()));
}
 
RTPSink * H264LiveVideoServerMediaSubsession::createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource)
{
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
 
 char const *H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource)
 {
    if (m_pSDPLine)
{
return m_pSDPLine;
}
 
m_pDummyRTPSink = rtpSink;
if(NULL == m_pDummyRTPSink)
    return NULL;
 
 
//mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
m_pDummyRTPSink->startPlaying(*inputSource, 0, 0);
 
chkForAuxSDPLine(this);
 
m_done = 0;
    char const *dasl = m_pDummyRTPSink->auxSDPLine();
envir().taskScheduler().doEventLoop(&m_done);
    if(dasl)
    m_pSDPLine = strdup(dasl);
 
m_pDummyRTPSink->stopPlaying();
 
return m_pSDPLine;
 
 }
void H264LiveVideoServerMediaSubsession::afterPlayingDummy(void * ptr)
{
H264LiveVideoServerMediaSubsession * This = (H264LiveVideoServerMediaSubsession *)ptr;
 
This->m_done = ~0;
}
 
void H264LiveVideoServerMediaSubsession::chkForAuxSDPLine(void * ptr)
{
H264LiveVideoServerMediaSubsession * This = (H264LiveVideoServerMediaSubsession *)ptr;
 
This->chkForAuxSDPLine1();
}
 
void H264LiveVideoServerMediaSubsession::chkForAuxSDPLine1()
{
if (m_pDummyRTPSink->auxSDPLine())
{
m_done =  ~0;
}
else
{
double delay = 1000.0 / (FRAME_PER_SEC);  // ms
int to_delay = delay * 1000;  // us
 
nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, chkForAuxSDPLine, this);
}
}
 
修改testOnDemandRTSPServer.cpp文件如下:
在main()里加入下面的代码
 
// A H.264 live video  stream:
  {
    OutPacketBuffer::maxSize = 300000;
    char const* streamName = "h264LiveVideo";
    char const* inputFileName = "test";
    ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, streamName, streamName,
      descriptionString,True);
UsageEnvironment& envr = rtspServer->envir();
envr << "\n\"" << sms<< "\"\n" ;
if(NULL == sms)
    printf("sms is null  \n");
    sms->addSubsession(H264LiveVideoServerMediaSubsession ::createNew(*env,True));
    rtspServer->addServerMediaSession(sms);
    
    announceStream(rtspServer, sms, streamName, inputFileName);
  }
 
H264LiveVideoSource.hh
 
#ifndef _H264_LIVE_VIDEO_SOURCE_HH
#define _H264_LIVE_VIDEO_SOURCE_HH
 
#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif
#include "DeviceSource.hh"
 
 
class H264LiveVideoSource: public FramedSource {
public:
    H264LiveVideoSource(UsageEnvironment& env);
// called only by createNew()
 
    virtual ~H264LiveVideoSource();
 
private:
    // redefined virtual functions:
    virtual void doGetNextFrame();
    //virtual void doStopGettingFrames();
    int maxFrameSize();
    static void getNextFrame(void * ptr);
    void GetFrameData();
 
 
private:
    void *m_pToken;
char *m_pFrameBuffer;
char *fTruncatedBytes;
int fTruncatedBytesNum;
};
 
#endif
 
H264LiveVideoSource.cpp
 
#include "H264LiveVideoSource.hh"
//#include "InputFile.hh"
#include "GroupsockHelper.hh"
 
#define FRAME_BUF_SIZE  (1024*1024)
#define FMAX (300000)
H264LiveVideoSource::H264LiveVideoSource(UsageEnvironment& env):FramedSource(env),
m_pToken(0),
m_pFrameBuffer(0),fTruncatedBytesNum(0),fTruncatedBytes(0)
{
    m_pFrameBuffer = new char[FRAME_BUF_SIZE];
    fTruncatedBytes = new char[FRAME_BUF_SIZE];
if(m_pFrameBuffer == NULL || fTruncatedBytes== NULL )
{
printf("[MEDIA SERVER] error malloc data buffer failed\n");
return;
}
memset(m_pFrameBuffer,0,FRAME_BUF_SIZE);
//fMaxSize =  FMAX;
    printf("[H264LiveVideoSource] fMaxSize:%d\n",fMaxSize);
}
 
H264LiveVideoSource::~H264LiveVideoSource()
{
    envir().taskScheduler().unscheduleDelayedTask(m_pToken);
 
if(m_pFrameBuffer)
{
    delete[] m_pFrameBuffer;
m_pFrameBuffer = NULL;
}
if(fTruncatedBytes)
{
    delete[] fTruncatedBytes;
fTruncatedBytes = NULL;
}
}
int H264LiveVideoSource::maxFrameSize()
{
    return FRAME_BUF_SIZE;
}
void H264LiveVideoSource::doGetNextFrame()
{
    int uSecsToDelay = 40000; // 40 ms
    m_pToken  = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
              (TaskFunc*)getNextFrame, this);
              //printf("m_pToken =%p \n" ,m_pToken);
}
void H264LiveVideoSource::getNextFrame(void *ptr)
{
    H264LiveVideoSource *p=(H264LiveVideoSource *)ptr;
    if(NULL  == p)
        printf("null point \n");
    p->GetFrameData();
}
 
#include <sys/types.h>
#include <sys/stat.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <limits.h>
 
typedef struct
{
unsigned long long timeTick;//时间(ms)
unsigned int dataLen;//数据长度
unsigned char dataType;//数据类型(DataType_E)
unsigned char rsv[3];
unsigned long long timeStamp;//编码时间戳(us)
unsigned char iFrame;//是否为关键帧
unsigned char frameRate;//帧率
int encodeType;//编码类型VideoEncodeType_E
unsigned short width;//视频宽度
unsigned short height;//视频高度
unsigned char rsv1[8];
unsigned char data[0];
}IFVFrameHeader_S;
void H264LiveVideoSource::GetFrameData()
{
#if 1
  //memcpy(fTo,m_pFrameBuffer,fFrameSize);
  int read = ShareBufGetOneFrame(g_BufHandle[0], FRAME_BUF_SIZE, (char *)m_pFrameBuffer); //这里要改成你自己实际获取视频帧的函数
if (read == 0)
{
printf("read byte =0 \n");
fFrameSize =0;
// FramedSource::afterGetting(this);
return;
}
 
     IFVFrameHeader_S *pFrameHead  = reinterpret_cast<IFVFrameHeader_S *>(m_pFrameBuffer);
     if(pFrameHead == NULL )
      {
      printf("pFrameHead =0 \n");
      fFrameSize =0;
      return;
      }
      if(iframetype == 0)
      { 
      if(1==pFrameHead->iFrame)
      {
      iframetype =1;
     
      }
      else
      {
      //printf("no i frame \n");
      //fFrameSize =0;
      //gettimeofday(&fPresentationTime,NULL);
      //FramedSource::afterGetting(this);
      //return;
      }
     
      }
      int framelen=pFrameHead->dataLen;
      #if 0
      if(pFrameHead->dataLen > fMaxSize)
      pFrameHead->dataLen = fMaxSize;
      memcpy(fTo,m_pFrameBuffer+sizeof(IFVFrameHeader_S),pFrameHead->dataLen);
      fFrameSize =pFrameHead->dataLen;
      #else
      //printf("pFrameHead->dataLen =%d fMaxSize=%u\n",pFrameHead->dataLen,fMaxSize);
      if(framelen > fMaxSize)
      {
      framelen = fMaxSize;
      fNumTruncatedBytes = pFrameHead->dataLen-fMaxSize;
     
      memcpy(fTo,pFrameHead->data,framelen);
      memmove(fTruncatedBytes,pFrameHead->data + framelen,fNumTruncatedBytes);
      fFrameSize =framelen;
    }
    else
    {
    if(fNumTruncatedBytes > 0 )
    {
    memmove(fTo,fTruncatedBytes,fTruncatedBytesNum);
memmove(fTo + fTruncatedBytesNum,pFrameHead->data,framelen);
fFrameSize += fTruncatedBytesNum;
//printf("send last truncted %d bytes\n",fTruncatedBytesNum);
fTruncatedBytesNum = 0;
    }
    else
    {
    memcpy(fTo,pFrameHead->data,framelen);
    fFrameSize =framelen;
    }
   
    }
    #endif
    fDurationInMicroseconds = 1000000/25;
    gettimeofday(&fPresentationTime,NULL);
    //*nextPT=fPresentationTime;
      FramedSource::afterGetting(this);
#else
    #define FIFO_NAME "./test.264"
    //#define BUFFER_SIZE (30000)
    static int fd=-1;
    //static u_int64_t fFileSize =0;
    if(fd ==-1)
    {
        fd = open(FIFO_NAME,O_RDONLY);
        if(fd > 0)
        {
           // fFileSize =5316637;// GetFileSize(FIFO_NAME, fd);
        }
    }
    if(fd ==-1)
    {
        printf("open file %s fail \n",FIFO_NAME);
        return;
    }
    int len =0;
    int remain = fMaxSize;
    //if(remain >fMaxSize)
    //    remain =fMaxSize;
if((len = read(fd,fTo,remain))>0)
{
//memmove(fTo,m_pFrameBuffer,len);
gettimeofday(&fPresentationTime, NULL);
fFrameSize=len;
}
else
{
    if(fd >0)
    {
    ::close(fd);
    fd = -1;
    //printf("GetFrameData close file %d\n",len);
    }
}
fDurationInMicroseconds = 1000000/25;
    gettimeofday(&fPresentationTime,NULL);
 
// printf("fMaxSize=%d fFrameSize=%d\n",fMaxSize,fFrameSize);
//nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
//(TaskFunc*)FramedSource::afterGetting, this);
FramedSource::afterGetting(this);
   
#endif
 
linve555常用修改点:
 
1, 输入的一帧数据最大值
StreamParser.cpp
#define BANK_SIZE 1500000 //帧越大,这个值就要越大
 
2, rtp buffer最大值
(1)Source端使用 MultiFramedRTPSource.cpp
BufferedPacket::BufferedPacket()
定义输入Buffer的上限值,即BufferedPacket的最大值
#define MAX_PACKET_SIZE 65536
(2)Sink端使用 MultiFramedRTPSink.cpp
#define RTP_PAYLOAD_MAX_SIZE 1456 //(1500-14-20-8)/4 *4 //ethernet=14,IP=20, UDP=8, a multiple of 4 bytes
MediaSink.cpp
静态变量OutPacketBuffer::maxSize = 600000; // allow for some possibly large H.265 frames,2000000 is by default
最好是RTP_PAYLOAD_MAX_SIZE的整数倍
值小了,会不断打印信息: Correct this by increasing “OutPacketBuffer::maxSize” to at least
 
,3,获取IP地址失败
RTSPServer::rtspURLPrefix(){
ourIPAddress(envir())
}
 
GroupsockHelper.cpp
 
 ourIPAddress()
 {
if (badAddressForUs(from)) {
  #if 0
      char tmp[100];
      sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val());
      env.setResultMsg(tmp);
      from = 0;
  #endif
  struct ifreq req;
  int ret = 0;
  char szIpBuf[32];
 
  sock = socket(AF_INET, SOCK_DGRAM, 0);
  if (-1 != sock)
  {
  memset(&req, 0, sizeof(req));
  strncpy(req.ifr_name, "eth0", sizeof(req.ifr_name));
  ret = ioctl(sock, SIOCGIFADDR, &req);
  if (-1 == ret)
  {
close(sock);
  }
  else
  {
  memset(&szIpBuf, 0, sizeof(szIpBuf));
     strcpy(szIpBuf, inet_ntoa(((struct sockaddr_in *)&req.ifr_addr)->sin_addr));
    close(sock);
    fromAddr.sin_addr.s_addr=our_inet_addr(szIpBuf);
from = fromAddr.sin_addr.s_addr;
  }
  }
  else
    {
    char tmp[100];
         sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val());
         env.setResultMsg(tmp);
         from = 0;
    }
 }
 
 
3,内存泄漏点
RTCPInstance::processIncomingReport
if(NULL != reason)
{
delete[] reason;
reason = NULL;
}
在申请内存时加上上面释放语句
reason = new char[reasonLength + 1];
4,fill sei data DeltaTfiDivisor
H264or5VideoStreamParser::H264or5VideoStreamParser()
{
//according to H264 and H265 spec, if not fill sei data, then frame_field_info_present_flag is zero. so need to set DeltaTfiDivisor to 2.0 in H264 and 1.0 in H265
if(fHNumber == 264) {
DeltaTfiDivisor = 2.0;
} else {
DeltaTfiDivisor = 1.0;
}
 
}
5,长时间拉取拉取RTSP流
报错误"Hit limit when reading incoming packet over TCP"
可考虑提高maxRTCPPacketSize的值
RTCP.CPP
static unsigned const maxRTCPPacketSize = 1456;
 
6,如播放越久延时越大
MultiFramedRTPSink.cpp->MultiFramedRTPSink::sendPacketIfNecessary() 最后延时列队uSecondsToGo 每帧都有延时时间。将uSecondsToGo 值赋为0。
 
7, 裁剪
只需留下这些目录,其它可删除掉。
其中liveMedia目录下有很多类型的文件,不需要的也可删除,同时修改
MediaSubsession::createSourceObjects()把相关类型的createNew也删除掉,否则编译失败。