1. 程式人生 > >Linux基於Live555從共享記憶體 獲取rstp實時H264視訊流並轉發 附原始碼

Linux基於Live555從共享記憶體 獲取rstp實時H264視訊流並轉發 附原始碼

1、編譯生成庫

下載的為2017.10.28版 

解壓tar xzf live555-latest.tar.gz

修改許可權: chmod+777 live -R

修改交叉編譯工具 :cp config.armlinux  cpconfig.arm

vi config.arm

CROSS_COMPILE?=        arm-buildroot-linux-uclibcgnueabi-

生成Makefile:    ./genMakefiles arm

make

生成mediaServer/live555MediaServer

使用 H.264檔案測試live555MediaServer test.264

獲取IP錯誤一直為127.0.1.10.0.0.0


而機器IP 127.0.0.1 &  192.168.55.3


核心不支援埠複用需要註釋掉groupsock/GroupsockHelper.cpp

SO_REUSEPORT

#if 0
#if defined(__WIN32__) || defined(_WIN32)
  // Windoze doesn't properly handle SO_REUSEPORT or IP_MULTICAST_LOOP
#else
#ifdef SO_REUSEPORT
  if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEPORT,
		 (const char*)&reuseFlag, sizeof reuseFlag) < 0) {
    socketErr(env, "setsockopt(SO_REUSEPORT) error: ");
    closeSocket(newSocket);
    return -1;
  }
#endif

#ifdef IP_MULTICAST_LOOP
  const u_int8_t loop = 1;
  if (setsockopt(newSocket, IPPROTO_IP, IP_MULTICAST_LOOP,
		 (const char*)&loop, sizeof loop) < 0) {
    socketErr(env, "setsockopt(IP_MULTICAST_LOOP) error: ");
    closeSocket(newSocket);
    return -1;
  }
#endif
#endif
#endif


參考:

Rtsp server 建立過程分析 

windows 本地獲取例項 

     http://blog.csdn.net/xiejiashu/article/details/8269873

2、新增獲取實時幀類

建立 rtsp server

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "H264LiveVideoServerMediaSubsession.hh"

UsageEnvironment* env;

// To make the second and subsequent client for each stream reuse the same
// input stream as the first client (rather than playing the file from the
// start for each client), change the following "False" to "True":
Boolean reuseFirstSource = True;//False;

static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
			   char const* streamName)
{
  char* url = rtspServer->rtspURL(sms);
  UsageEnvironment& env = rtspServer->envir();

  env << "\n"<<"Play this stream using the URL \"" << url << "\"\n";
  delete[] url;
}


int main(int argc, char** argv) 
{

    // Begin by setting up our usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);
    UserAuthenticationDatabase* authDB = NULL;  


#ifdef ACCESS_CONTROL
    // To implement client access control to the RTSP server, do the following:
    authDB = new UserAuthenticationDatabase;
    authDB->addUserRecord("username1", "password1"); // replace these with real strings
    // Repeat the above with each <username>, <password> that you wish to allow
    // access to the server.
#endif

  // Create the RTSP server:
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
    if (rtspServer == NULL) 
    {
        *env << "Failed to create svt RTSP server: " << env->getResultMsg() << "\n";
        exit(1);
    }
    
  char const* descriptionString = "Session streamed by \"720p-stream\"";

  // Set up each of the possible streams that can be served by the
  // RTSP server.  Each such stream is implemented using a
  // "ServerMediaSession" object, plus one or more
  // "ServerMediaSubsession" objects for each audio/video substream.

     char const* streamName = "720p-stream";

  // A H.264 video elementary stream:

    ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName, descriptionString,0);     
    	      
    sms->addSubsession(H264LiveVideoServerMediaSubsession
    ::createNew(*env, streamName,reuseFirstSource));//replace From H264VideoFileServerMediaSubsession			     

    rtspServer->addServerMediaSession(sms);
    announceStream(rtspServer, sms, streamName);


  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
  // Try first with the default HTTP port (80), and then with the alternative HTTP
  // port numbers (8000 and 8080).

  if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
    *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
  } else {
    *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
  }

  env->taskScheduler().doEventLoop(); // does not return
  
  
  printf("Exit server.....!\n");

 
  return 0; // only to prevent compiler warning
}


建立會話

char const* H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
 //   printf("~~~~~~~%s line :%d \n ",__func__,__LINE__);
  //return "a=fmtp:96 packetization-mode=1;profile-level-id=4D6028;sprop-parameter-sets=J01gKI1oBQBbpsgAAAMACAAAAwDweKEV,KO4D0kg=\r\n";
  if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)

  if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
    // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
    // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
    // and we need to start reading data from our file until this changes.
    fDummyRTPSink = rtpSink;

    // Start reading the file:
    fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(this);
  }
  envir().taskScheduler().doEventLoop(&fDoneFlag);

  return fAuxSDPLine;
}

FramedSource* H264LiveVideoServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) 
{    
 //   printf("~~~~~%s line %d \n",__func__,__LINE__);

    estBitrate = 500; // kbps, estimate
    // Create the video source:
    H264LiveVideoSource* liveSource = H264LiveVideoSource::createNew(envir(), fChanID);
    if (liveSource == NULL) return NULL;
    // Create a framer for the Video Elementary Stream:
    H264VideoStreamFramerBase   *pBase=H264VideoStreamFramerBase::createNew(envir(), liveSource);
    liveSource->SetBase(pBase);
    return pBase;
  	//return H264VideoStreamFramer::createNew(envir(), liveSource);

}

RTPSink* H264LiveVideoServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,FramedSource* /*inputSource*/) 
{
    printf("~~~~~%s line %d \n",__func__,__LINE__);
	return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);

}

獲取video 
void H264LiveVideoSource::doGetNextFrame() {
    static int i=0;
    i++;
    fFrameSize = 0;
#ifdef GET_VIDEO_FRAME_FROM_SHM
    char tmpbuf[SHM_VIDEO_STREAM_720P_MAX_FRAME_SIZE];
    int framelen = sizeof(tmpbuf);
    //if (SHM_VideoReadFrameWithExtras(shmVid, tmpbuf, &framelen, (char*)&struVExtra) > 1)
    int rtn=-1;
    do
    {
        rtn = SHM_VideoReadFrame(shmVid, tmpbuf, &framelen);
        if(rtn >= 2)break;
        usleep(10000);
    }while(rtn<2);
    
    if(rtn > 1)
    {
       // printf("i=%d framelen=%d time=%ld \n",i,framelen,struVExtra.ullTimeStamp);
        fFrameSize = framelen;
		if(fFrameSize > fMaxSize)
			{
				fFrameSize = fMaxSize;
				int frBufUsedBytes = fMaxSize;
				fNumTruncatedBytes = framelen- frBufUsedBytes;
				//printf("Truncat %d bytes\n",fNumTruncatedBytes);
				memmove(fTo,tmpbuf,frBufUsedBytes);
				memmove(fTruncatedBytes,tmpbuf + frBufUsedBytes,fNumTruncatedBytes);
				fTruncatedBytesNum = fNumTruncatedBytes;
			}
			else
			{
				if(fTruncatedBytesNum > 0)
				{
					memmove(fTo,fTruncatedBytes,fTruncatedBytesNum);
					memmove(fTo + fTruncatedBytesNum,tmpbuf,framelen);
					fFrameSize += fTruncatedBytesNum;
				//	printf("send last truncted %d bytes\n",fTruncatedBytesNum);
					fTruncatedBytesNum = 0;
				}
				else
				{
					memmove(fTo,tmpbuf,framelen);
				}
			}

    }
    fDurationInMicroseconds = 1000000/22;
    struct timeval *  nextPT=m_pBase->GetNextPresentationTime();
    gettimeofday(&fPresentationTime,NULL);
    *nextPT=fPresentationTime;
#endif


//    printf("fPresentationTime.tv_sec =%ld fMaxSize=%d frame size =%d\n",fPresentationTime.tv_sec,fMaxSize,framelen);
    nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this);
    //FramedSource::afterGetting(this);
}

獲取視訊幀 播放越久延時越大 追蹤後fPresentationTime 與 fNextPresentationTime 相差越來越大,直接用當前時間賦給fNextPresentationTime  後還是有延時,

後檢視liveMedia/MultiFramedRTPSink.cpp->MultiFramedRTPSink::sendPacketIfNecessary() 最後延時列隊uSecondsToGo 每幀都有延時時間。將uSecondsToGo 值賦為0後,延時解決了。但感覺不應該修改庫中的設定。

struct timeval *  nextPT=m_pBase->GetNextPresentationTime();
    gettimeofday(&fPresentationTime,NULL);
    *nextPT=fPresentationTime;

void MultiFramedRTPSink::sendPacketIfNecessary() {


。。。

//printf("MultiFramedRTPSink::sendPacketIfNecessary() delay uSecondsToGo %d \n",uSecondsToGo);
    uSecondsToGo=0;
    // Delay this amount of time:
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecondsToGo, (TaskFunc*)sendNext, this);
  }
}


編譯通過nfs執行


VLC 播放



原始碼地址http://download.csdn.net/download/xwu122930/10186456