1. 程式人生 > >使用live555類庫實現的網路直播系統——直播H264檔案

使用live555類庫實現的網路直播系統——直播H264檔案

    下載live555最新程式碼,編譯生成live555 的庫檔案:libBasicUsageEnvironment.a  libgroupsock.a  libliveMedia.a  libUsageEnvironment.a ,使用這4個庫再加上live555 自帶的測試程式,可以很簡單的實現live555 的直播。live555提供的直播程式是隻能直播之前已經錄製好的視訊(與點播不同)。程式碼如下:

/*=============================================================================
      FileName: h264live.c
          Desc: use the lib of live555 to do live
        Author: licaibiao
       Version: 
    LastChange: 2017-02-23 
*=============================================================================*/
#include "h264live.hh"

UsageEnvironment* env;
char const* inputFileName = "test.264";
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
  OutPacketBuffer::maxSize = 100000;
  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp
  = RTCPInstance::createNew(*env, &rtcpGroupsock,
			    estimatedSessionBandwidth, CNAME,
			    videoSink, NULL /* we're a server */,
			    True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "testStream", inputFileName,
		   "Session streamed by \"testH264VideoStreamer\"",
					   True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}

void afterPlaying(void* /*clientData*/) {
  *env << "...done reading from file\n";
  videoSink->stopPlaying();
  Medium::close(videoSource);
  // Note that this also closes the input file that this source read from.

  // Start playing once again:
  play();
}

void play() {
  // Open the input file as a 'byte-stream file source':
  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
         << "\" as a byte-stream file source\n";
    exit(1);
  }

  FramedSource* videoES = fileSource;

  // Create a framer for the Video Elementary Stream:
  videoSource = H264VideoStreamFramer::createNew(*env, videoES);

  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

Makefile 檔案如下:

INCLUDES 	 = -I./include/usageEnvironment/ -I./include/groupsock/ -I.include/liveMedia/ -I.include/BasicUsageEnvironment
COMPILE_OPTS =      $(INCLUDES) -I. -O2 -DSOCKLEN_T=socklen_t -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64
C 			 =         c
C_COMPILER   =        cc
C_FLAGS 	 =       $(COMPILE_OPTS) $(CPPFLAGS) $(CFLAGS)
CPP 		 =           cpp
CPLUSPLUS_COMPILER =    c++
CPLUSPLUS_FLAGS =   $(COMPILE_OPTS) -Wall -DBSD=1 $(CPPFLAGS) $(CXXFLAGS)
OBJ 		 =           o
LINK 		 =          c++ -o
LINK_OPTS    =     -L. $(LDFLAGS)
CONSOLE_LINK_OPTS = $(LINK_OPTS)

USAGE_ENVIRONMENT_LIB = ./lib/libUsageEnvironment.a
BASIC_USAGE_ENVIRONMENT_LIB = ./lib/libBasicUsageEnvironment.a
LIVEMEDIA_LIB = ./lib/libliveMedia.a
GROUPSOCK_LIB = ./lib/libgroupsock.a
LOCAL_LIBS 	  =  $(LIVEMEDIA_LIB) $(GROUPSOCK_LIB) $(BASIC_USAGE_ENVIRONMENT_LIB) $(USAGE_ENVIRONMENT_LIB)
LIBS          =  $(LOCAL_LIBS) 

MEDIA_SERVER_OBJS = h264live.$(OBJ)
APP = h264live


.$(C).$(OBJ):
	$(C_COMPILER) -c $(C_FLAGS) $<
.$(CPP).$(OBJ):
	$(CPLUSPLUS_COMPILER) -c $(CPLUSPLUS_FLAGS) $<

h264live: $(MEDIA_SERVER_OBJS) $(LOCAL_LIBS)
	$(LINK)
[email protected]
$(CONSOLE_LINK_OPTS) $(MEDIA_SERVER_OBJS) $(LIBS) clean: -rm -rf *.$(OBJ) $(APP) core *.core *~ include/*~


工程目錄如下:

[[email protected] h264live]# tree -L 2
.
├── h264live
├── h264live.cpp
├── h264live.hh
├── h264live.o
├── include
│   ├── basicUsageEnvironment
│   ├── groupsock
│   ├── liveMedia
│   └── usageEnvironment
├── lib
│   ├── libBasicUsageEnvironment.a
│   ├── libgroupsock.a
│   ├── libliveMedia.a
│   └── libUsageEnvironment.a
├── Makefile
└── test.264

直接執行h264live檔案,然後在客戶端開啟URL: "rtsp://192.168.0.127:8554/testStream"  就可以收看直播視訊。