1. 程式人生 > >FFMPEG 實時解碼網路H264碼流,RTP封裝

FFMPEG 實時解碼網路H264碼流,RTP封裝

初學FFMPEG和H264,解碼視訊流時遇到了很多麻煩,記錄一下研究成果。

我使用的FFMPEG 2.5.2版本,使用av_parser_parse2重組影象幀時遇到了一下麻煩!

下面是主要程式碼:

RTP頭定義,

typedef struct   
{  
	/**//* byte 0 */  
	unsigned char csrc_len:4;        /**//* expect 0 */  
	unsigned char extension:1;        /**//* expect 1, see RTP_OP below */  
	unsigned char padding:1;        /**//* expect 0 */  
	unsigned char version:2;        /**//* expect 2 */  
	/**//* byte 1 */  
	unsigned char payload:7;        /**//* RTP_PAYLOAD_RTSP */  
	unsigned char marker:1;        /**//* expect 1 */  
	/**//* bytes 2, 3 */  
	unsigned short seq_no;              
	/**//* bytes 4-7 */  
	unsigned  long timestamp;          
	/**//* bytes 8-11 */  
	unsigned long ssrc;            /**//* stream number is used here. */  
} RTP_FIXED_HEADER; 

初始化,

int CVPPMediaPlayer::init_decode()
{
	av_init_packet(&m_avpkt);

	m_codec = avcodec_find_decoder(CODEC_ID_H264);
	if(!m_codec){
		TRACE(_T("Codec not found\n"));
		return -1;
	}
	m_pCodecCtx = avcodec_alloc_context3(m_codec);
	if(!m_pCodecCtx){
		TRACE(_T("Could not allocate video codec context\n"));
		return -1;
	}

	m_pCodecParserCtx=av_parser_init(AV_CODEC_ID_H264);
	if (!m_pCodecParserCtx){
		TRACE(_T("Could not allocate video parser context\n"));
		return -1;
	}
	
	if(m_codec->capabilities&CODEC_CAP_TRUNCATED)
		m_pCodecCtx->flags|= CODEC_FLAG_TRUNCATED; 

	if (avcodec_open2(m_pCodecCtx, m_codec, NULL) < 0) {
		TRACE(_T("Could not open codec\n"));
		return -1;
	}

	m_picture = av_frame_alloc();
	m_pFrameRGB = av_frame_alloc();
	if(!m_picture || !m_pFrameRGB){
		TRACE(_T("Could not allocate video frame\n"));
		return -1;
	}

	m_PicBytes = 0;
	m_PicBuf = NULL;
	m_pImgCtx = NULL;

	return 0;
}
反初始化,
void CVPPMediaPlayer::uninit_decode()
{
	avcodec_close(m_pCodecCtx);
	if(m_pCodecCtx){
		av_free(m_pCodecCtx);
		m_pCodecCtx = NULL;
	}
	if(m_picture){
		av_frame_free(&m_picture);
		m_picture = NULL;
	}
	if(m_pFrameRGB){
		av_frame_free(&m_pFrameRGB);
		m_pFrameRGB = NULL;
	}
	if(m_pImgCtx){
		sws_freeContext(m_pImgCtx);
	}

	if(m_pCodecParserCtx){
		av_parser_close(m_pCodecParserCtx);
	}

	m_PicBytes = 0;
	if(m_PicBuf != NULL){
		delete m_PicBuf;
		m_PicBuf = NULL;
	}
}

網路資料流接收,收到RTP包後去掉RTP包頭,在視訊幀分片前加上00 00 00 01;這裡沒有處理FU-A格式的H264資料,如果你的H264資料是FU-A打包的,需要做另外的處理。
BOOL CVPPMediaPlayer::get_h264_data(char* in_buf, int in_len, char* out_buf, int &out_len)
{
	int rtp_head_len;
	char nalu[4] = {0x00, 0x00, 0x00, 0x01};
	
	memcpy(out_buf, nalu, 4);
	rtp_head_len = sizeof(RTP_FIXED_HEADER);
	out_buf += 4;
	in_buf += rtp_head_len;
	memcpy(out_buf, in_buf, in_len-rtp_head_len);
	out_len = in_len-rtp_head_len+4;

	return TRUE;
}

int CVPPMediaPlayer::video_recv_poll()
{
	char buf[RTP_RECV_BUF_LEN];
	char h264_buf[DECODE_BUF_SIZE];
	int sockfd, max_fd;
	int ret = 0;
	int len, out_len, addr_len;
	int line;

	struct sockaddr_in local_addr;
	struct sockaddr_in remote_addr;

	fd_set rset;
	struct timeval timeout = {RTP_RECV_TIMEOUT, 0};

	RTP_FIXED_HEADER* p_rtp_head;
	int rtp_head_len = sizeof(RTP_FIXED_HEADER);

	memset(&local_addr, 0, sizeof(local_addr));
	local_addr.sin_family = AF_INET;
	local_addr.sin_addr.s_addr = inet_addr(g_vpp_call.lines[m_VcId].meida_info.local_ip);
	local_addr.sin_port = htons(g_vpp_call.lines[m_VcId].meida_info.local_video_port);

	sockfd = socket(PF_INET, SOCK_DGRAM, 0);
	if(sockfd <= 0){
		TRACE(_T("Create socket for video recv failed\n"));
		return -1;
	}

	if(bind(sockfd, (struct sockaddr *)&local_addr, sizeof(local_addr)) < 0){
		TRACE(_T("bind failed\n"));
		closesocket(sockfd);
		return -1;
	}

	while(1)
	{
		timeout.tv_sec = RTP_RECV_TIMEOUT; 
		timeout.tv_usec = 0;
		FD_ZERO(&rset);
		FD_SET(sockfd, &rset);
		max_fd = sockfd + 1;
		ret = select(max_fd, &rset, NULL, NULL, &timeout);
		if(g_vpp_call.lines[m_VcId].status == CALL_STATUS_IDLE){
			TRACE(_T("Thread for recv video rtp end!\n"));
			PostMessage(WM_CLOSE_MEDIA_PLAYER);
			closesocket(sockfd);
			return 0;
		}

		if(ret < 0){
			TRACE(_T("select error"));
			PostMessage(WM_CLOSE_MEDIA_PLAYER);
			closesocket(sockfd);
			return -1;
		}else if(ret == 0){
			//TRACE(_T("select timeout %d s"), RTP_RECV_TIMEOUT);
			continue;
		}else{
			memset(buf, 0, sizeof(buf));
			memset(h264_buf, 0, sizeof(h264_buf));
			addr_len = sizeof(struct sockaddr);
			len = recvfrom(sockfd, buf, RTP_RECV_BUF_LEN, 0, (struct sockaddr *)&remote_addr, &addr_len);
			if(len <= 0){
				TRACE(_T("socket was shut, close it"));
				PostMessage(WM_CLOSE_MEDIA_PLAYER);
				closesocket(sockfd);
				return -1;
			}

			
			p_rtp_head = (RTP_FIXED_HEADER*)buf;
			TRACE(_T("recv video stream, len=%d, ssrc=0x%08x\n"), len, ntohl(p_rtp_head->ssrc));
			out_len = RTP_RECV_BUF_LEN;
			if(get_h264_data(buf, len, h264_buf, out_len)){
				h264_decode(h264_buf, out_len);
			}
		}
	}

	return 0;
}

重組視訊幀並解碼,解碼流程為 H264->YUV->RGB,得到RGB後直接顯示出來就行了。

這裡使用avcodec_decode_video2來將分片組成視訊幀,當返回值為0時表示前面的分片已經組成一幀完整的視訊幀,可以送給解碼函式進行解碼了,需要注意的是此時當前的分片並沒有加入到視訊幀中(也就是通過當前的分片判斷出前面的所有分片已經組成了視訊幀),所以當前的分片需要加入到下一幀中,我就在這裡吃了大虧,沒有把當前的分片加入到下一幀中,導致解碼出來的畫面很模糊,糾結了好幾天尷尬

當avcodec_decode_video2返回0時,將m_avpkt送給解碼函式,同時再次呼叫avcodec_decode_video2函式,將當前分片的buf送進去。

int CVPPMediaPlayer::h264_decode(char* buf, int buf_len)
{
	int got, len, paser_len;
	

	if(buf == NULL || buf_len == 0){
		return -1;
	}

	paser_len = av_parser_parse2(m_pCodecParserCtx, m_pCodecCtx, &m_avpkt.data, &m_avpkt.size, (uint8_t *)buf, buf_len,
		AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

	if(paser_len == 0){
		switch(m_pCodecParserCtx->pict_type){
			case AV_PICTURE_TYPE_I: TRACE(_T("Type: I\n"));break;
			case AV_PICTURE_TYPE_P: TRACE(_T("Type: P\n"));break;
			case AV_PICTURE_TYPE_B: TRACE(_T("Type: B\n"));break;
			default: TRACE(_T("Type: Other\n"));break;
		}
		len = avcodec_decode_video2(m_pCodecCtx, m_picture, &got, &m_avpkt);
		if(len < 0){
			TRACE(_T("Error while decoding frame\n"));
			return -1;
		}

		if(got){
			TRACE(_T("Got picture\n"));
			if(m_PicBytes == 0){
				m_PicBytes = avpicture_get_size(PIX_FMT_BGR24, m_pCodecCtx->width, m_pCodecCtx->height);
				m_PicBuf = new uint8_t[m_PicBytes];
				avpicture_fill((AVPicture *)m_pFrameRGB, m_PicBuf, PIX_FMT_BGR24,
					m_pCodecCtx->width, m_pCodecCtx->height);
			}

			if(!m_pImgCtx){
				m_pImgCtx = sws_getContext(m_pCodecCtx->width, m_pCodecCtx->height, m_pCodecCtx->pix_fmt, m_pCodecCtx->width, m_pCodecCtx->height, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
			}

			m_picture->data[0] += m_picture->linesize[0]*(m_pCodecCtx->height-1);
			m_picture->linesize[0] *= -1;                      
			m_picture->data[1] += m_picture->linesize[1]*(m_pCodecCtx->height/2-1);
			m_picture->linesize[1] *= -1;
			m_picture->data[2] += m_picture->linesize[2]*(m_pCodecCtx->height/2-1);
			m_picture->linesize[2] *= -1;
			sws_scale(m_pImgCtx, (const uint8_t* const*)m_picture->data, m_picture->linesize, 0, m_pCodecCtx->height, m_pFrameRGB->data, m_pFrameRGB->linesize); 

			display_picture(m_pFrameRGB->data[0], m_pCodecCtx->width, m_pCodecCtx->height);
		}
	}

	if(paser_len == 0){
		paser_len = av_parser_parse2(m_pCodecParserCtx, m_pCodecCtx, &m_avpkt.data, &m_avpkt.size, (uint8_t *)buf, buf_len,
			AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
	}

	return 0;
}
在視窗中顯示每一幀RGB圖片;這裡過載一個CStatic控制元件來做視訊播放。
void CVideoStatic::init_bm_head()
{
	m_bm_info.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	m_bm_info.bmiHeader.biWidth = m_width;
	m_bm_info.bmiHeader.biHeight = m_height;
	m_bm_info.bmiHeader.biPlanes =1;
	m_bm_info.bmiHeader.biBitCount = 24;
	m_bm_info.bmiHeader.biCompression = BI_RGB;
	m_bm_info.bmiHeader.biSizeImage = 0;
	m_bm_info.bmiHeader.biClrUsed = 0;
	m_bm_info.bmiHeader.biClrImportant = 0;
}


void CVideoStatic::display_pic(unsigned char* data, int width, int height)
{
	CRect  rc;
	HDC hdc = GetDC()->GetSafeHdc();
	GetClientRect(&rc);

	if(m_height != height || m_width != width){
		m_height = height;
		m_width = width;

		MoveWindow(0, 0, width, height, 0);
		Invalidate();
	}

	init_bm_head();

	DrawDibDraw(m_DrawDib,
		hdc,
		rc.left,
		rc.top,
		-1,	// don't stretch
		-1,
		&m_bm_info.bmiHeader, 
		(void*)data, 
		0, 
		0, 
		width, 
		height, 
		0);
}


存在的問題:

1、不能相容FU-A方式封裝的H264流,一些裝置的視訊無法播放。

2、分片組幀應該可以自己實現,不需要avcodec_decode_video2來做。