1. 程式人生 > >網易純直播SDK使用 視訊回撥

網易純直播SDK使用 視訊回撥

一 合成後視訊回撥

1 視訊回撥

Nlss_SetVideoSamplerCB,接受合成後的預覽的每幀影象rgb32 buffer。

	void  LsSession::SetVideoSamplerCB(){
		NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, VideoCallback);
	}
	void VideoCallback(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *sampler)
	{
		if (sampler)
		{
			int ret = sampler->iDataSize;
			if (ret > 0)
			{
				
				int nLen = sampler->iWidth*sampler->iHeight;
				LssManange::IsBlackVideo = IsBlack((DWORD*)sampler->puaData, nLen);

				timeb time_now;
				ftime(&time_now); // 秒數
				__int64 cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
				video_frame_mng_.AddVideoFrame(true, cur_timestamp, (const char*)sampler->puaData, sampler->iDataSize, \
					sampler->iWidth, sampler->iHeight, "", nim_comp::VideoFrameMng::Ft_ARGB);



			}
		}
	}

視訊資料存放在

	//視訊資料
	nim_comp::VideoFrameMng video_frame_mng_; 
	nim_comp::VideoFrameMng* LssManange::GetVideoFrameMng()
	{
		return &video_frame_mng_;
	}

這是個什麼類呢?

namespace nim_comp
{
	struct PicRegion //一塊顏色資料區的描述,便於引數傳遞
	{
		PicRegion()
		{
			pdata_ = NULL;
			//subtype_ = nim::kNIMVideoSubTypeARGB;
			size_max_ = 0;
			size_ = 0;
		}

		~PicRegion()
		{
			Clear();
		}
		void Clear()
		{
			if (pdata_)
			{
				delete[] pdata_;
				pdata_ = NULL;
			}
			size_max_ = 0;
			size_ = 0;
		}
		int ResetData(uint64_t time, const char* data, int size, unsigned int width, unsigned int height/*, nim::NIMVideoSubType subtype*/)
		{
			if (size > size_max_)
			{
				if (pdata_)
				{
					delete[] pdata_;
					pdata_ = NULL;
				}
				pdata_ = new char[size];
				size_max_ = size;
			}
			width_ = width;
			height_ = height;
			timestamp_ = time;
			//subtype_ = subtype;
			size_ = size;
			memcpy(pdata_, data, size);
			return size;
		}

		//nim::NIMVideoSubType subtype_;
		char*		pdata_;         //顏色資料首地址
		int			size_max_;
		int			size_;
		long        width_;         //畫素寬度
		long        height_;        //畫素高度
		uint64_t	timestamp_;     //時間戳(毫秒)
	};
	class VideoFrameMng
	{
	public:
		enum FrameType
		{
			Ft_I420 = 0,
			Ft_ARGB,
			Ft_ARGB_r,
		};
		VideoFrameMng();
		~VideoFrameMng();

		void Clear();
		void AddVideoFrame(bool capture, int64_t time, const char* data, int size, int width, int height, const std::string& json, FrameType frame_type = Ft_ARGB_r);
		bool GetVideoFrame(bool local_show, std::string account, int64_t& time, char* out_data, int& width, int& height, bool mirror = false, bool argb_or_yuv = true);
	
	public:
		nbase::NLock  lock_;
		PicRegion capture_video_pic_;
		std::map<std::string, PicRegion*> recv_video_pic_list_;
	};
}
	void VideoFrameMng::AddVideoFrame(bool capture, int64_t time, const char* data, int size, int width, int height, const std::string& json, FrameType frame_type)
	{
		Json::Value valus;
		Json::Reader reader;
		std::string account;
		if (reader.parse(json, valus))
		{
			//ToDo
			//account = valus[nim::kNIMDeviceDataAccount].asString();
		}
		if (!capture && account.empty())
		{
			return;
		}
		nbase::NAutoLock auto_lock(&lock_);
		//nim::NIMVideoSubType subtype = nim::kNIMVideoSubTypeI420;
		timeb time_now;
		ftime(&time_now); // 秒數
		int64_t cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
		const char* src_buffer = data;
		std::string ret_data;
		if (frame_type != Ft_I420)
		{
			int byte_width = width * 4;
			width -= width % 2;
			height -= height % 2;
			int wxh = width * height;
			ret_data.append(wxh * 3 / 2, (char)0);
			uint8_t* des_y = (uint8_t*)ret_data.c_str();
			uint8_t* des_u = des_y + wxh;
			uint8_t* des_v = des_u + wxh / 4;
			const uint8_t* src_argb = (const uint8_t*)data;
			if (frame_type == Ft_ARGB_r)
			{
				src_argb = (const uint8_t*)data + size - byte_width;
				byte_width = -byte_width;
			}
			libyuv::ARGBToI420(src_argb, byte_width,
				des_y, width,
				des_u, width / 2,
				des_v, width / 2,
				width, height);
			src_buffer = ret_data.c_str();
			size = wxh * 3 / 2;
		}
		if (capture)
		{
			capture_video_pic_.ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
		}
		else
		{
			auto it = recv_video_pic_list_.find(account);
			if (it != recv_video_pic_list_.end())
			{
				it->second->ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
			}
			else
			{
				PicRegion* pic_info = new PicRegion;
				pic_info->ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
				recv_video_pic_list_[account] = pic_info;
			}
		}
	}

2 視訊預覽

到此,我們獲得視訊資料了,視訊資料儲存在VideoFrameMng video_frame_mng_;  物件中

下一步,如何將這個物件與Bitmap控制元件關聯

duilib自定義控制元件,將VideoFrameMng物件作為引數傳入

ui::Control* NLSLiveForm::CreateControl(const std::wstring& pstrClass)
{
	if (pstrClass == _T("BitmapControl"))
	{
		return new ui::CBitmapControl(nim_nls::LssManange::GetVideoFrameMng());
	}
	return NULL;
}

這個BITMAP是怎麼定義的呢?

namespace ui
{
	class CBitmapControl : public ui::Box
	{
	public:
		CBitmapControl(nim_comp::VideoFrameMng* video_frame_mng);
		~CBitmapControl(void);

		void SetAccount(std::string account){ account_ = account; }
		std::string GetAccount() { return account_; }
		void Paint(HDC hDC, const UiRect& rcPaint) override;
		//清理資料
		void Clear();

		bool Refresh(Window* wnd, bool captrue = true, bool mirror = false, bool bCameraIsOpen = false);
		bool RefreshSuccess(){ return is_refresh_success_; };
	    

		void SetAutoSize(bool auto_size){ auto_size_ = auto_size; }

		bool IsRefreshTimeout();

		RECT  GetDrawMapMargin();
		RECT  GetPictureMargin();

		// add by yujian
		bool IsBlack(DWORD *pData, long nLen,int nTryTime=100);

	protected:
		std::string account_;
		bool auto_size_;
		int64_t timestamp_;
		std::string data_;
		int width_;
		int height_;
		int draw_map_x_;
		int draw_map_y_;
		int draw_map_width_;
		int draw_map_height_;
		bool is_refresh_success_;
		RECT DrawMapRect;
		RECT PictueMapRect;

		Window* parent_wnd_;
		nim_comp::VideoFrameMng* video_frame_mng_;

		bool m_bCameraIsOpen;
	};

}


	CBitmapControl::CBitmapControl(nim_comp::VideoFrameMng* video_frame_mng)
	{
		m_bCameraIsOpen = true;
		video_frame_mng_ = video_frame_mng;
		timestamp_ = 0;
		width_ = 0;
		height_ = 0;
		auto_size_ = false;
		is_refresh_success_ = false;
		DrawMapRect.bottom = 0;
		DrawMapRect.left= 0;
		DrawMapRect.right = 0;
		DrawMapRect.top = 0;
	}

將幀資料,轉換到string data中

	bool CBitmapControl::Refresh(Window* wnd, bool captrue, bool mirror, bool bCameraIsOpen)
	{
		m_bCameraIsOpen = bCameraIsOpen;

		int item_w = m_rcItem.right - m_rcItem.left;
		int item_h = m_rcItem.bottom - m_rcItem.top;
		if (auto_size_)
		{
			item_w = GetMaxWidth();
			item_h = GetMaxHeight();
		}
		bool ret = false;
		if (item_w > 0 && item_h > 0)
		{
			parent_wnd_ = wnd;
			data_.resize(item_w * item_h * 4);

			try{
				//item_w item_h 會根據視訊資訊等比例調節,返回的是 視訊的寬高
				ret = video_frame_mng_->GetVideoFrame(true, (captrue ? "" : account_), timestamp_, (char*)data_.c_str(), item_w, item_h, mirror);
				if (ret)
				{
					width_ = item_w;
					height_ = item_h;
					PictueMapRect.left = 0;
					PictueMapRect.top = 0;
					PictueMapRect.right = width_;
					PictueMapRect.bottom = height_;
					if (auto_size_)
					{
						SetFixedWidth(width_);
						SetFixedHeight(height_);
					}
					Invalidate();
				}
			}
			catch (...){

				throw "CBitmapControl::DoPaint";
				writelog3(LOG_LEVEL_CRITICAL, "CBitmapControl::Refresh catch exception!");
			
			}

		}
		is_refresh_success_ = ret;

		return ret;
	}

過載 void Paint(HDC hDC, const UiRect& rcPaint) override;

將data中資料繪製出來

	void CBitmapControl::Paint(HDC hDC, const UiRect& rcPaint)
	{
		static bool  bDrawing = false;
		if (bDrawing)
			return;
		bDrawing = true;
		try
		{
			if (!::IntersectRect(&m_rcPaint, &rcPaint, &m_rcItem))
				return;
			Control::Paint(hDC, rcPaint);
			//paint bitmap
			
			if (width_ * height_ > 0)
			{
				int item_w = m_rcItem.right - m_rcItem.left;  //控制元件的資訊:寬高
				int item_h = m_rcItem.bottom - m_rcItem.top;
				int item_x = m_rcItem.left;
				int item_y = m_rcItem.top;

				int source_w = width_;   //視訊的寬
				int source_h = height_;  //視訊的高

				if (source_w > 0 && source_h > 0 && parent_wnd_)
				{
					//視訊在控制元件中居中顯示
					item_x += (item_w - source_w) / 2;
					item_y += (item_h - source_h) / 2;


					//父視窗寬高
					UiRect rcClient;
					::GetClientRect(parent_wnd_->GetHWND(), &rcClient);
					int width = rcClient.right - rcClient.left;
					int height = rcClient.bottom - rcClient.top;


					//計算實際繪製區域座標
					int draw_x = max(rcPaint.left, item_x);
					draw_x = max(m_rcItem.left, draw_x);
					int draw_y = max(rcPaint.top, item_y);
					draw_y = max(m_rcItem.top, draw_y);
					int draw_h = min(rcPaint.bottom - draw_y, min(item_y + source_h, m_rcItem.bottom) - draw_y);
					draw_h = max(draw_h, 0);
					int src_x = draw_x - item_x;
					int src_y = draw_y - item_y;
					int src_w = min(rcPaint.right - draw_x, min(item_x + source_w, m_rcItem.right) - draw_x);
					src_w = max(src_w, 0);

					//視訊幀位元組資訊
					int dest_byte_width = width * 4;
					int src_byte_width = source_w * 4;
					int paint_byte_width = src_w * 4;


					char* dest_data = (char*)parent_wnd_->GetBackgroundBits();

					int bottom = height - draw_y - 1;
					dest_data += bottom * dest_byte_width + draw_x * 4;



					char* src_data = (char*)data_.c_str();
					int nLen = source_h*source_w;
					bool isBlackVideo = IsBlack((DWORD*)src_data, nLen);

					src_data += src_y * src_byte_width + src_x * 4;


					for (int i = 0; i < draw_h; ++i)
					{
						memcpy(dest_data, src_data, paint_byte_width);
						dest_data -= dest_byte_width;
						src_data += src_byte_width;
					}
					DrawMapRect.left = draw_x;
					DrawMapRect.top = draw_y;
					DrawMapRect.bottom = draw_y + draw_h;
					DrawMapRect.right = draw_x + src_w;
               }
             }
					

			//繪製子控制元件
			for (auto it = m_items.begin(); it != m_items.end(); it++)
			{
				Control* pControl = *it;
				if (!pControl->IsVisible()) continue;
				UiRect controlPos = pControl->GetPos();
				if (!::IntersectRect(&m_rcPaint, &rcPaint, &controlPos)) continue;
				pControl->AlphaPaint(hDC, rcPaint);
			}
		}
		catch (...)
		{
			throw "CBitmapControl::DoPaint";
		}
		bDrawing = false;
	}

3 設定自己的回掉函式

因為需要兩程序共享攝像頭,所以,我設定了自己的回掉函式

	void LsSession::SetVideoSamplerCB(PFN_NLSS_MERGED_VIDEO_SAMPLER_CB cb)
	{
		//NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, VideoCallback);
		NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, cb);
	}

/**
*  @brief 獲取最新一幀合併子視訊畫面後的視訊截圖後的回撥
*
*  @param  hNLSService: 直播推流例項
*  @param pstSampler 最新一幀合併子視訊畫面後的視訊截圖的結構體引數指標
*/
typedef void(*PFN_NLSS_MERGED_VIDEO_SAMPLER_CB)(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *pstSampler);

自己的回掉函式

void VideoCallback_G(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *sampler)
{
	if (sampler)
	{
		int ret = sampler->iDataSize;
		if (ret > 0)
		{

			int nLen = sampler->iWidth*sampler->iHeight;
			nim_nls::LssManange::IsBlackVideo = IsBlack((DWORD*)sampler->puaData, nLen);

			timeb time_now;
			ftime(&time_now); // 秒數
			__int64 cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
			nim_nls::LssManange::GetVideoFrameMng()->AddVideoFrame(true, cur_timestamp, (const char*)sampler->puaData, sampler->iDataSize, \
				sampler->iWidth, sampler->iHeight, "", nim_comp::VideoFrameMng::Ft_ARGB);

			//capture_video_pic_.ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
			char* pData=m_memMng.getData();

			INT32 nFps = g_pLiveForm->m_videoFps;
			INT32 nWidth = sampler->iWidth;
			INT32 nHeight = sampler->iHeight;

			INT32 Info[] = { nFps, nWidth, nHeight };
			int len = sizeof(Info) / sizeof(Info[0]);

			if (pData)
			{
				WaitForSingleObject(m_memMng.m_hReadEvent, INFINITE);
				ResetEvent(m_memMng.m_hWriteEvent);

				memcpy((INT32*)pData, Info, sizeof(INT32)*len);

				memcpy(pData + 12, nim_nls::LssManange::GetVideoFrameMng()->capture_video_pic_.pdata_, nim_nls::LssManange::GetVideoFrameMng()->capture_video_pic_.size_);


				SetEvent(m_memMng.m_hWriteEvent);
			}

		}
	}
}
m_LiveStreaming.SetVideoSamplerCB(VideoCallback_G);    

4 自定義點陣圖控制元件

要是想在其它視窗控制元件中,預覽此視訊,可以自定義點陣圖物件,

	if (pstrClass == _T("BitmapControl")) //WIN32控制元件
	{
		return new ui::CBitmapControl(nim_nls::LssManange::GetVideoFrameMng());
	}

二 單個視訊回撥

void    Nlss_ChildVideoSetSoloPreviewCB(_HNLSSCHILDSERVICE hNLSSChild, PFN_NLSS_VIDEOSAMPLER_CB pFunVideoSamplerCB);
void    Nlss_ChildVideoSwitchSoloPreview(_HNLSSCHILDSERVICE hNLSSChild, bool bOn);
	void LsSession::OnChildVideoSetSoloPreviewCB(const std::string& accid, PFN_NLSS_CHILD_VIDEO_SAMPLER_CB pFunVideoSamplerCB)
	{
		std::map<std::string, _HNLSSCHILDSERVICE>::const_iterator iter = nlss_child_services_.find(accid);
		if (iter != nlss_child_services_.end())
			NLS_SDK_GET_FUNC(Nlss_ChildVideoSetSoloPreviewCB)(iter->second, pFunVideoSamplerCB);
	}

	void  LsSession::OnChildVideoSwitchSoloPreview(const std::string& accid, bool bOn)
	{
		std::map<std::string, _HNLSSCHILDSERVICE>::const_iterator iter = nlss_child_services_.find(accid);
		if (iter != nlss_child_services_.end())
			NLS_SDK_GET_FUNC(Nlss_ChildVideoSwitchSoloPreview)(iter->second, bOn);
	}