1. 程式人生 > >WebRtc libjingle_PeerConnection層(一) 顯示本地視訊

WebRtc libjingle_PeerConnection層(一) 顯示本地視訊

//#include "talk/examples/peerconnection/client/conductor.h"

#include <utility>
#include <vector>


#ifdef WIN32
#include <Windows.h>

#endif

//#include "opencv2\opencv.hpp"

#include "talk/app/webrtc/videosourceinterface.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/media/devices/devicemanager.h"
//#include "talk/app/webrtc/test/fakeconstraints.h"
#include "talk/media/base/videorenderer.h"
#include "talk/media/base/videoframe.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
#include "webrtc/base/logging.h"


//#pragma comment(lib, "opencv_world300d.lib")

#define DTLS_ON  true
#define DTLS_OFF false

// Names used for a IceCandidate JSON object.
const char kCandidateSdpMidName[] = "sdpMid";
const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex";
const char kCandidateSdpName[] = "candidate";

// Names used for a SessionDescription JSON object.
const char kSessionDescriptionTypeName[] = "type";
const char kSessionDescriptionSdpName[] = "sdp";

const char kAudioLabel[] = "audio_label";
const char kVideoLabel[] = "video_label";
const char kStreamLabel[] = "stream_label";

// A little helper class to make sure we always to proper locking and
// unlocking when working with VideoRenderer buffers.
template <typename T>
class AutoLock {
public:
	explicit AutoLock(T* obj) : obj_(obj) { obj_->Lock(); }
	~AutoLock() { obj_->Unlock(); }
protected:
	T* obj_;
};

class VideoRenderer : public webrtc::VideoRendererInterface {
public:
	VideoRenderer(HWND wnd, int width, int height,
		webrtc::VideoTrackInterface* track_to_render);
	virtual ~VideoRenderer();

	void Lock() {
		::EnterCriticalSection(&buffer_lock_);
	}

	void Unlock() {
		::LeaveCriticalSection(&buffer_lock_);
	}

	// VideoRendererInterface implementation
	virtual void SetSize(int width, int height);
	virtual void RenderFrame(const cricket::VideoFrame* frame);

	const BITMAPINFO& bmi() const { return bmi_; }
	const uint8* image() const { return image_.get(); }

protected:
	enum {
		SET_SIZE,
		RENDER_FRAME,
	};

	HWND wnd_;
	BITMAPINFO bmi_;
	rtc::scoped_ptr<uint8[]> image_;
	CRITICAL_SECTION buffer_lock_;
	rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
};

//
// VideoRenderer
//
VideoRenderer::VideoRenderer(
	HWND wnd, int width, int height,
	webrtc::VideoTrackInterface* track_to_render)
	: wnd_(wnd), rendered_track_(track_to_render) {
	::InitializeCriticalSection(&buffer_lock_);
	ZeroMemory(&bmi_, sizeof(bmi_));
	bmi_.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	bmi_.bmiHeader.biPlanes = 1;
	bmi_.bmiHeader.biBitCount = 32;
	bmi_.bmiHeader.biCompression = BI_RGB;
	bmi_.bmiHeader.biWidth = width;
	bmi_.bmiHeader.biHeight = -height;
	bmi_.bmiHeader.biSizeImage = width * height *
		(bmi_.bmiHeader.biBitCount >> 3);
	rendered_track_->AddRenderer(this);
}

VideoRenderer::~VideoRenderer() {
	rendered_track_->RemoveRenderer(this);
	::DeleteCriticalSection(&buffer_lock_);
}

void VideoRenderer::SetSize(int width, int height) {
	AutoLock<VideoRenderer> lock(this);

	if (width == bmi_.bmiHeader.biWidth && height == bmi_.bmiHeader.biHeight) {
		return;
	}

	bmi_.bmiHeader.biWidth = width;
	bmi_.bmiHeader.biHeight = -height;
	bmi_.bmiHeader.biSizeImage = width * height *
		(bmi_.bmiHeader.biBitCount >> 3);
	image_.reset(new uint8[bmi_.bmiHeader.biSizeImage]);
}
int ii = 0;
void VideoRenderer::RenderFrame(
	const cricket::VideoFrame* video_frame) {
	if (!video_frame)
		return;

	{
		AutoLock<VideoRenderer> lock(this);

		const cricket::VideoFrame* frame =
			video_frame->GetCopyWithRotationApplied();

		SetSize(static_cast<int>(frame->GetWidth()),
			static_cast<int>(frame->GetHeight()));

		ASSERT(image_.get() != NULL);
		frame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
			image_.get(),
			bmi_.bmiHeader.biSizeImage,
			bmi_.bmiHeader.biWidth *
			bmi_.bmiHeader.biBitCount / 8);
	}
	InvalidateRect(wnd_, NULL, TRUE);
}



rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> peer_connection_factory_;

rtc::scoped_ptr<VideoRenderer> local_renderer_;

HWND wnd = NULL;

cricket::VideoCapturer* OpenVideoCaptureDevice() {
	rtc::scoped_ptr<cricket::DeviceManagerInterface> dev_manager(
		cricket::DeviceManagerFactory::Create());
	if (!dev_manager->Init()) {
		LOG(LS_ERROR) << "Can't create device manager";
		return NULL;
	}
	std::vector<cricket::Device> devs;
	if (!dev_manager->GetVideoCaptureDevices(&devs)) {
		LOG(LS_ERROR) << "Can't enumerate video devices";
		return NULL;
	}
	std::vector<cricket::Device>::iterator dev_it = devs.begin();
	cricket::VideoCapturer* capturer = NULL;
	for (; dev_it != devs.end(); ++dev_it) {
		capturer = dev_manager->CreateVideoCapturer(*dev_it);
		if (capturer != NULL)
			break;
	}
	return capturer;
}

void StartLocalRenderer(webrtc::VideoTrackInterface* local_video) {
	local_renderer_.reset(new VideoRenderer(wnd, 1, 1, local_video));
}

void AddStreams() {

	rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
		peer_connection_factory_->CreateAudioTrack(
		kAudioLabel, peer_connection_factory_->CreateAudioSource(NULL)));

	rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
		peer_connection_factory_->CreateVideoTrack(
		kVideoLabel,
		peer_connection_factory_->CreateVideoSource(OpenVideoCaptureDevice(),
		NULL)));
	
	StartLocalRenderer(video_track);
}

bool InitializePeerConnection() {
	ASSERT(peer_connection_factory_.get() == NULL);
	ASSERT(peer_connection_.get() == NULL);

	peer_connection_factory_ = webrtc::CreatePeerConnectionFactory();

	if (!peer_connection_factory_.get()) {
		return false;
	}

	AddStreams();
	return peer_connection_.get() != NULL;
}


void Show()
{
	VideoRenderer* local_renderer = local_renderer_.get();
	if (local_renderer)
	{
		AutoLock<VideoRenderer> local_lock(local_renderer);
		const BITMAPINFO& bmi = local_renderer->bmi();
		const uint8* image = local_renderer->image();
		//printf("height=%d  width=%d  biSizeImage=%d \n", bmi.bmiHeader.biHeight, bmi.bmiHeader.biWidth, bmi.bmiHeader.biSizeImage);
		//GetBitmapFromScreen("c:\\aa.bmp", (BITMAPINFO)bmi, (uint8*)image);
		int rows = abs(bmi.bmiHeader.biHeight);;
		int cols = bmi.bmiHeader.biWidth;
		static int i = 0;
		printf("%d\n", i++);
		//cv::Mat img(rows, cols, CV_8UC4, (void*)image, 0U);
		//cv::imshow("Video Capture", img);
		//cvWaitKey(1);
	}
}

void ThreadProc(LPVOID lpParam)
{
	while (true)
	{
		Show();
		Sleep(33);
	}
}

int main()
{
	getchar();
	
	//wnd = FindWindow(L"ConsoleWindowClass", NULL);

	if (!InitializePeerConnection())
		printf("error\n");
	
	DWORD dwThreadId;
	
	CreateThread(
		NULL,//defaultsecurityattributes
		0,//usedefaultstacksize
		(LPTHREAD_START_ROUTINE)ThreadProc,//threadfunction
		NULL,//argumenttothreadfunction
		0,//usedefaultcreationflags
		&dwThreadId);//returnsthethreadidentifier


	getchar();
	return 0;
}