1. 程式人生 > >webrtc自帶client的視頻引擎創建代碼走讀

webrtc自帶client的視頻引擎創建代碼走讀

let cti ets nullptr change tca imp dds for

src\webrtc\examples\peerconnection\client\conductor.cc
bool Conductor::InitializePeerConnection()
1 webrtc::CreatePeerConnectionFactory();
src\talk\app\webrtc\peerconnectionfactory.cc
1.1 new rtc::RefCountedObject<PeerConnectionFactory>()
1.2 bool PeerConnectionFactory::Initialize()
1.2.1 cricket::MediaEngineInterface* media_engine = PeerConnectionFactory::CreateMediaEngine_w

()
src\talk\media\webrtc\webrtcmediaengine.cc
1.2.1.1
MediaEngineInterface* WebRtcMediaEngineFactory::Create(webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
{
return CreateWebRtcMediaEngine(adm, encoder_factory, decoder_factory);
}
1.2.1.2
cricket::MediaEngineInterface* WebRtcMediaEngineFactory:: CreateWebRtcMediaEngine(
webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
{
return new cricket::WebRtcMediaEngine2(adm, encoder_factory,decoder_factory);
}
1.2.1.3
class WebRtcMediaEngine2
: public CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine2>
{
public:
WebRtcMediaEngine2(webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
};

1.2.1.4
src\talk\media\webrtc\webrtcmediaengine.cc
WebRtcVideoEngine2::WebRtcVideoEngine2()
: initialized_(false),
external_decoder_factory_(NULL),
external_encoder_factory_(NULL)
{
video_codecs_ = GetSupportedCodecs(); // 獲得視頻編解碼器列表(含內部默認支持的和外部引入的,外部最多引入8個)


}
std::vector<VideoCodec> WebRtcVideoEngine2::GetSupportedCodecs() const
{
//讀取默認的視頻編解碼器列表 VP8(默認最寬640 最高480 最多15fps) VP9(如果內部支持) H264(如果內部支持) Rtx Red Ulpfec
std::vector<VideoCodec> supported_codecs = DefaultVideoCodecList();
}
1.2.2 channel_manager_.reset(
new cricket::ChannelManager(media_engine, worker_thread_));
1.2.2.1 ConstructDataEngine{new HybridDataEngine(new RtpDataEngine(), new SctpDataEngine())}
1.2.2.2 new CaptureManager()
1.2.2.3 audio_options_ = media_engine_->GetAudioOptions();

1.2.3 channel_manager_->Init()
1.2.3.1 ChannelManager::InitMediaEngine_w調用media_engine_->Init(worker_thread_)
template<class VOICE, class VIDEO>

class CompositeMediaEngine : public MediaEngineInterface
{
virtual bool Init(rtc::Thread* worker_thread)
{
if (!voice_.Init(worker_thread)) return false;

video_.Init();
return true;
}
};
1.2.3.2 SetAudioOptions(audio_options_)
1.2.3.3 SetOutputVolume(audio_output_volume_)
1.2.3.4 SetDefaultVideoEncoderConfig(default_video_encoder_config_)

2 peer_connection_ =
peer_connection_factory_->CreatePeerConnection()
2.1 PeerConnection::Initialize
2.1.1 ParseIceServers(configuration.servers, &stun_config, &turn_config)
2.1.2 port_allocator_->SetIceServers(cricket_stuns, cricket_turns);
2.1.3 media_controller_.reset(factory_->CreateMediaController());
2.1.4 remote_stream_factory_.reset(new RemoteMediaStreamFactory)
2.1.5 session_.reset(new WebRtcSession)
2.1.6 stats_.reset(new StatsCollector(this));
2.1.7 session_->Initialize()
2.1.8 session_->RegisterIceObserver(this);

2.1.9 session_->SignalState.connect(this, &PeerConnection::OnSessionStateChange);
Conductor::OnSuccess(webrtc::SessionDescriptionInterface* desc)
{
peer_connection_->SetLocalDescription(DummySetSessionDescriptionObserver::Create(), desc);
}
PeerConnection::SetLocalDescription()
{
session_->SetLocalDescription(desc, &error)
}
WebRtcSession::SetLocalDescription
{
if (action == kOffer && !CreateChannels(local_desc_->description()))
}
bool WebRtcSession::CreateChannels(const SessionDescription* desc)
{
CreateVoiceChannel(voice)
CreateVideoChannel(video)->ChannelManager::CreateVideoChannel->ChannelManager::CreateVideoChannel_w
CreateDataChannel(data)
}
WebRtcSession::SetRemoteDescription類同SetLocalDescription

VideoChannel* ChannelManager::CreateVideoChannel_w()
{
VideoMediaChannel* media_channel =
media_engine_->CreateVideoChannel即media_engine_指向WebRtcVideoChannel2
}

class WebRtcVideoChannel2 : public rtc::MessageHandler,public VideoMediaChannel,

WebRtcVideoChannel2* WebRtcVideoEngine2::CreateChannel(webrtc::Call* call,const VideoOptions& options)
{
return new WebRtcVideoChannel2(call, options, video_codecs_,external_encoder_factory_, external_decoder_factory_);

}
VideoChannel* video_channel = new VideoChannel
video_channel->Init()
BaseChannel::Init()

3 Conductor::AddStreams()
3.1 peer_connection_factory_->CreateAudioTrack
3.2 peer_connection_factory_->CreateAudioSource
3.3 peer_connection_factory_->CreateVideoTrack
3.4 peer_connection_factory_->CreateVideoSource(OpenVideoCaptureDevice())
3.5 peer_connection_factory_->CreateLocalMediaStream(kStreamLabel)
3.6 stream->AddTrack(audio_track);

3.7 stream->AddTrack(video_track);

3.8 peer_connection_->AddStream(stream)

3.4.1
rtc::scoped_ptr<cricket::DeviceManagerInterface> dev_manager
DeviceManagerInterface* DeviceManagerFactory::Create() {
return new Win32DeviceManager();
}
class Win32DeviceManager : public DeviceManager

dev_manager->Init()

dev_manager->GetVideoCaptureDevices(&devs)

capturer = dev_manager->CreateVideoCapturer(*dev_it) // 可能是從文件讀取的假捕獲器

DeviceManager::DeviceManager()
{
SetVideoDeviceCapturerFactory(new WebRtcVideoDeviceCapturerFactory()); // 給video_device_capturer_factory_賦值
}
VideoCapturer* DeviceManager::CreateVideoCapturer(const Device& device)
{
rtc::scoped_ptr<
VideoDeviceCapturerFactory> video_device_capturer_factory_;
capturer = video_device_capturer_factory_->Create(device);
}
VideoCapturer* WebRtcVideoDeviceCapturerFactory::Create(const Device& device)
{
rtc::scoped_ptr<WebRtcVideoCapturer> capturer(new WebRtcVideoCapturer());

capturer->Init(device)
}
src\talk\media\webrtc\webrtcvideocapturer.cc
WebRtcVideoCapturer::WebRtcVideoCapturer()

: factory_(new WebRtcVcmFactory)
,module_(nullptr)
,captured_frames_(0)
,start_thread_(nullptr)
,async_invoker_(nullptr)
{
set_frame_factory(new WebRtcVideoFrameFactory());
}
bool WebRtcVideoCapturer::Init(const Device& device)
{
webrtc::VideoCaptureModule::DeviceInfo* info = factory_->CreateDeviceInfo(0);
int num_cams = info->NumberOfDevices();
std::vector<VideoFormat> supported;
int32_t num_caps = info->NumberOfCapabilities(vcm_id);
module_ = factory_->Create(0, vcm_id);
SetId(device.id);
SetSupportedFormats(supported);
}
src\webrtc\modules\video_capture\video_capture_impl.cc
class WebRtcVcmFactory : public WebRtcVcmFactoryInterface
{
virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo(int id)
{
return webrtc::VideoCaptureFactory::CreateDeviceInfo(id);
}
};
src\webrtc\modules\video_capture\video_capture_factory.cc
VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
const int32_t id)
{

return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
}
src\webrtc\modules\video_capture\windows\video_capture_factory_windows.cc
// static

VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
const int32_t id)
{
return DeviceInfoDS::Create(id);

}

VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id, const char* device_id) {
// TODO(tommi): Use Media Foundation implementation for Vista and up.
RefCountImpl<VideoCaptureDS>* capture = new RefCountImpl<VideoCaptureDS>(id);
if (capture->Init(id, device_id) != 0) {
delete capture;
capture = NULL;
}
return capture;
}
src\webrtc\modules\video_capture\windows\device_info_ds.h
class DeviceInfoDS: public DeviceInfoImpl{
};

3.4.2 new rtc::RefCountedObject<VideoSource>(channel_manager,
capturer));
3.4.3 source->Initialize(constraints);
3.4.3.1 std::vector<cricket::VideoFormat> formats =
channel_manager_->GetSupportedFormats(video_capturer_.get());
3.4.3.2 channel_manager_->StartVideoCapture(video_capturer_.get(), format_)
3.4.3.2.1 RegisterVideoCapturer(video_capturer)
3.4.3.2.2 StartWithBestCaptureFormat{video_capturer->StartCapturing}
bool CaptureManager::RegisterVideoCapturer(VideoCapturer* video_capturer)
{
VideoCapturerState* capture_state = VideoCapturerState::Create(video_capturer);
}
// static

VideoCapturerState* VideoCapturerState::Create(VideoCapturer* video_capturer)
{
CaptureRenderAdapter* adapter = CaptureRenderAdapter::Create(video_capturer);
return new VideoCapturerState(adapter);
}
CaptureRenderAdapter* CaptureRenderAdapter::Create(
VideoCapturer* video_capturer)
{
CaptureRenderAdapter* return_value = new CaptureRenderAdapter(video_capturer);

return_value->Init(); // Can‘t fail.
return return_value;
}
void CaptureRenderAdapter::Init(){
video_capturer_->SignalVideoFrame.connect(this,&CaptureRenderAdapter::OnVideoFrame);
}
void CaptureRenderAdapter::OnVideoFrame(VideoCapturer* capturer,
const VideoFrame* video_frame)
{
for (VideoRenderers::iterator iter = video_renderers_.begin();
iter != video_renderers_.end(); ++iter)
{
VideoRenderer* video_renderer = iter->renderer;
video_renderer->RenderFrame(video_frame);
}
}

bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp)
{
new WebRtcVideoSendStream
}
bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
bool default_stream)
{
new WebRtcVideoReceiveStream
}

webrtc自帶client的視頻引擎創建代碼走讀