1. 程式人生 > >.net中捕獲攝像頭視訊的方式及對比

.net中捕獲攝像頭視訊的方式及對比

 隨著Windows作業系統的不斷演變,用於捕獲視訊的API介面也在進化,微軟提供了VFW、DirectShow和MediaFoundation這三代介面。其中VFW早已被DirectShow取代,而最新的MediaFoundation被Windows Vista和Windows 7所支援。可惜的是,上述介面基於COM技術且靈活性很大,在.net中並不方便直接使用。

  隨著Windows作業系統的不斷演變,用於捕獲視訊的API介面也在進化,微軟提供了VFW、DirectShow和MediaFoundation這三代介面。其中VFW早已被DirectShow取代,而最新的MediaFoundation被Windows Vista和Windows 7所支援。可惜的是,上述介面基於COM技術且靈活性很大,在.net中並不方便直接使用。

  .net封裝

  老外有很多活雷鋒,他們奉獻了不少的開源專案,DirectShow.net是對DirectShow的封裝,而MediaFoundation.net是對MediaFoundation的封裝。它們都能在http://sourceforge.net上找到。這兩個封裝之後的類庫基本上跟原來的COM是一一對應的關係,可以用於視訊捕獲,但是用起來還是不夠簡便。

  通過不斷的google搜尋,我認為以下類庫對視訊捕獲封裝得不錯,它們是:DirectX.Capture、OpenCv、EmguCv和AForge。

  DirectX.Capture

  DirectX.Capture是發表在CodeProject上的一個專案,它能很方便的捕獲視訊和音訊,在視窗預覽,並將結果儲存到檔案。使用DirectX.Capture的示例如下:


  DirectX.Capture
  Capture capture = new Capture( Filters.VideoInputDevices[0],
  Filters.AudioInputDevices[1] );
  capture.Filename = "C:\MyVideo.avi";
  capture.Start();
  //...
  capture.Stop();

  但是,它沒有提供單獨獲取某幀內容的方法。如果您只是需要預覽並儲存視訊,它很好用。

  OpenCv

  OpenCv對VFW和DirectShow的視訊捕獲部分進行了很好的封裝,能夠很方便的獲取到某幀的內容,也可以將結果儲存到視訊檔案中。使用OpenCv的示例如下:

 


 OpenCv
  IntPtr ptrCapture = CvInvoke.cVCreateCameraCapture(param.deviceInfo.Index);
  while (!stop)
  {
  IntPtr ptrImage = CvInvoke.cvQueryFrame(ptrCapture);
  lock (lockObject)
  {
  stop = stopCapture;
  }
  }
  CvInvoke.cvReleaseCapture(ref ptrCapture);

  不過OpenCv並未對音訊捕獲進行封裝,如果需要同時錄製音訊,這個搞不定。

  值得注意的是,從OpenCv 1.1開始已經實現了對DirectShow的封裝,這跟網上很多人所說的OpenCv使用VFW進行視訊捕獲效率低下這種觀點不一致。關於OpenCv使用DirectShow的論據請看本文的附錄。

  EmguCv

  EmguCv是對OpenCv在.net的封裝,繼承了OpenCv快速的優點,同時它更加好用。使用EmguCv的示例程式碼如下:


  EmguCv
  Capture capture = new Capture(param.deviceInfo.Index);
  while (!stop)
  {
  PBCapture.Image = capture.QueryFrame().Bitmap;
  lock (lockObject)
  {
  stop = stopCapture;
  }
  }
  capture.Dispose();
  AForge

  AForge是一套純正的.net開源影象處理類庫,它的視訊捕獲類也是基於DirectShow的,但更加好用,功能更多,從使用和幫助來看更類似微軟的類庫。


 AForge
  captureAForge = new VideoCaptureDevice(cameraDevice.MonikerString);
  captureAForge.NewFrame += new NewFrameEventHandler(captureAForge_NewFrame);
  captureAForge.Start();
  //...
  captureAForge.SignalToStop();
  private void captureAForge_NewFrame(object sender, NewFrameEventArgs eventArgs)
  {
  PBCapture.Image = (Bitmap)eventArgs.Frame.Clone();
  }

  .net中捕獲攝像頭視訊的方式及對比(How to Capture Camera Video via .Net)

  對比

  介紹完它們之後,我們來比較下它們。它們都是基於DirectShow的,所以效能幾乎一樣。實際上,我個人認為,攝像頭所用的硬體和驅動程式的支援對效能影響更大。我的攝像頭在Windows 7下沒有專門的驅動程式,只能使用Microsoft提供的預設驅動,效能比WindowsXP要差一截。

  值得注意的是主要有幾點:

  (1)只有DirectX.Capture實現了對音訊的捕獲;

  (2)只有DirectX.Capture不能獲取單獨的某幀影象;

  (3)EmguCv的免費版基於商業許可,而其他類庫的許可都很寬鬆;

  (4)AForge的示例和幫助比較好,而且功能多些。、

  附錄:OpenCv也用DirectShow來捕獲視訊


通過分析OpenCv 2.0的原始碼,我得出了OpenCv使用DirectShow來捕獲視訊的結論。證據如下:


  DirectShow In OpenCv
  (1)
  //_highgui.h line:100
  #if (_MSC_VER >= 1400 || defined __GNUC__) && !defined WIN64 && !defined _WIN64
  #define HAVE_VIDEOINPUT 1
  #endif
  (2)
  //cVCap_dshow.cpp line:44
  #ifdef HAVE_VIDEOINPUT
  #include "videoinput.h"
  /********************* Capturing video from camera via VFW *********************/
  class CvCaptureCAM_DShow : public CvCapture
  (3)
  //cvapp.cpp line:102
  CV_IMPL CvCapture * cvCreateCameraCapture (int index)
  {
  //.....
  //line:140
  switch (domains[i])
  {
  #ifdef HAVE_VIDEOINPUT
  case CV_CAP_DSHOW:
  capture = cvCreateCameraCapture_DShow (index);
  if (capture)
  return capture;
  break;
  #endif

  本文完整原始碼


 using System;
  using System.Collections.Generic;
  using System.ComponentModel;
  using System.Data;
  using System.Drawing;
  using System.Linq;
  using System.Text;
  using System.Windows.Forms;
  using System.Diagnostics;
  using System.Runtime.InteropServices;
  using AForge.Video;
  using AForge.Video.DirectShow;
  using Emgu.CV;
  using Emgu.CV.CvEnum;
  using Emgu.CV.Structure;
  using Emgu.CV.UI;
  using System.Threading;
  namespace ImageProcessLearn
  {
  public partial class FormCameraCapture : Form
  {
  private int framesCaptured; //已經捕獲的視訊幀數
  private int frameCount;   //需要捕獲的總幀數
  private Stopwatch sw;    //計時器
  private VideoCaptureDevice captureAForge = null;  //AForge視訊捕獲物件
  private bool stopCapture;              //是否停止捕獲視訊
  private object lockObject = new object();
  public FormCameraCapture()
  {
  InitializeComponent();
  sw = new Stopwatch();
  }
  //窗體載入時,獲取視訊捕獲裝置列表
  private void FormCameraCapture_Load(object sender, EventArgs e)
  {
  FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
  if (videoDevices != null && videoDevices.Count > 0)
  {
  int idx = 0;
  foreach (FilterInfo device in videoDevices)
  {
  cmbCaptureDevice.Items.Add(new DeviceInfo(device.Name, device.MonikerString, idx, FilterCategory.VideoInputDevice));
  idx++;
  }
  cmbCaptureDevice.SelectedIndex = 0;
  }
  }
  //當改變視訊裝置時,重新填充該裝置對應的能力
  private void cmbCaptureDevice_SelectedIndExchanged(object sender, EventArgs e)
  {
  if (cmbCaptureDevice.SelectedItem != null)
  {
  //儲存原來選擇的裝置能力
  Size oldFrameSize = new Size(0, 0);
  int oldMaxFrameRate = 0;
  if (cmbDeviceCapability.SelectedItem != null)
  {
  oldFrameSize = ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize;
  oldMaxFrameRate = ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).MaxFrameRate;
  }
  //清除裝置能力
  cmbDeviceCapability.Items.Clear();
  //新增新的裝置能力
  int oldCapIndex = -1;  //原來選擇的裝置能力的新索引
  VideoCaptureDevice video = new VideoCaptureDevice(((DeviceInfo)cmbCaptureDevice.SelectedItem).MonikerString);
  for (int i = 0; i < video.VideoCapabilities.Length; i++)
  {
  VideoCapabilities cap = video.VideoCapabilities[i];
  DeviceCapabilityInfo capInfo = new DeviceCapabilityInfo(cap.FrameSize, cap.MaxFrameRate);
  cmbDeviceCapability.Items.Add(capInfo);
  if (oldFrameSize == capInfo.FrameSize && oldMaxFrameRate == capInfo.MaxFrameRate)
  oldCapIndex = i;
  }
  //重新選擇原來的裝置能力,或者選一個新的能力
  if (oldCapIndex == -1)
  oldCapIndex = 0;
  cmbDeviceCapability.SelectedIndex = oldCapIndex;
  }
  }
  //當改變裝置能力時
  private void cmbDeviceCapability_SelectedIndexChanged(object sender, EventArgs e)
  {
  if (int.Parse(txtRate.Text) >= ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).MaxFrameRate)
  txtRate.Text = ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).MaxFrameRate.ToString();
  }
  //效能測試:測試獲取指定幀數的視訊,並將其轉換成影象,所需要的時間,然後計算出FPS
  private void btnPerformTest_Click(object sender, EventArgs e)
  {
  int frameCount = int.Parse(txtFrameCount.Text);
  if (frameCount <= 0)
  frameCount = 300;
  DeviceInfo device = (DeviceInfo)cmbCaptureDevice.SelectedItem;
  btnPerformTest.Enabled = false;
  btnStart.Enabled = false;
  txtResult.Text += PerformTestWithAForge(device.MonikerString, frameCount);
  txtResult.Text += PerformTestWithEmguCv(device.Index, frameCount);
  txtResult.Text += PerformTestWithOpenCv(device.Index, frameCount);
  btnPerformTest.Enabled = true;
  btnStart.Enabled = true;
  }



 //AForge效能測試
  private string PerformTestWithAForge(string deviceMonikerString, int frameCount)
  {
  VideoCaptureDevice video = new VideoCaptureDevice(deviceMonikerString);
  video.NewFrame += new NewFrameEventHandler(PerformTest_NewFrame);
  video.DesiredFrameSize = ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize;
  video.DesiredFrameRate = int.Parse(txtRate.Text);
  framesCaptured = 0;
  this.frameCount = frameCount;
  video.Start();
  sw.Reset();
  sw.Start();
  video.WaitForStop();
  double time = sw.Elapsed.TotalMilliseconds;
  return string.Format("AForge效能測試,幀數:{0},耗時:{1:F05}毫秒,FPS:{2:F02},設定({3})\r\n", frameCount, time, 1000d * frameCount / time, GetSettings());
  }
  void PerformTest_NewFrame(object sender, NewFrameEventArgs eventArgs)
  {
  framesCaptured++;
  if (framesCaptured > frameCount)
  {
  sw.Stop();
  VideoCaptureDevice video = sender as VideoCaptureDevice;
  video.SignalToStop();
  }
  }
  //EmguCv效能測試
  private string PerformTestWithEmguCv(int deviceIndex, int frameCount)
  {
  Capture video = new Capture(deviceIndex);
  video.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Width);
  video.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Height);
  video.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS, double.Parse(txtRate.Text));
  sw.Reset();
  sw.Start();
  for (int i = 0; i < frameCount; i++)
  video.QueryFrame();
  sw.Stop();
  video.Dispose();
  double time = sw.Elapsed.TotalMilliseconds;
  return string.Format("EmguCv效能測試,幀數:{0},耗時:{1:F05}毫秒,FPS:{2:F02},設定({3})\r\n", frameCount, time, 1000d * frameCount / time, GetSettings());
  }
  //OpenCv效能測試
  private string PerformTestWithOpenCv(int deviceIndex, int frameCount)
  {
  IntPtr ptrVideo = CvInvoke.cvCreateCameraCapture(deviceIndex);
  CvInvoke.cvSetCaptureProperty(ptrVideo, CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Width);
  CvInvoke.cvSetCaptureProperty(ptrVideo, CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Height);
  CvInvoke.cvSetCaptureProperty(ptrVideo, CAP_PROP.CV_CAP_PROP_FPS, double.Parse(txtRate.Text));
  sw.Reset();
  sw.Start();
  for (int i = 0; i < frameCount; i++)
  CvInvoke.cvQueryFrame(ptrVideo);
  sw.Stop();
  CvInvoke.cvReleaseCapture(ref ptrVideo);
  double time = sw.Elapsed.TotalMilliseconds;
  return string.Format("OpenCv效能測試,幀數:{0},耗時:{1:F05}毫秒,FPS:{2:F02},設定({3})\r\n", frameCount, time, 1000d * frameCount / time, GetSettings());
  }
  //得到設定所對應的字串
  private string GetSettings()
  {
  return string.Format("攝像頭:{0},尺寸:{1}x{2},FPS:{3}", ((DeviceInfo)cmbCaptureDevice.SelectedItem).Name,
  ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Width,
  ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize.Height,
  txtRate.Text);
  }
  //開始捕獲視訊
  private void btnStart_Click(object sender, EventArgs e)
  {
  //得到設定項
  DeviceInfo cameraDevice = (DeviceInfo)cmbCaptureDevice.SelectedItem;
  Size frameSize = ((DeviceCapabilityInfo)cmbDeviceCapability.SelectedItem).FrameSize;
  int rate = int.Parse(txtRate.Text);
  ThreadParam param = new ThreadParam(cameraDevice, new DeviceCapabilityInfo(frameSize, rate));
  if (rbAForge.Checked)
  {
  captureAForge = new VideoCaptureDevice(cameraDevice.MonikerString);
  captureAForge.DesiredFrameSize = frameSize;
  captureAForge.DesiredFrameRate = rate;
  captureAForge.NewFrame += new NewFrameEventHandler(captureAForge_NewFrame);
  txtResult.Text += string.Format("開始捕獲視訊(方式:AForge,開始時間:{0})......\r\n", DateTime.Now.ToLongTimeString());
  framesCaptured = 0;
  sw.Reset();
  sw.Start();
  captureAForge.Start();
  }
  else if (rbEmguCv.Checked)
  {
  stopCapture = false;
  Thread captureThread = new Thread(new ParameterizedThreadStart(CaptureWithEmguCv));
  captureThread.Start(param);
  }
  else if (rbOpenCv.Checked)
  {
  stopCapture = false;
  Thread captureThread = new Thread(new ParameterizedThreadStart(CaptureWithOpenCv));
  captureThread.Start(param);
  }
  btnStart.Enabled = false;
  btnStop.Enabled = true;
  btnPerformTest.Enabled = false;
  }
  private void captureAForge_NewFrame(object sender, NewFrameEventArgs eventArgs)
  {
  PBCapture.Image = (Bitmap)eventArgs.Frame.Clone();
  lock (lockObject)
  {
  framesCaptured++;
  }
  }



  //EmguCv視訊捕獲
  private void CaptureWithEmguCv(object objParam)
  {
  bool stop = false;
  int framesCaptured = 0;
  Stopwatch sw = new Stopwatch();
  txtResult.Invoke(new AddResultDelegate(AddResultMethod), string.Format("開始捕獲視訊(方式:EmguCv,開始時間:{0})......\r\n", DateTime.Now.ToLongTimeString()));
  ThreadParam param = (ThreadParam)objParam;
  Capture capture = new Capture(param.deviceInfo.Index);
  capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, param.deviceCapability.FrameSize.Width);
  capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, param.deviceCapability.FrameSize.Height);
  capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS, param.deviceCapability.MaxFrameRate);
  sw.Start();
  while (!stop)
  {
  pbCapture.Image = capture.QueryFrame().Bitmap;
  framesCaptured++;
  lock (lockObject)
  {
  stop = stopCapture;
  }
  }
  sw.Stop();
  txtResult.Invoke(new AddResultDelegate(AddResultMethod), string.Format("捕獲視訊結束(方式:EmguCv,結束時間:{0},用時:{1:F05}毫秒,幀數:{2},FPS:{3:F02})\r\n",
  DateTime.Now.ToLongTimeString(), sw.Elapsed.TotalMilliseconds, framesCaptured, framesCaptured / sw.Elapsed.TotalSeconds));
  capture.Dispose();
  }
  //OpenCv視訊捕獲
  private void CaptureWithOpenCv(object objParam)
  {
  bool stop = false;
  int framesCaptured = 0;
  Stopwatch sw = new Stopwatch();
  txtResult.Invoke(new AddResultDelegate(AddResultMethod), string.Format("開始捕獲視訊(方式:OpenCv,開始時間:{0})......\r\n", DateTime.Now.ToLongTimeString()));
  ThreadParam param = (ThreadParam)objParam;
  IntPtr ptrCapture = CvInvoke.cvCreateCameraCapture(param.deviceInfo.Index);
  CvInvoke.cvSetCaptureProperty(ptrCapture, CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, param.deviceCapability.FrameSize.Width);
  CvInvoke.cvSetCaptureProperty(ptrCapture, CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, param.deviceCapability.FrameSize.Height);
  CvInvoke.cvSetCaptureProperty(ptrCapture, CAP_PROP.CV_CAP_PROP_FPS, param.deviceCapability.MaxFrameRate);
  sw.Start();
  while (!stop)
  {
  IntPtr ptrImage = CvInvoke.cvQueryFrame(ptrCapture);
  MIplImage iplImage = (MIplImage)Marshal.PtrToStructure(ptrImage, typeof(MIplImage));
  Image image = new Image(iplImage.width, iplImage.height, iplImage.widthStep, iplImage.imageData);
  pbCapture.Image = image.Bitmap;
  //pbCapture.Image = ImageConverter.IplImagePointerToBitmap(ptrImage);
  framesCaptured++;
  lock (lockObject)
  {
  stop = stopCapture;
  }
  }


sw.Stop();
  txtResult.Invoke(new AddResultDelegate(AddResultMethod), string.Format("捕獲視訊結束(方式:OpenCv,結束時間:{0},用時:{1:F05}毫秒,幀數:{2},FPS:{3:F02})\r\n",
  DateTime.Now.ToLongTimeString(), sw.Elapsed.TotalMilliseconds, framesCaptured, framesCaptured / sw.Elapsed.TotalSeconds));
  CvInvoke.cvReleaseCapture(ref ptrCapture);
  }
  //停止捕獲視訊
  private void btnStop_Click(object sender, EventArgs e)
  {
  if (captureAForge != null)
  {
  sw.Stop();
  if (captureAForge.IsRunning)
  captureAForge.SignalToStop();
  captureAForge = null;
  txtResult.Text += string.Format("捕獲視訊結束(方式:AForge,結束時間:{0},用時:{1:F05}毫秒,幀數:{2},FPS:{3:F02})\r\n",
  DateTime.Now.ToLongTimeString(), sw.Elapsed.TotalMilliseconds, framesCaptured, framesCaptured / sw.Elapsed.TotalSeconds);
  }
  lock (lockObject)
  {
  stopCapture = true;
  }
  btnStart.Enabled = true;
  btnStop.Enabled = false;
  btnPerformTest.Enabled = true;
  }
  //用於在工作執行緒中更新結果的委託及方法
  public delegate void AddResultDelegate(string result);
  public void AddResultMethod(string result)
  {
  txtResult.Text += result;
  }
  }
  //裝置資訊
  public struct DeviceInfo
  {
  public string Name;
  public string MonikerString;
  public int Index;
  Guid Category;
  public DeviceInfo(string name, string monikerString, int index) :
  this(name, monikerString, index, Guid.Empty)
  {
  }
  public DeviceInfo(string name, string monikerString, int index, Guid category)
  {
  Name = name;
  MonikerString = monikerString;
  Index = index;
  Category = category;
  }
  public override string ToString()
  {
  return Name;
  }
  }
  //裝置能力
  public struct DeviceCapabilityInfo
  {
  public Size FrameSize;
  public int MaxFrameRate;
  public DeviceCapabilityInfo(Size frameSize, int maxFrameRate)
  {
  FrameSize = frameSize;
  MaxFrameRate = maxFrameRate;
  }
  public override string ToString()
  {
  return string.Format("{0}x{1} {2}fps", FrameSize.Width, FrameSize.Height, MaxFrameRate);
  }
  }
  //傳遞到捕獲視訊工作執行緒的引數
  public struct ThreadParam
  {
  public DeviceInfo deviceInfo;
  public DeviceCapabilityInfo deviceCapability;
  public ThreadParam(DeviceInfo deviceInfo, DeviceCapabilityInfo deviceCapability)
  {
  this.deviceInfo = deviceInfo;
  this.deviceCapability = deviceCapability;
  }
  }
  }