/// <summary> /// ビデオ情報の設定 /// </summary> /// <param name="pin">対象のピン</param> /// <param name="vih">ビデオ情報</param> public static void SetVideoInfo(IPin pin, VIDEOINFOHEADER vih) { var mt = GetFormat(pin); Marshal.StructureToPtr(vih, mt.pbFormat, true); SetFormat(pin, mt); }
/// <summary> /// Bitmap への変換 /// </summary> /// <param name="vih"></param> /// <returns> /// </returns> public System.Drawing.Bitmap ToImage(VIDEOINFOHEADER vih) { if (this.Address == IntPtr.Zero) throw new System.InvalidOperationException("Address is Zero."); IntPtr address = this.Address; int width = vih.bmiHeader.biWidth; int height = System.Math.Abs(vih.bmiHeader.biHeight); int bpp = vih.bmiHeader.biBitCount; int stride = ((width * (bpp / 8) + 3) / 4) * 4; if (0 < vih.bmiHeader.biHeight) { address = new IntPtr(address.ToInt64() + ((height - 1) * stride)); stride = stride * -1; } PixelFormat format; switch (bpp) { case 24: format = PixelFormat.Format24bppRgb; break; case 32: format = PixelFormat.Format32bppRgb; break; default: throw new System.NotSupportedException("vih.bmiHeader.biBitCount must be one of the following. (24, 32)"); } var dst = new System.Drawing.Bitmap(width, height, format); #region データ複製: BitmapData bmpData = null; try { bmpData = new BitmapData(); bmpData.Width = width; bmpData.Height = height; bmpData.Stride = stride; bmpData.PixelFormat = format; bmpData.Scan0 = address; dst.LockBits( new Rectangle(0, 0, dst.Width, dst.Height), ImageLockMode.WriteOnly | ImageLockMode.UserInputBuffer, dst.PixelFormat, bmpData ); } finally { dst.UnlockBits(bmpData); } #endregion return dst; }
/// <summary> /// デバッグ /// </summary> /// <param name="graph"></param> private void DebugPrint(IGraphBuilder graph) { IEnumFilters filters = null; IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) break; FILTER_INFO info = new FILTER_INFO(); hr = filter.QueryFilterInfo(info); Console.WriteLine("{0}", info.achName); // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } // 解放. Marshal.ReleaseComObject(filters); // サンプルグラバフィルタの入力形式設定. // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); SampleGrabber.GetConnectedMediaType(media_type); Debug.WriteLine("{0}:{1}", "majortype", media_type.majortype); Debug.WriteLine("{0}:{1}", "subtype", media_type.subtype); Debug.WriteLine("{0}:{1}", "formattype", media_type.formattype); Debug.WriteLine("{0}:{1}", "bFixedSizeSamples", media_type.bFixedSizeSamples); Debug.WriteLine("{0}:{1}", "bTemporalCompression", media_type.bTemporalCompression); Debug.WriteLine("{0}:{1}", "cbFormat", media_type.cbFormat); Debug.WriteLine("{0}:{1}", "lSampleSize", media_type.lSampleSize); Debug.WriteLine("{0}:{1}", "pbFormat", media_type.pbFormat); Debug.WriteLine("{0}:{1}", "pUnk", media_type.pUnk); // 映像形式の場合、サイズを取得する. if (GUID.Compare(media_type.formattype.ToString(), GUID.FORMAT_VideoInfo)) { VIDEOINFOHEADER vinfo = new VIDEOINFOHEADER(); vinfo = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); Debug.WriteLine("{0}:{1}", "Width", vinfo.bmiHeader.biWidth); Debug.WriteLine("{0}:{1}", "Height", vinfo.bmiHeader.biHeight); Debug.WriteLine("{0}:{1}", "BitCount", vinfo.bmiHeader.biBitCount); Debug.WriteLine("{0}:{1}", "Size", vinfo.bmiHeader.biSize); Debug.WriteLine("{0}:{1}", "ImageSize", vinfo.bmiHeader.biSizeImage); Debug.WriteLine("{0}:{1}", "ClrImportant", vinfo.bmiHeader.biClrImportant); Debug.WriteLine("{0}:{1}", "ClrUsed", vinfo.bmiHeader.biClrUsed); Debug.WriteLine("{0}:{1}", "Compression", vinfo.bmiHeader.biCompression); Debug.WriteLine("{0}:{1}", "Planes", vinfo.bmiHeader.biPlanes); Debug.WriteLine("{0}:{1}", "XPelsPerMeter", vinfo.bmiHeader.biXPelsPerMeter); Debug.WriteLine("{0}:{1}", "YPelsPerMeter", vinfo.bmiHeader.biYPelsPerMeter); } }
/// <summary> /// デバッグ /// </summary> /// <param name="graph"></param> private void DebugPrint(IGraphBuilder graph) { IEnumFilters filters = null; IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) { break; } FILTER_INFO info = new FILTER_INFO(); hr = filter.QueryFilterInfo(info); Console.WriteLine("{0}", info.achName); // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } // 解放. Marshal.ReleaseComObject(filters); // サンプルグラバフィルタの入力形式設定. // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); SampleGrabber.GetConnectedMediaType(media_type); Debug.WriteLine("{0}:{1}", "majortype", media_type.majortype); Debug.WriteLine("{0}:{1}", "subtype", media_type.subtype); Debug.WriteLine("{0}:{1}", "formattype", media_type.formattype); Debug.WriteLine("{0}:{1}", "bFixedSizeSamples", media_type.bFixedSizeSamples); Debug.WriteLine("{0}:{1}", "bTemporalCompression", media_type.bTemporalCompression); Debug.WriteLine("{0}:{1}", "cbFormat", media_type.cbFormat); Debug.WriteLine("{0}:{1}", "lSampleSize", media_type.lSampleSize); Debug.WriteLine("{0}:{1}", "pbFormat", media_type.pbFormat); Debug.WriteLine("{0}:{1}", "pUnk", media_type.pUnk); // 映像形式の場合、サイズを取得する. if (GUID.Compare(media_type.formattype.ToString(), GUID.FORMAT_VideoInfo)) { VIDEOINFOHEADER vinfo = new VIDEOINFOHEADER(); vinfo = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); Debug.WriteLine("{0}:{1}", "Width", vinfo.bmiHeader.biWidth); Debug.WriteLine("{0}:{1}", "Height", vinfo.bmiHeader.biHeight); Debug.WriteLine("{0}:{1}", "BitCount", vinfo.bmiHeader.biBitCount); Debug.WriteLine("{0}:{1}", "Size", vinfo.bmiHeader.biSize); Debug.WriteLine("{0}:{1}", "ImageSize", vinfo.bmiHeader.biSizeImage); Debug.WriteLine("{0}:{1}", "ClrImportant", vinfo.bmiHeader.biClrImportant); Debug.WriteLine("{0}:{1}", "ClrUsed", vinfo.bmiHeader.biClrUsed); Debug.WriteLine("{0}:{1}", "Compression", vinfo.bmiHeader.biCompression); Debug.WriteLine("{0}:{1}", "Planes", vinfo.bmiHeader.biPlanes); Debug.WriteLine("{0}:{1}", "XPelsPerMeter", vinfo.bmiHeader.biXPelsPerMeter); Debug.WriteLine("{0}:{1}", "YPelsPerMeter", vinfo.bmiHeader.biYPelsPerMeter); } }
/// <summary> /// グラフの生成 /// </summary> public virtual void Setup() { this.Dispose(); try { // グラフ. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // ファイル入力. IBaseFilter capture = null; GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture); if (capture == null) { throw new System.IO.IOException(); } #if false // DMO ラッパーフィルタ. // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter))); if (dmo != null) { //// Mpeg4 Decoder DMO //// F371728A-6052-4D47-827C-D039335DFE0A //// 4A69B442-28BE-4991-969C-B500ADF5D8A8 //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64] var idmo = (IDMOWrapperFilter)dmo; idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8")); idmo = null; this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO"); } #endif #if false // Avi Splitter IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter))); if (splitter == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(splitter, "Avi Splitter"); // Avi Decompressor IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec))); if (decompressor == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor"); #endif // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); // レンダラー. IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(renderer, "Renderer"); #endregion #region ピンの取得. IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); #endregion #region ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); #endregion #region 保管: インターフェース. CaptureFilter = capture; CaptureOutPin = capture_out; SampleGrabber = (ISampleGrabber)grabber; SampleGrabberInPin = grabber_in; SampleGrabberOutPin = grabber_out; Renderer = renderer; RendererInPin = renderer_in; #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region インタフェースの抽出: { DSLab.IGraphBuilder graph = this.GraphBuilder; DSLab.IEnumFilters filters = null; DSLab.IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) { break; } if (filter is DSLab.IMediaSeeking) { // シーク操作用. Seeking = (DSLab.IMediaSeeking)filter; } else { // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } } // 解放. Marshal.ReleaseComObject(filters); } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { throw new DSLab.CxDSException(ex); } }
/// <summary> /// プレイヤーの接続 /// </summary> /// <param name="filename"></param> private void Player_Connect(string filename) { #region グラフビルダーの生成: { Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (Graph == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (Builder == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder.SetFiltergraph(Graph); } #endregion #region 映像入力用: ソースフィルタを生成します. { Graph.AddSourceFilter(filename, "VideoSource", ref VideoSource); if (VideoSource == null) throw new System.IO.IOException("Failed to create a VideoSource."); } #endregion #region 映像捕獲用: サンプルグラバーを生成します. { VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (VideoGrabber == null) throw new System.IO.IOException("Failed to create a VideoGrabber."); Graph.AddFilter(VideoGrabber, "VideoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)VideoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(VideoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(VideoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { AudioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (AudioGrabber == null) throw new System.IO.IOException("Failed to create a AudioGrabber."); Graph.AddFilter(AudioGrabber, "AudioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)AudioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(AudioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(AudioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 映像出力用: レンダラーを生成します. { VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (VideoRenderer == null) throw new System.IO.IOException("Failed to create a VideoRenderer."); Graph.AddFilter(VideoRenderer, "VideoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (AudioRenderer == null) throw new System.IO.IOException("Failed to create a AudioRenderer."); Graph.AddFilter(AudioRenderer, "AudioRenderer"); } #endregion #region フィルタの接続: if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase)) { #region AVI 形式ファイル用の初期化: unsafe { HRESULT hr; // AVI 分離器の追加: Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter); if (Splitter == null) throw new System.IO.IOException("Failed to create a Splitter."); Graph.AddFilter(Splitter, "Splitter"); // フィルタの接続: (AVI 分離器) hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) ※ Audioless も有る. try { var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer); } catch (System.Exception ex) { Debug.WriteLine(ex.StackTrace); } } #endregion } else if ( filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) || filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase)) { #region WMV 形式ファイル用の初期化: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); } #endregion } #endregion // 同期用: サンプルグラバーのイベント登録: VideoGrabberCB.Enable = true; VideoGrabberCB.Notify += VideoGrabberCB_Notify; VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber); }
/// <summary> /// 画像入力フィルタの生成 /// </summary> /// <param name="param">イメージグラバーパラメータ</param> /// <returns> /// 生成されたインスタンスを返します。 /// </returns> internal virtual IBaseFilter CreateVideoCapture(CxDSCameraParam param) { IBaseFilter capture = DSLab.Axi.CreateFilter(GUID.CLSID_VideoInputDeviceCategory, param.FilterInfo.Name, param.FilterInfo.Index); if (capture != null) { // ビデオ出力フォーマットの設定. // Width,Height に 0,0 が指定されている場合は既定値を使用する. // 指定されている場合は、VIDEOINFOHEADER を書き換えて SetFormat を行う. // // http://msdn.microsoft.com/ja-jp/library/cc353344.aspx // if (0 < param.FormatInfo.VideoSize.Width && 0 < param.FormatInfo.VideoSize.Height) { // 出力ピン. IPin pin = null; if (param.PinInfo.Name != "") pin = DSLab.Axi.FindPin(capture, param.PinInfo.Name); else pin = DSLab.Axi.FindPin(capture, 0, param.PinInfo.Direction); #region 手段1) IAMStreamConfig.GetNumberOfCapabilities で列挙して、該当する AM_MEDIA_TYPE を SetFormat する方法. if (pin is IAMStreamConfig) { IAMStreamConfig config = pin as IAMStreamConfig; int count = 0; int size = 0; config.GetNumberOfCapabilities(ref count, ref size); if (size == Marshal.SizeOf(new VIDEO_STREAM_CONFIG_CAPS())) { for (int i = 0; i < count; i++) { AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); VIDEOINFOHEADER video_info = new VIDEOINFOHEADER(); IntPtr addr = IntPtr.Zero; try { addr = Marshal.AllocCoTaskMem(size); int status = config.GetStreamCaps(i, ref media_type, addr); if (status == (int)HRESULT.S_OK && DS.GUID.Compare(media_type.majortype.ToString(), DS.GUID.MEDIATYPE_Video) && // Function.GuidCompare(media_type.subtype.ToString(), DS.GUID.MEDIASUBTYPE_RGB24) && DS.GUID.Compare(media_type.formattype.ToString(), DS.GUID.FORMAT_VideoInfo) && media_type.cbFormat >= Marshal.SizeOf(video_info) && media_type.pbFormat != IntPtr.Zero ) { video_info = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズの確認. if (video_info.bmiHeader.biWidth == param.FormatInfo.VideoSize.Width && video_info.bmiHeader.biHeight == param.FormatInfo.VideoSize.Height) { config.SetFormat(media_type); return capture; } } } finally { if (addr != IntPtr.Zero) Marshal.FreeCoTaskMem(addr); Axi.DeleteMediaType(ref media_type); } } } } #endregion #region 手段2) VIDEOINFOHEADER の Width,Height を書き換えて SetFormat を行う方法. // // この手段は、多くのカメラで有効だが、LifeCam (x86) では失敗する. // { AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); VIDEOINFOHEADER video_info = new VIDEOINFOHEADER(); media_type = Axi.GetFormat(pin); video_info = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズ. video_info.bmiHeader.biWidth = param.FormatInfo.VideoSize.Width; video_info.bmiHeader.biHeight = param.FormatInfo.VideoSize.Height; // 2013.09.18(Wed): LifeCam (x86) でエラーが発生するので試したが効果は無かった. //video_info.bmiHeader.biBitCount = (short)BppIn; // 2013.09.18(Wed): lSampleSize を変更すると LifeCam (x86) でエラーが発生する. // --- サンプルサイズ. //int horz = System.Math.Abs(param.FilterFormatInfo.VideoSize.Width); //int vert = System.Math.Abs(param.FilterFormatInfo.VideoSize.Height); //int bpp = BppIn; //media_type.lSampleSize = FVIL.Data.CFviImage.CalcHorzByte(horz, bpp) * (uint)vert; Marshal.StructureToPtr(video_info, media_type.pbFormat, true); Axi.SetFormat(pin, media_type); } #endregion } } return capture; }
/// <summary> /// Bitmap �ւ̕ϊ� /// </summary> /// <param name="vih"></param> /// <returns> /// </returns> public System.Drawing.Bitmap ToImage(VIDEOINFOHEADER vih) { if (this.Address == IntPtr.Zero) throw new System.InvalidOperationException("Address is Zero."); IntPtr address = this.Address; int width = vih.bmiHeader.biWidth; int height = System.Math.Abs(vih.bmiHeader.biHeight); int bpp = vih.bmiHeader.biBitCount; int stride = ((width * (bpp / 8) + 3) / 4) * 4; if (0 < vih.bmiHeader.biHeight) { address = new IntPtr(address.ToInt64() + ((height - 1) * stride)); stride = stride * -1; } PixelFormat format; switch (bpp) { case 24: format = PixelFormat.Format24bppRgb; break; case 32: format = PixelFormat.Format32bppRgb; break; default: throw new System.NotSupportedException("vih.bmiHeader.biBitCount must be one of the following. (24, 32)"); } var dst = new System.Drawing.Bitmap(width, height, format); #region �f�[�^����: BitmapData bmpData = null; try { bmpData = new BitmapData(); bmpData.Width = width; bmpData.Height = height; bmpData.Stride = stride; bmpData.PixelFormat = format; bmpData.Scan0 = address; dst.LockBits( new Rectangle(0, 0, dst.Width, dst.Height), ImageLockMode.WriteOnly | ImageLockMode.UserInputBuffer, dst.PixelFormat, bmpData ); } finally { dst.UnlockBits(bmpData); } #endregion return dst; }
/// <summary> /// カメラの接続 /// </summary> /// <param name="filterInfo"></param> /// <param name="pinno"></param> /// <param name="frameSize"></param> private void Camera_Connect(CxFilterInfo filterInfo, int pinno, Size frameSize) { #region グラフビルダーの生成: { Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (Graph == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (Builder == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder.SetFiltergraph(Graph); } #endregion #region 映像入力用: ソースフィルタを生成します. { VideoSource = Axi.CreateFilter(GUID.CLSID_VideoInputDeviceCategory, filterInfo.CLSID, filterInfo.Index); if (VideoSource == null) throw new System.IO.IOException("Failed to create a VideoSource."); Graph.AddFilter(VideoSource, "VideoSource"); // フレームサイズを設定します. // ※注) この操作は、ピンを接続する前に行う必要があります. IPin pin = Axi.FindPin(VideoSource, pinno, PIN_DIRECTION.PINDIR_OUTPUT); Axi.SetFormatSize(pin, frameSize.Width, frameSize.Height); } #endregion #region 映像捕獲用: サンプルグラバーを生成します. { VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (VideoGrabber == null) throw new System.IO.IOException("Failed to create a VideoGrabber."); Graph.AddFilter(VideoGrabber, "VideoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)VideoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(VideoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(VideoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 映像出力用: レンダラーを生成します. { VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (VideoRenderer == null) throw new System.IO.IOException("Failed to create a VideoRenderer."); Graph.AddFilter(VideoRenderer, "VideoRenderer"); } #endregion #region フィルタの接続: unsafe { var mediatype = new Guid(GUID.MEDIATYPE_Video); var hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype), VideoSource, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); } #endregion // 同期用: サンプルグラバーのイベント登録: VideoGrabberCB.Enable = true; VideoGrabberCB.Notify += VideoGrabberCB_Notify; VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber); // カメラ制御インターフェースの抽出. CameraControl = Axi.GetInterface<IAMCameraControl>(this.Graph); }
/// <summary> /// 画像入力フィルタの生成 /// </summary> /// <param name="param">イメージグラバーパラメータ</param> /// <returns> /// 生成されたインスタンスを返します。 /// </returns> internal virtual IBaseFilter CreateVideoCapture(CxDSCameraParam param) { IBaseFilter capture = DSLab.Axi.CreateFilter(GUID.CLSID_VideoInputDeviceCategory, param.FilterInfo.Name, param.FilterInfo.Index); if (capture != null) { // ビデオ出力フォーマットの設定. // Width,Height に 0,0 が指定されている場合は既定値を使用する. // 指定されている場合は、VIDEOINFOHEADER を書き換えて SetFormat を行う. // // http://msdn.microsoft.com/ja-jp/library/cc353344.aspx // if (0 < param.FormatInfo.VideoSize.Width && 0 < param.FormatInfo.VideoSize.Height) { // 出力ピン. IPin pin = null; if (param.PinInfo.Name != "") { pin = DSLab.Axi.FindPin(capture, param.PinInfo.Name); } else { pin = DSLab.Axi.FindPin(capture, 0, param.PinInfo.Direction); } #region 手段1) IAMStreamConfig.GetNumberOfCapabilities で列挙して、該当する AM_MEDIA_TYPE を SetFormat する方法. if (pin is IAMStreamConfig) { IAMStreamConfig config = pin as IAMStreamConfig; int count = 0; int size = 0; config.GetNumberOfCapabilities(ref count, ref size); if (size == Marshal.SizeOf(new VIDEO_STREAM_CONFIG_CAPS())) { for (int i = 0; i < count; i++) { AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); VIDEOINFOHEADER video_info = new VIDEOINFOHEADER(); IntPtr addr = IntPtr.Zero; try { addr = Marshal.AllocCoTaskMem(size); int status = config.GetStreamCaps(i, ref media_type, addr); if (status == (int)HRESULT.S_OK && DS.GUID.Compare(media_type.majortype.ToString(), DS.GUID.MEDIATYPE_Video) && // Function.GuidCompare(media_type.subtype.ToString(), DS.GUID.MEDIASUBTYPE_RGB24) && DS.GUID.Compare(media_type.formattype.ToString(), DS.GUID.FORMAT_VideoInfo) && media_type.cbFormat >= Marshal.SizeOf(video_info) && media_type.pbFormat != IntPtr.Zero ) { video_info = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズの確認. if (video_info.bmiHeader.biWidth == param.FormatInfo.VideoSize.Width && video_info.bmiHeader.biHeight == param.FormatInfo.VideoSize.Height) { config.SetFormat(media_type); return(capture); } } } finally { if (addr != IntPtr.Zero) { Marshal.FreeCoTaskMem(addr); } Axi.DeleteMediaType(ref media_type); } } } } #endregion #region 手段2) VIDEOINFOHEADER の Width,Height を書き換えて SetFormat を行う方法. // // この手段は、多くのカメラで有効だが、LifeCam (x86) では失敗する. // { AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); VIDEOINFOHEADER video_info = new VIDEOINFOHEADER(); media_type = Axi.GetFormat(pin); video_info = (VIDEOINFOHEADER)Marshal.PtrToStructure(media_type.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズ. video_info.bmiHeader.biWidth = param.FormatInfo.VideoSize.Width; video_info.bmiHeader.biHeight = param.FormatInfo.VideoSize.Height; // 2013.09.18(Wed): LifeCam (x86) でエラーが発生するので試したが効果は無かった. //video_info.bmiHeader.biBitCount = (short)BppIn; // 2013.09.18(Wed): lSampleSize を変更すると LifeCam (x86) でエラーが発生する. // --- サンプルサイズ. //int horz = System.Math.Abs(param.FilterFormatInfo.VideoSize.Width); //int vert = System.Math.Abs(param.FilterFormatInfo.VideoSize.Height); //int bpp = BppIn; //media_type.lSampleSize = FVIL.Data.CFviImage.CalcHorzByte(horz, bpp) * (uint)vert; Marshal.StructureToPtr(video_info, media_type.pbFormat, true); Axi.SetFormat(pin, media_type); } #endregion } } return(capture); }
/// <summary> /// グラフの生成 /// </summary> /// <param name="output_file">出力ファイル</param> public virtual void Setup(string output_file) { this.Dispose(); try { CxDSCameraParam param = this.Param; // グラフビルダー. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // 画像入力フィルタ. IBaseFilter capture = CreateVideoCapture(param); if (capture == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(capture, "CaptureFilter"); IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); this.CaptureFilter = capture; this.CaptureOutPin = capture_out; // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); this.SampleGrabber = (ISampleGrabber)grabber; #endregion #region キャプチャビルダー: { int hr = 0; CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2))); hr = CaptureBuilder.SetFiltergraph(GraphBuilder); if (string.IsNullOrEmpty(output_file)) { // レンダラー. IBaseFilter renderer = null; renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(renderer, "Renderer"); this.Renderer = renderer; #if true // IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する. // fig) [capture]-out->-in-[sample grabber]-out->-in-[null render] hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer); #else // ピンの取得. IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); // ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); // ピンの保管. //SampleGrabberInPin = grabber_in; //SampleGrabberOutPin = grabber_out; //RendererInPin = renderer_in; #endif } else { IBaseFilter mux = null; IFileSinkFilter sync = null; hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync); hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux); this.Mux = mux; this.Sync = sync; } } #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region 保管: デバイス名称. try { if (string.IsNullOrEmpty(param.FilterInfo.Name) == false) { this.DeviceName = param.FilterInfo.Name; } else { int filter_index = param.FilterInfo.Index; List <DSLab.CxDSFilterInfo> filters = DSLab.Axi.GetFilterList(DSLab.GUID.CLSID_VideoInputDeviceCategory); if (0 <= filter_index && filter_index < filters.Count) { this.DeviceName = filters[filter_index].Name; } } } catch (System.Exception) { this.DeviceName = ""; } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { this.Dispose(); throw new DSLab.CxDSException(ex); } }
/// <summary> /// フォーマットサイズの設定 /// </summary> /// <param name="pin">ピン</param> /// <param name="width">幅 [0,1~]</param> /// <param name="height">高さ [0,1~]</param> /// <remarks> /// width,height に 0,0 が指定されている場合は既定値を使用する. /// それ以外は、VIDEOINFOHEADER を書き換えて SetFormat を行う. /// http://msdn.microsoft.com/ja-jp/library/cc353344.aspx /// </remarks> public static void SetFormatSize(IPin pin, int width, int height) { if (width <= 0 || height <= 0) { return; } #region 手段1) IAMStreamConfig.GetNumberOfCapabilities で列挙して、該当する AM_MEDIA_TYPE を SetFormat する方法. if (pin is IAMStreamConfig) { var config = (IAMStreamConfig)pin; int count = 0; int size = 0; config.GetNumberOfCapabilities(ref count, ref size); if (size == Marshal.SizeOf(new VIDEO_STREAM_CONFIG_CAPS())) { for (int i = 0; i < count; i++) { var mt = new AM_MEDIA_TYPE(); var vih = new VIDEOINFOHEADER(); IntPtr addr = IntPtr.Zero; try { addr = Marshal.AllocCoTaskMem(size); int status = config.GetStreamCaps(i, ref mt, addr); if (status == (int)HRESULT.S_OK && GUID.Compare(mt.majortype.ToString(), GUID.MEDIATYPE_Video) && GUID.Compare(mt.formattype.ToString(), GUID.FORMAT_VideoInfo) && mt.cbFormat >= Marshal.SizeOf(vih) && mt.pbFormat != IntPtr.Zero ) { vih = (VIDEOINFOHEADER)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズの確認. if (vih.bmiHeader.biWidth == width && vih.bmiHeader.biHeight == height) { config.SetFormat(mt); return; } } } finally { if (addr != IntPtr.Zero) { Marshal.FreeCoTaskMem(addr); } Axi.FreeMediaType(ref mt); } } } } #endregion #region 手段2) VIDEOINFOHEADER の Width,Height を書き換えて SetFormat を行う方法. // // この手段は、多くのカメラで有効だが、LifeCam (x86) では失敗する. // { AM_MEDIA_TYPE mt = new AM_MEDIA_TYPE(); VIDEOINFOHEADER video_info = new VIDEOINFOHEADER(); mt = Axi.GetFormat(pin); video_info = (VIDEOINFOHEADER)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER)); // --- ビデオ入力サイズ. video_info.bmiHeader.biWidth = width; video_info.bmiHeader.biHeight = height; // 2013.09.18(Wed): LifeCam (x86) でエラーが発生するので試したが効果は無かった. //video_info.bmiHeader.biBitCount = (short)BppIn; // 2013.09.18(Wed): lSampleSize を変更すると LifeCam (x86) でエラーが発生する. // --- サンプルサイズ. //int horz = System.Math.Abs(param.FilterFormatInfo.VideoSize.Width); //int vert = System.Math.Abs(param.FilterFormatInfo.VideoSize.Height); //int bpp = BppIn; //mt.lSampleSize = FVIL.Data.CFviImage.CalcHorzByte(horz, bpp) * (uint)vert; Marshal.StructureToPtr(video_info, mt.pbFormat, true); Axi.SetFormat(pin, mt); } #endregion }
/// <summary> /// フォーマット一覧の取得 /// </summary> /// <param name="pin">対象のピン</param> /// <returns> /// 取得したフォーマット情報のコレクションを返します。 /// </returns> public static List <CxFormatInfo> GetFormatList(IPin pin) { var result = new List <CxFormatInfo>(); if (pin == null) { return(result); } var config = pin as IAMStreamConfig; if (config == null) { return(result); } IntPtr dataptr = IntPtr.Zero; try { int count = 0; int size = 0; config.GetNumberOfCapabilities(ref count, ref size); dataptr = Marshal.AllocHGlobal(size); for (int i = 0; i < count; i++) { AM_MEDIA_TYPE mt = null; try { config.GetStreamCaps(i, ref mt, dataptr); // 基本情報の取得. var info = new CxFormatInfo(); info.MediaType = GUID.Normalize(mt.majortype.ToString()); info.MediaSubType = GUID.Normalize(mt.subtype.ToString()); info.FormatType = GUID.Normalize(mt.formattype.ToString()); // 映像形式か否か. if (GUID.Compare(info.FormatType, GUID.FORMAT_VideoInfo)) { var vih = new VIDEOINFOHEADER(); vih = (VIDEOINFOHEADER)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER)); info.VideoSize = new Size(vih.bmiHeader.biWidth, vih.bmiHeader.biHeight); } // コレクションに追加. result.Add(info); } finally { if (mt != null) { Axi.FreeMediaType(ref mt); } } } } finally { if (dataptr != IntPtr.Zero) { Marshal.FreeHGlobal(dataptr); } } return(result); }