//////////////////////////////////////////////////////////////////////////私有函数 /// <summary> /// 如果打开视频文件成功,便可以正常实例化成为一个合法的对象 /// </summary> /// <param name="videoFile">视频文件的绝对路径</param> /// <param name="mediaDetClass">MediaDetClass类的实例</param> /// <param name="aMMediaType">视频类型</param> /// <returns>true为找到视频流,false为没有发现视频流(或不支持此视频格式)</returns> private static bool OpenVideoStream(string videoFile, out MediaDetClass mediaDetClass, out _AMMediaType aMMediaType) { MediaDetClass mediaDet = new MediaDetClass(); mediaDet.Filename = videoFile; //加载视频文件 int streamsNumber = mediaDet.OutputStreams; //获得输出流的个数(只统计媒体类型为MEDIATYPE_Video和MEDIATYPE_Audio的流) //获得视频文件流,并抓取一帧 for (int i = 0; i < streamsNumber; i++) { mediaDet.CurrentStream = i; _AMMediaType mediaType = mediaDet.StreamMediaType; //发现流媒体类型为视频时,传出对象 if (mediaType.majortype == MayorTypes.MEDIATYPE_Video) { mediaDetClass = mediaDet; aMMediaType = mediaType; return(true); } } //没有发现视频流时,对象置空 mediaDetClass = null; aMMediaType = new _AMMediaType(); return(false); }
/// <summary>Loads a video from a file into a MediaDet.</summary> /// <param name="filename">The path to the file to be loaded.</param> /// <param name="mediaType">The media type of the video loaded.</param> /// <returns>The MediaDet configured with the loaded video.</returns> private static MediaDetClass LoadVideo(string filename, out _AMMediaType mediaType) { // Initialize the MediaDet with the video file MediaDetClass mediaDet = new MediaDetClass(); mediaDet.Filename = filename; mediaType = new _AMMediaType(); // Loop through each of the streams in the video searching for the actual video stream int numberOfStreams = mediaDet.OutputStreams; for (int i = 0; i < numberOfStreams; i++) { // Return when we find the video stream, leaving the MediaDet set to // use that stream. mediaDet.CurrentStream = i; if (mediaDet.StreamType == MEDIATYPE_Video) { mediaType = mediaDet.StreamMediaType; return(mediaDet); } } // No video stream found. Clean up and error out. Marshal.ReleaseComObject(mediaDet); throw new ArgumentOutOfRangeException("filename", "No video stream found."); }
/// <summary>Gets the size of a frame based on a video's media type.</summary> /// <param name="mediaType">The media type of the video.</param> /// <returns>The size of a frame in the video.</returns> private static Size GetFrameSize(_AMMediaType mediaType) { VIDEOINFOHEADER videoInfo = (VIDEOINFOHEADER)Marshal.PtrToStructure( mediaType.pbFormat, typeof(VIDEOINFOHEADER)); return(new Size(videoInfo.bmiHeader.biWidth, videoInfo.bmiHeader.biHeight)); }
private void InitCaptureInterface() { // release com object (useless here but can't hurt) Cleanup(true); this.fmc = new FilgraphManagerClass(); // create the cg object and add the filter graph to it Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2); this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t); t = Type.GetTypeFromCLSID(CLSID_SampleGrabber); this.isg = (ISampleGrabber)Activator.CreateInstance(t); // source filter (the capture device) this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex]; // sample grabber filter this.sgf = (IBaseFilter)this.isg; object o = null; this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o); this.iamsc = (IAMStreamConfig)o; // set sample grabber media type this.SGMediaType = new _AMMediaType(); this.SGMediaType.majortype = MEDIATYPE_Video; this.SGMediaType.subtype = MEDIASUBTYPE_RGB24; this.SGMediaType.formattype = FORMAT_VideoInfo; this.isg.SetMediaType(ref SGMediaType); this.isg.SetOneShot(0); this.isg.SetBufferSamples(1); }
private bool SetOutPutSize() { Size videosize = (Size)this.OutPutSizeList[this.cbxSize.SelectedIndex]; _AMMediaType mt = (_AMMediaType)this.MediaTypeList[this.cbxSize.SelectedIndex]; if (this.iamsc == null) { return(false); } try { iamsc.SetFormat(ref mt); } catch (Exception ex) { MessageBox.Show("Fail to set output size: " + ex.Message); return(false); } if (videosize.Width < 320) { videosize.Width = 320; videosize.Height = 240; } this.ClientSize = new Size(videosize.Width, videosize.Height + this.btnCapture.Height + vertical_space); return(true); }
/// <summary> /// Constructs the _AMMediaType (adds pbFormat to it), sets it, then frees it /// </summary> public static void SetMediaType(IAMStreamConfig iSC, _AMMediaType mt, object formatBlock) { System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0); mt = MediaType.Construct(mt, formatBlock); SetMediaType(iSC, ref mt); }
private void InitializeCompressorMediaType() { ArrayList mts = new ArrayList(); ArrayList ihs = new ArrayList(); ArrayList sccs = new ArrayList(); Pin.GetStreamConfigCaps((IAMStreamConfig)cOutputPin, out mts, out ihs, out sccs); for (int i = 0; i < mts.Count; i++) { Console.WriteLine(MediaType.Dump((_AMMediaType)mts[i])); Console.WriteLine(Pin.DebugStreamConfigCaps(sccs[i])); } // There's only one cMT = (_AMMediaType)mts[0]; cMT.formattype = MediaType.FormatType.FORMAT_VideoInfo; // MediaTypes are local to method, so free them all // then reallocate just the one we want for (int i = 0; i < mts.Count; i++) { _AMMediaType mt = (_AMMediaType)mts[i]; MediaType.Free(ref mt); } cMT.cbFormat = (uint)Marshal.SizeOf(cVI); cMT.pbFormat = Marshal.AllocCoTaskMem((int)cMT.cbFormat); }
/// <summary> /// Frees the pbFormat pointer of the _AMMediaTypes in the collection /// </summary> public static void Free(_AMMediaType[] mts) { for(int i = 0; i < mts.Length; i++) { Free(ref mts[i]); } }
/// <summary> /// Put compatible formats supported by the selected hardware device into the /// compression format ComboBox. We assume the compressor's static properties have already been /// pulled in from the registry. /// </summary> private void RestoreCompressionFormat() { _AMMediaType[] mts = Pin.GetMediaTypes(ac.CaptureGraph.Source.OutputPin); int defaultIndex = 0; // Note: GetMediaTypes appears to return the selected MT in element 0, which is a // duplicate of a MT found elsewhere in the array. That's why we are ignoring // element 0 here. for (int j = 1; j < mts.Length; j++) { _AMMediaType mt = mts[j]; WAVEFORMATEX wfex = (WAVEFORMATEX)MediaType.FormatType.MarshalData(mt); if (OpusAudioCompressor.WorksWithOpus(wfex)) { int i = cbCompressionFormat.Items.Add(new CompressorFmt(wfex)); if ((OpusAudioCompressor.Frequency == wfex.SamplesPerSec) && (OpusAudioCompressor.Channels == wfex.Channels) && (OpusAudioCompressor.Depth == wfex.BitsPerSample)) { defaultIndex = i; } } } if (cbCompressionFormat.Items.Count == 0) { throw new ApplicationException("No audio formats supported by the device are compatible with the Opus Encoder."); } cbCompressionFormat.SelectedIndex = defaultIndex; }
/// <summary> /// tries to open a video file. If successful it makes available MediaDetClass and _AMMediaType instances of the current file /// </summary> /// <param name="videoFile">Path to the video file</param> /// <param name="mediaDetClass"></param> /// <param name="aMMediaType"></param> /// <returns>true for success, false for failure (no video stream, file not supported, ...)</returns> public static bool openVideoStream(string videoFile, out IMediaDet mediaDet, out _AMMediaType aMMediaType) { mediaDet = new MediaDetClass(); //loads file mediaDet.Filename = videoFile; //gets # of streams int streamsNumber = mediaDet.OutputStreams; //finds a video stream _AMMediaType mediaType; for (int i = 0; i < streamsNumber; i++) { mediaDet.CurrentStream = i; mediaType = mediaDet.StreamMediaType; if (mediaType.majortype == JockerSoft.Media.MayorTypes.MEDIATYPE_Video) { //video stream found aMMediaType = mediaType; return true; } } //no video stream found Marshal.ReleaseComObject(mediaDet); mediaDet = null; aMMediaType = new _AMMediaType(); return false; }
/// <summary> /// 试图打开一视频文件,如果成功它便可以正常实例化成为一个合法的对象- /// MediaDetClasstries to open a video file. If successful it makes available MediaDetClass and _AMMediaType instances of the current file /// </summary> /// <param name="videoFile">视频文件路径</param> /// <param name="mediaDetClass">实例</param> /// <param name="aMMediaType">视频类型</param> /// <returns>操作结果-true for success, false for failure (no video stream, file not supported, ...)</returns> private bool openVideoStream(string videoFile, out MediaDetClass mediaDetClass, out _AMMediaType aMMediaType) { MediaDetClass mediaDet = new MediaDetClass(); //loads file //加载视频文件 mediaDet.Filename = videoFile; //gets # of streams //获得视频文件的全部流 int streamsNumber = mediaDet.OutputStreams; //finds a video stream and grabs a frame //获得视频文件流,并抓取一桢 for (int i = 0; i < streamsNumber; i++) { mediaDet.CurrentStream = i; _AMMediaType mediaType = mediaDet.StreamMediaType; if (mediaType.majortype == Tz888.Common.MayorTypes.MEDIATYPE_Video) { mediaDetClass = mediaDet; aMMediaType = mediaType; return(true); } } mediaDetClass = null; aMMediaType = new _AMMediaType(); return(false); }
/// <summary> /// Gets iSC's available _AMMediaTypes, without freeing pbFormat /// Caller should call MediaType.Free(_AMMediaType[]) when done /// </summary> public static _AMMediaType[] GetMediaTypes(IPin pin) { IEnumMediaTypes iEnum; pin.EnumMediaTypes(out iEnum); ArrayList alMTs = new ArrayList(); IntPtr[] ptrs = new IntPtr[1]; uint fetched; iEnum.Next(1, ptrs, out fetched); while (fetched == 1) { _AMMediaType mt = (_AMMediaType)Marshal.PtrToStructure(ptrs[0], typeof(_AMMediaType)); alMTs.Add(mt); Marshal.FreeCoTaskMem(ptrs[0]); ptrs[0] = IntPtr.Zero; iEnum.Next(1, ptrs, out fetched); } _AMMediaType[] mts = new _AMMediaType[alMTs.Count]; alMTs.CopyTo(mts); return(mts); }
public static object MarshalData(_AMMediaType mt) { object formatBlock = null; if (mt.cbFormat > 0 && mt.pbFormat != IntPtr.Zero) { if (mt.formattype == MediaType.FormatType.FORMAT_VideoInfo) { formatBlock = (VIDEOINFOHEADER)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER)); } else if (mt.formattype == MediaType.FormatType.FORMAT_VideoInfo2) { formatBlock = (VIDEOINFOHEADER2)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER2)); } else if (mt.formattype == MediaType.FormatType.FORMAT_MPEGVideo) { formatBlock = (MPEG1VIDEOINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(MPEG1VIDEOINFO)); } else if (mt.formattype == MediaType.FormatType.FORMAT_MPEG2Video) { formatBlock = (MPEG2VIDEOINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(MPEG2VIDEOINFO)); } else if (mt.formattype == MediaType.FormatType.FORMAT_DvInfo) { formatBlock = (DVINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(DVINFO)); } else if (mt.formattype == MediaType.FormatType.FORMAT_WaveFormatEx) { formatBlock = (WAVEFORMATEX)Marshal.PtrToStructure(mt.pbFormat, typeof(WAVEFORMATEX)); } } return(formatBlock); }
/// <summary> /// Gets iSC's available _AMMediaTypes, without freeing pbFormat /// Caller should call MediaType.Free(_AMMediaType[]) when done /// </summary> public static _AMMediaType[] GetMediaTypes(IPin pin) { IEnumMediaTypes iEnum; pin.EnumMediaTypes(out iEnum); ArrayList alMTs = new ArrayList(); IntPtr[] ptrs = new IntPtr[1]; uint fetched; iEnum.Next(1, ptrs, out fetched); while(fetched == 1) { _AMMediaType mt = (_AMMediaType)Marshal.PtrToStructure(ptrs[0], typeof(_AMMediaType)); alMTs.Add(mt); Marshal.FreeCoTaskMem(ptrs[0]); ptrs[0] = IntPtr.Zero; iEnum.Next(1, ptrs, out fetched); } _AMMediaType[] mts = new _AMMediaType[alMTs.Count]; alMTs.CopyTo(mts); return mts; }
/// <summary> /// Frees the pbFormat pointer of the _AMMediaType /// </summary> public static void Free(ref _AMMediaType mt) { if (mt.pbFormat != IntPtr.Zero) { Marshal.FreeCoTaskMem(mt.pbFormat); mt.pbFormat = IntPtr.Zero; mt.cbFormat = 0; } }
/// <summary> /// Frees the pbFormat pointer of the _AMMediaType /// </summary> public static void Free(ref _AMMediaType mt) { if(mt.pbFormat != IntPtr.Zero) { Marshal.FreeCoTaskMem(mt.pbFormat); mt.pbFormat = IntPtr.Zero; mt.cbFormat = 0; } }
/// <summary> /// Gets iSC's current _AMMediaType, and frees pbFormat /// </summary> public static void GetMediaType(IAMStreamConfig iSC, out _AMMediaType mt, out object formatBlock) { IntPtr pmt = IntPtr.Zero; iSC.GetFormat(out pmt); // Frees pmt and mt.pbFormat MediaType.MarshalData(ref pmt, out mt, out formatBlock); System.Diagnostics.Debug.Assert(pmt == IntPtr.Zero); System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0); }
/// <summary> /// Sets the _AMMediaType on the pin, then frees it /// </summary> public static void SetMediaType(IAMStreamConfig iSC, ref _AMMediaType mt) { try { SetMediaType(iSC, mt); } finally { MediaType.Free(ref mt); } }
/// <summary> /// Takes an _AMMediaType and a format block and reconstructs the pbFormat pointer /// </summary> public static _AMMediaType Construct(_AMMediaType mt, object formatBlock) { System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0); int size = Marshal.SizeOf(formatBlock); mt.pbFormat = Marshal.AllocCoTaskMem(size); mt.cbFormat = (uint)size; Marshal.StructureToPtr(formatBlock, mt.pbFormat, false); return mt; }
/// <summary> /// Takes an _AMMediaType and a format block and reconstructs the pbFormat pointer /// </summary> public static _AMMediaType Construct(_AMMediaType mt, object formatBlock) { System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0); int size = Marshal.SizeOf(formatBlock); mt.pbFormat = Marshal.AllocCoTaskMem(size); mt.cbFormat = (uint)size; Marshal.StructureToPtr(formatBlock, mt.pbFormat, false); return(mt); }
/// <summary> /// Loads the provided file into a Windows Media ASF Filter for reading /// If the filter already exists, remove it from the graph, because... /// /// The WM ASF Reader only allows you to Load 1 file per instance. So re-create each time /// through. See IFileSourceFilter documentation. /// </summary> private void CreateWMASFReader(string fileName) { if (wmASFReader != null) { iGB.RemoveFilter(wmASFReader); } wmASFReader = Filter.CreateBaseFilterByName("WM ASF Reader"); iGB.AddFilter(wmASFReader, "WM ASF Reader"); _AMMediaType wmvmt = new _AMMediaType(); ((IFileSourceFilter)wmASFReader).Load(fileName, ref wmvmt); }
/// <summary> /// Restore the video stream's last settings from the registry /// </summary> private void RestoreVideoSettings() { // Read media type from registry byte[] bytes = (byte[])AVReg.ReadValue(DeviceKey(), AVReg.MediaType); if (bytes != null) { AVReg.ms.Position = 0; AVReg.ms.Write(bytes, 0, bytes.Length); AVReg.ms.Position = 0; _AMMediaType mt = (_AMMediaType)AVReg.bf.Deserialize(AVReg.ms); // Read format block from registry if (mt.cbFormat != 0) { bytes = (byte[])AVReg.ReadValue(DeviceKey(), AVReg.FormatBlock); Debug.Assert(bytes.Length == mt.cbFormat); mt.pbFormat = Marshal.AllocCoTaskMem((int)mt.cbFormat); Marshal.Copy(bytes, 0, mt.pbFormat, (int)mt.cbFormat); Log("Restoring stream settings..."); Log(MediaType.Dump(mt)); try { // Set and free cg.Source.SetMediaType(ref mt); } catch (COMException ex) { Log(DShowError._AMGetErrorText(ex.ErrorCode)); Log(ex.ToString()); } catch (Exception ex) { Log(ex.ToString()); } } } }
/// <summary>Dispose of the instance.</summary> public void Dispose() { if (_mediaDetector != null) { // Free the frame buffer _frameBuffer.Dispose(); _frameBuffer = null; // Free the media type if (_mediaType.cbFormat != 0) { Marshal.FreeCoTaskMem(new IntPtr(_mediaType.cbFormat)); } _mediaType = new _AMMediaType(); // Release the media detector Marshal.ReleaseComObject(_mediaDetector); _mediaDetector = null; } }
/// <summary> /// Save the stream's current settings to the registry. /// </summary> protected void SaveStreamSettings() { _AMMediaType mt = cg.Source.GetMediaType(); // Copy the pbFormat block into a byte array if (mt.pbFormat != IntPtr.Zero && mt.cbFormat > 0) { byte[] pbFormat = new byte[mt.cbFormat]; Marshal.Copy(mt.pbFormat, pbFormat, 0, (int)mt.cbFormat); Marshal.FreeCoTaskMem(mt.pbFormat); mt.pbFormat = IntPtr.Zero; // Don't adjust cbFormat, will use on restore AVReg.WriteValue(DeviceKey(), AVReg.FormatBlock, pbFormat); } AVReg.ms.Position = 0; AVReg.bf.Serialize(AVReg.ms, mt); AVReg.WriteValue(DeviceKey(), AVReg.MediaType, AVReg.ms.ToArray()); }
/// <summary> /// Turns the _AMMediaType into a string representation /// </summary> public static string Dump(_AMMediaType mt) { string ret = "\r\nMedia Type\r\n"; ret += string.Format("\tbFixedSizeSamples : {0}\r\n", mt.bFixedSizeSamples); ret += string.Format("\tbTemporalCompression : {0}\r\n", mt.bTemporalCompression); ret += string.Format("\tcbFormat : {0}\r\n", mt.cbFormat); ret += string.Format("\tformattype : {0}\r\n", FormatType.GuidToString(mt.formattype)); ret += string.Format("\tlSampleSize : {0}\r\n", mt.lSampleSize); ret += string.Format("\tmajortype : {0}\r\n", MajorType.GuidToString(mt.majortype)); ret += string.Format("\tpbFormat : {0}\r\n", ((int)mt.pbFormat)); ret += string.Format("\tsubtype : {0}", SubType.GuidToString(mt.subtype)); object formatBlock = FormatType.MarshalData(mt); if(formatBlock != null) { ret += FormatType.Dump(formatBlock); } return ret; }
public void GetAudioMediaType(out _AMMediaType mt, out object formatBlock) { _AMMediaType[] mts; object[] formats; //GetMediaType does not work with this pin because it cannot be cast to IAMStreamConfig. if (!AddDVSplitter()) { throw new ApplicationException("Failed to add DV Splitter"); } Pin.GetMediaTypes(this.splitterAudioOut, out mts, out formats); //Is it safe to assume one audio media type? if (mts.Length > 0) { mt = mts[0]; formatBlock = formats[0]; } else { mt = new _AMMediaType(); formatBlock = null; } }
/// <summary> /// Turns the _AMMediaType into a string representation /// </summary> public static string Dump(_AMMediaType mt) { string ret = "\r\nMedia Type\r\n"; ret += string.Format(CultureInfo.InvariantCulture, "\tbFixedSizeSamples : {0}\r\n", mt.bFixedSizeSamples); ret += string.Format(CultureInfo.InvariantCulture, "\tbTemporalCompression : {0}\r\n", mt.bTemporalCompression); ret += string.Format(CultureInfo.InvariantCulture, "\tcbFormat : {0}\r\n", mt.cbFormat); ret += string.Format(CultureInfo.InvariantCulture, "\tformattype : {0}\r\n", FormatType.GuidToString(mt.formattype)); ret += string.Format(CultureInfo.InvariantCulture, "\tlSampleSize : {0}\r\n", mt.lSampleSize); ret += string.Format(CultureInfo.InvariantCulture, "\tmajortype : {0}\r\n", MajorType.GuidToString(mt.majortype)); ret += string.Format(CultureInfo.InvariantCulture, "\tpbFormat : {0}\r\n", ((int)mt.pbFormat)); ret += string.Format(CultureInfo.InvariantCulture, "\tsubtype : {0}", SubType.GuidToString(mt.subtype)); object formatBlock = FormatType.MarshalData(mt); if (formatBlock != null) { ret += FormatType.Dump(formatBlock); } return(ret); }
public static Size getVideoSize(_AMMediaType mediaType) { WinStructs.VIDEOINFOHEADER videoInfo = (WinStructs.VIDEOINFOHEADER)Marshal.PtrToStructure(mediaType.pbFormat, typeof(WinStructs.VIDEOINFOHEADER)); return new Size(videoInfo.bmiHeader.biWidth, videoInfo.bmiHeader.biHeight); }
private void ConfigureCompressor(VideoCompressorQualityInfo vcqi) { // This method is called after the compressor is connected to the // source filter, so that the media types and format blocks contain // useable information. _AMMediaType[] mts; object[] fbs; GetMediaTypes(out mts, out fbs); for(int i = 0; i < mts.Length; i++) { if(mts[i].subtype == vcqi.MediaSubType) { mt = mts[i]; vih = (VIDEOINFOHEADER)fbs[i]; break; } } Debug.Assert(mt.subtype == vcqi.MediaSubType); // Configure the bit rate - .Net makes a copy of fb vih.BitRate = vcqi.BitRate; // Update the structure in memory with what we have mt = MediaType.Construct(mt, vih); // Allow compressor specific configuration // e.g. WM9+ requires extra configuration, others may as well CompressorSpecificConfiguration(vcqi); // Use the structure in the compressor - this will free the format // block when it is done SetMediaType(ref mt); // Check for other video compression settings IAMVideoCompression iVC = OutputPin as IAMVideoCompression; if(iVC != null) { // WMV9 and WMVAdv don't work well if you modify them this way if (FriendlyName != "WMVideo8 Encoder DMO" && FriendlyName != "WMVideo9 Encoder DMO") { iVC.put_KeyFrameRate(vcqi.KeyFrameRate); iVC.put_Quality(vcqi.Quality); } } CompressorDiagnostics("After setting media type"); }
/// <summary> /// Build a graph with sampleGrabber. Render it, and get the media type. /// </summary> /// <param name="payload"></param> /// <returns></returns> public bool Build(PayloadType payload, RtpStream newStream) { this.stream = newStream; this.ssrc = newStream.SSRC; //Required as of RC3: this.stream.IsUsingNextFrame = true; if ((ssrc == 0) || !((payload == PayloadType.dynamicVideo) || (payload == PayloadType.dynamicAudio))) { errorMsg = "Invalid inputs to build method."; return(false); } fgm = new FilgraphManagerClass(); MSR.LST.MDShow.IBaseFilter bfSource = null; IGraphBuilder iGB = (IGraphBuilder)fgm; //if (false) // rotnum = FilterGraph.AddToRot(iGB); //AddToRot(iGB); try { bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(this.stream); iGB.AddFilter(bfSource, "RtpSource"); MSR.LST.MDShow.IPin sourceOutput = Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0); //Add SampleGrabber filter MSR.LST.MDShow.IBaseFilter bfGrabber = SampleGrabberClass.CreateInstance(); iGB.AddFilter(bfGrabber, "Grabber"); UW.CSE.MDShow.ISampleGrabber sgGrabber = (UW.CSE.MDShow.ISampleGrabber)bfGrabber; //Set mediatype UW.CSE.MDShow._AMMediaType mt = new UW.CSE.MDShow._AMMediaType(); if (payload == PayloadType.dynamicVideo) { mt.majortype = MediaType.MajorType.MEDIATYPE_Video; //PRI2: RGB24 seems to work for all video? We have used YUY2 in the past, but that won't work // for screen streaming. Probably could use more testing //mt.subtype = MediaType.SubType.MEDIASUBTYPE_YUY2; mt.subtype = MediaType.SubType.MEDIASUBTYPE_RGB24; } else { mt.majortype = MediaType.MajorType.MEDIATYPE_Audio; mt.subtype = MediaType.SubType.MEDIASUBTYPE_PCM; //MEDIASUBTYPE_PCM; } sgGrabber.SetMediaType(ref mt); //Add samplegrabber callback //0 is sampleCB, 1 is bufferCB. Only bufferCB is actually returning data so far. sgGrabber.SetCallback(callBack, 1); sgGrabber.SetOneShot(0); sgGrabber.SetBufferSamples(0); iGB.Render(sourceOutput); UW.CSE.MDShow._AMMediaType uwmt = new UW.CSE.MDShow._AMMediaType(); sgGrabber.GetConnectedMediaType(ref uwmt); connectedMT = copy_AMMediaType(uwmt); } catch (Exception e) { errorMsg = e.Message; Debug.WriteLine("Exception while building graph: " + e.ToString()); eventLog.WriteEntry("Exception while building graph: " + e.ToString(), EventLogEntryType.Error, 1001); return(false); } return(true); }
public static object MarshalData(_AMMediaType mt) { object formatBlock = null; if(mt.cbFormat > 0 && mt.pbFormat != IntPtr.Zero) { if(mt.formattype == MediaType.FormatType.FORMAT_VideoInfo) { formatBlock = (VIDEOINFOHEADER)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER)); } else if(mt.formattype == MediaType.FormatType.FORMAT_VideoInfo2) { formatBlock = (VIDEOINFOHEADER2)Marshal.PtrToStructure(mt.pbFormat, typeof(VIDEOINFOHEADER2)); } else if(mt.formattype == MediaType.FormatType.FORMAT_MPEGVideo) { formatBlock = (MPEG1VIDEOINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(MPEG1VIDEOINFO)); } else if(mt.formattype == MediaType.FormatType.FORMAT_MPEG2Video) { formatBlock = (MPEG2VIDEOINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(MPEG2VIDEOINFO)); } else if(mt.formattype == MediaType.FormatType.FORMAT_DvInfo) { formatBlock = (DVINFO)Marshal.PtrToStructure(mt.pbFormat, typeof(DVINFO)); } else if(mt.formattype == MediaType.FormatType.FORMAT_WaveFormatEx) { formatBlock = (WAVEFORMATEX)Marshal.PtrToStructure(mt.pbFormat, typeof(WAVEFORMATEX)); } } return formatBlock; }
private void InitializeCompressorMediaType() { ArrayList mts = new ArrayList(); ArrayList ihs = new ArrayList(); ArrayList sccs = new ArrayList(); Pin.GetStreamConfigCaps((IAMStreamConfig)cOutputPin, out mts, out ihs, out sccs); for(int i = 0; i < mts.Count; i++) { Console.WriteLine(MediaType.Dump((_AMMediaType)mts[i])); Console.WriteLine(Pin.DebugStreamConfigCaps(sccs[i])); } // There's only one cMT = (_AMMediaType)mts[0]; cMT.formattype = MediaType.FormatType.FORMAT_VideoInfo; // MediaTypes are local to method, so free them all // then reallocate just the one we want for(int i = 0; i < mts.Count; i++) { _AMMediaType mt = (_AMMediaType)mts[i]; MediaType.Free(ref mt); } cMT.cbFormat = (uint)Marshal.SizeOf(typeof(VIDEOINFOHEADER)); cMT.pbFormat = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VIDEOINFOHEADER))); }
/// <summary> /// The override returns media types on capture output pin, not the decoder output. /// Attempts to get the MediaType of the decoder output fail because the IAMStreamConfig interface is not supported. /// </summary> /// <param name="mt"></param> /// <param name="formatBlock"></param> public override void GetMediaType(out _AMMediaType mt, out object formatBlock) { Pin.GetMediaType((IAMStreamConfig)captureOutput, out mt, out formatBlock); }
/// <summary> /// Converts the unmanaged IntPtr to the _AMMediaType into the _AMMediaType struct. /// Frees pmt but not the _AMMediaType's pbFormat pointer; it is left in tact and will need /// to be freed later. /// </summary> public static void MarshalData(ref IntPtr pmt, out _AMMediaType mt) { mt = (_AMMediaType)Marshal.PtrToStructure(pmt, typeof(_AMMediaType)); Marshal.FreeCoTaskMem(pmt); pmt = IntPtr.Zero; }
/// <summary> /// Gets iSC's available _AMMediaTypes, and frees the pbFormats /// </summary> public static void GetMediaTypes(IPin pin, out _AMMediaType[] mediaTypes, out object[] formatBlocks) { mediaTypes = GetMediaTypes(pin); formatBlocks = new object[mediaTypes.Length]; for(int i = 0; i < mediaTypes.Length; i++) { object formatBlock; MediaType.MarshalData(ref mediaTypes[i], out formatBlock); // Frees pbFormat formatBlocks[i] = formatBlock; } }
/// <summary> /// Converts the unmanaged IntPtr to the _AMMediaType into the _AMMediaType struct and the /// format block. Frees pmt and the _AMMediaType's pbFormat pointer. /// Note: If there was extra data at the end of the format block, it is lost /// </summary> public static void MarshalData(ref IntPtr pmt, out _AMMediaType mt, out object formatBlock) { MarshalData(ref pmt, out mt); MarshalData(ref mt, out formatBlock); }
/// <summary> /// Returns the media type and format block chosen by the user /// </summary> public void GetMediaType(out _AMMediaType mt, out object fb) { mt = mts[lvFormats.SelectedIndices[0]]; fb = fbs[lvFormats.SelectedIndices[0]]; }
/// <summary> /// Extracts the format block (pbFormat) from the _AMMediaType. /// Note: if there is extra data at the end of pbFormat, it is lost /// </summary> public static void MarshalData(ref _AMMediaType mt, out object formatBlock) { formatBlock = FormatType.MarshalData(mt); Free(ref mt); }
/// <summary> /// Sets the _AMMediaType on the pin, but doesn't free it /// </summary> public static void SetMediaType(IAMStreamConfig iSC, _AMMediaType mt) { System.Diagnostics.Debug.Assert(mt.pbFormat != IntPtr.Zero && mt.cbFormat != 0); iSC.SetFormat(ref mt); }
private static Size getVideoSize(_AMMediaType mediaType) { WinStructs.VIDEOINFOHEADER videoInfo = (WinStructs.VIDEOINFOHEADER)Marshal.PtrToStructure(mediaType.pbFormat, typeof(WinStructs.VIDEOINFOHEADER)); return new Size(videoInfo.bmiHeader.biWidth, videoInfo.bmiHeader.biHeight); }
/// <summary> /// tries to open a video file. If successful it makes available MediaDetClass and _AMMediaType instances of the current file /// </summary> /// <param name="videoFile">Path to the video file</param> /// <param name="mediaDetClass"></param> /// <param name="aMMediaType"></param> /// <returns>true for success, false for failure (no video stream, file not supported, ...)</returns> private static bool openVideoStream(string videoFile, out IMediaDet mediaDet, out _AMMediaType aMMediaType) { mediaDet = new MediaDetClass(); //loads file mediaDet.Filename = videoFile; //gets # of streams int streamsNumber = mediaDet.OutputStreams; //finds a video stream for (int i = 0; i < streamsNumber; i++) { mediaDet.CurrentStream = i; _AMMediaType mediaType = mediaDet.StreamMediaType; if (mediaType.majortype == JockerSoft.Media.MayorTypes.MEDIATYPE_Video) { //video stream found aMMediaType = mediaType; return true; } } //no video stream found Marshal.ReleaseComObject(mediaDet); mediaDet = null; aMMediaType = new _AMMediaType(); return false; }
private void ConfigureCompressor(VideoCompressorQualityInfo vcqi) { // This method is called after the compressor is connected to the // source filter, so that the media types and format blocks contain // useable information. _AMMediaType[] mts; object[] fbs; GetMediaTypes(out mts, out fbs); for(int i = 0; i < mts.Length; i++) { if(mts[i].subtype == vcqi.MediaSubType) { mt = mts[i]; vih = (VIDEOINFOHEADER)fbs[i]; break; } } if (mt.subtype != vcqi.MediaSubType) { //If we are using non-standard codecs, there may not be a match. //PRI2: Some compressors will likely need to be configured using their own custom tools or dialogs, or will require special case subclasses. return; } Debug.Assert(mt.subtype == vcqi.MediaSubType); // Configure the bit rate - .Net makes a copy of fb vih.BitRate = vcqi.BitRate; // Update the structure in memory with what we have mt = MediaType.Construct(mt, vih); // Allow compressor specific configuration // e.g. WM9+ requires extra configuration, others may as well CompressorSpecificConfiguration(vcqi); // Use the structure in the compressor - this will free the format // block when it is done. try { //This was observed to fail for some non-standard compressors. SetMediaType(ref mt); } catch (Exception ex) { Trace.WriteLine("Failed to set video compressor MediaType: " + ex.ToString()); } // Check for other video compression settings IAMVideoCompression iVC = OutputPin as IAMVideoCompression; if(iVC != null) { // WMV9 and WMVAdv don't work well if you modify them this way if (FriendlyName != "WMVideo8 Encoder DMO" && FriendlyName != "WMVideo9 Encoder DMO") { try { iVC.put_KeyFrameRate(vcqi.KeyFrameRate); iVC.put_Quality(vcqi.Quality); } catch(Exception ex) { Trace.WriteLine("Failed to set video compressor quality: " + ex.ToString()); } } } CompressorDiagnostics("After setting media type"); }
private static bool openVideoStream(string videoFile, out MediaDetClass mediaDetClass, out _AMMediaType aMMediaType) { MediaDetClass mediaDet = new MediaDetClass(); mediaDet.Filename = videoFile; int streamsNumber = mediaDet.OutputStreams; for (int i = 0; i < streamsNumber; i++) { mediaDet.CurrentStream = i; _AMMediaType mediaType = mediaDet.StreamMediaType; if (mediaType.majortype == JockerSoft.Media.MayorTypes.MEDIATYPE_Video) { mediaDetClass = mediaDet; aMMediaType = mediaType; return true; } } mediaDetClass = null; aMMediaType = new _AMMediaType(); return false; }
public void GetAudioMediaTypes(out _AMMediaType[] mts, out object[] fbs) { if (!AddDVSplitter()) { throw new ApplicationException("Failed to add DV Splitter"); } Pin.GetMediaTypes(this.splitterAudioOut, out mts, out fbs); }
/// <summary> /// Gets the Output pin's current _AMMediaType, and frees pbFormat /// </summary> public virtual void GetMediaType(out _AMMediaType mt, out object formatBlock) { Pin.GetMediaType((IAMStreamConfig)OutputPin, out mt, out formatBlock); }
/// <summary> /// Gets the Output pin's available _AMMediaTypes, and frees the pbFormats /// </summary> public void GetMediaTypes(out _AMMediaType[] mts, out object[] formatBlocks) { Pin.GetMediaTypes(OutputPin, out mts, out formatBlocks); }
/// <summary> /// Sets the media type on the Output pin, then frees it /// </summary> public void SetMediaType(ref _AMMediaType mt) { Pin.SetMediaType((IAMStreamConfig)OutputPin, ref mt); }
public void GetVideoMediaType(out _AMMediaType mt, out object formatBlock) { _AMMediaType[] mts; object[] formats; //GetMediaType does not work with this pin because it cannot be cast to IAMStreamConfig. if (!AddDVSplitter()) { throw new ApplicationException("Failed to add DV Splitter"); } Pin.GetMediaTypes(this.splitterVideoOut, out mts, out formats); //Is it safe to assume only one video media type? if (mts.Length > 0) { mt = mts[0]; formatBlock = formats[0]; } else { mt = new _AMMediaType(); formatBlock = null; } }
/// <summary> /// Sets the media type on the Output pin, then frees it /// </summary> public void SetMediaType(_AMMediaType mt, object formatBlock) { Pin.SetMediaType((IAMStreamConfig)OutputPin, mt, formatBlock); }