/// <summary> /// Create an audio media type /// </summary> /// <returns>The constructed media type</returns> public static AMMediaType GetAudioMediaType() { var AudioGroupType = new AMMediaType(); AudioGroupType.majorType = MediaType.Audio; return AudioGroupType; }
/// <summary> /// AddStreamSourceFilter /// </summary> /// <param name="url"></param> protected override void AddStreamSourceFilter(string url) { Log.Log.WriteFile("dvbip:Add NWSource-Plus"); _filterStreamSource = FilterGraphTools.AddFilterFromClsid(_graphBuilder, typeof (ElecardNWSourcePlus).GUID, "Elecard NWSource-Plus"); AMMediaType mpeg2ProgramStream = new AMMediaType(); mpeg2ProgramStream.majorType = MediaType.Stream; mpeg2ProgramStream.subType = MediaSubType.Mpeg2Transport; mpeg2ProgramStream.unkPtr = IntPtr.Zero; mpeg2ProgramStream.sampleSize = 0; mpeg2ProgramStream.temporalCompression = false; mpeg2ProgramStream.fixedSizeSamples = true; mpeg2ProgramStream.formatType = FormatType.None; mpeg2ProgramStream.formatSize = 0; mpeg2ProgramStream.formatPtr = IntPtr.Zero; ((IFileSourceFilter)_filterStreamSource).Load(url, mpeg2ProgramStream); //connect the [stream source] -> [inf tee] Log.Log.WriteFile("dvb: Render [source]->[inftee]"); int hr = _capBuilder.RenderStream(null, null, _filterStreamSource, null, _infTeeMain); if (hr != 0) { Log.Log.Error("dvb:Add source returns:0x{0:X}", hr); throw new TvException("Unable to add source filter"); } }
/// <summary> Use capture with selected media caps</summary> public Capture(int iDeviceNum, AMMediaType media) { DsDevice[] capDevices; // Get the collection of video devices capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (iDeviceNum + 1 > capDevices.Length) { throw new Exception("No video capture devices found at that index!"); } try { // Set up the capture graph SetupGraph(capDevices[iDeviceNum], media); // tell the callback to ignore new images m_PictureReady = new ManualResetEvent(false); m_bGotOne = true; m_bRunning = false; timer1.Interval = 1000 / 15; // 15 fps timer1.Tick += new EventHandler(timer1_Tick); timer1.Start(); } catch { Dispose(); throw; } }
/// <summary> /// Constructor /// </summary> /// <param name="mType">Media type of the new group</param> /// <param name="pTimeline">Timeline to use for the group</param> /// <param name="fps">FPS for the group</param> public MediaGroup(AMMediaType mType, IAMTimeline pTimeline, double fps) { int hr; IAMTimelineObj pGroupObj; m_Length = 0; m_Files = new ArrayList(); m_FPS = fps; m_pTimeline = pTimeline; // make the root group/composition hr = m_pTimeline.CreateEmptyNode(out pGroupObj, TimelineMajorType.Group); DESError.ThrowExceptionForHR(hr); try { m_pGroup = (IAMTimelineGroup)pGroupObj; // Set the media type we just created hr = m_pGroup.SetMediaType(mType); DESError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mType); // add the video group to the timeline hr = m_pTimeline.AddGroup(pGroupObj); DESError.ThrowExceptionForHR(hr); } finally { //Marshal.ReleaseComObject(pGroupObj); } //Marshal.ReleaseComObject(pTrack1Obj); // Released as m_VideoTrack in dispose }
public static AMMediaType GetVideoMediaType(short bitCount, int width, int height) { Guid mediaSubType = GetMediaSubTypeForBitCount(bitCount); var VideoGroupType = new AMMediaType(); VideoGroupType.majorType = MediaType.Video; VideoGroupType.subType = mediaSubType; VideoGroupType.formatType = FormatType.VideoInfo; VideoGroupType.fixedSizeSamples = true; VideoGroupType.formatSize = Marshal.SizeOf(typeof (VideoInfoHeader)); var vif = new VideoInfoHeader(); vif.BmiHeader = new BitmapInfoHeader(); // The HEADER macro returns the BITMAPINFO within the VIDEOINFOHEADER vif.BmiHeader.Size = Marshal.SizeOf(typeof (BitmapInfoHeader)); vif.BmiHeader.Compression = 0; vif.BmiHeader.BitCount = bitCount; vif.BmiHeader.Width = width; vif.BmiHeader.Height = height; vif.BmiHeader.Planes = 1; int iSampleSize = vif.BmiHeader.Width*vif.BmiHeader.Height*(vif.BmiHeader.BitCount/8); vif.BmiHeader.ImageSize = iSampleSize; VideoGroupType.sampleSize = iSampleSize; VideoGroupType.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vif)); Marshal.StructureToPtr(vif, VideoGroupType.formatPtr, false); return VideoGroupType; }
// Save the size parameters for use in SnapShot private void SaveSizeInfo(DirectShowLib.ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber DirectShowLib.AMMediaType media = new DirectShowLib.AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); try { if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Get the struct DirectShowLib.VideoInfoHeader videoInfoHeader = new DirectShowLib.VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfoHeader); // Grab the size info m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); m_ImageSize = m_videoWidth * m_videoHeight * 3; } finally { DirectShowLib.DsUtils.FreeAMMediaType(media); media = null; } }
private ResolutionInfo(AMMediaType media) { var videoInfo = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfo); Width = videoInfo.BmiHeader.Width; Height = videoInfo.BmiHeader.Height; Bpp = videoInfo.BmiHeader.BitCount; }
public GCSBitmapInfo(int width, int height, long fps, string standard, AMMediaType media) { Width = width; Height = height; Fps = fps; Standard = standard; Media = media; }
public WavFileRenderer(ITimeline timeline, string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType, ICallbackParticipant[] audioParticipants) : base(timeline) { RenderToWavDest(outputFile, audioCompressor, mediaType, audioParticipants); ChangeState(RendererState.Initialized); }
private void RenderToWavDest( string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType, ICallbackParticipant[] audioParticipants) { if (audioCompressor != null) Cleanup.Add(audioCompressor); int hr; if (FirstAudioGroup == null) { throw new SplicerException(Resources.ErrorNoAudioStreamToRender); } if (outputFile == null) { throw new SplicerException(Resources.ErrorInvalidOutputFileName); } // Contains useful routines for creating the graph var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); Cleanup.Add(graphBuilder); try { hr = graphBuilder.SetFiltergraph(Graph); DESError.ThrowExceptionForHR(hr); IBaseFilter wavDestFilter = StandardFilters.RenderWavDestination(Cleanup, Graph); IBaseFilter fileSink = StandardFilters.RenderFileDestination(Cleanup, Graph, outputFile); try { RenderGroups(graphBuilder, audioCompressor, null, wavDestFilter, audioParticipants, null); FilterGraphTools.ConnectFilters(Graph, wavDestFilter, fileSink, true); // if supplied, apply the media type to the filter if (mediaType != null) { FilterGraphTools.SetFilterFormat(mediaType, audioCompressor); } DisableClock(); } finally { if (wavDestFilter != null) Marshal.ReleaseComObject(wavDestFilter); if (fileSink != null) Marshal.ReleaseComObject(fileSink); } } finally { Marshal.ReleaseComObject(graphBuilder); } }
/// <summary> /// Constructor /// </summary> /// <param name="type">The type of group this is</param> /// <param name="mediaType">Media type of the new group</param> /// <param name="timeline">Timeline to use for the group</param> /// <param name="fps">Fps for the group</param> public Group(ITimeline timeline, GroupType type, AMMediaType mediaType, string name, double fps) : base(timeline, name, -1) { if (timeline == null) throw new ArgumentNullException("timeline"); if (mediaType == null) throw new ArgumentNullException("mediaType"); if (fps <= 0) throw new SplicerException(Resources.ErrorFramesPerSecondMustBeGreaterThenZero); _timeline = timeline; _type = type; _fps = fps; _group = TimelineBuilder.InsertGroup(_timeline.DesTimeline, mediaType, name); TimelineComposition = (IAMTimelineComp) _group; }
private void Dispose(bool disposing) { if (_filter != null) { Marshal.ReleaseComObject(_filter); _filter = null; } if (_mediaType != null) { DsUtils.FreeAMMediaType(_mediaType); _mediaType = null; } }
public MainForm() { InitializeComponent(); graphbuilder = (IGraphBuilder)new FilterGraph(); samplegrabber = (ISampleGrabber)new SampleGrabber(); graphbuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber"); mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; mt.formatType = FormatType.VideoInfo; samplegrabber.SetMediaType(mt); PrintSeconds(); }
private static void ConfigSampleGrabber(ISampleGrabber sb) { // set the media type var media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; // that's the call to the ISampleGrabber interface sb.SetMediaType(media); DsUtils.FreeAMMediaType(media); }
private void SetupSampleGrabber(ISampleGrabber sampleGrabber) { var mediaType = new DirectShowLib.AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; int hr = sampleGrabber.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 0); DsError.ThrowExceptionForHR(hr); }
public CaptureForm() { InitializeComponent(); graph_builder = (IGraphBuilder)new FilterGraph(); media_control = (IMediaControl)graph_builder; events = (IMediaEventEx)graph_builder; grabber = (ISampleGrabber)new SampleGrabber(); AMMediaType media_type = new AMMediaType(); media_type.majorType = MediaType.Video; media_type.subType = MediaSubType.RGB24; grabber.SetMediaType( media_type ); grabber.SetCallback( this, 1 ); cbDevices.Items.AddRange( GetDevices( FilterCategory.VideoInputDevice ) ); }
private void ConfigureSampleGrabber(DirectShowLib.ISampleGrabber sampGrabber) { DirectShowLib.AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new DirectShowLib.AMMediaType(); media.majorType = DirectShowLib.MediaType.Video; media.subType = DirectShowLib.MediaSubType.RGB24; media.formatType = DirectShowLib.FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DirectShowLib.DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
// // retrieve the bitmap stride (the offset from one row of pixel to the next) // private int GetStride(int videoWidth) { var media = new AMMediaType(); // GetConnectedMediaType retrieve the media type for a sample var hr = sampleGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if (media.formatType != FormatType.VideoInfo || media.formatPtr == IntPtr.Zero) { throw new Exception("Format type incorrect"); } // save the stride var videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); int videoStride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); DsUtils.FreeAMMediaType(media); return videoStride; }
public WavFormatInfo(AMMediaType type) { if (type == null) throw new ArgumentNullException("type"); _mediaType = type; if (type.formatType != FormatType.WaveEx) { throw new SplicerException(Resources.UnsupportedAMMEdiaType); } var formatEx = Marshal.PtrToStructure(type.formatPtr, typeof (WaveFormatEx)) as WaveFormatEx; _size = formatEx.cbSize; _averageBytesPerSecond = formatEx.nAvgBytesPerSec; _blockAlign = formatEx.nBlockAlign; _channels = formatEx.nChannels; _samplesPerSecond = formatEx.nSamplesPerSec; _bitsPerSample = formatEx.wBitsPerSample; _formatTag = formatEx.wFormatTag; _khz = _samplesPerSecond/1000; _kbps = (_averageBytesPerSecond*8)/1000; }
private void SetSaveFile(string filename) { fileDump = FilterGraphTools.FindFilterByName(graphBuilder, dumpFilterName); if (object.Equals(fileDump, null)) throw new System.Exception("Couldn't find dump filter in filter graph: " + dumpFilterName); sink = fileDump as IFileSinkFilter; AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.Mpeg2Transport; media.formatType = FormatType.VideoInfo; int hr = sink.SetFileName(filename, media); DsError.ThrowExceptionForHR(hr); }
public static bool IsVideo(IPin pin) { if (pin == null) throw new ArgumentNullException("pin"); int hr; bool bRet = false; var pmt = new AMMediaType[1]; IEnumMediaTypes ppEnum; IntPtr i = IntPtr.Zero; // Walk the MediaTypes for the pin hr = pin.EnumMediaTypes(out ppEnum); DESError.ThrowExceptionForHR(hr); try { // Just read the first one hr = ppEnum.Next(1, pmt, i); DESError.ThrowExceptionForHR(hr); bRet = pmt[0].majorType == MediaType.Video; } finally { Marshal.ReleaseComObject(ppEnum); } DsUtils.FreeAMMediaType(pmt[0]); return bRet; }
public static IPin FindPinForMajorType(IBaseFilter filter, PinDirection direction, Guid majorType) { if (filter == null) throw new ArgumentNullException("filter"); int hr = 0; IEnumPins pinsEnum = null; try { hr = filter.EnumPins(out pinsEnum); DsError.ThrowExceptionForHR(hr); var pins = new IPin[1]; int numberFetched = 1; while (numberFetched > 0) { IntPtr pcFetched = Marshal.AllocCoTaskMem(4); try { hr = pinsEnum.Next(1, pins, pcFetched); DsError.ThrowExceptionForHR(hr); numberFetched = Marshal.ReadInt32(pcFetched); } finally { Marshal.FreeCoTaskMem(pcFetched); } if (numberFetched > 0) { PinDirection currentPinDirection; hr = pins[0].QueryDirection(out currentPinDirection); DsError.ThrowExceptionForHR(hr); if (currentPinDirection != direction) continue; IEnumMediaTypes mediaTypesEnum = null; try { var mediaTypes = new AMMediaType[1]; pins[0].EnumMediaTypes(out mediaTypesEnum); int numberFetched2 = 1; while (numberFetched2 > 0) { IntPtr fetched2 = IntPtr.Zero; try { hr = mediaTypesEnum.Next(1, mediaTypes, fetched2); DsError.ThrowExceptionForHR(hr); numberFetched2 = Marshal.ReadInt32(fetched2); } finally { Marshal.FreeCoTaskMem(fetched2); } if (numberFetched2 > 0) { if (mediaTypes[0].majorType == majorType) { // success, return the pin return pins[0]; } } Marshal.ReleaseComObject(pins[0]); } } finally { if (mediaTypesEnum != null) Marshal.ReleaseComObject(mediaTypesEnum); } } } } finally { if (pinsEnum != null) Marshal.ReleaseComObject(pinsEnum); } return null; }
public static void SetFilterFormat(AMMediaType streamFormat, IBaseFilter filter) { if (filter == null) throw new ArgumentNullException("filter"); int hr; IEnumPins pinsEnum = null; try { hr = filter.EnumPins(out pinsEnum); DsError.ThrowExceptionForHR(hr); if (pinsEnum == null) throw new SplicerException(Resources.ErrorPinsEnumeratorIsNull); var pins = new IPin[1]; while (true) { try { int fetched = 0; IntPtr pcFetched = Marshal.AllocCoTaskMem(4); try { hr = pinsEnum.Next(pins.Length, pins, pcFetched); DsError.ThrowExceptionForHR(hr); fetched = Marshal.ReadInt32(pcFetched); } finally { Marshal.FreeCoTaskMem(pcFetched); } if (fetched == 1) { // we have something IPin pin = pins[0]; string queryId; hr = pin.QueryId(out queryId); DsError.ThrowExceptionForHR(hr); PinInfo pinInfo; hr = pin.QueryPinInfo(out pinInfo); DsError.ThrowExceptionForHR(hr); if (pinInfo.dir != PinDirection.Output) continue; var streamConfig = (IAMStreamConfig) pin; hr = streamConfig.SetFormat(streamFormat); DsError.ThrowExceptionForHR(hr); } else { break; } } finally { if (pins[0] != null) Marshal.ReleaseComObject(pins[0]); pins[0] = null; } } } finally { if (pinsEnum != null) Marshal.ReleaseComObject(pinsEnum); } }
public unsafe int Next(int cMediaTypes, AMMediaType[] ppMediaTypes, IntPtr pcFetched) { if (cMediaTypes > 0 && index++ == 0 && ppMediaTypes != null) { if (pcFetched != IntPtr.Zero) *((int*)pcFetched) = 1; ppMediaTypes[0] = new AMMediaType(); CopyMediaTypes(ppMediaTypes[0], pin.mediaType); return cMediaTypes == 1 ? S_OK : S_FALSE; } else { if (pcFetched != IntPtr.Zero) *((int*)pcFetched) = 0; return S_FALSE; } }
int IPin.ReceiveConnection(IPin pReceivePin, AMMediaType pmt) { return E_FAIL; }
int IPin.QueryAccept(AMMediaType pmt) { if (pmt == null) return E_POINTER; else if (pmt.majorType == MediaType.Stream) return S_OK; else return S_FALSE; }
int IPin.ConnectionMediaType(AMMediaType pmt) { if (connectedPin == null) return DsResults.E_NotConnected; if (pmt == null) return E_POINTER; CopyMediaTypes(pmt, mediaType); return S_OK; }
private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure(media.formatPtr, typeof (VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth*(videoInfoHeader.BmiHeader.BitCount/8); m_avgtimeperframe = videoInfoHeader.AvgTimePerFrame; DsUtils.FreeAMMediaType(media); media = null; }
private double GetFrameRate(string filename) { IMediaDet md = new MediaDet() as IMediaDet; Guid streamType; AMMediaType mt = new AMMediaType(); int hr, nStreams; md.put_Filename(filename); md.get_OutputStreams(out nStreams); for (int i = 0; i < nStreams; i++) { hr = md.put_CurrentStream(i); DsError.ThrowExceptionForHR(hr); hr = md.get_StreamType(out streamType); DsError.ThrowExceptionForHR(hr); if (streamType == MediaType.Video) { md.put_CurrentStream(0); double frate = 30; md.get_FrameRate(out frate); return frate; } } return 30; }
private List<int> GetAudioStreams(string filename) { IMediaDet md = new MediaDet() as IMediaDet; Guid streamType; AMMediaType mt = new AMMediaType(); int hr, nStreams; List<int> streamList = new List<int>(); md.put_Filename(filename); md.get_OutputStreams(out nStreams); for (int i = 0; i < nStreams; i++) { hr = md.put_CurrentStream(i); DsError.ThrowExceptionForHR(hr); hr = md.get_StreamType(out streamType); DsError.ThrowExceptionForHR(hr); if (streamType == MediaType.Audio) streamList.Add(i); } return streamList; }
public StreamOutputPin(MpqFileSourceFilter filter) { this.filter = filter; name = "Output"; mediaType = new AMMediaType(); CopyMediaTypes(mediaType, filter.detectedMediaType); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; sampGrabber.SetBufferSamples(false); sampGrabber.SetOneShot(false); hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
int IPin.Connect(IPin pReceivePin, AMMediaType pmt) { IEnumMediaTypes emt; if (connectedPin != null) return DsResults.E_AlreadyConnected; if (pReceivePin == null) return E_POINTER; if (pReceivePin.EnumMediaTypes(out emt) == S_OK) { var mediaTypes = new AMMediaType[1]; while (emt.Next(1, mediaTypes, IntPtr.Zero) == S_OK) if (mediaTypes[0].majorType == MediaType.Stream && pReceivePin.ReceiveConnection(this, mediaTypes[0]) == S_OK) { emt = null; connectedPin = pReceivePin; return S_OK; } emt = null; } if (pmt != null && pmt.majorType != MediaType.Null && pmt.majorType != MediaType.Stream) return DsResults.E_TypeNotAccepted; if (pReceivePin.ReceiveConnection(this, mediaType) == S_OK) { connectedPin = pReceivePin; return S_OK; } return DsResults.E_NoAcceptableTypes; }