public void SetAndGetAllAvailableResolution(IPin VideoOutPin) { int hr = 0; IAMStreamConfig streamConfig = (IAMStreamConfig)VideoOutPin; AMMediaType CorectvidFormat = new AMMediaType(); IntPtr ptr; hr = streamConfig.GetNumberOfCapabilities(out int piCount, out int piSize); ptr = Marshal.AllocCoTaskMem(piSize); for (int i = 0; i < piCount; i++) { hr = streamConfig.GetStreamCaps(i, out AMMediaType searchmedia, ptr); VideoInfoHeader v = new VideoInfoHeader(); Marshal.PtrToStructure(searchmedia.formatPtr, v); if (i == 2)// 4 { CorectvidFormat = searchmedia; } } hr = streamConfig.SetFormat(CorectvidFormat); IntPtr pmt = IntPtr.Zero; AMMediaType mediaType = new AMMediaType(); IAMStreamConfig streamConfig1 = (IAMStreamConfig)VideoOutPin; hr = streamConfig1.GetFormat(out mediaType); BitmapInfoHeader bmpih = new BitmapInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, bmpih); x = bmpih.Width; y = bmpih.Height; }
/// <summary> /// Our chance to allocate any storage we may need /// </summary> /// <returns>Returns always S_OK</returns> protected override int InternalAllocateStreamingResources() { // Reinitialize variables for (int i = 0; i < InputPinCount; i++) { InternalDiscontinuity(i); } for (int i = 0; i < InputPinCount; i++) { AMMediaType mediaType = InputType(i); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader); this.inputStreams[i].StreamWidth = videoInfoHeader.BmiHeader.Width; this.inputStreams[i].StreamHeight = videoInfoHeader.BmiHeader.Height; this.inputStreams[i].StreamBBP = videoInfoHeader.BmiHeader.BitCount / 8; this.inputStreams[i].StreamStride = videoInfoHeader.BmiHeader.Width * this.inputStreams[i].StreamBBP; this.inputStreams[i].BufferTimeStamp = 0; } AMMediaType outputMediaType = OutputType(0); VideoInfoHeader outputVideoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(outputMediaType.formatPtr, outputVideoInfoHeader); this.outputStream.StreamWidth = outputVideoInfoHeader.BmiHeader.Width; this.outputStream.StreamHeight = outputVideoInfoHeader.BmiHeader.Height; this.outputStream.StreamBBP = outputVideoInfoHeader.BmiHeader.BitCount / 8; this.outputStream.StreamStride = outputVideoInfoHeader.BmiHeader.Width * this.outputStream.StreamBBP; this.outputStream.BufferTimeStamp = 0; return(SOK); }
private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); m_avgtimeperframe = videoInfoHeader.AvgTimePerFrame; DsUtils.FreeAMMediaType(media); media = null; }
public int GetLatency(out long prtLatency) { #if HAMED_LOG_METHOD_INFO MethodBase method = new StackTrace().GetFrame(0).GetMethod(); Console.WriteLine(this.GetType().FullName + " - " + method.Name + " - " + method.ToString()); #endif prtLatency = UNITS / 30; AMMediaType mt = Pins[0].CurrentMediaType; if (mt.majorType == MediaType.Video) { { VideoInfoHeader _pvi = mt; if (_pvi != null) { prtLatency = _pvi.AvgTimePerFrame; } } { VideoInfoHeader2 _pvi = mt; if (_pvi != null) { prtLatency = _pvi.AvgTimePerFrame; } } } return(NOERROR); }
/// <summary> /// Checks if AMMediaType's frame size is appropriate for desired frame size. /// </summary> /// <param name="media_type">Media type to analyze.</param> /// <param name="RefFrameSize">Desired frame size. Can be null or have 0 for height or width if it's not important.</param> private static bool IsFrameSizeAppropiate ( AMMediaType media_type, FrameSize RefFrameSize ) { // if we were asked to choose frame size if (RefFrameSize == null) { return(true); } VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader); if (RefFrameSize.Width > 0 && videoInfoHeader.BmiHeader.Width != RefFrameSize.Width) { return(false); } if (RefFrameSize.Height > 0 && videoInfoHeader.BmiHeader.Height != RefFrameSize.Height) { return(false); } return(true); }
void SetFormat(int formatIndex, int frameRate) { int capsCount, capSize; int hr = VideoConfig.GetNumberOfCapabilities(out capsCount, out capSize); DsError.ThrowExceptionForHR(hr); IntPtr pSC = Marshal.AllocHGlobal(capSize); AMMediaType mt = null; try { VideoInfoHeader vih = new VideoInfoHeader(); hr = VideoConfig.GetStreamCaps(formatIndex, out mt, pSC); DsError.ThrowExceptionForHR(hr); if (frameRate > 0) { Marshal.PtrToStructure(mt.formatPtr, vih); vih.AvgTimePerFrame = (long)(10000000.0 / frameRate); Marshal.StructureToPtr(vih, mt.formatPtr, false); } hr = VideoConfig.SetFormat(mt); DsError.ThrowExceptionForHR(hr); } finally { DsUtils.FreeAMMediaType(mt); Marshal.FreeHGlobal(pSC); } }
private static short GetBitCountForMediaType(AMMediaType media_type) { VideoInfoHeader structure = new VideoInfoHeader(); Marshal.PtrToStructure(media_type.formatPtr, structure); return(structure.BmiHeader.BitCount); }
/// <summary> /// Gets and saves mode (mediatype, format type and etc). /// </summary> public void SaveMode() { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = m_SampleGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_videoBitCount = videoInfoHeader.BmiHeader.BitCount; m_ImageSize = videoInfoHeader.BmiHeader.ImageSize; DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// Get bit count for mediatype /// </summary> /// <param name="media_type">Media type to analyze.</param> private static int GetBitCountForMediaType(AMMediaType media_type) { VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader); return(videoInfoHeader.BmiHeader.BitCount); }
/* * public override int OnReceive(ref IMediaSampleImpl _sample) * { * Console.WriteLine("OnReceive "); * * Output.Deliver(ref _sample); * * return S_OK; * } */ public override int GetMediaType(int iPosition, ref AMMediaType pMediaType) { Console.WriteLine("GetMediaType"); if (iPosition > 0) { return(VFW_S_NO_MORE_ITEMS); } if (pMediaType == null) { return(E_INVALIDARG); } if (!Input.IsConnected) { return(VFW_E_NOT_CONNECTED); } AMMediaType.Copy(Input.CurrentMediaType, ref pMediaType); VideoInfoHeader vhi = new VideoInfoHeader(); Marshal.PtrToStructure(pMediaType.formatPtr, vhi); vhi.BmiHeader.Compression = 0; vhi.BmiHeader.BitCount = 24; vhi.BmiHeader.ImageSize = vhi.BmiHeader.Width * vhi.BmiHeader.Height * 3; pMediaType.formatPtr = Marshal.AllocCoTaskMem(pMediaType.formatSize); Marshal.StructureToPtr(vhi, pMediaType.formatPtr, false); pMediaType.majorType = MediaType.Video; pMediaType.subType = MediaSubType.RGB24; pMediaType.formatType = FormatType.VideoInfo; pMediaType.sampleSize = vhi.BmiHeader.ImageSize; return(NOERROR); }
private void SetConfigParameters(ICaptureGraphBuilder2 captureGraphBuilder, IBaseFilter captureFilter, int frameRate, int width, int height) { object outObject; int hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out outObject); IAMStreamConfig videoStreamConfig = outObject as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } AMMediaType outMedia; hr = videoStreamConfig.GetFormat(out outMedia); DsError.ThrowExceptionForHR(hr); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(outMedia.formatPtr, videoInfoHeader); videoInfoHeader.AvgTimePerFrame = 10000000 / frameRate; videoInfoHeader.BmiHeader.Width = width; videoInfoHeader.BmiHeader.Height = height; Marshal.StructureToPtr(videoInfoHeader, outMedia.formatPtr, false); hr = videoStreamConfig.SetFormat(outMedia); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(outMedia); outMedia = null; }
protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) { throw new NotSupportedException(); } this.assertStopped(); this.derenderGraph(); IntPtr zero = IntPtr.Zero; AMMediaType structure = new AMMediaType(); try { object obj2; int format = streamConfig.GetFormat(out zero); if (format != 0) { Marshal.ThrowExceptionForHR(format); } Marshal.PtrToStructure(zero, structure); if (structure.formatType == FormatType.WaveEx) { obj2 = new WaveFormatEx(); } else if (structure.formatType == FormatType.VideoInfo) { obj2 = new VideoInfoHeader(); } else { if (structure.formatType != FormatType.VideoInfo2) { throw new NotSupportedException("This device does not support a recognized format block."); } obj2 = new VideoInfoHeader2(); } Marshal.PtrToStructure(structure.formatPtr, obj2); FieldInfo field = obj2.GetType().GetField(fieldName); if (field == null) { throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); } field.SetValue(obj2, newValue); Marshal.StructureToPtr(obj2, structure.formatPtr, false); format = streamConfig.SetFormat(structure); if (format != 0) { Marshal.ThrowExceptionForHR(format); } } finally { DsUtils.FreeAMMediaType(structure); Marshal.FreeCoTaskMem(zero); } this.renderStream = false; this.renderGraph(); this.startPreviewIfNeeded(); return(null); }
public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return((IBaseFilter)source); }
public static AMMediaType GetVideoMediaType(short bitCount, int width, int height) { Guid mediaSubType = GetMediaSubTypeForBitCount(bitCount); var VideoGroupType = new AMMediaType(); VideoGroupType.majorType = MediaType.Video; VideoGroupType.subType = mediaSubType; VideoGroupType.formatType = FormatType.VideoInfo; VideoGroupType.fixedSizeSamples = true; VideoGroupType.formatSize = Marshal.SizeOf(typeof(VideoInfoHeader)); var vif = new VideoInfoHeader(); vif.BmiHeader = new BitmapInfoHeader(); // The HEADER macro returns the BITMAPINFO within the VIDEOINFOHEADER vif.BmiHeader.Size = Marshal.SizeOf(typeof(BitmapInfoHeader)); vif.BmiHeader.Compression = 0; vif.BmiHeader.BitCount = bitCount; vif.BmiHeader.Width = width; vif.BmiHeader.Height = height; vif.BmiHeader.Planes = 1; int iSampleSize = vif.BmiHeader.Width * vif.BmiHeader.Height * (vif.BmiHeader.BitCount / 8); vif.BmiHeader.ImageSize = iSampleSize; VideoGroupType.sampleSize = iSampleSize; VideoGroupType.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vif)); Marshal.StructureToPtr(vif, VideoGroupType.formatPtr, false); return(VideoGroupType); }
/// <summary> /// Sets the number of DVD buttons found in the current DVD video /// </summary> /// <param name="buttonCount">The total number of buttons</param> private void SetDvdButtonCount(int buttonCount) { m_dvdButtonCount = buttonCount; if (m_dvdButtonCount == 0) { m_isOverButton = false; InvokeOnOverDvdButton(m_isOverButton); } var mediaType = new AMMediaType(); m_dummyRendererPin.ConnectionMediaType(mediaType); /* Check to see if its a video media type */ if (mediaType.formatType != FormatType.VideoInfo2 && mediaType.formatType != FormatType.VideoInfo) { DsUtils.FreeAMMediaType(mediaType); return; } var videoInfo = new VideoInfoHeader(); /* Read the video info header struct from the native pointer */ Marshal.PtrToStructure(mediaType.formatPtr, videoInfo); /* Get the target rect */ m_renderTargetRect = videoInfo.TargetRect.ToRectangle(); DsUtils.FreeAMMediaType(mediaType); }
private VideoOutPinConfiguration[] GetVideoOutPins(IBaseFilter filter) { List <VideoOutPinConfiguration> video_out_pins = new List <VideoOutPinConfiguration>(); IEnumPins iterator; IPin[] pins = new IPin[1]; filter.EnumPins(out iterator); while (iterator.Next(1, pins, IntPtr.Zero) == 0) { PinDirection pin_direction; pins[0].QueryDirection(out pin_direction); if (pin_direction == PinDirection.Output) { int caps_count; int caps_size; IAMStreamConfig config = (IAMStreamConfig)pins[0]; config.GetNumberOfCapabilities(out caps_count, out caps_size); AMMediaType type = null; IntPtr buffer = Marshal.AllocCoTaskMem(caps_size); for (int i = 0; i < caps_count; i++) { config.GetStreamCaps(i, out type, buffer); VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(type.formatPtr, typeof(VideoInfoHeader)); if (header.BmiHeader.Width > 0) { video_out_pins.Add(new VideoOutPinConfiguration(filter, pins[0], i, header)); } } Marshal.FreeCoTaskMem(buffer); DsUtils.FreeAMMediaType(type); } } return(video_out_pins.ToArray()); }
/// <summary> /// Checks if AMMediaType's resolution is appropriate for desired resolution. /// </summary> /// <param name="media_type">Media type to analyze.</param> /// <param name="resolution_desired">Desired resolution. Can be null or have 0 for height or width if it's not important.</param> private static bool IsResolutionAppropiate(AMMediaType media_type, Resolution resolution_desired) { // if we were asked to choose resolution if (resolution_desired == null) { return(true); } VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader); if (resolution_desired.Width > 0 && videoInfoHeader.BmiHeader.Width != resolution_desired.Width) { return(false); } if (resolution_desired.Height > 0 && videoInfoHeader.BmiHeader.Height != resolution_desired.Height) { return(false); } return(true); }
private object GetField(AMMediaType mediaType, String fieldName) { object formatStruct; if (mediaType.formatType == FormatType.WaveEx) { formatStruct = new WaveFormatEx(); } else if (mediaType.formatType == FormatType.VideoInfo) { formatStruct = new VideoInfoHeader(); } else if (mediaType.formatType == FormatType.VideoInfo2) { formatStruct = new VideoInfoHeader2(); } else { throw new NotSupportedException("This device does not support a recognized format block."); } // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo != null) { return(fieldInfo.GetValue(formatStruct)); } return(null); }
public CAviDS(string filename, double playSpeed) { builder = new FilterGraph() as IGraphBuilder; grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; DsError.ThrowExceptionForHR(grabber.SetMediaType(mediaType)); DsError.ThrowExceptionForHR(builder.AddFilter(grabber as IBaseFilter, "Sample Grabber(DTXMania)")); DsError.ThrowExceptionForHR(builder.RenderFile(filename, null)); CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); if (builder is IVideoWindow videoWindow) { videoWindow.put_AutoShow(OABool.False); } DsError.ThrowExceptionForHR(grabber.GetConnectedMediaType(mediaType)); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; seeker = builder as IMediaSeeking; DsError.ThrowExceptionForHR(seeker.GetDuration(out nMediaLength)); DsError.ThrowExceptionForHR(seeker.SetRate(playSpeed / 20.0)); control = builder as IMediaControl; filter = builder as IMediaFilter; grabber.SetBufferSamples(BufferThem: true); Run(); Pause(); bPlaying = false; bPause = false; }
private static Camera_NET.Resolution GetResolutionForMediaType(AMMediaType media_type) { VideoInfoHeader structure = new VideoInfoHeader(); Marshal.PtrToStructure(media_type.formatPtr, structure); return(new Camera_NET.Resolution(structure.BmiHeader.Width, structure.BmiHeader.Height)); }
/// <summary> /// Gets and saves mode (mediatype, format type and etc). /// </summary> public Resolution SaveMode() { lock (_DeltaLock) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = _SampleGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); _ResolutionWidth = videoInfoHeader.BmiHeader.Width; _ResolutionHeight = videoInfoHeader.BmiHeader.Height; _ResolutionBitsPerPixel = videoInfoHeader.BmiHeader.BitCount; _ResolutionSizeBytes = videoInfoHeader.BmiHeader.ImageSize; DsUtils.FreeAMMediaType(media); media = null; return(new Resolution(_ResolutionWidth, _ResolutionHeight)); } }
/// <summary> /// Parametrizes the provider and itself according to the required config. /// You can consider it as converting required -> actual config. /// </summary> /// <param name="config">The required configuration from the UI</param> private void configPane_SetupComplete(InteractiveDSConfig config) { // Copy the provider provider = config.Provider; // Enable stats if necessary stats.Enabled = config.Statistics; // Prepare the buffer BMPbuffer = null; BMPbuffered = config.Buffered; videoInfoHeader = provider.StartProvider(config.Preview, this, config.Owner.Handle); // Setup image storage imageWidth = videoInfoHeader.BmiHeader.Width; imageHeight = videoInfoHeader.BmiHeader.Height; imageStride = imageWidth * 3; imageOffset = (imageHeight - 1) * imageStride; savedArray = new byte[imageWidth * imageHeight * 4]; // Adjust the preview ResizeVideo(config.Owner.Size); // Create the current configuration object configuration = new DSConfig(provider, imageWidth, imageHeight, config.Preview, config.Statistics); // Indicate that the preparations are complete mediaOnline = true; if (MediaOnline != null) { MediaOnline(configuration); } }
public static void GetImageFromMedia(string mediaFile, string outImageFile, double time) { int hr; IMediaDet imd = (IMediaDet) new MediaDet(); try { int width = 200; int height = 200; // Set the name hr = imd.put_Filename(mediaFile); DESError.ThrowExceptionForHR(hr); // Read from stream to get video size AMMediaType mediaType = new AMMediaType(); imd.get_StreamMediaType(mediaType); VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); width = infoHeader.BmiHeader.Width; height = infoHeader.BmiHeader.Height; // Get the length in seconds hr = imd.WriteBitmapBits(time, width, height, outImageFile); DESError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(imd); } catch { Marshal.ReleaseComObject(imd); } }
/// <summary> Read and store the properties </summary> private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); //m_bitmapDataArray = new byte[videoInfoHeader.BmiHeader.ImageSize]; m_handle = Marshal.AllocCoTaskMem(m_stride * m_videoHeight); DsUtils.FreeAMMediaType(media); media = null; }
private void buildGraph() { int hr = 0; IBaseFilter captureFilter;AMMediaType pmt4 = new AMMediaType(); IFilterGraph2 filtergraph = new FilterGraph() as IFilterGraph2; ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = pBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); filtergraph.AddSourceFilterForMoniker(m_capDev.Mon, null, m_capDev.Name, out captureFilter); m_graph.AddFilter(captureFilter, "CapFilter"); DsError.ThrowExceptionForHR(hr); IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(typeof(SampleGrabber)); hr = m_graph.AddFilter(pSampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); hr = ((ISampleGrabber)pSampleGrabber).SetCallback(new StillGrabberCallBack(), 0); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BmiHeader = new BitmapInfoHeader() format.BmiHeader.Size = }
/// <summary> /// Gets the type of the media. /// </summary> /// <param name="pMediaType">Type of the p media.</param> /// <returns></returns> public override int GetMediaType(ref AMMediaType pMediaType) { pMediaType.majorType = MediaType.Video; pMediaType.subType = MediaSubType.RGB32; pMediaType.formatType = FormatType.VideoInfo; VideoInfoHeader vih = new VideoInfoHeader(); vih.AvgTimePerFrame = m_nAvgTimePerFrame; vih.BmiHeader = new BitmapInfoHeader(); vih.BmiHeader.Size = Marshal.SizeOf(typeof(BitmapInfoHeader)); vih.BmiHeader.Compression = 0; vih.BmiHeader.BitCount = 32; vih.BmiHeader.Width = m_nWidth; vih.BmiHeader.Height = m_nHeight; vih.BmiHeader.Planes = 1; vih.BmiHeader.ImageSize = vih.BmiHeader.Width * vih.BmiHeader.Height * vih.BmiHeader.BitCount / 8; vih.SrcRect = new DsRect(); vih.TargetRect = new DsRect(); AMMediaType.SetFormat(ref pMediaType, ref vih); pMediaType.fixedSizeSamples = true; pMediaType.sampleSize = vih.BmiHeader.ImageSize; return(NOERROR); }
/// <summary> /// Saves the video. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="profileFileName">Name of the profile file.</param> /// <param name="framesPerSecond">Frames per second.</param> /// <param name="bitmaps">Bitmaps.</param> public WmvEncoder(string profileFileName, ulong framesPerSecond) { try { WMEncoder encoder = new WMEncoder(); IWMEncProfile2 profile = WMEncProfile.LoadEncodingProfile(encoder, profileFileName); WMEncProfile.ReleaseEncoder(ref encoder); writer = new WmvWriter(); writer.SetProfile((IWMProfile)profile.SaveToIWMProfile()); viHeader = new VideoInfoHeader(); bool bret = writer.FindVideoInputFormat(0, MediaSubTypes.WMMEDIASUBTYPE_RGB24, ref viHeader, false); if (!bret) { throw new Exception("FindVideoInputFormat error"); } ; frame = new Bitmap((int)viHeader.bmiHeader.biWidth, (int)viHeader.bmiHeader.biHeight, PixelFormat.Format24bppRgb); fps = framesPerSecond; } catch (Exception) { // error handle throw; } }
// Save the size parameters for use in SnapShot private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); try { if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Get the struct VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfoHeader); // Grab the size info m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = videoInfoHeader.BmiHeader.ImageSize / m_videoHeight; m_ImageSize = videoInfoHeader.BmiHeader.ImageSize; } finally { DsUtils.FreeAMMediaType(media); media = null; } }
private static void UpdateVideoPart(IMediaDet mediaDet, MediaProperty mediaProperty) { int hr = 0; AMMediaType mediaType = new AMMediaType(); hr = mediaDet.get_StreamMediaType(mediaType); DsError.ThrowExceptionForHR(hr); mediaProperty.videoSubType = mediaType.subType; double streamLength; hr = mediaDet.get_StreamLength(out streamLength); DsError.ThrowExceptionForHR(hr); mediaProperty.videoLength = TimeSpan.FromSeconds(streamLength); if (mediaType.formatType == FormatType.VideoInfo) { VideoInfoHeader videoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); mediaProperty.resolution = new Size(videoHeader.BmiHeader.Width, videoHeader.BmiHeader.Height); mediaProperty.bitsPerPixel = videoHeader.BmiHeader.BitCount; mediaProperty.fourCC = FourCCToString(videoHeader.BmiHeader.Compression); } }
private void SetPinVideoImageSize(string pinName) { int hr; IPin pin = DsFindPin.ByDirection(_videoEncoder, PinDirection.Output, 0); if (pin != null) { AppLogger.Message("VideoCaptureDevice: found output pin"); } // get video stream interfaces AppLogger.Message("VideoCaptureDevice:get Video stream control interface (IAMStreamConfig)"); IAMStreamConfig streamConfig = (IAMStreamConfig)pin; AMMediaType media; hr = streamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); VideoInfoHeader v = new VideoInfoHeader(); v.BmiHeader = new BitmapInfoHeader(); v.BmiHeader.Width = 320; v.BmiHeader.Height = 240; media.formatPtr = Marshal.AllocCoTaskMem(1024); Marshal.StructureToPtr(v, media.formatPtr, true); hr = streamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); }
/// <summary> /// Retrieves the value of one member of the IAMStreamConfig format block. /// Helper function for several properties that expose /// video/audio settings from IAMStreamConfig.GetFormat(). /// IAMStreamConfig.GetFormat() returns a AMMediaType struct. /// AMMediaType.formatPtr points to a format block structure. /// This format block structure may be one of several /// types, the type being determined by AMMediaType.formatType. /// </summary> protected object getStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName) { if ( streamConfig == null ) throw new NotSupportedException(); assertStopped(); derenderGraph(); object returnValue = null; #if DSHOWNET IntPtr pmt = IntPtr.Zero; #endif AMMediaType mediaType = new AMMediaType(); try { // Get the current format info #if DSHOWNET int hr = streamConfig.GetFormat(out pmt); #else int hr = streamConfig.GetFormat(out mediaType); #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); #if DSHOWNET Marshal.PtrToStructure( pmt, mediaType ); #endif // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField( fieldName ); if ( fieldInfo == null ) throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." ); // Extract the field's current value returnValue = fieldInfo.GetValue( formatStruct ); } finally { DsUtils.FreeAMMediaType( mediaType ); #if DSHOWNET Marshal.FreeCoTaskMem( pmt ); #endif } renderGraph(); startPreviewIfNeeded(); return( returnValue ); }
private object GetField(AMMediaType mediaType, String fieldName) { object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if(fieldInfo != null) { return fieldInfo.GetValue(formatStruct); } return null; }
/// <summary> Read and store the properties </summary> private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); DsUtils.FreeAMMediaType(media); media = null; }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight) { int hr; object o; AMMediaType media; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = videoStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader VideoInfoHeader v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); // if overriding the framerate, set the frame rate if (iFrameRate > 0) { v.AvgTimePerFrame = 10000000 / iFrameRate; } // if overriding the width, set the width if (iWidth > 0) { v.BmiHeader.Width = iWidth; } // if overriding the Height, set the Height if (iHeight > 0) { v.BmiHeader.Height = iHeight; } // Copy the media structure back Marshal.StructureToPtr(v, media.formatPtr, false); // Set the new format hr = videoStreamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; }
protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) throw new NotSupportedException(); object returnValue = null; IntPtr pmt = IntPtr.Zero; AMMediaType mediaType = new AMMediaType(); try { // Get the current format info int hr = streamConfig.GetFormat(out pmt); if (hr != 0) Marshal.ThrowExceptionForHR(hr); Marshal.PtrToStructure(pmt, mediaType); // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if (mediaType.formatType == FormatType.WaveEx) formatStruct = new WaveFormatEx(); else if (mediaType.formatType == FormatType.VideoInfo) formatStruct = new VideoInfoHeader(); else if (mediaType.formatType == FormatType.VideoInfo2) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException("This device does not support a recognized format block."); // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo == null) throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); // Update the value of the field fieldInfo.SetValue(formatStruct, newValue); // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false); // Save the changes hr = streamConfig.SetFormat(mediaType); if (hr != 0) Marshal.ThrowExceptionForHR(hr); } finally { //DsUtils.FreeAMMediaType(mediaType); Marshal.FreeCoTaskMem(pmt); } return (returnValue); }
/// <summary> /// Set the value of one member of the IAMStreamConfig format block. /// Helper function for several properties that expose /// video/audio settings from IAMStreamConfig.GetFormat(). /// IAMStreamConfig.GetFormat() returns a AMMediaType struct. /// AMMediaType.formatPtr points to a format block structure. /// This format block structure may be one of several /// types, the type being determined by AMMediaType.formatType. /// </summary> protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue) { if ( streamConfig == null ) throw new NotSupportedException(); assertStopped(); derenderGraph(); object returnValue = null; #if DSHOWNET IntPtr pmt = IntPtr.Zero; #endif AMMediaType mediaType = new AMMediaType(); try { // Get the current format info #if DSHOWNET int hr = streamConfig.GetFormat(out pmt); #else int hr = streamConfig.GetFormat(out mediaType); #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); #if DSHOWNET Marshal.PtrToStructure(pmt, mediaType); #endif // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField( fieldName ); if ( fieldInfo == null ) throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." ); // Update the value of the field fieldInfo.SetValue( formatStruct, newValue ); // Update fields that may depend on specific values of other attributes if (mediaType.formatType == FormatType.WaveEx) { WaveFormatEx waveFmt = formatStruct as WaveFormatEx; waveFmt.nBlockAlign = (short)(waveFmt.nChannels * waveFmt.wBitsPerSample / 8); waveFmt.nAvgBytesPerSec = waveFmt.nBlockAlign * waveFmt.nSamplesPerSec; } // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); // Save the changes hr = streamConfig.SetFormat( mediaType ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); } finally { DsUtils.FreeAMMediaType( mediaType ); #if DSHOWNET Marshal.FreeCoTaskMem(pmt); #endif } renderGraph(); startPreviewIfNeeded(); return( returnValue ); }