Example #1
0
 public bool IsCompatible(VideoFormatHelper.SupportedVideoFormat videoFormat)
 {
     OcrConfiguration config = OcrSettings.Instance[Name];
     return
         config != null &&
         config.Alignment.Width == videoFormat.Width &&
         config.Alignment.Height == videoFormat.Height;
 }
Example #2
0
        private void SetupGraphInternal(DsDevice dev, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight)
        {
            // Capture Source (Capture/Video) --> (Input) Sample Grabber (Output) --> (In) Null Renderer

            IBaseFilter nullRenderer = null;

            try
            {
                // Add the video device
                int hr = filterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter);
                DsError.ThrowExceptionForHR(hr);

                if (capFilter != null)
                    // If any of the default config items are set
                    SetConfigParms(capBuilder, capFilter, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight);

                IBaseFilter baseGrabFlt = (IBaseFilter)samplGrabber;
                ConfigureSampleGrabber(samplGrabber);

                hr = filterGraph.AddFilter(baseGrabFlt, "OccuRec AVI Video Grabber");
                DsError.ThrowExceptionForHR(hr);

                // Connect the video device output to the sample grabber
                IPin videoCaptureOutputPin = DsHelper.FindPin(capFilter, PinDirection.Output, MediaType.Video, PinCategory.Capture, "Capture");
                IPin grabberInputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
                hr = filterGraph.Connect(videoCaptureOutputPin, grabberInputPin);
                DsError.ThrowExceptionForHR(hr);
                Marshal.ReleaseComObject(videoCaptureOutputPin);
                Marshal.ReleaseComObject(grabberInputPin);

                // Add the frame grabber to the graph
                nullRenderer = (IBaseFilter)new NullRenderer();
                hr = filterGraph.AddFilter(nullRenderer, "OccuRec AVI Video Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Connect the sample grabber to the null renderer (so frame samples will be coming through)
                IPin grabberOutputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
                IPin renderedInputPin = DsFindPin.ByDirection(nullRenderer, PinDirection.Input, 0);
                hr = filterGraph.Connect(grabberOutputPin, renderedInputPin);
                DsError.ThrowExceptionForHR(hr);
                Marshal.ReleaseComObject(grabberOutputPin);
                Marshal.ReleaseComObject(renderedInputPin);
            }
            finally
            {
                if (nullRenderer != null)
                    Marshal.ReleaseComObject(nullRenderer);
            }
        }
Example #3
0
        private void SetConfigParms(ICaptureGraphBuilder2 capBuilder, IBaseFilter capFilter, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight)
        {
            object o;
            AMMediaType media;
            IAMStreamConfig videoStreamConfig;
            IAMVideoControl videoControl = capFilter as IAMVideoControl;

            int hr = capBuilder.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            videoStreamConfig = o as IAMStreamConfig;
            try
            {
                if (videoStreamConfig == null)
                {
                    throw new Exception("Failed to get IAMStreamConfig");
                }

                int iCount = 0, iSize = 0;
                hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
                DsError.ThrowExceptionForHR(hr);

                VideoInfoHeader vMatching = null;
                VideoFormatHelper.SupportedVideoFormat entry = null;

                IntPtr taskMemPointer = Marshal.AllocCoTaskMem(iSize);

                AMMediaType pmtConfig = null;
                for (int iFormat = 0; iFormat < iCount; iFormat++)
                {
                    IntPtr ptr = IntPtr.Zero;

                    hr = videoStreamConfig.GetStreamCaps(iFormat, out pmtConfig, taskMemPointer);
                    DsError.ThrowExceptionForHR(hr);

                    vMatching = (VideoInfoHeader)Marshal.PtrToStructure(pmtConfig.formatPtr, typeof(VideoInfoHeader));

                    if (vMatching.BmiHeader.BitCount > 0)
                    {
                        entry = new VideoFormatHelper.SupportedVideoFormat()
                        {
                            Width = vMatching.BmiHeader.Width,
                            Height = vMatching.BmiHeader.Height,
                            BitCount = vMatching.BmiHeader.BitCount,
                            FrameRate = 10000000.0 / vMatching.AvgTimePerFrame
                        };

                        if (entry.Matches(selectedFormat))
                        {
                            // WE FOUND IT !!!
                            break;
                        }
                    }

                    vMatching = null;
                }

                if (vMatching != null)
                {
                    hr = videoStreamConfig.SetFormat(pmtConfig);
                    DsError.ThrowExceptionForHR(hr);

                    iFrameRate = 10000000/vMatching.AvgTimePerFrame;
                    iWidth = vMatching.BmiHeader.Width;
                    iHeight = vMatching.BmiHeader.Height;
                }
                else
                {
                    hr = videoStreamConfig.GetFormat(out media);
                    DsError.ThrowExceptionForHR(hr);

                    // Copy out the videoinfoheader
                    VideoInfoHeader v = new VideoInfoHeader();
                    Marshal.PtrToStructure(media.formatPtr, v);

                    if (selectedFormat != null && iWidth == 0 && iHeight == 0)
                    {
                        // Use the config from the selected format
                        iWidth = selectedFormat.Width;
                        iHeight = selectedFormat.Height;
                        iFrameRate = (float) selectedFormat.FrameRate;
                    }

                    // If overriding the framerate, set the frame rate
                    if (iFrameRate > 0)
                    {
                        int newAvgTimePerFrame = (int)Math.Round(10000000 / iFrameRate);
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.AvgTimePerFrame from {0} to {1}", v.AvgTimePerFrame, newAvgTimePerFrame));
                        v.AvgTimePerFrame = newAvgTimePerFrame;
                    }
                    else
                        iFrameRate = 10000000 / v.AvgTimePerFrame;

                    // If overriding the width, set the width
                    if (iWidth > 0)
                    {
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Width from {0} to {1}", v.BmiHeader.Width, iWidth));
                        v.BmiHeader.Width = iWidth;
                    }
                    else
                        iWidth = v.BmiHeader.Width;

                    // If overriding the Height, set the Height
                    if (iHeight > 0)
                    {
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Height from {0} to {1}", v.BmiHeader.Height, iHeight));
                        v.BmiHeader.Height = iHeight;
                    }
                    else
                        iHeight = v.BmiHeader.Height;

                    // Copy the media structure back
                    Marshal.StructureToPtr(v, media.formatPtr, false);

                    // Set the new format
                    hr = videoStreamConfig.SetFormat(media);
                    try
                    {
                        DsError.ThrowExceptionForHR(hr);
                    }
                    catch (Exception ex)
                    {
                        // If setting the format failed then log the error but try to continue
                        Trace.WriteLine(ex.GetFullStackTrace());
                    }

                    DsUtils.FreeAMMediaType(media);
                    media = null;
                }

                Marshal.FreeCoTaskMem(taskMemPointer);
                DsUtils.FreeAMMediaType(pmtConfig);
                pmtConfig = null;

                // Fix upsidedown video
                if (videoControl != null)
                {
                    // NOTE: Flipping detection and fixing doesn't seem to work!

                    //IPin pPin = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);
                    //VideoFormatHelper.FixFlippedVideo(videoControl, pPin);

                    //pPin = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0);
                    //VideoFormatHelper.FixFlippedVideo(videoControl, pPin);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(videoStreamConfig);
            }
        }
Example #4
0
        public void SetupGraph(DsDevice dev, bool runOCR, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight)
        {
            try
            {
                filterGraph = (IFilterGraph2)new FilterGraph();
                mediaCtrl = filterGraph as IMediaControl;

                capBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                samplGrabber = (ISampleGrabber)new SampleGrabber();

                int hr = capBuilder.SetFiltergraph(filterGraph);
                DsError.ThrowExceptionForHR(hr);

                if (Settings.Default.VideoGraphDebugMode)
                {
                    if (rot != null)
                    {
                        rot.Dispose();
                        rot = null;
                    }
                    rot = new DsROTEntry(filterGraph);
                }

                SetupGraphInternal(dev, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight);

                // Now that sizes are fixed/known, store the sizes
                SaveSizeInfo(samplGrabber);

                crossbar = CrossbarHelper.SetupTunerAndCrossbar(capBuilder, capFilter);

                latestBitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb);
                fullRect = new Rectangle(0, 0, latestBitmap.Width, latestBitmap.Height);

                NativeHelpers.SetupCamera(
                    Settings.Default.CameraModel,
                    iWidth, iHeight,
                    Settings.Default.HorizontalFlip,
                    Settings.Default.VerticalFlip,
                    Settings.Default.IsIntegrating,
                    (float)Settings.Default.MinSignatureDiffRatio,
                    (float)Settings.Default.MinSignatureDiff,
                    Settings.Default.GammaDiff,
                    Settings.Default.ForceNewFrameOnLockedRate,
                    dev.Name,
                    selectedFormat.AsSerialized(),
                    selectedFormat.FrameRate);

                NativeHelpers.SetupAav(Settings.Default.RecordStatusSectionOnly ? AavImageLayout.StatusSectionOnly : Settings.Default.AavImageLayout, Settings.Default.AavCompression);

                ocrEnabled = false;
                string errorMessage;

                if (runOCR)
                {
                    OcrConfiguration ocrConfig = OcrSettings.Instance[Settings.Default.SelectedOcrConfiguration];

                    errorMessage = NativeHelpers.SetupBasicOcrMetrix(ocrConfig);
                    if (errorMessage != null && callbacksObject != null)
                        callbacksObject.OnError(-1, errorMessage);
                    else
                    {
                        NativeHelpers.SetupOcr(ocrConfig);
                        ocrEnabled = true;
                    }
                }
                else
                {
                    errorMessage = NativeHelpers.SetupTimestampPreservation(false, 0, 0);
                    if (errorMessage != null && callbacksObject != null)
                        callbacksObject.OnError(-1, errorMessage);
                }
            }
            catch
            {
                CloseResources();

                if (callbacksObject != null)
                    callbacksObject.OnError(-1, "Error initialising the camera. The selected video mode may not be supported by the camera.");

                throw;
            }
        }
Example #5
0
 private void UpdateSelectedVideoFormatControls(VideoFormatHelper.SupportedVideoFormat selectedVideoFormat)
 {
     if (selectedVideoFormat != null)
     {
         if (selectedVideoFormat.IsPal())
         {
             rbPAL.Checked = true;
             pnlSimpleFrameRate.Visible = true;
             cbxVideoFormats.Visible = false;
         }
         else if (selectedVideoFormat.IsNtsc())
         {
             rbNTSC.Checked = true;
             pnlSimpleFrameRate.Visible = true;
             cbxVideoFormats.Visible = false;
         }
         else
         {
             pnlSimpleFrameRate.Visible = false;
             cbxVideoFormats.Visible = true;
         }
     }
     else
     {
         pnlSimpleFrameRate.Visible = true;
         cbxVideoFormats.Visible = false;
     }
 }