Exemplo n.º 1
0
        public override bool Play(string strFile)
        {
            updateTimer = DateTime.Now;
            m_speedRate = 10000;
            m_bVisible  = false;
            m_iVolume   = 100;
            m_state     = PlayState.Init;
            if (strFile != DUMMY_URL)
            {
                m_strCurrentFile = strFile;                       // hack to get around the MP 1.3 Alpha bug with non http URLs
            }
            m_bFullScreen = true;
            m_ar          = GUIGraphicsContext.ARType;
            VideoRendererStatistics.VideoState = VideoRendererStatistics.State.VideoPresent;
            _updateNeeded = true;
            Logger.Instance.Info("AirPlayerVideo: Play '{0}'", m_strCurrentFile);

            m_bStarted = false;
            if (!GetInterfaces())
            {
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }

            AnalyseStreams();
            SelectSubtitles();
            SelectAudioLanguage();
            OnInitialized();

            int hr = mediaEvt.SetNotifyWindow(GUIGraphicsContext.ActiveForm, WM_GRAPHNOTIFY, IntPtr.Zero);

            if (hr < 0)
            {
                Error.SetError("Unable to play movie", "Can not set notifications");
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }
            if (videoWin != null)
            {
                videoWin.put_Owner(GUIGraphicsContext.ActiveForm);
                videoWin.put_WindowStyle(
                    (WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipChildren + (int)WindowStyle.ClipSiblings));
                videoWin.put_MessageDrain(GUIGraphicsContext.form.Handle);
            }
            if (basicVideo != null)
            {
                hr = basicVideo.GetVideoSize(out m_iVideoWidth, out m_iVideoHeight);
                if (hr < 0)
                {
                    Error.SetError("Unable to play movie", "Can not find movie width/height");
                    m_strCurrentFile = "";
                    CloseInterfaces();
                    return(false);
                }
            }

            DirectShowUtil.SetARMode(graphBuilder, AspectRatioMode.Stretched);

            try
            {
                hr = mediaCtrl.Run();
                DsError.ThrowExceptionForHR(hr);
                if (hr == 1) // S_FALSE from IMediaControl::Run means: The graph is preparing to run, but some filters have not completed the transition to a running state.
                {
                    // wait max. 20 seconds for the graph to transition to the running state
                    DateTime    startTime = DateTime.Now;
                    FilterState filterState;
                    do
                    {
                        Thread.Sleep(100);
                        hr = mediaCtrl.GetState(100, out filterState); // check with timeout max. 10 times a second if the state changed
                    }while ((hr != 0) && ((DateTime.Now - startTime).TotalSeconds <= 20));
                    if (hr != 0)                                       // S_OK
                    {
                        DsError.ThrowExceptionForHR(hr);
                        throw new Exception(string.Format("IMediaControl.GetState after 20 seconds: 0x{0} - '{1}'", hr.ToString("X8"), DsError.GetErrorText(hr)));
                    }
                }
            }
            catch (Exception error)
            {
                Logger.Instance.Warn("AirPlayerVideo: Unable to play with reason: {0}", error.Message);
            }
            if (hr != 0) // S_OK
            {
                Error.SetError("Unable to play movie", "Unable to start movie");
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }

            GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_PLAYBACK_STARTED, 0, 0, 0, 0, 0, null);

            msg.Label = CurrentFile;
            GUIWindowManager.SendThreadMessage(msg);
            m_state       = PlayState.Playing;
            m_iPositionX  = GUIGraphicsContext.VideoWindow.X;
            m_iPositionY  = GUIGraphicsContext.VideoWindow.Y;
            m_iWidth      = GUIGraphicsContext.VideoWindow.Width;
            m_iHeight     = GUIGraphicsContext.VideoWindow.Height;
            m_ar          = GUIGraphicsContext.ARType;
            _updateNeeded = true;
            SetVideoWindow();
            mediaPos.get_Duration(out m_dDuration);
            Logger.Instance.Info("AirPlayerVideo: Duration {0} sec", m_dDuration.ToString("F"));
            return(true);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Try to connect 2 pins in the FilterGraph.
        /// </summary>
        /// <param name="output"></param>
        /// <param name="input"></param>
        /// <returns>false if pins cannot be connected</returns>
        bool Graph_BeforePinsConnected(DaggerOutputPin output, DaggerInputPin input)
        {
            IPin connectedTo;

            (output as DSOutputPin)._pin.ConnectedTo(out connectedTo);
            if (connectedTo == (input as DSInputPin)._pin)
            {
                // these two are already connected in DSGraph so allow it
                Marshal.ReleaseComObject(connectedTo);
                return(true);
            }

            if (connectedTo != null)
            {
                Marshal.ReleaseComObject(connectedTo);
            }

            // make sure the graph is stopped
            Stop();

            int hr = 0;

            if (!_connectIntelligent)
            {
                hr = _graph.ConnectDirect((output as DSOutputPin)._pin, (input as DSInputPin)._pin, null);
                if (hr == 0)
                {
                    // sync the pins on the nodes
                    (output.ParentNode as DSFilterNode).SyncPins();
                    (input.ParentNode as DSFilterNode).SyncPins();

                    return(true);
                }
            }
            else
            {
                // try intelligent connection with the GraphBuilder
                hr = _graphBuilder.Connect((output as DSOutputPin)._pin, (input as DSInputPin)._pin);
                if (hr == 0 || hr == DsResults.S_PartialRender)
                {
                    // sync the FilterGraph and the DaggerGraph
                    dsDaggerUIGraph1.SyncGraphs(null);

                    // sync the pins on the nodes
                    (output.ParentNode as DSFilterNode).SyncPins();
                    (input.ParentNode as DSFilterNode).SyncPins();

                    // because SyncGraph already creates needed connections, return false to cancel this connection
                    return(false);
                }
            }

            // cancel pin connection operations
            dsDaggerUIGraph1.StopPinConnect();

            // if we get here, we simply couldn't connect the pins
            MessageBox.Show(DsError.GetErrorText(hr));

            // sync the pins on the nodes just in case an attempt to connect created new pins
            (output.ParentNode as DSFilterNode).SyncPins();
            (input.ParentNode as DSFilterNode).SyncPins();

            return(false);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Sends the diseq command.
        /// </summary>
        /// <param name="channel">The channel.</param>
        /// <param name="parameters">The channels scanning parameters.</param>
        public void SendDiseqCommand(ScanParameters parameters, DVBSChannel channel)
        {
            if (_isConexant == false)
            {
                return;
            }

            int antennaNr = BandTypeConverter.GetAntennaNr(channel);

            if (antennaNr == 0)
            {
                return;
            }

            //clear the message params before writing in order to avoid corruption of the diseqc message.
            for (int i = 0; i < 188; ++i)
            {
                Marshal.WriteByte(_ptrDiseqc, i, 0x00);
            }
            bool hiBand = BandTypeConverter.IsHiBand(channel, parameters);
            //bit 0	(1)	: 0=low band, 1 = hi band
            //bit 1 (2) : 0=vertical, 1 = horizontal
            //bit 3 (4) : 0=satellite position A, 1=satellite position B
            //bit 4 (8) : 0=switch option A, 1=switch option  B
            // LNB    option  position
            // 1        A         A
            // 2        A         B
            // 3        B         A
            // 4        B         B

            bool isHorizontal = ((channel.Polarisation == Polarisation.LinearH) ||
                                 (channel.Polarisation == Polarisation.CircularL));
            byte cmd = 0xf0;

            cmd |= (byte)(hiBand ? 1 : 0);
            cmd |= (byte)((isHorizontal) ? 2 : 0);
            cmd |= (byte)((antennaNr - 1) << 2);

            const int len    = 188;
            ulong     diseqc = 0xE0103800; //currently committed switches only. i.e. ports 1-4

            diseqc += cmd;
            //write the diseqc command to memory
            Marshal.WriteByte(_ptrDiseqc, 0, (byte)((diseqc >> 24) & 0xff));                         //framing byte
            Marshal.WriteByte(_ptrDiseqc, 1, (byte)((diseqc >> 16) & 0xff));                         //address byte
            Marshal.WriteByte(_ptrDiseqc, 2, (byte)((diseqc >> 8) & 0xff));                          //command byte
            Marshal.WriteByte(_ptrDiseqc, 3, (byte)(diseqc & 0xff));                                 //data byte (port group 0)
            Marshal.WriteInt32(_ptrDiseqc, 160, 4);                                                  //send_message_length
            Marshal.WriteInt32(_ptrDiseqc, 164, 0);                                                  //receive_message_length
            Marshal.WriteInt32(_ptrDiseqc, 168, 3);                                                  //amplitude_attenuation
            if (antennaNr == 1)                                                                      //for simple diseqc switches (i.e. 22KHz tone burst)
            {
                Marshal.WriteByte(_ptrDiseqc, 172, (int)BurstModulationType.TONE_BURST_UNMODULATED); //
            }
            else
            {
                Marshal.WriteByte(_ptrDiseqc, 172, (int)BurstModulationType.TONE_BURST_MODULATED);
                //default to tone_burst_modulated
            }
            Marshal.WriteByte(_ptrDiseqc, 176, (int)DisEqcVersion.DISEQC_VER_1X); //default
            Marshal.WriteByte(_ptrDiseqc, 180, (int)RxMode.RXMODE_NOREPLY);       //default
            Marshal.WriteByte(_ptrDiseqc, 184, 1);                                //last_message TRUE */

            //check the command
            string txt = "";

            for (int i = 0; i < 4; ++i)
            {
                txt += String.Format("0x{0:X} ", Marshal.ReadByte(_ptrDiseqc, i));
            }
            for (int i = 160; i < 188; i = (i + 4))
            {
                txt += String.Format("0x{0:X} ", Marshal.ReadInt32(_ptrDiseqc, i));
            }
            Log.Log.Debug("Conexant BDA: SendDiseqCommand: {0}", txt);

            int hr = _propertySet.Set(BdaTunerExtentionProperties, (int)BdaTunerExtension.KSPROPERTY_BDA_DISEQC, _ptrDiseqc,
                                      len, _ptrDiseqc, len);

            if (hr != 0)
            {
                Log.Log.Info("Conexant BDA: SendDiseqCommand returned: 0x{0:X} - {1}", hr, DsError.GetErrorText(hr));
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Connects to the property changed events of the camera settings.
        /// </summary>
        //private void Initialize()
        //{
        //    //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged;
        //    //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged;
        //    //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged;

        //    //stopwatch = new Stopwatch();
        //}

        /// <summary>
        /// Build the capture graph for grabber.
        /// </summary>
        /// <param name="dev">The index of the new capture device.</param>
        /// <param name="frameRate">The framerate to use.</param>
        /// <param name="width">The width to use.</param>
        /// <param name="height">The height to use.</param>
        /// <returns>True, if succesfull, otherwise false.</returns>
        private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
        {
            int hr;

            fps           = frameRate; // Not measured, only to expose FPS externally
            cameraControl = null;
            capFilter     = null;

            // Get the graphbuilder object
            graphBuilder = (IFilterGraph2) new FilterGraph();
            mediaControl = graphBuilder as IMediaControl;

            try
            {
                // Create the ICaptureGraphBuilder2
                capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Create the SampleGrabber interface
                sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = capGraph.SetFiltergraph(graphBuilder);
                //if (hr != 0)
                //    ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " +
                //                          DsError.GetErrorText(hr));

#if DEBUG
                this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif

                this.capFilter = CreateFilter(
                    FilterCategory.VideoInputDevice,
                    dev.Name);
                if (this.capFilter != null)
                {
                    hr = graphBuilder.AddFilter(this.capFilter, "Video Source");
                    DsError.ThrowExceptionForHR(hr);
                }

                //// Add the video device
                //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
                //if (hr != 0)
                //    ErrorLogger.WriteLine(
                //        "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " +
                //        DsError.GetErrorText(hr));

                var baseGrabFlt = (IBaseFilter)sampGrabber;

                ConfigureSampleGrabber(sampGrabber);

                // Add the frame grabber to the graph
                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");

                //if (hr != 0)
                //    ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " +
                //                          DsError.GetErrorText(hr));

                // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM

                /*
                 * if (!defaultMode)
                 * {
                 *  m_icc = capFilter as IAMCameraControl;
                 *  CameraControlFlags CamFlags = new CameraControlFlags();
                 *  int pMin, pMax, pStep, pDefault;
                 *
                 *  hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
                 *  m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
                 * }
                 */


                //IBaseFilter smartTee = new SmartTee() as IBaseFilter;

                //// Add the smart tee filter to the graph
                //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee");
                //Marshal.ThrowExceptionForHR(hr);

                // Connect the video source output to the smart tee
                //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee);

                hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
                var errorText = DsError.GetErrorText(hr);

                cameraControl = capFilter as IAMCameraControl;

                // Set videoProcAmp
                object obj;
                var    iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770");
                DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject(
                    null,
                    null,
                    ref iid_IBaseFilter,
                    out obj);

                videoProcAmp = obj as IAMVideoProcAmp;

                // If any of the default config items are set
                if (frameRate + height + width > 0)
                {
                    SetConfigParms(capGraph, capFilter, frameRate, width, height);
                }

                // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false.
                if (hr < 0)
                {
                    Cleanup();
                    return(false);
                }
                else
                {
                    // Otherwise update the SampleGrabber.
                    SaveSizeInfo(sampGrabber);
                    hr = sampGrabber.SetBufferSamples(false);

                    if (hr == 0)
                    {
                        hr = sampGrabber.SetOneShot(false);
                        hr = sampGrabber.SetCallback(this, 1);
                    }

                    //if (hr < 0)
                    //    ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()");
                }
            }
            catch (Exception ex)
            {
                //ErrorLogger.ProcessException(ex, false);

                Cleanup();
                return(false);
            }

            return(true);
        }
Exemplo n.º 5
0
        public static string GetErrorText(int hr)
        {
            string sRet = null;

            switch (hr)
            {
            case MsResults.S_Pending:
                sRet = "Sample update is not yet complete.";
                break;

            case MsResults.S_NoUpdate:
                sRet = "Sample was not updated after forced completion.";
                break;

            case MsResults.S_EndOfStream:
                sRet = "End of stream. Sample not updated.";
                break;

            case MsResults.E_SampleAlloc:
                sRet = "An IMediaStream object could not be removed from an IMultiMediaStream object because it still contains at least one allocated sample.";
                break;

            case MsResults.E_PurposeId:
                sRet = "The specified purpose ID can't be used for the call.";
                break;

            case MsResults.E_NoStream:
                sRet = "No stream can be found with the specified attributes.";
                break;

            case MsResults.E_NoSeeking:
                sRet = "Seeking not supported for this IMultiMediaStream object.";
                break;

            case MsResults.E_Incompatible:
                sRet = "The stream formats are not compatible.";
                break;

            case MsResults.E_Busy:
                sRet = "The sample is busy.";
                break;

            case MsResults.E_NotInit:
                sRet = "The object can't accept the call because its initialize function or equivalent has not been called.";
                break;

            case MsResults.E_SourceAlreadyDefined:
                sRet = "Source already defined.";
                break;

            case MsResults.E_InvalidStreamType:
                sRet = "The stream type is not valid for this operation.";
                break;

            case MsResults.E_NotRunning:
                sRet = "The IMultiMediaStream object is not in running state.";
                break;

            default:
                sRet = DsError.GetErrorText(hr);
                break;
            }

            return(sRet);
        }
Exemplo n.º 6
0
        /// <summary>
        ///   Start the capture graph
        /// </summary>
        public virtual void Play()
        {
            if (this.CurrentState != PlayState.Running && this.mediaControl != null)
            {
                int hr = this.mediaControl.Run();

                if (hr != 0)
                {
                    ErrorLogger.WriteLine("Error while starting to play. Message: " + DsError.GetErrorText(hr));
                }
                else
                {
                    this.CurrentState = PlayState.Running;
                }
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Build the capture graph for grabber.
        /// </summary>
        /// <param name="frameRate">
        /// The framerate to use.
        /// </param>
        /// <param name="width">
        /// The width to use.
        /// </param>
        /// <param name="height">
        /// The height to use.
        /// </param>
        /// <returns>
        /// True, if succesfull, otherwise false.
        /// </returns>
        private bool SetupGraph(int frameRate, int width, int height)
        {
            int hr;

            this.fps = frameRate; // Not measured, only to expose FPS externally

            // Get the graphbuilder object
            this.filterGraph = (IFilterGraph2) new FilterGraph();

            // Create the ICaptureGraphBuilder2
            var captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            this.mediaControl = this.filterGraph as IMediaControl;

            try
            {
                // Create the SampleGrabber interface
                this.sampleGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = captureGraphBuilder.SetFiltergraph(this.filterGraph);
                if (hr != 0)
                {
                    ErrorLogger.WriteLine(
                        "Error in capGraph.SetFiltergraph. Could not build graph. Message: " + DsError.GetErrorText(hr));
                }

                // #if DEBUG
                this.rotEntry = new DsROTEntry(this.filterGraph);

                // #endif
                if (this.VideoDeviceFilter != null)
                {
                    hr = this.filterGraph.AddFilter(this.VideoDeviceFilter, "Video input");
                    DsError.ThrowExceptionForHR(hr);
                }
                else
                {
                    return(false);
                }

                //// Add the video device
                // hr = this.filterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out this.captureFilter);
                // if (hr != 0)
                // {
                // ErrorLogger.WriteLine(
                // "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: "
                // + DsError.GetErrorText(hr));
                // }
                var baseGrabFlt = (IBaseFilter)this.sampleGrabber;

                this.ConfigureSampleGrabber(this.sampleGrabber);

                // Add the frame grabber to the graph
                hr = this.filterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr != 0)
                {
                    ErrorLogger.WriteLine(
                        "Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + DsError.GetErrorText(hr));
                }

                // If any of the default config items are set
                if (frameRate + height + width > 0)
                {
                    this.SetConfigParms(captureGraphBuilder, this.VideoDeviceFilter, frameRate, width, height);
                }

                hr = captureGraphBuilder.RenderStream(null, null, this.VideoDeviceFilter, null, baseGrabFlt);
                string error = DsError.GetErrorText(hr);

                // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false.
                if (hr < 0)
                {
                    this.Dispose();
                    return(false);
                }
                else
                {
                    // Otherwise update the SampleGrabber.
                    this.SaveSizeInfo(this.sampleGrabber);

                    hr = this.sampleGrabber.SetBufferSamples(false);

                    if (hr == 0)
                    {
                        hr = this.sampleGrabber.SetOneShot(false);
                    }

                    if (hr == 0)
                    {
                        hr = this.sampleGrabber.SetCallback(this, 1);
                    }

                    if (hr < 0)
                    {
                        ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()");
                    }
                }
            }
            catch (Exception ex)
            {
                ErrorLogger.ProcessException(ex, false);

                this.Dispose();
                return(false);
            }
            finally
            {
                Marshal.ReleaseComObject(captureGraphBuilder);
            }

            return(true);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Saves the video properties of the SampleGrabber into member fields
        /// and creates a file mapping for the captured frames.
        /// </summary>
        /// <param name="sampGrabber">The <see cref="ISampleGrabber"/>
        /// from which to retreive the sample information.</param>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);

            if (hr != 0)
            {
                ErrorLogger.WriteLine("Could not SaveSizeInfo in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                ErrorLogger.WriteLine("Error in Camera.Capture. Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            this.videoWidth  = videoInfoHeader.BmiHeader.Width;
            this.videoHeight = videoInfoHeader.BmiHeader.Height;
            this.stride      = this.videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            this.bufferLength = this.videoWidth * this.videoHeight * 4; // RGB24 = 3 bytes

            // create memory section and map for the OpenCV Image.
            this.section    = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, (uint)this.bufferLength, null);
            this.map        = MapViewOfFile(this.section, 0xF001F, 0, 0, (uint)this.bufferLength);
            this.videoImage = new System.Drawing.Bitmap(
                this.videoWidth,
                this.videoHeight,
                this.stride,
                System.Drawing.Imaging.PixelFormat.Format32bppRgb,
                this.map);

            this.BitmapSource = Imaging.CreateBitmapSourceFromMemorySection(
                this.section, this.videoWidth, this.videoHeight, PixelFormats.Bgr32, this.videoWidth * 4, 0) as InteropBitmap;

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Exemplo n.º 9
0
        public bool BuildGraph()
        {
            if (_class.Capture.CurrentDevice <= -1 ||
                _class.Capture.CurrentDevice >= _class.Var.VideoCaptureDevice.Count)
            {
                return(true);
            }

            var strVideoDevice = _class.Var.VideoCaptureDevice[_class.Capture.CurrentDevice];
            var strShortName   = FindCaptureName(strVideoDevice);

            if (_class.Var.VideoResolutionIndex > _class.Resolution.List.Count)
            {
                _class.Var.VideoResolutionIndex = 0;
            }

            if (_class.Audio.Output > _class.Audio.Devices.Count)
            {
                _class.Audio.Output = -1;
                _class.Audio.Find();
            }

            _class.Debug.Log("[2] VCD: " + strVideoDevice);
            _class.Debug.Log("[2] VCDID: " + strShortName);
            _class.Debug.Log("[2] RES: " + _class.Resolution.List[_class.Var.VideoResolutionIndex]);
            _class.Debug.Log("[2] AOD: " + _class.Audio.Devices[_class.Audio.Output]);

            _class.Var.VideoDevice = strVideoDevice;
            _class.Var.AudioDevice = _class.Audio.Devices[_class.Audio.Output];

            //Filter lists definitions
            var strCrossVideoOut = "";
            var strCrossAudioOut = "";
            var strAvIin         = "";
            var strAvIout        = "";
            var strVideoIn       = "";
            var strPreviewIn     = "";
            var strPreviewOut    = "";
            var strTempOut       = "";

            //graph builder
            _class.Debug.Log("");
            _class.Debug.Log("[0] Create new graph");
            // ReSharper disable once SuspiciousTypeConversion.Global
            var pBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            var hr       = pBuilder.SetFiltergraph(_class.Graph.CaptureGraph);

            _class.Debug.Log("[2] [OK] " + DsError.GetErrorText(hr));
            _class.Debug.Log("");

            //_class.Graph.VideoWindow = (IVideoWindow)_class.Graph.CaptureGraph;            //Open the window

            //Primary Capture Device
            var pCaptureDevice = _class.GraphFilter.Set(FilterCategory.VideoInputDevice, strVideoDevice, out strTempOut);

            _class.Debug.Log("");
            if (pCaptureDevice == null)
            {
                _class.Debug.Log("[ERR] Cant create capture device. Graph cannot continue");
                return(false);
            }

            _class.Graph.CaptureDevice = pCaptureDevice;

            //Video capture in/output
            _class.GraphPin.ListPin(pCaptureDevice);
            var strCaptureVideoOut = _class.GraphPin.AssumePinOut("Capture", "Video");

            if (strCaptureVideoOut.Length == 0)
            {
                strCaptureVideoOut = _class.GraphPin.AssumePinOut("Capturar", "vídeo");                                     //Alias for Deen0x spanish card
            }
            var strCaptureAudioOut = _class.GraphPin.AssumePinOut("Audio");

            var strCaptureVideoIn = _class.GraphPin.AssumePinIn("Video");

            if (strCaptureVideoIn.Length == 0)
            {
                strCaptureVideoIn = _class.GraphPin.AssumePinIn("Capturar", "vídeo");
            }

            var strCaptureAudioIn = _class.GraphPin.AssumePinIn("Audio");

            _class.Debug.Log("[0]");
            _class.Debug.Log("<Video Out>" + strCaptureVideoOut);
            _class.Debug.Log("<Audio Out>" + strCaptureAudioOut);
            _class.Debug.Log("<Video In>" + strCaptureVideoIn);
            _class.Debug.Log("<Audio In>" + strCaptureAudioIn);
            _class.Debug.Log("");

            // ReSharper disable once SuspiciousTypeConversion.Global
            _class.Graph.IamAvd = pCaptureDevice as IAMAnalogVideoDecoder;

            //Create user crossbar if needed
            if (_class.Var.UseCrossbar)
            {
                if (_class.GraphCrossbar.createCrossbar(ref strCrossAudioOut, ref strCrossVideoOut, strCaptureVideoIn, strCaptureAudioIn, strShortName, pCaptureDevice))
                {
                    _class.GraphCrossbar.checkCrossbar();
                }
            }

            _class.Debug.Log("");

            //Set resolution
            _class.Debug.Log("[0] Checking capture resolution");

            if (_class.Var.VideoResolutionIndex == 0 || _class.System.IsAutoSetCaptureResolution)
            {
                _class.GraphResolution.Get();
            }

            if (_class.Var.VideoResolutionIndex > 0)
            {
                _class.GraphResolution.Set(pCaptureDevice, strCaptureVideoOut);
            }
            else
            {
                _class.Debug.Log("[0] [WARN] Cant find capture resolution - no input or unknown resolution type");
            }

            var pRen      = pCaptureDevice;
            var strPinOut = strCaptureVideoOut;
            var strDevice = strVideoDevice;

            //if (_class.Var.UseSampleGrabber)
            //    _class.SampleGrabber.createSampleGrabber(ref strPreviewIn, ref strPreviewOut, ref strDevice, ref strPinOut, ref pRen);

            if (_class.Var.CreateSmartTee)
            {
                _class.SmartTee.createSmartTee(ref strPreviewIn, ref strPreviewOut, ref strDevice, ref strPinOut, ref pRen);

                _class.Graph.CaptureFeed   = pRen;
                _class.Graph.CaptureFeedIn = strPreviewIn;
            }

            IBaseFilter smartTeeBase = null;

            if (_class.System.IsVr)
            {
                smartTeeBase = pRen;
            }

            if (_class.Var.CreateAviRender)
            {
                _class.AviRender.Create(ref strAvIin, ref strAvIout, ref strDevice, ref strPinOut, ref pRen);
            }

            //Video renderer
            _class.Debug.Log("");
            _class.Debug.Log("[0]***   Create Video Renderer");
            Guid CLSID_ActiveVideo = new Guid("{B87BEB7B-8D29-423F-AE4D-6582C10175AC}");

            IBaseFilter pVideoRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_ActiveVideo));

            hr = _class.Graph.CaptureGraph.AddFilter(pVideoRenderer, "Video Renderer");
            if (hr == 0)
            {
                _class.Debug.Log("[1] [OK] Created video renderer");
            }
            else
            {
                _class.Debug.Log("[1] [FAIL] Cant create video renderer");
                _class.Debug.Log("-> " + DsError.GetErrorText(hr));
            }

            _class.Debug.Log("");
            _class.Debug.Log("***   Listing Video Renderer pins");
            _class.GraphPin.ListPin(pVideoRenderer);
            strVideoIn = _class.GraphPin.AssumePinIn("Input");
            _class.Debug.Log("<Video>" + strVideoIn);
            _class.Debug.Log("");

            _class.Debug.Log("***   Connect AVI Decompressor (" + strPinOut + ") to Video Renderer (" + strVideoIn + ")");
            hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pRen, strPinOut), _class.GraphPin.GetPin(pVideoRenderer, strVideoIn), null);
            if (hr == 0)
            {
                _class.Debug.Log("[OK] Connected AVI to video renderer");
            }
            else
            {
                _class.Debug.Log("[FAIL] Can't connect AVI to video renderer");
                _class.Debug.Log("-> " + DsError.GetErrorText(hr));
            }

            _class.Graph.VideoDef    = pVideoRenderer as IBasicVideo;
            _class.Graph.VideoWindow = pVideoRenderer as IVideoWindow;

            if (_class.System.IsVr)
            {
                _class.Debug.Log("Create VR View");

                pRen      = smartTeeBase;
                strPinOut = "Capture";

                IBaseFilter pAVIDecompressor2 = (IBaseFilter) new AVIDec();
                hr = _class.Graph.CaptureGraph.AddFilter(pAVIDecompressor2, "AVI Decompressor VR");
                _class.Debug.Log("-> " + DsError.GetErrorText(hr));

                hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pRen, strPinOut), _class.GraphPin.GetPin(pAVIDecompressor2, "XForm In"), null);
                _class.Debug.Log("-> " + DsError.GetErrorText(hr));

                IBaseFilter pVideoRenderer2 = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_ActiveVideo));
                hr = _class.Graph.CaptureGraph.AddFilter(pVideoRenderer2, "Video Renderer VR");
                hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pAVIDecompressor2, "XForm Out"), _class.GraphPin.GetPin(pVideoRenderer2, "VMR Input0"), null);

                _class.Graph.VideoWindowVr = pVideoRenderer2 as IVideoWindow;
            }

            //Audio device
            if (_class.Audio.Output > -1 && _class.Audio.Output < _class.Audio.Devices.Count)
            {
                _class.Var.DeviceId = 0;               //Dont need multiple devices, set back to 0

                _class.Debug.Log("[0]");
                _class.Debug.Log("***   Create " + _class.Audio.Devices[_class.Audio.Output] + " audio device");
                IBaseFilter pAudio = null;

                pAudio = _class.GraphFilter.Set(FilterCategory.AudioRendererCategory, _class.Audio.Devices[_class.Audio.Output], out strTempOut);
                hr     = _class.Graph.CaptureGraph.AddFilter(pAudio, "Audio Device");
                _class.Debug.Log("-> " + DsError.GetErrorText(hr));

                if (pAudio != null)
                {
                    _class.Debug.Log("[1]");
                    _class.Debug.Log("***   Listing " + _class.Audio.Devices[_class.Audio.Output] + " pins");

                    _class.GraphPin.ListPin(pAudio);
                    var strAudioIn = _class.GraphPin.AssumePinIn("Audio");
                    _class.Debug.Log("<Audio>" + strAudioIn);
                    _class.Debug.Log("");

                    //connect Capture Device and Audio Device
                    _class.Debug.Log("***   Connect " + strVideoDevice + " (" + strCaptureAudioOut + ") to " + _class.Audio.Devices[_class.Audio.Output] + " [Audio] (" + strAudioIn + ")");
                    hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pCaptureDevice, strCaptureAudioOut), _class.GraphPin.GetPin(pAudio, strAudioIn), null);
                    _class.Debug.Log("-> " + DsError.GetErrorText(hr));
                }
            }

            return(true);
        }
Exemplo n.º 10
0
        /// <summary>
        ///   The stop.
        /// </summary>
        public override void Stop()
        {
            try
            {
                // To stop the capture filter before stopping the media control
                // seems to solve the problem described in the next comment.
                // sancta simplicitas...
                if (this.VideoDeviceFilter != null)
                {
                    int hr = this.VideoDeviceFilter.Stop();
                    if (hr != 0)
                    {
                        ErrorLogger.WriteLine("Error while stopping capture filter. Message: " + DsError.GetErrorText(hr));
                    }

                    if (this.frameTimer != null)
                    {
                        this.frameTimer.Stop();
                    }
                }
            }
            catch (Exception ex)
            {
                ErrorLogger.ProcessException(ex, false);
            }

            base.Stop();
        }
Exemplo n.º 11
0
        private void CaptureVideo()
        {
            int         hr           = 0;
            IBaseFilter sourceFilter = null;

            try
            {
                GetInterfaces();

                hr = this.CaptureGraphBuilder.SetFiltergraph(this.GraphBuilder);
                //Specifies filter graph "graphbuilder" for the capture graph builder "captureGraphBuilder" to use.
                Debug.WriteLine("Attach the filter graph to the capture graph : " + DsError.GetErrorText(hr));
                DsError.ThrowExceptionForHR(hr);

                sourceFilter = FindCaptureDevice();

                hr = this.GraphBuilder.AddFilter(sourceFilter, "Video Capture");
                Debug.WriteLine("Add capture filter to our graph : " + DsError.GetErrorText(hr));
                DsError.ThrowExceptionForHR(hr);

                hr = this.CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, null, null);
                Debug.WriteLine("Render the preview pin on the video capture filter : " + DsError.GetErrorText(hr));
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(sourceFilter);

                SetupVideoWindow();

                rot = new DsROTEntry(this.GraphBuilder);

                hr = this.MediaControl.Run();
                Debug.WriteLine("Start previewing video data : " + DsError.GetErrorText(hr));
                DsError.ThrowExceptionForHR(hr);

                this.CurrentState = PlayState.Running;
                Debug.WriteLine("The currentstate : " + this.CurrentState.ToString());

                //Click
                //DsDevice[] capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
                //short iBPP = 24;
                //SetupGraph(capDevices[0], Width, Height, iBPP, Control);
                // tell the callback to ignore new images
                //m_PictureReady = new ManualResetEvent(false);
            }
            catch (Exception ex)
            {
                MessageBox.Show("An unrecoverable error has occurred.With error : " + ex.ToString());
            }
        }
Exemplo n.º 12
0
        private IBaseFilter FindCaptureDevice()
        {
            Debug.WriteLine("Start the Sub FindCaptureDevice");
            int hr = 0;
            UCOMIEnumMoniker classEnum = null;

            UCOMIMoniker[] moniker = new UCOMIMoniker[1];
            object         source  = null;
            ICreateDevEnum devEnum = (ICreateDevEnum) new CreateDevEnum();

            hr = devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, CDef.None);
            Debug.WriteLine("Create an enumerator for the video capture devices : " + DsError.GetErrorText(hr));
            DsError.ThrowExceptionForHR(hr);
            Marshal.ReleaseComObject(devEnum);
            if (classEnum == null)
            {
                throw new ApplicationException("No video capture device was detected.\\r\\n\\r\\n" + "This sample requires a video capture device, such as a USB WebCam,\\r\\n" + "to be installed and working properly.  The sample will now close.");
            }
            int celt = 0;

            if (classEnum.Next(moniker.Length, moniker, out celt) == 0)
            {
                Guid iid = typeof(IBaseFilter).GUID;
                moniker[0].BindToObject(null, null, ref iid, out source);
            }
            else
            {
                throw new ApplicationException("Unable to access video capture device!");
            }
            Marshal.ReleaseComObject(moniker[0]);
            Marshal.ReleaseComObject(classEnum);
            return((IBaseFilter)source);
        }
Exemplo n.º 13
0
        public void RunGraph()
        {
            Class.Debug.Log("[0] Build capture graph");
            if (Class.Capture.CurrentDevice > -1 && Class.Capture.CurrentDevice < Class.Var.VideoCaptureDevice.Count)
            {
                Class.Var.IsBuildingGraph = true;
                Class.Debug.Log("Using : " + Class.Var.VideoCaptureDevice[Class.Capture.CurrentDevice]);
                Class.Debug.Log("");

                if (Class.Graph.MediaControl != null)
                {
                    Class.Graph.MediaControl.StopWhenReady();
                }
                if (Class.Resolution.Type.Count == 0)
                {
                    Class.Resolution.Find();
                }

                Class.Graph.ClearGraph();

                Class.Graph.CaptureGraph = new FilterGraph() as IGraphBuilder;

                if (Class.GraphBuild.BuildGraph())
                {
                    if (!Class.Var.ShowPreviewWindow)
                    {
                        Class.Display.Setup();
                    }
                    else
                    {
                        setupPreviewWindow();
                    }

                    Class.Graph.MediaControl = Class.Graph.CaptureGraph as IMediaControl;
                    Class.Graph.MediaEvent   = Class.Graph.CaptureGraph as IMediaEvent;

                    Class.Debug.Log("");
                    Class.Debug.Log("Run compiled graph");
                    if (Class.Graph.MediaControl != null)
                    {
                        int hr = Class.Graph.MediaControl.Run();
                        Class.Debug.Log("[2] " + DsError.GetErrorText(hr));
                    }

                    BoolActiveVideo = true;
                }

                Class.Var.IsBuildingGraph = false;

                if (Class.Graph.XBar != null)
                {
                    if (Class.Var.CrossbarInput.Count == 0)
                    {
                        Class.Crossbar.Output();
                    }
                }

                if (Class.Var.IsRestartGraph)
                {
                    _intRestartGraph = 3;
                }
            }
            else
            {
                Class.Debug.Log("[ERR] Unknown capture device");
            }
        }
Exemplo n.º 14
0
        /// <summary>
        /// Returns the <see cref="CameraInfo"/> for the given <see cref="DsDevice"/>.
        /// </summary>
        /// <param name="dev">A <see cref="DsDevice"/> to parse name and capabilities for.</param>
        /// <returns>The <see cref="CameraInfo"/> for the given device.</returns>
        private CameraInfo Caps(DsDevice dev)
        {
            var camerainfo = new CameraInfo();

            // Get the graphbuilder object
            var graphBuilder = (IFilterGraph2) new FilterGraph();

            // Get the ICaptureGraphBuilder2
            var capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            IBaseFilter capFilter = null;

            try
            {
                int hr = capGraph.SetFiltergraph(graphBuilder);
                DsError.ThrowExceptionForHR(hr);

                // Add the video device
                hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
                //        DsError.ThrowExceptionForHR(hr);

                if (hr != 0)
                {
                    Console.WriteLine("Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + DsError.GetErrorText(hr));
                    return(null);
                }

                //hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                //DsError.ThrowExceptionForHR(hr);

                object o    = null;
                DsGuid cat  = PinCategory.Capture;
                DsGuid type = MediaType.Interleaved;
                DsGuid iid  = typeof(IAMStreamConfig).GUID;

                // Check if Video capture filter is in use
                hr = capGraph.RenderStream(cat, MediaType.Video, capFilter, null, null);
                if (hr != 0)
                {
                    return(null);
                }

                //hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Interleaved, capFilter, typeof(IAMStreamConfig).GUID, out o);
                //if (hr != 0)
                //{
                hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter,
                                            typeof(IAMStreamConfig).GUID, out o);
                DsError.ThrowExceptionForHR(hr);
                //}

                var videoStreamConfig = o as IAMStreamConfig;

                int iCount = 0;
                int iSize  = 0;

                try
                {
                    if (videoStreamConfig != null)
                    {
                        videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
                    }
                }
                catch (Exception)
                {
                    //ErrorLogger.ProcessException(ex, false);
                    return(null);
                }

                pscc = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));

                camerainfo.Name             = dev.Name;
                camerainfo.DirectshowDevice = dev;

                for (int i = 0; i < iCount; i++)
                {
                    VideoStreamConfigCaps scc;

                    try
                    {
                        AMMediaType curMedType;
                        if (videoStreamConfig != null)
                        {
                            hr = videoStreamConfig.GetStreamCaps(i, out curMedType, pscc);
                        }
                        Marshal.ThrowExceptionForHR(hr);
                        scc = (VideoStreamConfigCaps)Marshal.PtrToStructure(pscc, typeof(VideoStreamConfigCaps));


                        var CSF = new CamSizeFPS();
                        CSF.FPS    = (int)(10000000 / scc.MinFrameInterval);
                        CSF.Height = scc.InputSize.Height;
                        CSF.Width  = scc.InputSize.Width;

                        if (!InSizeFpsList(camerainfo.SupportedSizesAndFPS, CSF))
                        {
                            if (ParametersOK(CSF))
                            {
                                camerainfo.SupportedSizesAndFPS.Add(CSF);
                            }
                        }
                    }
                    catch (Exception)
                    {
                        //ErrorLogger.ProcessException(ex, false);
                    }
                }

                Marshal.FreeCoTaskMem(pscc);
            }
            finally
            {
                if (graphBuilder != null)
                {
                    Marshal.ReleaseComObject(graphBuilder);
                }
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                }
                if (capGraph != null)
                {
                    Marshal.ReleaseComObject(capGraph);
                }
            }

            return(camerainfo);
        }
Exemplo n.º 15
0
        /// <summary>
        ///   The build graph.
        /// </summary>
        /// <exception cref="ArgumentOutOfRangeException"></exception>
        private void BuildGraph()
        {
            if (this.VideoFilename == string.Empty)
            {
                return;
            }

            this.filterGraph = (IFilterGraph2) new FilterGraph();

            // #if DEBUG
            this.rotEntry = new DsROTEntry(this.filterGraph);

            // #endif

            // IFileSourceFilter urlSourceFilter = new URLReader() as IFileSourceFilter;
            // IBaseFilter sourceFilter = urlSourceFilter as IBaseFilter;

            // string fileURL = string.Concat(@"file:///", this.filename.Replace("\\","/"));
            // hr = urlSourceFilter.Load(fileURL, null);
            // DsError.ThrowExceptionForHR(hr);
            // this.filterGraph.AddFilter(sourceFilter, "URL Source");
            IBaseFilter sourceFilter;

            this.filterGraph.AddSourceFilter(this.VideoFilename, "File Source", out sourceFilter);

            // Create the SampleGrabber interface
            this.sampleGrabber = (ISampleGrabber) new SampleGrabber();
            var baseGrabFlt = (IBaseFilter)this.sampleGrabber;

            this.ConfigureSampleGrabber(this.sampleGrabber);

            // Add the frame grabber to the graph
            int hr = this.filterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");

            if (hr != 0)
            {
                ErrorLogger.WriteLine(
                    "Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + DsError.GetErrorText(hr));
            }

            IPin sampleGrabberIn  = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
            IPin sampleGrabberOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
            IPin sourceOut;

            // Iterate through source output pins, to find video output pin to be connected to
            // the sample grabber
            int i = 0;

            do
            {
                sourceOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
                if (sourceOut == null)
                {
                    throw new ArgumentOutOfRangeException("Found no compatible video source output pin");
                }

                hr = this.filterGraph.Connect(sourceOut, sampleGrabberIn);
                i++;
            }while (hr < 0);

            DsError.ThrowExceptionForHR(hr);
            hr = this.filterGraph.Render(sampleGrabberOut);
            DsError.ThrowExceptionForHR(hr);

            //// Have the graph builder construct its the appropriate graph automatically
            // hr = this.graphBuilder.RenderFile(filename, null);
            // DsError.ThrowExceptionForHR(hr);

            // QueryInterface for DirectShow interfaces
            this.mediaControl = (IMediaControl)this.filterGraph;

            // this.mediaEventEx = (IMediaEventEx)this.graphBuilder;
            this.mediaSeeking  = (IMediaSeeking)this.filterGraph;
            this.mediaPosition = (IMediaPosition)this.filterGraph;
            this.mediaEvent    = (IMediaEvent)this.filterGraph;

            //hr = this.mediaSeeking.IsFormatSupported(TimeFormat.Frame);
            //if (hr != 0)
            //{
            //  this.isFrameTimeCapable = false;
            //}
            //else
            //{
            //  this.isFrameTimeCapable = true;

            // string text = DsError.GetErrorText(hr);
            // hr = this.mediaSeeking.SetTimeFormat(TimeFormat.Frame);
            // text = DsError.GetErrorText(hr);
            //}

            // hr = this.mediaSeeking.GetTimeFormat(out this.timeFormat);
            // DsError.ThrowExceptionForHR(hr);

            // Query for video interfaces, which may not be relevant for audio files
            this.videoWindow = this.filterGraph as IVideoWindow;
            hr = this.videoWindow.put_AutoShow(OABool.False);
            DsError.ThrowExceptionForHR(hr);

            this.basicVideo = this.filterGraph as IBasicVideo;

            // Query for audio interfaces, which may not be relevant for video-only files
            this.basicAudio = this.filterGraph as IBasicAudio;

            //// Have the graph signal event via window callbacks for performance
            // hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WMGraphNotify, IntPtr.Zero);
            // DsError.ThrowExceptionForHR(hr);

            // Get the event handle the graph will use to signal
            // when events occur
            IntPtr hEvent;

            hr = this.mediaEvent.GetEventHandle(out hEvent);
            DsError.ThrowExceptionForHR(hr);

            // Reset event loop exit flag
            this.shouldExitEventLoop = false;

            // Create a new thread to wait for events
            this.eventThread      = new Thread(this.EventWait);
            this.eventThread.Name = "Media Event Thread";
            this.eventThread.Start();

            this.GetFrameStepInterface();

            // Update the SampleGrabber.
            this.SaveSizeInfo(this.sampleGrabber);
        }
Exemplo n.º 16
0
        /// <summary>
        /// Set the Framerate, and video size
        /// </summary>
        /// <param name="capGraph">The <see cref="ICaptureGraphBuilder2"/> interface.</param>
        /// <param name="capFilter">The <see cref="IBaseFilter"/> of the capture device.</param>
        /// <param name="frameRate">The new framerate to be used.</param>
        /// <param name="width">The new video width to be used.</param>
        /// <param name="height">The new video height to be used.</param>
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int frameRate, int width, int height)
        {
            int         hr;
            object      o;
            AMMediaType media;

            // Find the stream config interface
            hr = this.capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            this.videoControl      = capFilter as IAMVideoControl;
            this.videoStreamConfig = o as IAMStreamConfig;

            if (this.videoStreamConfig == null)
            {
                ErrorLogger.WriteLine("Error in Capture.SetConfigParams(). Failed to get IAMStreamConfig");
            }

            // Get the existing format block
            hr = this.videoStreamConfig.GetFormat(out media);

            if (hr != 0)
            {
                ErrorLogger.WriteLine("Could not SetConfigParms in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            // copy out the videoinfoheader
            VideoInfoHeader v = new VideoInfoHeader();

            Marshal.PtrToStructure(media.formatPtr, v);

            // if overriding set values
            if (frameRate > 0)
            {
                v.AvgTimePerFrame = 10000000 / frameRate;
            }

            if (width > 0)
            {
                v.BmiHeader.Width = width;
            }

            if (height > 0)
            {
                v.BmiHeader.Height = height;
            }

            // Copy the media structure back
            Marshal.StructureToPtr(v, media.formatPtr, true);

            // Set the new format
            hr = this.videoStreamConfig.SetFormat(media);
            if (hr != 0)
            {
                ErrorLogger.WriteLine("Error while setting new camera format (videoStreamConfig) in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Exemplo n.º 17
0
        /// <summary>
        /// Saves the video properties of the SampleGrabber into member fields
        /// and creates a file mapping for the captured frames.
        /// </summary>
        /// <param name="sampGrabber">The <see cref="ISampleGrabber"/>
        /// from which to retreive the sample information.</param>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            var media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);

            if (hr != 0)
            {
                ErrorLogger.WriteLine("Could not SaveSizeInfo in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                ErrorLogger.WriteLine("Error in Camera.Capture. Unknown Grabber Media Format");
            }

            // Grab the size info
            var videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            videoWidth  = videoInfoHeader.BmiHeader.Width;
            videoHeight = videoInfoHeader.BmiHeader.Height;
            stride      = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            bufferLength = videoWidth * videoHeight * 3; // RGB24 = 3 bytes

            // create memory section and map for the OpenCV Image.
            section    = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, (uint)bufferLength, null);
            map        = MapViewOfFile(section, 0xF001F, 0, 0, (uint)bufferLength);
            videoImage = new Image <Bgr, byte>(videoWidth, videoHeight, stride, map);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Exemplo n.º 18
0
        /// <summary>
        /// Build the capture graph for grabber.
        /// </summary>
        /// <param name="dev">The index of the new capture device.</param>
        /// <param name="frameRate">The framerate to use.</param>
        /// <param name="width">The width to use.</param>
        /// <param name="height">The height to use.</param>
        /// <returns>True, if succesfull, otherwise false.</returns>
        private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
        {
            int hr;

            this.fps = frameRate; // Not measured, only to expose FPS externally

            this.cameraControl = null;
            this.capFilter     = null;

            // Get the graphbuilder object
            this.graphBuilder = (IFilterGraph2) new FilterGraph();
            this.mediaControl = this.graphBuilder as IMediaControl;

            try
            {
                // Create the ICaptureGraphBuilder2
                this.capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Create the SampleGrabber interface
                this.sampGrabber = (ISampleGrabber) new SampleGrabber();

                // Start building the graph
                hr = this.capGraph.SetFiltergraph(this.graphBuilder);
                if (hr != 0)
                {
                    ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + DsError.GetErrorText(hr));
                }

#if DEBUG
                this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif

                // Add the video device
                hr = this.graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out this.capFilter);
                if (hr != 0)
                {
                    ErrorLogger.WriteLine("Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + DsError.GetErrorText(hr));
                }

                IBaseFilter baseGrabFlt = (IBaseFilter)this.sampGrabber;

                this.ConfigureSampleGrabber(this.sampGrabber);

                // Add the frame grabber to the graph
                hr = this.graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr != 0)
                {
                    ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + DsError.GetErrorText(hr));
                }

                this.cameraControl = this.capFilter as IAMCameraControl;

                // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM

                /*
                 * if (!defaultMode)
                 * {
                 *  m_icc = capFilter as IAMCameraControl;
                 *  CameraControlFlags CamFlags = new CameraControlFlags();
                 *  int pMin, pMax, pStep, pDefault;
                 *
                 *  hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
                 *  m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
                 * }
                 */

                // Set videoProcAmp
                object obj;
                Guid   iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770");
                Devices.Current.Cameras[0].DirectshowDevice.Mon.BindToObject(
                    null,
                    null,
                    ref iid_IBaseFilter,
                    out obj);

                this.videoProcAmp = obj as IAMVideoProcAmp;

                // If any of the default config items are set
                if (frameRate + height + width > 0)
                {
                    this.SetConfigParms(this.capGraph, this.capFilter, frameRate, width, height);
                }

                hr = this.capGraph.RenderStream(PinCategory.Capture, MediaType.Video, this.capFilter, null, baseGrabFlt);
                // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false.
                if (hr < 0)
                {
                    string error = DsError.GetErrorText(hr);
                    MessageBox.Show(error);
                    this.Dispose();
                    return(false);
                }
                else
                {
                    // Otherwise update the SampleGrabber.
                    this.SaveSizeInfo(this.sampGrabber);

                    hr = this.sampGrabber.SetBufferSamples(false);

                    if (hr == 0)
                    {
                        hr = this.sampGrabber.SetOneShot(false);
                    }

                    if (hr == 0)
                    {
                        hr = this.sampGrabber.SetCallback(this, 1);
                    }

                    if (hr < 0)
                    {
                        ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()");
                    }
                }
            }
            catch (Exception ex)
            {
                ErrorLogger.ProcessException(ex, false);

                this.Dispose();
                return(false);
            }

            return(true);
        }
Exemplo n.º 19
0
        //The following is called for building the PREVIEW graph
        #region PREVIEW ONLY
        public void Init()
        {
            Debug.WriteLine("VIDEO FOR PREVIEW");
            pGraph = graph;
            int hr = 0;

            hr = pGraphBuilder.SetFiltergraph(pGraph);
            DsError.ThrowExceptionForHR(hr);

            pUSB = FindCaptureDevice();

            hr = pGraph.AddFilter(pUSB, "WebCamControl Video");
            DsError.ThrowExceptionForHR(hr);

            hr = pGraph.AddFilter(pSampleGrabber as IBaseFilter, "SampleGrabber");
            checkHR(hr, "Can't add SampleGrabber to graph");
            i_grabber = (ISampleGrabber)pSampleGrabber;
            i_grabber.SetBufferSamples(true);

            IVMRAspectRatioControl9 ratioControl9 = (IVMRAspectRatioControl9)renderFilter;

            hr = ratioControl9.SetAspectRatioMode(VMRAspectRatioMode.LetterBox);
            DsError.ThrowExceptionForHR(hr);

            hr = pGraph.AddFilter(renderFilter, "My Render Filter");
            DsError.ThrowExceptionForHR(hr);
            //SetFormat();

            hr = pGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, pUSB, (IBaseFilter)pSampleGrabber, renderFilter);
            DsError.ThrowExceptionForHR(hr);
            Debug.WriteLine(DsError.GetErrorText(hr) + " is error in rendering");

            IEnumFilters enumFilters = null;

            IBaseFilter[] baseFilters = { null };
            IntPtr        fetched     = IntPtr.Zero;

            hr = pGraph.EnumFilters(out enumFilters);
            int r = 0;

            while (r == 0)
            {
                try
                {
                    r = enumFilters.Next(baseFilters.Length, baseFilters, fetched);
                    DsError.ThrowExceptionForHR(hr);
                    baseFilters[0].QueryFilterInfo(out FilterInfo filterInfo);
                    Debug.WriteLine(filterInfo.achName + " -filtername");
                }
                catch
                {
                    r = 1;
                    continue;
                }
            }

            SafeReleaseComObject(pUSB);
            SafeReleaseComObject(pAVIMux);
            SafeReleaseComObject(pGraph);
            SafeReleaseComObject(pGraphBuilder);
            SafeReleaseComObject(pSampleGrabber);
            //SafeReleaseComObject(sampleGrabber);
            SafeReleaseComObject(pSmartTee);
            SafeReleaseComObject(renderFilter);
            SafeReleaseComObject(nullRender);
            SafeReleaseComObject(mediaEventEx);
            SafeReleaseComObject(ratioControl9);
            SafeReleaseComObject(streamConfig);
            SafeReleaseComObject(mediaControl);
            //SafeReleaseComObject(format);
            //SafeReleaseComObject(i_grabber);
            SafeReleaseComObject(graph);

            SetupVideoWindow();
        }
Exemplo n.º 20
0
        public bool ConnectEx(PinEx pReceivePinEx)
        {
            Disconnect();
            if (this.MediaTypes.Count == 0)
            {
                InitAMMediaType();
            }
            Console.WriteLine(string.Format("InitAMMediaType():{0}", this.MediaTypes.Count));
            foreach (var mt in this.MediaTypes)
            {
                var hr = pReceivePinEx.Pin.QueryAccept(mt);
                Console.WriteLine(string.Format("pReceivePinEx.Pin.QueryAccept(mt):{0}  {1}", hr, DsError.GetErrorText(hr)));
                if (hr != 0)
                {
                    continue;
                }

                if (hr == 0 && ConnectEx(pReceivePinEx, mt) == 0)
                {
                    return(true);
                }
            }
            return(false);
        }
Exemplo n.º 21
0
        public override bool Play(string strFile)
        {
            updateTimer = DateTime.Now;
            m_speedRate = 10000;
            m_bVisible  = false;
            m_iVolume   = 100;
            m_state     = PlayState.Init;
            if (strFile != "http://localhost/OnlineVideo.mp4")
            {
                m_strCurrentFile = strFile;                                                // hack to get around the MP 1.3 Alpha bug with non http URLs
            }
            m_bFullScreen = true;
            m_ar          = GUIGraphicsContext.ARType;
            VideoRendererStatistics.VideoState = VideoRendererStatistics.State.VideoPresent;
            _updateNeeded = true;
            Log.Instance.Info("OnlineVideosPlayer: Play '{0}'", m_strCurrentFile);

            m_bStarted = false;
            if (!GetInterfaces())
            {
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }

            // if we are playing a local file set the cache file so refresh rate adaption can happen
            Uri    uri      = new Uri(m_strCurrentFile);
            string protocol = uri.Scheme.Substring(0, Math.Min(uri.Scheme.Length, 4));

            if (protocol == "file")
            {
                cacheFile = m_strCurrentFile;
            }

            AdaptRefreshRateFromCacheFile();

            ISubEngine engine = SubEngine.GetInstance(true);

            if (!engine.LoadSubtitles(graphBuilder, string.IsNullOrEmpty(SubtitleFile) ? m_strCurrentFile : SubtitleFile))
            {
                SubEngine.engine = new SubEngine.DummyEngine();
            }
            else
            {
                engine.Enable = true;
            }

            IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true);

            if (!postengine.LoadPostProcessing(graphBuilder))
            {
                PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine();
            }
            AnalyseStreams();
            SelectSubtitles();
            SelectAudioLanguage();
            OnInitialized();

            int hr = mediaEvt.SetNotifyWindow(GUIGraphicsContext.ActiveForm, WM_GRAPHNOTIFY, IntPtr.Zero);

            if (hr < 0)
            {
                Error.SetError("Unable to play movie", "Can not set notifications");
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }
            if (videoWin != null)
            {
                videoWin.put_WindowStyle((WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipChildren + (int)WindowStyle.ClipSiblings));
                videoWin.put_MessageDrain(GUIGraphicsContext.form.Handle);
            }

            DirectShowUtil.SetARMode(graphBuilder, AspectRatioMode.Stretched);

            try
            {
                if (protocol == "file")
                {
                    if (Vmr9 != null)
                    {
                        Vmr9.StartMediaCtrl(mediaCtrl);
                    }
                }
                else
                {
                    hr = mediaCtrl.Run();
                    DsError.ThrowExceptionForHR(hr);
                    if (hr == 1)
                    // S_FALSE from IMediaControl::Run means: The graph is preparing to run, but some filters have not completed the transition to a running state.
                    {
                        // wait max. 20 seconds for the graph to transition to the running state
                        DateTime    startTime = DateTime.Now;
                        FilterState filterState;
                        do
                        {
                            Thread.Sleep(100);
                            hr = mediaCtrl.GetState(100, out filterState);
                            // check with timeout max. 10 times a second if the state changed
                        } while ((hr != 0) && ((DateTime.Now - startTime).TotalSeconds <= 20));
                        if (hr != 0) // S_OK
                        {
                            DsError.ThrowExceptionForHR(hr);
                            throw new Exception(string.Format("IMediaControl.GetState after 20 seconds: 0x{0} - '{1}'",
                                                              hr.ToString("X8"), DsError.GetErrorText(hr)));
                        }
                    }
                }
            }
            catch (Exception error)
            {
                Log.Instance.Warn("OnlineVideosPlayer: Unable to play with reason: {0}", error.Message);
            }
            if (hr != 0) // S_OK
            {
                Error.SetError("Unable to play movie", "Unable to start movie");
                m_strCurrentFile = "";
                CloseInterfaces();
                return(false);
            }

            if (basicVideo != null)
            {
                basicVideo.GetVideoSize(out m_iVideoWidth, out m_iVideoHeight);
            }

            if (GoFullscreen)
            {
                GUIWindowManager.ActivateWindow(GUIOnlineVideoFullscreen.WINDOW_FULLSCREEN_ONLINEVIDEO);
            }
            GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_PLAYBACK_STARTED, 0, 0, 0, 0, 0, null);

            msg.Label = CurrentFile;
            GUIWindowManager.SendThreadMessage(msg);
            m_state       = PlayState.Playing;
            m_iPositionX  = GUIGraphicsContext.VideoWindow.X;
            m_iPositionY  = GUIGraphicsContext.VideoWindow.Y;
            m_iWidth      = GUIGraphicsContext.VideoWindow.Width;
            m_iHeight     = GUIGraphicsContext.VideoWindow.Height;
            m_ar          = GUIGraphicsContext.ARType;
            _updateNeeded = true;
            SetVideoWindow();
            mediaPos.get_Duration(out m_dDuration);
            Log.Instance.Info("OnlineVideosPlayer: Duration {0} sec", m_dDuration.ToString("F"));

            return(true);
        }
Exemplo n.º 22
0
        /// <summary>
        /// Configure the sample grabber with default Video RGB24 mode.
        /// </summary>
        /// <param name="sampGrabber">
        /// The <see cref="ISampleGrabber"/> to be configured.
        /// </param>
        protected void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video/RBG24
            media           = new AMMediaType();
            media.majorType = MediaType.Video;

            // media.subType=MediaSubType.UYVY;
            media.subType    = MediaSubType.RGB32;
            media.formatType = FormatType.VideoInfo;

            hr = this.sampleGrabber.SetMediaType(media);

            if (hr != 0)
            {
                ErrorLogger.WriteLine(
                    "Could not ConfigureSampleGrabber in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = this.sampleGrabber.SetCallback(this, 1);

            if (hr != 0)
            {
                ErrorLogger.WriteLine(
                    "Could not set callback method for sampleGrabber in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }
        }
Exemplo n.º 23
0
        /// <summary>
        /// Sends the DiSEqC command.
        /// </summary>
        /// <param name="diSEqC">The DiSEqC command.</param>
        /// <returns>true if succeeded, otherwise false</returns>
        public bool SendDiSEqCCommand(byte[] diSEqC)
        {
            const int len = 188;

            for (int i = 0; i < diSEqC.Length; ++i)
            {
                Marshal.WriteByte(_ptrDiseqc, i, diSEqC[i]);
            }

            Marshal.WriteInt32(_ptrDiseqc, 160, diSEqC.Length);                                //send_message_length
            Marshal.WriteInt32(_ptrDiseqc, 164, 0);                                            //receive_message_length
            Marshal.WriteInt32(_ptrDiseqc, 168, 3);                                            //amplitude_attenuation
            Marshal.WriteByte(_ptrDiseqc, 172, (int)BurstModulationType.TONE_BURST_MODULATED); //tone_burst_modulated
            Marshal.WriteByte(_ptrDiseqc, 176, (int)DisEqcVersion.DISEQC_VER_1X);
            Marshal.WriteByte(_ptrDiseqc, 180, (int)RxMode.RXMODE_NOREPLY);
            Marshal.WriteByte(_ptrDiseqc, 184, 1); //last_message TRUE

            //check the command
            string txt = "";

            for (int i = 0; i < diSEqC.Length; ++i)
            {
                txt += String.Format("0x{0:X} ", Marshal.ReadByte(_ptrDiseqc, i));
            }
            for (int i = 160; i < 188; i = (i + 4))
            {
                txt += String.Format("0x{0:X} ", Marshal.ReadInt32(_ptrDiseqc, i));
            }
            Log.Log.Debug("Conexant BDA: SendDiseqCCommand: {0}", txt);

            int hr = _propertySet.Set(BdaTunerExtentionProperties, (int)BdaTunerExtension.KSPROPERTY_BDA_DISEQC, _ptrDiseqc,
                                      len, _ptrDiseqc, len);

            if (hr != 0)
            {
                Log.Log.Info("Conexant BDA: SendDiseqCCommand returned: 0x{0:X} - {1}", hr, DsError.GetErrorText(hr));
            }
            return(hr == 0);
        }