Capabilities of video device such as frame size and frame rate.
Пример #1
1
        // Ok button clicked
        private void okButton_Click(object sender, EventArgs e)
        {
            videoDeviceMoniker = videoDevice.Source;

            // set video size
            if (videoCapabilitiesDictionary.Count != 0)
            {
                VideoCapabilities caps = videoCapabilitiesDictionary[(string)videoResolutionsCombo.SelectedItem];

                videoDevice.DesiredFrameSize = caps.FrameSize;
                videoDevice.DesiredFrameRate = caps.FrameRate;

                captureSize = caps.FrameSize;
            }

            if (configureSnapshots)
            {
                // set snapshots size
                if (snapshotCapabilitiesDictionary.Count != 0)
                {
                    VideoCapabilities caps = snapshotCapabilitiesDictionary[(string)snapshotResolutionsCombo.SelectedItem];

                    videoDevice.ProvideSnapshots    = true;
                    videoDevice.DesiredSnapshotSize = caps.FrameSize;

                    snapshotSize = caps.FrameSize;
                }
            }

            if (availableVideoInputs.Length != 0)
            {
                videoInput = availableVideoInputs[videoInputsCombo.SelectedIndex];
                videoDevice.CrossbarVideoInput = videoInput;
            }
        }
Пример #2
0
 private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution)
 {
     if (!(resolution == null))
     {
         int                   count            = 0;
         int                   size             = 0;
         AMMediaType           mediaType        = null;
         VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps();
         streamConfig.GetNumberOfCapabilities(out count, out size);
         for (int i = 0; i < count; i++)
         {
             try
             {
                 VideoCapabilities b = new VideoCapabilities(streamConfig, i);
                 if (resolution == b && streamConfig.GetStreamCaps(i, out mediaType, streamConfigCaps) == 0)
                 {
                     break;
                 }
             }
             catch
             {
             }
         }
         if (mediaType != null)
         {
             streamConfig.SetFormat(mediaType);
             mediaType.Dispose();
         }
     }
 }
Пример #3
0
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException("videoStreamConfig");
            }

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size);

            if (hr != 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if (count <= 0)
            {
                throw new NotSupportedException("This video device does not report capabilities.");
            }

            if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
            {
                throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
            }

            // group capabilities with similar parameters
            Dictionary <uint, VideoCapabilities> videocapsList = new Dictionary <uint, VideoCapabilities>();

            for (int i = 0; i < count; i++)
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities(videoStreamConfig, i);

                    uint key = (((uint)vc.FrameSize.Height) << 32) |
                               (((uint)vc.FrameSize.Width) << 16);

                    if (!videocapsList.ContainsKey(key))
                    {
                        videocapsList.Add(key, vc);
                    }
                    else
                    {
                        if (vc.BitCount > videocapsList[key].BitCount)
                        {
                            videocapsList[key] = vc;
                        }
                    }
                }
                catch
                {
                }
            }

            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo(videocaps, 0);

            return(videocaps);
        }
Пример #4
0
        // Token: 0x06000040 RID: 64 RVA: 0x000033D0 File Offset: 0x000015D0
        private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution)
        {
            if (resolution == null)
            {
                return;
            }
            int                   num              = 0;
            int                   num2             = 0;
            AMMediaType           ammediaType      = null;
            VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps();

            streamConfig.GetNumberOfCapabilities(out num, out num2);
            for (int i = 0; i < num; i++)
            {
                try
                {
                    VideoCapabilities b = new VideoCapabilities(streamConfig, i);
                    if (resolution == b && streamConfig.GetStreamCaps(i, out ammediaType, streamConfigCaps) == 0)
                    {
                        break;
                    }
                }
                catch
                {
                }
            }
            if (ammediaType != null)
            {
                streamConfig.SetFormat(ammediaType);
                ammediaType.Dispose();
            }
        }
Пример #5
0
        /// <summary>
        /// Check if two video capabilities are equal.
        /// </summary>
        ///
        /// <param name="vc2">Second video capability to compare with.</param>
        ///
        /// <returns>Returns true if both video capabilities are equal or false otherwise.</returns>
        ///
        public bool Equals(VideoCapabilities vc2)
        {
            if ((object)vc2 == null)
            {
                return(false);
            }

            return((FrameSize == vc2.FrameSize) && (BitCount == vc2.BitCount));
        }
Пример #6
0
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig( IAMStreamConfig videoStreamConfig )
        {
            if ( videoStreamConfig == null )
                throw new ArgumentNullException( "videoStreamConfig" );

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities( out count, out size );

            if ( hr != 0 )
                Marshal.ThrowExceptionForHR( hr );

            if ( count <= 0 )
                throw new NotSupportedException( "This video device does not report capabilities." );

            if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) )
                throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." );

            // group capabilities with similar parameters
            Dictionary<ulong, VideoCapabilities> videocapsList = new Dictionary<ulong, VideoCapabilities>();

            for (int i = 0; i < count; i++)
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities(videoStreamConfig, i);

                    ulong key = (((uint)vc.AverageFrameRate) << 48) |
                               (((uint)vc.FrameSize.Height) << 32) |
                               (((uint)vc.FrameSize.Width) << 16);

                    if (!videocapsList.ContainsKey(key))
                    {
                        videocapsList.Add(key, vc);
                    }
                    else
                    {
                        if (vc.BitCount > videocapsList[key].BitCount)
                        {
                            videocapsList[key] = vc;
                        }
                    }
                }
                catch
                {
                }
            }


            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo( videocaps, 0 );

            return videocaps;
        }
Пример #7
0
        /// <summary>
        /// Configure device and report frame format that will be used during streaming.
        /// This method must return a proper ImageDescriptor so we can pre-allocate buffers.
        /// </summary>
        public ImageDescriptor Prepare()
        {
            ConfigureDevice();

            AForge.Video.DirectShow.VideoCapabilities cap = null;
            if (device.VideoResolution == null)
            {
                // This device was never connected to in Kinovea, use the first media type.
                AForge.Video.DirectShow.VideoCapabilities[] caps = device.VideoCapabilities;
                if (caps.Length == 0)
                {
                    log.ErrorFormat("Cannot get any media type for the device.");
                    return(ImageDescriptor.Invalid);
                }

                cap = caps[0];

                device.SetMediaTypeAndFramerate(cap.Index, (float)cap.AverageFrameRate);
                log.DebugFormat("Device set to default configuration: Index:{0}. ({1}x{2} @ {3:0.###} fps ({4})).",
                                cap.Index, cap.FrameSize.Width, cap.FrameSize.Height, cap.AverageFrameRate, cap.Compression);
            }
            else
            {
                cap = device.VideoResolution;
            }

            int width  = cap.FrameSize.Width;
            int height = cap.FrameSize.Height;

            resultingFramerate = cap.AverageFrameRate;

            ImageFormat format = ImageFormat.RGB24;

            switch (cap.Compression)
            {
            case "RGB24":
            default:
                format = ImageFormat.RGB24;
                break;

            case "MJPG":
                format = ImageFormat.JPEG;
                break;
            }

            int  bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format);
            bool topDown    = false;

            return(new ImageDescriptor(format, width, height, topDown, bufferSize));
        }
        // Token: 0x06000026 RID: 38 RVA: 0x0000260C File Offset: 0x0000080C
        internal static VideoCapabilities[] FromStreamConfig(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException("videoStreamConfig");
            }
            int num;
            int num2;
            int numberOfCapabilities = videoStreamConfig.GetNumberOfCapabilities(out num, out num2);

            if (numberOfCapabilities != 0)
            {
                Marshal.ThrowExceptionForHR(numberOfCapabilities);
            }
            if (num <= 0)
            {
                throw new NotSupportedException("This video device does not report capabilities.");
            }
            if (num2 > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
            {
                throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
            }
            Dictionary <uint, VideoCapabilities> dictionary = new Dictionary <uint, VideoCapabilities>();

            for (int i = 0; i < num; i++)
            {
                try
                {
                    VideoCapabilities videoCapabilities = new VideoCapabilities(videoStreamConfig, i);
                    uint key = (uint)(videoCapabilities.FrameSize.Height | videoCapabilities.FrameSize.Width << 16);
                    if (!dictionary.ContainsKey(key))
                    {
                        dictionary.Add(key, videoCapabilities);
                    }
                    else if (videoCapabilities.BitCount > dictionary[key].BitCount)
                    {
                        dictionary[key] = videoCapabilities;
                    }
                }
                catch
                {
                }
            }
            VideoCapabilities[] array = new VideoCapabilities[dictionary.Count];
            dictionary.Values.CopyTo(array, 0);
            return(array);
        }
Пример #9
0
        /// <summary>
        /// Configure device and report frame format that will be used during streaming.
        /// This method must return a proper ImageDescriptor so we can pre-allocate buffers.
        /// </summary>
        public ImageDescriptor Prepare()
        {
            ConfigureDevice();

            AForge.Video.DirectShow.VideoCapabilities cap = null;
            if (device.VideoResolution == null)
            {
                // This device was never connected to in Kinovea, use the first media type.
                AForge.Video.DirectShow.VideoCapabilities[] caps = device.VideoCapabilities;
                if (caps.Length == 0)
                {
                    return(ImageDescriptor.Invalid);
                }

                cap = caps[0];
            }
            else
            {
                cap = device.VideoResolution;
            }

            int width  = cap.FrameSize.Width;
            int height = cap.FrameSize.Height;

            ImageFormat format = ImageFormat.RGB24;

            switch (cap.Compression)
            {
            case "RGB24":
            default:
                format = ImageFormat.RGB24;
                break;

            case "MJPG":
                format = ImageFormat.JPEG;
                break;
            }

            int  bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format);
            bool topDown    = false;

            return(new ImageDescriptor(format, width, height, topDown, bufferSize));
        }
Пример #10
0
        // Token: 0x06000041 RID: 65 RVA: 0x00003458 File Offset: 0x00001658
        private void GetPinCapabilitiesAndConfigureSizeAndRate(ICaptureGraphBuilder2 graphBuilder, IBaseFilter baseFilter, Guid pinCategory, VideoCapabilities resolutionToSet, ref VideoCapabilities[] capabilities)
        {
            object obj;

            graphBuilder.FindInterface(pinCategory, MediaType.Video1, baseFilter, typeof(IAMStreamConfig).GUID, out obj);
            if (obj != null)
            {
                IAMStreamConfig iamstreamConfig = null;
                try
                {
                    iamstreamConfig = (IAMStreamConfig)obj;
                }
                catch (InvalidCastException)
                {
                }
                if (iamstreamConfig != null)
                {
                    if (capabilities == null)
                    {
                        try
                        {
                            capabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig(iamstreamConfig);
                        }
                        catch
                        {
                        }
                    }
                    if (resolutionToSet != null)
                    {
                        this.SetResolution(iamstreamConfig, resolutionToSet);
                    }
                }
            }
            if (capabilities == null)
            {
                capabilities = new VideoCapabilities[0];
            }
        }
Пример #11
0
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig( IAMStreamConfig videoStreamConfig )
        {
            if ( videoStreamConfig == null )
                throw new ArgumentNullException( "videoStreamConfig" );

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities( out count, out size );

            if ( hr != 0 )
                Marshal.ThrowExceptionForHR( hr );

            if ( count <= 0 )
                throw new NotSupportedException( "This video device does not report capabilities." );

            if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) )
                throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." );

            Dictionary<uint, VideoCapabilities> videocapsList = new Dictionary<uint, VideoCapabilities>( );

            for ( int i = 0; i < count; i++ )
            {
                // vidcaps[i] = new VideoCapabilities( videoStreamConfig, i );
                VideoCapabilities vc = new VideoCapabilities( videoStreamConfig, i );

                uint key = ( ( (uint) vc.FrameSize.Height ) << 16 ) | (uint) vc.FrameSize.Width;

                if ( !videocapsList.ContainsKey( key ) )
                {
                    videocapsList.Add( key, vc );
                }
            }

            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo( videocaps, 0 );

            return videocaps;
        }
Пример #12
0
        /// <summary>
        /// Configure the device according to what is saved in the preferences for it.
        /// </summary>
        private void ConfigureDevice()
        {
            SpecificInfo info = summary.Specific as SpecificInfo;

            if (info == null || info.MediaTypeIndex < 0)
            {
                log.DebugFormat("No configuration saved in preferences for this device.");
                return;
            }

            // Initialize device configuration (Extract and cache media types on the output pin).
            // Double check we have an existing index and set the format.
            AForge.Video.DirectShow.VideoCapabilities[] capabilities = device.VideoCapabilities;
            AForge.Video.DirectShow.VideoCapabilities   match        = capabilities.FirstOrDefault(c => c.Index == info.MediaTypeIndex);
            if (match == null)
            {
                log.ErrorFormat("Could not match the saved media type.");
                return;
            }

            device.SetMediaTypeAndFramerate(info.MediaTypeIndex, info.SelectedFramerate);

            log.DebugFormat("Device set to saved configuration: Index:{0}. ({1}×{2} @ {3:0.###} fps ({4})).",
                            info.MediaTypeIndex, match.FrameSize.Width, match.FrameSize.Height, info.SelectedFramerate, match.Compression);

            // Reload camera properties in case the firmware "forgot" them.
            // This means changes done in other softwares will be overwritten.
            try
            {
                CameraPropertyManager.Write(device, info.CameraProperties);
            }
            catch
            {
                log.ErrorFormat("An error occured while reloading camera properties.");
            }
        }
        // Configure specified pin and collect its capabilities if required
        private void GetPinCapabilitiesAndConfigureSizeAndRate( ICaptureGraphBuilder2 graphBuilder, IBaseFilter baseFilter,
            Guid pinCategory, Size size, int frameRate, ref VideoCapabilities[] capabilities )
        {
            object streamConfigObject;
            graphBuilder.FindInterface( pinCategory, MediaType.Video, baseFilter, typeof( IAMStreamConfig ).GUID, out streamConfigObject );

            if ( streamConfigObject != null )
            {
                IAMStreamConfig streamConfig = null;

                try
                {
                    streamConfig = (IAMStreamConfig) streamConfigObject;
                }
                catch ( InvalidCastException )
                {
                }

                if ( streamConfig != null )
                {
                    if ( capabilities == null )
                    {
                        try
                        {
                            // get all video capabilities
                            capabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig( streamConfig );
                        }
                        catch
                        {
                        }
                    }

                    // check if it is required to change capture settings
                    if ( ( frameRate != 0 ) || ( ( size.Width != 0 ) && ( size.Height != 0 ) ) )
                    {
                        SetFrameSizeAndRate( streamConfig, size, frameRate );
                    }
                }
            }

            // if failed resolving capabilities, then just create empty capabilities array,
            // so we don't try again
            if ( capabilities == null )
            {
                capabilities = new VideoCapabilities[0];
            }
        }
        // Set resolution for the specified stream configuration
        private void SetResolution( IAMStreamConfig streamConfig, VideoCapabilities resolution )
        {
            if ( resolution == null )
            {
                return;
            }

            // iterate through device's capabilities to find mediaType for desired resolution
            int capabilitiesCount = 0, capabilitySize = 0;
            AMMediaType newMediaType = null;
            VideoStreamConfigCaps caps = new VideoStreamConfigCaps( );

            streamConfig.GetNumberOfCapabilities( out capabilitiesCount, out capabilitySize );

            for ( int i = 0; i < capabilitiesCount; i++ )
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities( streamConfig, i );

                    if ( resolution == vc )
                    {
                        if ( streamConfig.GetStreamCaps( i, out newMediaType, caps ) == 0 )
                        {
                            break;
                        }
                    }
                }
                catch
                {
                }
            }

            // set the new format
            if ( newMediaType != null )
            {
                streamConfig.SetFormat( newMediaType );
                newMediaType.Dispose( );
            }
        }
        public void ResetItems(VideoCapabilities[] vidCaps)
        {
            if (this.items == null)
            {
                this.items = new List<CameraResolution>();
            }
            else
            {
                this.items.Clear();
            }

            if (vidCaps == null || vidCaps.Length == 0)
            {
                return;
            }

            foreach (VideoCapabilities vidCap in vidCaps)
            {
                this.items.Add(new CameraResolution(vidCap));
            }
        }
Пример #16
0
        /// <summary>
        /// Check if two video capabilities are equal.
        /// </summary>
        /// 
        /// <param name="vc2">Second video capability to compare with.</param>
        /// 
        /// <returns>Returns true if both video capabilities are equal or false otherwise.</returns>
        /// 
        public bool Equals( VideoCapabilities vc2 )
        {
            if ( (object) vc2 == null )
            {
                return false;
            }

            return ( ( FrameSize == vc2.FrameSize ) && ( BitCount == vc2.BitCount ) );
        }
 public CameraResolution(VideoCapabilities vidCap)
 {
     this.id = Guid.NewGuid();
     this.frameSize = vidCap.FrameSize;
     this.frameRate = vidCap.AverageFrameRate;
 }
 // Token: 0x06000029 RID: 41 RVA: 0x000028BA File Offset: 0x00000ABA
 public bool Equals(VideoCapabilities vc2)
 {
     return(vc2 != null && this.FrameSize == vc2.FrameSize && this.BitCount == vc2.BitCount);
 }
        public void Start(VideoCapabilities Capability)
        {
            FrameNumber = 0;
            videoDevice.VideoResolution = Capability;
            videoDevice.Start();

            State = CameraState.CAMERA_STARTED;
        }
Пример #20
0
        private void OnCurrentDeviceChanged(FilterInfo filterInfo)
        {
            if (_currentDevice != null)
            {
                _currentDevice.Stop();
                _currentDevice.NewFrame -= DeviceOnNewFrame;
            }
            _currentDevice = new VideoCaptureDevice(filterInfo.MonikerString);

            CurrentDeviceCapabilities = new ObservableCollection<VideoCapabilities>(_currentDevice.VideoCapabilities);
            SelectedDeviceCapabilities = CurrentDeviceCapabilities.FirstOrDefault();

            _currentDevice.NewFrame += DeviceOnNewFrame;
        }
Пример #21
0
        // Configure specified pin and collect its capabilities if required
        private void GetPinCapabilitiesAndConfigureSizeAndRate( ICaptureGraphBuilder2 graphBuilder, IBaseFilter baseFilter,
            Guid pinCategory, VideoCapabilities resolutionToSet, ref VideoCapabilities[] capabilities )
        {
            object streamConfigObject;
            graphBuilder.FindInterface( pinCategory, MediaType.Video, baseFilter, typeof( IAMStreamConfig ).GUID, out streamConfigObject );

            if ( streamConfigObject != null )
            {
                IAMStreamConfig streamConfig = null;

                try
                {
                    streamConfig = (IAMStreamConfig) streamConfigObject;
                }
                catch ( InvalidCastException )
                {
                }

                if ( streamConfig != null )
                {
                    if ( capabilities == null )
                    {
                        try
                        {
                            // get all video capabilities
                            capabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig( streamConfig );
                        }
                        catch
                        {
                        }
                    }

                    // check if it is required to change capture settings
                    if ( resolutionToSet != null )
                    {
                        SetResolution( streamConfig, resolutionToSet );
                    }
                }

                Marshal.ReleaseComObject( streamConfigObject );
            }

            // if failed resolving capabilities, then just create empty capabilities array,
            // so we don't try again
            if ( capabilities == null )
            {
                capabilities = new VideoCapabilities[0];
            }
        }
Пример #22
0
        public void Start()
        {
            // Turn on flash
            isBusy = true;

            try
            {

                // Webcam stuff
                int idx = -1;
                List<MediaInformation> media = WebcamDevice.GetVideoDevices.ToList<MediaInformation>();
                VideoCapabilities[] cap = null;

                if (media.Count > 0)
                {
                    cam.VideoSourceId = media[0].UsbId;
                    cap = (cam.VideoSourcePlayer.VideoSource as VideoCaptureDevice).VideoCapabilities;
                    for (int i = 0; i < cap.Length; i++)
                        if (idx == -1 || (Math.Abs(cap[idx].FrameSize.Height - 600) > Math.Abs(cap[i].FrameSize.Height - 600)))
                            idx = i;
                }

                if (idx < 0)
                {
                    hasCamera = false;
                    vres = null;

                    cam.Visibility = Visibility.Visible;
                    cropArea.Visibility = Visibility.Hidden;
                    cropArea.Width = cam.Width;
                    imgSnap.Width = cam.Width;

                    ControlCenter.Instance.imgPop_snapH = cam.Height;
                    ControlCenter.Instance.imgPop_snapW = cam.Width;

                    cbRetake.Visibility = Visibility.Hidden;

                    rB1_0.Visibility = Visibility.Hidden;
                    rB1_1.Visibility = Visibility.Hidden;
                    rB1_2.Visibility = Visibility.Hidden;
                    rB2_0.Visibility = Visibility.Visible;
                    rB2_1.Visibility = Visibility.Visible;
                    rB2_2.Visibility = Visibility.Visible;

                    labB1.Foreground = new SolidColorBrush(Color.FromArgb(255, 204, 204, 204));
                    labB2.Foreground = new SolidColorBrush(Color.FromArgb(255, 204, 204, 204));
                    imgB1.Source = new BitmapImage(new Uri("pack://application:,,,/Media/camera-inactive-btn.png"));
                    imgB2.Source = new BitmapImage(new Uri("pack://application:,,,/Media/crop-inactive-btn.png"));
                }
                else
                {
                    hasCamera = true;
                    vres = cap[idx];

                    cam.Visibility = Visibility.Visible;
                    cropArea.Visibility = Visibility.Hidden;
                    cropArea.Width = cam.Width;
                    imgSnap.Width = cam.Width;

                    ControlCenter.Instance.imgPop_snapH = cam.Height;
                    ControlCenter.Instance.imgPop_snapW = cam.Width;

                    cbRetake.Visibility = Visibility.Hidden;

                    rB1_0.Visibility = Visibility.Hidden;
                    rB1_1.Visibility = Visibility.Hidden;
                    rB1_2.Visibility = Visibility.Hidden;
                    rB2_0.Visibility = Visibility.Visible;
                    rB2_1.Visibility = Visibility.Visible;
                    rB2_2.Visibility = Visibility.Visible;

                    labB1.Foreground = new SolidColorBrush(Color.FromArgb(255, 0, 0, 0));
                    labB2.Foreground = new SolidColorBrush(Color.FromArgb(255, 204, 204, 204));
                    imgB1.Source = new BitmapImage(new Uri("pack://application:,,,/Media/camera-active-btn.png"));
                    imgB2.Source = new BitmapImage(new Uri("pack://application:,,,/Media/crop-inactive-btn.png"));
                }

                // Calculate width and resize accordingly
                //double vw = (cam.Height / vres.FrameSize.Height) * vres.FrameSize.Width;
                //cam.Width = vw;

            }
            catch (Exception ex)
            {
                LogError(ex);
            }

            isBusy = false;
        }
Пример #23
0
 private void OnSelectedDeviceCapabilitiesChanged(VideoCapabilities capabilities)
 {
     if (capabilities != null)
     {
         if (_currentDevice.IsRunning)
         {
             _currentDevice.SignalToStop();
         }
         _currentDevice.VideoResolution = capabilities;
         ThreadPool.QueueUserWorkItem(state =>
                 {
                     _currentDevice.WaitForStop();
                     _currentDevice.Start();
                 }
             );
     }
 }