Inheritance: IVideoEncodingProperties, IMediaEncodingProperties
 private int GetFrameRateDistance(VideoEncodingProperties vep)
 {
     if (DesiredFrameRate > 0)
     {
         var frameRate = vep.FrameRate.Numerator / vep.FrameRate.Denominator;
         return Math.Abs(DesiredFrameRate - (int)frameRate);
     }
     return -1;
 }
 private int GetSizeDistance(VideoEncodingProperties vep)
 {
     if (DesiredWidth > 0 && DesiredHeight > 0)
     {
         return Math.Abs(DesiredWidth - (int)vep.Width) + Math.Abs(DesiredHeight - (int)vep.Height);
     }
     else if (DesiredWidth <= 0)
     {
         return Math.Abs(DesiredHeight - (int)vep.Height);
     }
     else if (DesiredHeight <= 0)
     {
         return Math.Abs(DesiredWidth - (int)vep.Width);
     }
     return -1;
 }
Beispiel #3
0
        public void SetEncodingProperties(VideoEncodingProperties encodingProperties, IDirect3DDevice device)
        {
            canvasDevice = CanvasDevice.CreateFromDirect3D11Device(device);
            numColumns = (uint)(encodingProperties.Width / pixelsPerTile);
            numRows = (uint)(encodingProperties.Height / pixelsPerTile);
            transforms = new Transform2DEffect[numColumns, numRows];
            crops = new CropEffect[numColumns, numRows];

            for (uint i = 0; i < numColumns; i++)
            {
                for (uint j = 0; j < numRows; j++)
                {
                    crops[i, j] = new CropEffect();
                    crops[i, j].SourceRectangle = new Rect(i * pixelsPerTile, j * pixelsPerTile, pixelsPerTile, pixelsPerTile);
                    transforms[i, j] = new Transform2DEffect();
                    transforms[i, j].Source = crops[i, j];
                }
            }
        }
 public void SetEncodingProperties(VideoEncodingProperties encodingProperties, IDirect3DDevice device)
 {
     canvasDevice = CanvasDevice.CreateFromDirect3D11Device(device);
 }
        public async Task SetResolution(VideoEncodingProperties resolution)
        {
            var startPreview = IsInPreview;

            if (IsInPreview)
            {
                await StopPreview();
            }

            _model.Resolution = resolution;

            if (startPreview)
            {
                await Task.Delay(TimeSpan.FromSeconds(0.25));
                await StartPreview();
            }
        }
 public void SetEncodingProperties(VideoEncodingProperties encodingProperties, IDirect3DDevice device)
 {
     _currentEncodingProperties = encodingProperties;
     _canvasDevice = CanvasDevice.CreateFromDirect3D11Device(device, CanvasDebugLevel.Error);
 }
 public VideoCameraResolutionModel()
 {
     IsRemote = true;
     Properties = new VideoEncodingProperties();
 }
        /// <summary>
        /// 
        /// </summary>
        /// <returns></returns>
        private async Task<bool> StartStreamingAsync()
        {
            bool success = true;
            try
            {
                this.mediaCapture = new MediaCapture();

                MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
                settings.StreamingCaptureMode = StreamingCaptureMode.Video;

                await this.mediaCapture.InitializeAsync(settings);
                this.mediaCapture.Failed += MediaCapture_Failed;

                var deviceController = this.mediaCapture.VideoDeviceController;
                this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

                _displayRequest.RequestActive();
                cameraPreview.Source = mediaCapture;

                await mediaCapture.StartPreviewAsync();

                this.frameProcessingSemaphore.Release();

                TimeSpan timerInterval = TimeSpan.FromMilliseconds(66);
                this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(UploadStream), timerInterval);
            }
            catch (System.UnauthorizedAccessException)
            {
                success = false;
            }
            catch (Exception)
            {

                success = false;
            }

            return success;
        }
        /// <summary>
        /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element.
        /// </summary>
        /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns>
        private async Task<bool> StartWebcamStreaming()
        {
            bool successful = true;

            try
            {
                this.mediaCapture = new MediaCapture();
                
                MediaCaptureSettings = new MediaCaptureInitializationSettings();
                MediaCaptureSettings.StreamingCaptureMode = StreamingCaptureMode.AudioAndVideo;

                if(whichCameraToInit)
                {
                    MediaCaptureSettings.VideoDeviceId = backWebcam.Id;
                }
                else
                {
                    MediaCaptureSettings.VideoDeviceId = frontWebcam.Id;
                }
                await this.mediaCapture.InitializeAsync(MediaCaptureSettings);
                this.mediaCapture.CameraStreamStateChanged += this.MediaCapture_CameraStreamStateChanged;

                // Cache the media properties as we'll need them later.
                var deviceController = this.mediaCapture.VideoDeviceController;
                this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

                // Immediately start streaming to our CaptureElement UI.
                // NOTE: CaptureElement's Source must be set before streaming is started.
                this.CamPreview.Source = this.mediaCapture;
                await this.mediaCapture.StartPreviewAsync();
            }
            catch (System.UnauthorizedAccessException)
            {
                // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact.
                this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage);
                successful = false;
            }
            catch (Exception ex)
            {
                this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
                successful = false;
            }

            return successful;
        }
Beispiel #10
0
        private async Task<IVideoEffectDefinition> CreateEffectDefinitionAsync(VideoEncodingProperties props)
        {
            switch (EffectType.SelectedIndex)
            {
                case 0: 
                    return new LumiaEffectDefinition(() =>
                {
                    return new IFilter[]
                    {
                        new AntiqueFilter(),
                        new FlipFilter(FlipMode.Horizontal)
                    };
                });

                case 1:
                    IBuffer shaderY = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_Y.cso");
                    IBuffer shaderUV = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_UV.cso");
                    return new ShaderEffectDefinitionNv12(shaderY, shaderUV);

                case 2:
                    IBuffer shader = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_RGB32.cso");
                    return new ShaderEffectDefinitionBgrx8(shader);

                case 3:
                    // Select the largest centered square area in the input video
                    uint inputWidth = props.Width;
                    uint inputHeight = props.Height;
                    uint outputLength = Math.Min(inputWidth, inputHeight);
                    Rect cropArea = new Rect(
                        (float)((inputWidth - outputLength) / 2),
                        (float)((inputHeight - outputLength) / 2),
                        (float)outputLength,
                        (float)outputLength
                        );

                    var definition = new LumiaEffectDefinition(new FilterChainFactory(() =>
                    {
                        var filters = new List<IFilter>();
                        filters.Add(new CropFilter(cropArea));
                        return filters;
                    }));
                    definition.InputWidth = inputWidth;
                    definition.InputHeight = inputHeight;
                    definition.OutputWidth = outputLength;
                    definition.OutputHeight = outputLength;
                    return definition;

                case 4:
                    return new SquareEffectDefinition();

                case 5:
                    var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/traffic.png"));
                    var foreground = new StorageFileImageSource(file);
                    return new LumiaEffectDefinition(() =>
                    {
#pragma warning disable 618
                        var filter = new BlendFilter(foreground);
                        filter.TargetOutputOption = OutputOption.PreserveAspectRatio;
                        filter.TargetArea = new Rect(0, 0, .4, .4);
                        return new IFilter[] { filter };
                    });

                case 6:
                    return new LumiaEffectDefinition(() =>
                    {
                        return new AnimatedWarp();
                    });

                case 7:
                    return new LumiaEffectDefinition(() =>
                    {
                        return new BitmapEffect();
                    });

                case 8:
                    return new LumiaEffectDefinition(() =>
                    {
                        return new BitmapEffect2();
                    });

                case 9:
                    return new CanvasEffectDefinition(() =>
                    {
                        return new CanvasEffect();
                    });

                default:
                    throw new ArgumentException("Invalid effect type");
            }
        }
Beispiel #11
0
        /// <summary>
        /// 
        /// </summary>
        private async void WebcamSetting()
        {
            try
            {
                if (capture != null)
                {
                    if (isPreviewing)
                    {
                        //キャプチャーオブジェクトが有効な場合は一旦プレビューを停止
                        await capture.StopPreviewAsync();
                        isPreviewing = false;
                    }

                    //キャプチャーオブジェクトのクリア
                    capture.Dispose();
                    capture = null;
                }

                var captureInitSettings = new MediaCaptureInitializationSettings();
                captureInitSettings.VideoDeviceId = "";
                captureInitSettings.StreamingCaptureMode = StreamingCaptureMode.Video;
                captureInitSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview;

                //Webカメラの接続を確認
                var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

                if (devices.Count() == 0)
                {
                    textBox.Text = "カメラが接続されていません。";
                    return;
                }
                else if (devices.Count() == 1)
                {
                    captureInitSettings.VideoDeviceId = devices[0].Id;
                }
                else
                {
                    captureInitSettings.VideoDeviceId = devices[1].Id;
                }

                capture = new MediaCapture();
                await capture.InitializeAsync(captureInitSettings);

                //Webカメラの利用できる解像度を確認する場合はコメントアウト解除
                //var resolusions = GetPreviewResolusions(capture);

                
                //Webカメラの設定
                //解像度が640X480だとプレビュー表示が乱れる。
                VideoEncodingProperties vp = new VideoEncodingProperties();
                vp.Height = 240;        //高さ
                vp.Width = 320;         //幅
                vp.Subtype = "YUY2";    //形式

                await capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vp);

                cap.Source = capture;
                await capture.StartPreviewAsync();
                isPreviewing = true;

            }

            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }

        }
 public void SetEncodingProperties(VideoEncodingProperties backgroundProperties, IDirect3DDevice device)
 {
     _backgroundProperties = backgroundProperties;
     _canvasDevice = CanvasDevice.CreateFromDirect3D11Device(device, CanvasDebugLevel.Error);
 }
        /// <summary>
        /// Initializes the resolutions.
        /// </summary>
        private async Task InitResolutions()
        {
            if (_mediaCapture != null)
            {
                _availablePreviewResolutions =
                    _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo);

#if DEBUG
                foreach (var mediaEncodingPropertiese in _availablePreviewResolutions)
                {
                    var prop = mediaEncodingPropertiese as VideoEncodingProperties;
                    Debug.WriteLine("{0} {1}", prop.Width, prop.Height);
                }
#endif

                // Some devices produces black stripes around picture if highest 4:3 ratio is being used
                // For now switching to 16/9 resolution as default.
                _selectedPreviewResolution = _availablePreviewResolutions
                    .Where(r => ((VideoEncodingProperties)r).GetAspectRatio() == AspectRatio.Ratio16To9)
                    .OrderByDescending(r => ((VideoEncodingProperties)r).Width)
                    .FirstOrDefault() as VideoEncodingProperties;

                // Now set the resolution on the device
                if (_selectedPreviewResolution != null)
                {
                    await
                        _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo,
                            _selectedPreviewResolution);
                }
            }
        }
 public ResolutionModel(VideoEncodingProperties video)
 {
 }
        void 取得裝置資訊()
        {
            string line = "";
            裝置資訊清單.Text = "";
            // Find the highest resolution available
            VideoEncodingProperties resolutionMax = null;
            int max = 0;
            var xa = m_mediaCaptureMgr.VideoDeviceController;
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + "正在使用的裝置資訊";
            IMediaEncodingProperties xb = xa.GetMediaStreamProperties(MediaStreamType.VideoPreview);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + xb.Type + " " + "VideoPreview";
            VideoEncodingProperties xc = xb as VideoEncodingProperties;
            MediaEncodingProperties_VideoPreview = xc;
            line = string.Format("W{0} H{1} ID{2} ", xc.Width, xc.Height, xc.ProfileId);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + line;

            xb = xa.GetMediaStreamProperties(MediaStreamType.VideoRecord);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + xb.Type + " " + "VideoRecord"; ;
            xc = xb as VideoEncodingProperties;
            MediaEncodingProperties_VideoRecord = xc;
            line = string.Format("W{0} H{1} ID{2} ", xc.Width, xc.Height, xc.ProfileId);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + line;

            xb = xa.GetMediaStreamProperties(MediaStreamType.Photo);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + xb.Type + " " + "Photo"; ;
            xc = xb as VideoEncodingProperties;
            MediaEncodingProperties_Photo = xc;
            line = string.Format("W{0} H{1} ID{2} ", xc.Width, xc.Height, xc.ProfileId);
            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + line;

            裝置資訊清單.Text = 裝置資訊清單.Text + "\r\n" + "\r\n";

            現有裝置的可用支援(MediaStreamType.VideoPreview);
            現有裝置的可用支援(MediaStreamType.VideoRecord);
            現有裝置的可用支援(MediaStreamType.Photo);

            //await m_mediaCaptureMgr.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, resolutionMax);

        }
Beispiel #16
0
 public void SetEncodingProperties(VideoEncodingProperties encodingProperties, IDirect3DDevice device)
 {
 }
        /// <summary>
        /// Predicate used to filter only the suitable encoding properties.
        /// </summary>
        /// <param name="properties">Encoding properties.</param>
        /// <returns>Whether the encoding <paramref name="properties"/> can be used.</returns>
        private static bool IsValidFormat(VideoEncodingProperties properties)
        {
            if (properties == null || properties.Width == 0 || properties.Height == 0 || string.IsNullOrEmpty(properties.Subtype))
            {
                return false;
            }

            return CameraController.SupportedFormats.Contains(properties.Subtype, StringComparer.OrdinalIgnoreCase);
        }
        /// <summary>
        /// Configures the pipeline to use the optimal resolutions for VS based on the settings currently in use
        /// </summary>
        /// <returns></returns>
        private async Task SetUpVideoStabilizationRecommendationAsync()
        {
            Debug.WriteLine("Setting up VS recommendation...");

            // Get the recommendation from the effect based on our current input and output configuration
            var recommendation = _videoStabilizationEffect.GetRecommendedStreamConfiguration(_mediaCapture.VideoDeviceController, _encodingProfile.Video);

            // Handle the recommendation for the input into the effect, which can contain a larger resolution than currently configured, so cropping is minimized
            if (recommendation.InputProperties != null)
            {
                // Back up the current input properties from before VS was activated
                _inputPropertiesBackup = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord) as VideoEncodingProperties;

                // Set the recommendation from the effect (a resolution higher than the current one to allow for cropping) on the input
                await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, recommendation.InputProperties);
                Debug.WriteLine("VS recommendation for the MediaStreamProperties (input) has been applied");
            }

            // Handle the recommendations for the output from the effect
            if (recommendation.OutputProperties != null)
            {
                // Back up the current output properties from before VS was activated
                _outputPropertiesBackup = _encodingProfile.Video;

                // Apply the recommended encoding profile for the output, which will result in a video with the same dimensions as configured
                // before VideoStabilization was added if an appropriate padded capture resolution was available. Otherwise, it will be slightly
                // smaller (due to cropping). This prevents upscaling back to the original size, which can result in a loss of quality
                _encodingProfile.Video = recommendation.OutputProperties;
                Debug.WriteLine("VS recommendation for the MediaEncodingProfile (output) has been applied");
            }
        }
Beispiel #19
0
        private async Task StartPreviewAsync()
        {
            try
            {
                var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

                var    deviceList = devices.ToList();
                var    device     = devices.FirstOrDefault(x => x.Name.Contains(settings.CameraKey));
                string deviceId   = device == null ? "" : device.Id;


                mediaCapture = new MediaCapture();
                await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.ExclusiveControl, VideoDeviceId = deviceId
                });

                var resolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo).ToList();

                Windows.Media.MediaProperties.VideoEncodingProperties reslution = (Windows.Media.MediaProperties.VideoEncodingProperties)resolutions.Where(x => x.Type == "Video").OrderByDescending(x => ((Windows.Media.MediaProperties.VideoEncodingProperties)x).Width).FirstOrDefault();


                // set used resolution
                await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo, reslution);

                mediaCapture2 = new MediaCapture();
                await mediaCapture2.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                mediaCapture3 = new MediaCapture();
                await mediaCapture3.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                mediaCapture4 = new MediaCapture();
                await mediaCapture4.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                displayRequest.RequestActive();
                DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;

                // Create the definition, which will contain some initialization settings
                var definition = new FaceDetectionEffectDefinition();

                // To ensure preview smoothness, do not delay incoming samples
                definition.SynchronousDetectionEnabled = false;

                // In this scenario, choose detection speed over accuracy
                definition.DetectionMode = FaceDetectionMode.HighPerformance;

                // Add the effect to the preview stream
                _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

                // Choose the shortest interval between detection events
                _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(300);

                // Start detecting faces
                _faceDetectionEffect.Enabled = true;

                // Register for face detection events
                _faceDetectionEffect.FaceDetected += _faceDetectionEffect_FaceDetectedAsync;;
            }
            catch (Exception)
            {
                // This will be thrown if the user denied access to the camera in privacy settings
                Console.Write("The app was denided access to the camera");
                return;
            }

            try
            {
                captionsControl.MainCapture.Source = mediaCapture;
                speechControl.MainCapture.Source   = mediaCapture2;
                tagsControl.MainCapture.Source     = mediaCapture3;
                captureBottomRight.Source          = mediaCapture4;
                await mediaCapture.StartPreviewAsync();

                await mediaCapture2.StartPreviewAsync();

                await mediaCapture3.StartPreviewAsync();

                await mediaCapture4.StartPreviewAsync();
            }
            catch (Exception)
            {
                //mediaCapture.CaptureDeviceExclusiveControlStatusChanged += MediaCapture_CaptureDeviceExclusiveControlStatusChanged; ;
            }
        }
        public FaceTrackerProxy (Canvas canvas, MainPage page, CaptureElement capture, MediaCapture mediacapture ) {


            if (this.faceTracker == null)
            {
                this.faceTracker = FaceTracker.CreateAsync().AsTask().Result;
            }

            rootPage = page;
            VisualizationCanvas = canvas;

            this.VisualizationCanvas.Children.Clear();

            mediaCapture = mediacapture;

            var deviceController = mediaCapture.VideoDeviceController;
            this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            currentState = ScenarioState.Streaming;

            // Ensure the Semaphore is in the signalled state.
            this.frameProcessingSemaphore.Release();

            // Use a 66 milisecond interval for our timer, i.e. 15 frames per second 
            TimeSpan timerInterval = TimeSpan.FromMilliseconds(200);
            this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval);


        }
        /// <summarycancel
        /// Initializes the scenario
        /// </summary>
        /// <returns></returns>
        private async Task InitializeAsync()
        {
            var streamFilteringCriteria = new
            {
                HorizontalResolution = (uint)480,
                SubType = "YUY2"
            };
            currentState = State.Initializing;
            device = new CaptureDevice();

            PreviewVideo.Visibility = Visibility.Collapsed;
            WebcamPreviewPoster.Visibility = Visibility.Visible;
            PreviewButton.Content = "Start Preview";
            LoopbackClientButton.IsEnabled = false;

            mode = defaultMode;
            LatencyModeToggle.IsOn = (mode == LatencyMode.LowLatency);
            LatencyModeToggle.IsEnabled = false;

            await device.InitializeAsync();
            var setting = await device.SelectPreferredCameraStreamSettingAsync(MediaStreamType.VideoPreview, ((x) =>
            {
                var previewStreamEncodingProperty = x as Windows.Media.MediaProperties.VideoEncodingProperties;

                return (previewStreamEncodingProperty.Width >= streamFilteringCriteria.HorizontalResolution &&
                    previewStreamEncodingProperty.Subtype == streamFilteringCriteria.SubType);
            }));

            previewEncodingProperties = setting as VideoEncodingProperties;

            PreviewSetupCompleted();
        }
        /// <summary>
        /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element.
        /// </summary>
        /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns>
        private async Task<bool> StartWebcamStreaming()
        {
            bool successful = true;

            try
            {
                this.mediaCapture = new MediaCapture();

                // For this scenario, we only need Video (not microphone) so specify this in the initializer.
                // NOTE: the appxmanifest only declares "webcam" under capabilities and if this is changed to include
                // microphone (default constructor) you must add "microphone" to the manifest or initialization will fail.
                MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
                settings.StreamingCaptureMode = StreamingCaptureMode.Video;
                await this.mediaCapture.InitializeAsync(settings);
                this.mediaCapture.CameraStreamStateChanged += this.MediaCapture_CameraStreamStateChanged;

                // Cache the media properties as we'll need them later.
                var deviceController = this.mediaCapture.VideoDeviceController;
                this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

                // Immediately start streaming to our CaptureElement UI.
                // NOTE: CaptureElement's Source must be set before streaming is started.
                this.CamPreview.Source = this.mediaCapture;
                await this.mediaCapture.StartPreviewAsync();

                // Use a 66 milisecond interval for our timer, i.e. 15 frames per second 
                TimeSpan timerInterval = TimeSpan.FromMilliseconds(66);
                this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval);
            }
            catch (System.UnauthorizedAccessException)
            {
                // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact.
                this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage);
                successful = false;
            }
            catch (Exception ex)
            {
                this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
                successful = false;
            }

            return successful;
        }
        /// <summary>
        ///  Disables and removes the video stabilization effect, and unregisters the event handler for the EnabledChanged event of the effect
        /// </summary>
        /// <returns></returns>
        private async Task CleanUpVideoStabilizationEffectAsync()
        {
            // No work to be done if there is no effect
            if (_videoStabilizationEffect == null) return;

            // Disable the effect
            _videoStabilizationEffect.Enabled = false;

            _videoStabilizationEffect.EnabledChanged -= VideoStabilizationEffect_EnabledChanged;

            // Remove the effect from the record stream
            await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoRecord);

            Debug.WriteLine("VS effect removed from pipeline");

            // If backed up settings (stream properties and encoding profile) exist, restore them and clear the backups
            if (_inputPropertiesBackup != null)
            {
                await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, _inputPropertiesBackup);
                _inputPropertiesBackup = null;
            }

            if (_outputPropertiesBackup != null)
            {
                _encodingProfile.Video = _outputPropertiesBackup;
                _outputPropertiesBackup = null;
            }

            // Clear the member variable that held the effect instance
            _videoStabilizationEffect = null;
        }
 public VideoCameraResolutionModel(VideoEncodingProperties video)
 {
     IsRemote = false;
     Properties = video;
 }
        /// <summary>
        /// Calculates the size and location of the rectangle that contains the preview stream within the preview control, when the scaling mode is Uniform
        /// </summary>
        /// <param name="previewResolution">The resolution at which the preview is running</param>
        /// <param name="previewControl">The control that is displaying the preview using Uniform as the scaling mode</param>
        /// <param name="displayOrientation">The orientation of the display, to account for device rotation and changing of the CaptureElement display ratio compared to the camera stream</param>
        /// <returns></returns>
        public static Rect GetPreviewStreamRectInControl(VideoEncodingProperties previewResolution, CaptureElement previewControl, DisplayOrientations displayOrientation)
        {
            var result = new Rect();

            // In case this function is called before everything is initialized correctly, return an empty result
            if (previewControl == null || previewControl.ActualHeight < 1 || previewControl.ActualWidth < 1 ||
                previewResolution == null || previewResolution.Height == 0 || previewResolution.Width == 0)
            {
                return result;
            }

            var streamWidth = previewResolution.Width;
            var streamHeight = previewResolution.Height;

            // For portrait orientations, the width and height need to be swapped
            if (displayOrientation == DisplayOrientations.Portrait || displayOrientation == DisplayOrientations.PortraitFlipped)
            {
                streamWidth = previewResolution.Height;
                streamHeight = previewResolution.Width;
            }

            // Start by assuming the preview display area in the control spans the entire width and height both (this is corrected in the next if for the necessary dimension)
            result.Width = previewControl.ActualWidth;
            result.Height = previewControl.ActualHeight;

            // If UI is "wider" than preview, letterboxing will be on the sides
            if ((previewControl.ActualWidth / previewControl.ActualHeight > streamWidth / (double)streamHeight))
            {
                var scale = previewControl.ActualHeight / streamHeight;
                var scaledWidth = streamWidth * scale;

                result.X = (previewControl.ActualWidth - scaledWidth) / 2.0;
                result.Width = scaledWidth;
            }
            else // Preview stream is "wider" than UI, so letterboxing will be on the top+bottom
            {
                var scale = previewControl.ActualWidth / streamWidth;
                var scaledHeight = streamHeight * scale;

                result.Y = (previewControl.ActualHeight - scaledHeight) / 2.0;
                result.Height = scaledHeight;
            }

            return result;
        }
		public async Task SetResolutionAsync( VideoEncodingProperties resolution )
		{
			this.AssertInitialized();

			await this.CaptureManager.VideoDeviceController.SetMediaStreamPropertiesAsync( MediaStreamType.VideoPreview, resolution );
		}