Example #1
0
        private async void StartMediaCapturePreview_Click(object sender, RoutedEventArgs e)
        {
            StartCaptureElementPreview.IsEnabled = false;

            // Skip if no camera
            var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            if (devices.Count == 0)
            {
                return;
            }

            var capture = new MediaCapture();
            await capture.InitializeAsync(new MediaCaptureInitializationSettings
            {
                StreamingCaptureMode = StreamingCaptureMode.Video
            });

            var definition = await CreateEffectDefinitionAsync(
                (VideoEncodingProperties)capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview)
                );

            await capture.AddEffectAsync(MediaStreamType.VideoPreview, definition.ActivatableClassId, definition.Properties);

            CapturePreview.Source = capture;
            await capture.StartPreviewAsync();

            StartCaptureElementPreview.IsEnabled = true;
        }
Example #2
0
        public async Task <bool> StartRecordingAsync()
        {
            if (Initialized && PreviewStarted && !RecordingStarted)
            {
#if WINDOWS_PHONE_APP
                MediaEncodingProfile recordProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p);
                recordProfile.Video.FrameRate.Numerator   = _hfrVideoEncodingProperties.FrameRate.Numerator;
                recordProfile.Video.FrameRate.Denominator = _hfrVideoEncodingProperties.FrameRate.Denominator;
                double factor = (double)(recordProfile.Video.FrameRate.Numerator) / (recordProfile.Video.FrameRate.Denominator * 4);
                recordProfile.Video.Bitrate = (uint)(recordProfile.Video.Width * recordProfile.Video.Height * factor);
                await MediaCapture.StartRecordToStreamAsync(recordProfile, _recordingStream);
#else
                MediaEncodingProfile recordProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                await MediaCapture.StartRecordToStreamAsync(recordProfile, _recordingStream);
#endif

                // Get camera's resolution
                VideoEncodingProperties resolution =
                    (VideoEncodingProperties)_videoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord);
                ResolutionWidth  = (int)resolution.Width;
                ResolutionHeight = (int)resolution.Height;

                Messenger.SettingsChangedFlag = true;
                await MediaCapture.AddEffectAsync(RecordMediaStreamType, BufferTransformActivationId, Properties);

                RecordingStarted = true;
            }

            return(RecordingStarted);
        }
Example #3
0
        private async void StartMediaCaptureRecord_Click(object sender, RoutedEventArgs e)
        {
            StartCaptureElementRecord.IsEnabled = false;

            // Skip if no camera
            var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            if (devices.Count == 0)
            {
                return;
            }

            StorageFile destination = await KnownFolders.VideosLibrary.CreateFileAsync("VideoEffectsTestApp.MediaCapture.mp4", CreationCollisionOption.ReplaceExisting);

            var capture = new MediaCapture();
            await capture.InitializeAsync(new MediaCaptureInitializationSettings
            {
                StreamingCaptureMode = StreamingCaptureMode.Video
            });

            var definition = await CreateEffectDefinitionAsync(
                (VideoEncodingProperties)capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord)
                );

            await capture.AddEffectAsync(MediaStreamType.VideoRecord, definition.ActivatableClassId, definition.Properties);

            await capture.StartRecordToStorageFileAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Qvga), destination);

            await Task.Delay(3000);

            await capture.StopRecordAsync();

            StartCaptureElementRecord.IsEnabled = true;
        }
Example #4
0
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            capture_element.Source = m_camera;
            m_camera.StartPreviewAsync();

            //StorageFile file = await KnownFolders.DocumentsLibrary.CreateFileAsync("_exposure.jpg", CreationCollisionOption.ReplaceExisting);


            m_camera.AddEffectAsync(MediaStreamType.VideoPreview, "GrayscaleTransform.GrayscaleEffect", null);


            //m_camera.StartRecordToStorageFileAsync()
            float maxExp = m_camera.VideoDeviceController.ExposureCompensationControl.Max;
            float minExp = m_camera.VideoDeviceController.ExposureCompensationControl.Min;
            float step   = m_camera.VideoDeviceController.ExposureCompensationControl.Step;

            TimeSpan maxExpControl = m_camera.VideoDeviceController.ExposureControl.Max;
            TimeSpan minExpControl = m_camera.VideoDeviceController.ExposureControl.Min;
            TimeSpan stepExp       = m_camera.VideoDeviceController.ExposureControl.Step;

            TimeSpan value = m_camera.VideoDeviceController.ExposureControl.Value;
            var      supportedVideoFormats = new List <string> {
                "nv12", "rgb32"
            };

            var availableMediaStreamProperties = m_camera.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoRecord).OfType <Windows.Media.MediaProperties.VideoEncodingProperties>().Where(p => p != null &&
                                                                                                                                                                                                                      !String.IsNullOrEmpty(p.Subtype) &&
                                                                                                                                                                                                                      supportedVideoFormats.Contains(p.Subtype.ToLower()))
                                                 .ToList();

            Windows.Media.MediaProperties.VideoEncodingProperties previewFormat = availableMediaStreamProperties.FirstOrDefault();

            //previewFormat.FrameRate.Numerator = 15;
            //previewFormat.FrameRate.Denominator = 1;

            //availableMediaStreamProperties.All;
            m_camera.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, previewFormat);

            m_camera.VideoDeviceController.ExposureControl.SetAutoAsync(false);

            TimeSpan k = new TimeSpan(833);

            m_camera.VideoDeviceController.ExposureControl.SetValueAsync(minExpControl);

            await this.StreamToFile();

            //m_camera.VideoDeviceController.ExposureCompensationControl.SetValueAsync(0);
            //m_camera.VideoDeviceController.ExposureCompensationControl.SetValueAsync(0.0f);

            //double exp;
            //bool b = m_camera.VideoDeviceController.Exposure.TryGetValue(out exp);

            // m_camera.VideoDeviceController.ExposureControl.SetValueAsync(new TimeSpan(1))
            //m_camera.VideoDeviceController.Exposure.TrySetValue(0.00833);
            //m_camera.VideoDeviceController.ExposureControl.SetAutoAsync(false);
            //m_camera.VideoDeviceController.ExposureControl.SetValueAsync(new TimeSpan(10 * 2000));

            //  m_camera.VideoDeviceController.ExposureControl.SetValueAsync(minExpControl);
        }
Example #5
0
        async private void setEffects()
        {
            var definition = new LumiaAnalyzerDefinition(ColorMode.Yuv420Sp, 640, AnalyzeBitmap);

            await mediaCapture.AddEffectAsync(
                MediaStreamType.VideoPreview,
                definition.ActivatableClassId,
                definition.Properties
                );
        }
Example #6
0
        public async Task <string> ScanAsync()
        {
            using (var capture = new MediaCapture())
            {
                await capture.InitializeAsync();

                var definition = new LumiaAnalyzerDefinition(ColorMode.Yuv420Sp, 640, AnalyzeBitmap);
                await capture.AddEffectAsync(MediaStreamType.VideoPreview, definition.ActivatableClassId, definition.Properties);

                return(_result?.Text ?? "Failed");
            }
        }
Example #7
0
        protected async override void OnNavigatedTo(NavigationEventArgs e)
        {
            DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;

            await _capture.InitializeAsync(new MediaCaptureInitializationSettings
            {
                VideoDeviceId        = await GetBackOrDefaulCameraIdAsync(),
                StreamingCaptureMode = StreamingCaptureMode.Video
            });

            IBuffer shaderY = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_Y.cso");

            IBuffer shaderUV = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_UV.cso");

            var definition = new VideoEffects.ShaderEffectDefinitionNv12(shaderY, shaderUV);

            await _capture.AddEffectAsync(MediaStreamType.VideoRecord, definition.ActivatableClassId, definition.Properties);

            Preview.Source = _capture;
            await _capture.StartPreviewAsync();

            var previewProps = (VideoEncodingProperties)_capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            TextLog.Text += String.Format("Preview: {0} {1}x{2} {3}fps\n", previewProps.Subtype, previewProps.Width, previewProps.Height, previewProps.FrameRate.Numerator / (float)previewProps.FrameRate.Denominator);

            var recordProps = (VideoEncodingProperties)_capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord);

            TextLog.Text += String.Format("Record: {0} {1}x{2} {3}fps\n", recordProps.Subtype, recordProps.Width, recordProps.Height, recordProps.FrameRate.Numerator / (float)recordProps.FrameRate.Denominator);

            StorageFile file = await KnownFolders.VideosLibrary.CreateFileAsync("VideoEffectRecordCaptureTestApp.mp4", CreationCollisionOption.ReplaceExisting);

            var profile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);

            profile.Audio = null;

            TextLog.Text += "Starting record\n";
            await _capture.StartRecordToStorageFileAsync(profile, file);

            TextLog.Text += "Record started to" + file.Path + "\n";

            for (int i = 0; i < 10; i++)
            {
                await Task.Delay(1000);

                TextLog.Text += i + "s\n";
            }

            TextLog.Text += "Stopping record\n";
            await _capture.StopRecordAsync();

            TextLog.Text += "Record stopped\n";
        }
Example #8
0
        /// <summary>
        /// Starts/stops the effect.
        /// </summary>
        /// <returns>True, if the effect was set on. False, if it was turned off.</returns>
        public async Task <bool> ToggleEffectAsync()
        {
            if (Started)
            {
                if (EffectSet)
                {
                    await MediaCapture.ClearEffectsAsync(PreviewMediaStreamType);

                    StateManager.State = VideoEffectState.Idle;
                }
                else
                {
                    Messenger.SettingsChangedFlag = true;
                    Messenger.ModeChangedFlag     = true;
                    await MediaCapture.AddEffectAsync(PreviewMediaStreamType, RealtimeTransformActivationId, Properties);

                    StateManager.State = VideoEffectState.Locking;
                }
            }

            return(EffectSet);
        }
        //</SnippetStartRecordWithRotation>


        async void AddEffectCapture(MediaCapture captureMgrReal)
        {
            // <SnippetCaptureAddEffect>
            MediaCapture captureMgr = new MediaCapture();
            await captureMgr.InitializeAsync();

            await captureMgr.AddEffectAsync(
                MediaStreamType.VideoRecord,
                Windows.Media.VideoEffects.VideoStabilization,
                null);

            // </SnippetCaptureAddEffect>

            // For Testing snippet.  Don't include in snippet.
            captureMgrReal        = captureMgr;
            capturePreview.Source = captureMgrReal;
            await captureMgr.StartPreviewAsync();

            // <SnippetCaptureRemoveEffect>
            // captureMgr is of type MediaCapture.
            await captureMgr.ClearEffectsAsync(MediaStreamType.VideoRecord);

            // </SnippetCaptureRemoveEffect>
        }
        // Must be called on the UI thread
        private async Task InitializeCaptureAsync()
        {
            if (m_initializing || (m_capture != null))
            {
                return;
            }
            m_initializing = true;

            try
            {
                var settings = new MediaCaptureInitializationSettings
                {
                    VideoDeviceId        = await GetBackOrDefaulCameraIdAsync(),
                    StreamingCaptureMode = StreamingCaptureMode.Video
                };

                var capture = new MediaCapture();
                await capture.InitializeAsync(settings);

                // Select the capture resolution closest to screen resolution
                var formats = capture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview);
                var format  = (VideoEncodingProperties)formats.OrderBy((item) =>
                {
                    var props = (VideoEncodingProperties)item;
                    return(Math.Abs(props.Width - this.ActualWidth) + Math.Abs(props.Height - this.ActualHeight));
                }).First();
                await capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, format);

                // Make the preview full screen
                var scale = Math.Min(this.ActualWidth / format.Width, this.ActualHeight / format.Height);
                Preview.Width  = format.Width;
                Preview.Height = format.Height;
                Preview.RenderTransformOrigin = new Point(.5, .5);
                Preview.RenderTransform       = new ScaleTransform {
                    ScaleX = scale, ScaleY = scale
                };
                //BarcodeOutline.Width = format.Width;
                //BarcodeOutline.Height = format.Height;
                //BarcodeOutline.RenderTransformOrigin = new Point(.5, .5);
                //BarcodeOutline.RenderTransform = new ScaleTransform { ScaleX = scale, ScaleY = scale };

                // Enable QR code detection
                var definition = new LumiaAnalyzerDefinition(ColorMode.Yuv420Sp, 640, AnalyzeBitmap);
                await capture.AddEffectAsync(MediaStreamType.VideoPreview, definition.ActivatableClassId, definition.Properties);

                // Start preview
                //m_time.Restart();
                Preview.Source = capture;
                await capture.StartPreviewAsync();

                capture.Failed += capture_Failed;

                m_autoFocus = await ContinuousAutoFocus.StartAsync(capture.VideoDeviceController.FocusControl);

                m_capture = capture;
            }
            catch (Exception e)
            {
                MessageDialog dialog = new MessageDialog(string.Format("Failed to start the camera: {0}", e.Message));
                await dialog.ShowAsync();
            }

            m_initializing = false;
        }
        private async void StartAsync(MediaCaptureInitializationSettings settings)
        {
            await Capture.InitializeAsync(settings);

            var veps = Capture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Where(x => x is VideoEncodingProperties).Cast <VideoEncodingProperties>().ToList();

            if (veps.Count > 0)
            {
                // find the closest resolution
                if (DesiredWidth > 0 || DesiredHeight > 0)
                {
                    var closest = new List <VideoEncodingProperties>();

                    var distance = -1;
                    foreach (var vep in veps)
                    {
                        if (distance < 0)
                        {
                            closest.Add(vep);
                            distance = GetSizeDistance(vep);
                        }
                        else
                        {
                            var d = GetSizeDistance(vep);
                            if (d == distance)
                            {
                                closest.Add(vep);
                            }
                            else if (d < distance)
                            {
                                closest = new List <VideoEncodingProperties>();
                                closest.Add(vep);
                                distance = d;
                            }
                        }
                    }

                    veps = closest;
                }

                // find the closest frame rate
                if (DesiredFrameRate > 0)
                {
                    var closest = new List <VideoEncodingProperties>();

                    var distance = -1;
                    foreach (var vep in veps)
                    {
                        if (distance < 0)
                        {
                            closest.Add(vep);
                            distance = GetFrameRateDistance(vep);
                        }
                        else
                        {
                            var d = GetFrameRateDistance(vep);
                            if (d == distance)
                            {
                                closest.Add(vep);
                            }
                            else if (d < distance)
                            {
                                closest = new List <VideoEncodingProperties>();
                                closest.Add(vep);
                                distance = d;
                            }
                        }
                    }

                    veps = closest;
                }

                Log.DebugFormat("Found {0} matching video profile(s).", veps.Count.ToString());

                if (veps.Count > 0)
                {
                    var vep = veps[0];
                    Log.DebugFormat("Using ({0} x {1}, {2} fps, {3}) video profile.", vep.Width.ToString(), vep.Height.ToString(), (vep.FrameRate.Numerator / vep.FrameRate.Denominator).ToString(), vep.Subtype);

                    // Set properties.
                    await Capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vep);
                }
            }

            Preview.Source = Capture;

            var properties = new PropertySet();

            properties.MapChanged += PropertiesChanges;

            await Capture.AddEffectAsync(MediaStreamType.VideoPreview, "VideoCaptureTransform.CaptureEffect", properties);

            await Capture.StartPreviewAsync();
        }
Example #12
0
        private async Task InitializeCaptureAsync()
        {
            if (isMediaCaptureInitializing || (mediaCapture != null))
            {
                return;
            }
            isMediaCaptureInitializing = true;

            try
            {
                var settings = new MediaCaptureInitializationSettings
                {
                    VideoDeviceId        = await GetBackOrDefaulCameraIdAsync(),
                    StreamingCaptureMode = StreamingCaptureMode.Video
                };

                var newMediaCapture = new MediaCapture();
                await newMediaCapture.InitializeAsync(settings);

                // Select the capture resolution closest to screen resolution
                var formats = newMediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview);
                var format  = (VideoEncodingProperties)formats.OrderBy((item) =>
                {
                    var props = (VideoEncodingProperties)item;
                    return(Math.Abs(props.Width - this.ActualHeight) + Math.Abs(props.Height - this.ActualWidth));
                }).First();

                Debug.WriteLine("{0} x {1}", format.Width, format.Height);

                await newMediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, format);

                // Disable flash control if supported
                if (newMediaCapture.VideoDeviceController.FlashControl.Supported)
                {
                    newMediaCapture.VideoDeviceController.FlashControl.Enabled = false;
                }

                // Prepare bitmap for reports
                bitmapWithBarcode = new WriteableBitmap((int)format.Width, (int)format.Height);

                // Make the preview full screen
                Preview.Width  = this.ActualHeight;
                Preview.Height = this.ActualWidth;

                // Enable QR code detection
                var definition = new LumiaAnalyzerDefinition(ColorMode.Yuv420Sp, Math.Min(format.Width, 800), AnalyzeBitmap);
                await newMediaCapture.AddEffectAsync(MediaStreamType.VideoPreview, definition.ActivatableClassId, definition.Properties);

                // Start preview
                Preview.Source = newMediaCapture;
                await newMediaCapture.StartPreviewAsync();

                newMediaCapture.Failed += OnMediaCaptureFailed;

                autoFocus = await ContinuousAutoFocus.StartAsync(newMediaCapture.VideoDeviceController.FocusControl);

                mediaCapture = newMediaCapture;
            }
            catch (Exception e)
            {
                Debug.WriteLine("Failed to start the camera: {0}", e.Message);
            }

            isMediaCaptureInitializing = false;
        }