Пример #1
0
        protected override async void Start()
        {
            ResourceCache.AutoReloadResources = true;
            base.Start();

            EnableGestureTapped = true;

            _busyIndicatorNode = Scene.CreateChild();
            _busyIndicatorNode.SetScale(0.06f);
            _busyIndicatorNode.CreateComponent <BusyIndicator>();

            _mediaCapture = new MediaCapture();
            await _mediaCapture.InitializeAsync();

            await _mediaCapture.AddVideoEffectAsync(new MrcVideoEffectDefinition(), MediaStreamType.Photo);

            await RegisterCortanaCommands(new Dictionary <string, Action> {
                { "Describe", () => CaptureAndShowResult(false) },
                { "Text describe", () => CaptureAndShowResult(true) },
                { "Enable preview", () => EnablePreview(true) },
                { "Disable preview", () => EnablePreview(false) },
                { "Help", Help }
            });

            ShowBusyIndicator(true);
            await TextToSpeech("Welcome to the Hololens and Flow demo!");

            ShowBusyIndicator(false);

            _withPreview = true;
            _inited      = true;
        }
Пример #2
0
        private async void DisplacementButton_Click(object sender, RoutedEventArgs e)
        {
            DisableButtons();

            await ResetEffectsAsync();

            await mediaCapture.AddVideoEffectAsync(
                new VideoEffectDefinition(
                    typeof(DisplacementEffect).FullName,
                    effectConfiguration
                    ),
                MediaStreamType.VideoPreview
                );

            EnableButtons();
        }
Пример #3
0
        private async Task <MediaCapture> StartRecording(string fileName)
        {
            var devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Devices.Enumeration.DeviceClass.VideoCapture);

            var camerDeviceId = devices.FirstOrDefault()?.Id;

            var captureSettings = new MediaCaptureInitializationSettings
            {
                VideoDeviceId        = camerDeviceId,
                AudioDeviceId        = string.Empty,
                PhotoCaptureSource   = PhotoCaptureSource.VideoPreview,
                StreamingCaptureMode = StreamingCaptureMode.Video
            };

            var mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync(captureSettings);

            var def = new Windows.Media.Effects.VideoEffectDefinition(Windows.Media.VideoEffects.VideoStabilization);
            await mediaCapture.AddVideoEffectAsync(def, MediaStreamType.VideoRecord);

            var profile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Qvga);

            var storageFolder = await KnownFolders.RemovableDevices
                                .GetFolderAsync("E:\\");

            var storageFile = await storageFolder.CreateFileAsync(fileName, CreationCollisionOption.GenerateUniqueName);

            await mediaCapture.StartRecordToStorageFileAsync(profile, storageFile);

            return(mediaCapture);
        }
Пример #4
0
        protected override async void Start()
        {
            ResourceCache.AutoReloadResources = true;
            base.Start();

            EnableGestureTapped = true;

            busyIndicatorNode = Scene.CreateChild();
            busyIndicatorNode.SetScale(0.06f);
            busyIndicatorNode.CreateComponent <BusyIndicator>();

            mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync();

            await mediaCapture.AddVideoEffectAsync(new MrcVideoEffectDefinition(), MediaStreamType.Photo);

            await RegisterCortanaCommands(new Dictionary <string, Action> {
                { "Describe", () => CaptureAndShowResult(false) },
                { "Read this text", () => CaptureAndShowResult(true) },
                { "Enable preview", () => EnablePreview(true) },
                { "Disable preview", () => EnablePreview(false) },
                { "Help", Help }
            });

            ShowBusyIndicator(true);
            await TextToSpeech("Welcome to the Microsoft Cognitive Services sample for HoloLens and UrhoSharp.");

            ShowBusyIndicator(false);

            inited = true;
        }
Пример #5
0
        /// <summary>
        /// Initializes face detection on the preview stream, from https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/scene-analysis-for-media-capture
        /// </summary>
        /// <returns></returns>
        public async Task InitializeFaceDetection()
        {
            // Load the face service client to to face recognition with Cognitive Services
            if (FaceService == null)
            {
                FaceService = await FaceServiceHelper.CreateNewFaceServiceAsync();
            }

            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // TODO: Chance to a good frequency to save Cognitive Services API calls
            // Choose the shortest interval between detection events
            //_faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            // Currently we offline detect faces every 3 seconds to save API calls
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(3000);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            _isDetecting = true;
        }
Пример #6
0
        /// <summary>
        /// Asynchronously starts video mode.
        ///
        /// Activates the web camera with the various settings specified in CameraParameters.
        /// Only one VideoCapture instance can start the video mode at any given time.
        /// After starting the video mode, you listen for new video frame samples via the VideoCapture.FrameSampleAcquired event,
        /// or by calling VideoCapture.RequestNextFrameSample() when will return the next available sample.
        /// While in video mode, more power will be consumed so make sure that you call VideoCapture.StopVideoModeAsync qhen you can afford the start/stop video mode overhead.
        /// </summary>
        /// <param name="setupParams">Parameters that change how video mode is used.</param>
        /// <param name="onVideoModeStartedCallback">This callback will be invoked once video mode has been activated.</param>
        public async void StartVideoModeAsync(UnityEngine.Windows.WebCam.CameraParameters setupParams, OnVideoModeStartedCallback onVideoModeStartedCallback)
        {
            var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; //Returns a MediaFrameSource

            if (mediaFrameSource == null)
            {
                onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(1, ResultType.UnknownError, false));
                return;
            }

            var pixelFormat = ConvertCapturePixelFormatToMediaEncodingSubtype(setupParams.pixelFormat);

            _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat);

            _frameReader.FrameArrived += HandleFrameArrived;
            await _frameReader.StartAsync();

            VideoEncodingProperties properties = GetVideoEncodingPropertiesForCameraParams(setupParams);

            // Historical context: https://github.com/VulcanTechnologies/HoloLensCameraStream/issues/6
            if (setupParams.rotateImage180Degrees)
            {
                properties.Properties.Add(ROTATION_KEY, 180);
            }

            //	gr: taken from here https://forums.hololens.com/discussion/2009/mixedrealitycapture
            IVideoEffectDefinition ved = new VideoMRCSettings(setupParams.enableHolograms, setupParams.enableVideoStabilization, setupParams.videoStabilizationBufferSize, setupParams.hologramOpacity);
            await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview);

            await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(STREAM_TYPE, properties);

            onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(0, ResultType.Success, true));
        }
Пример #7
0
        /// <summary>
        /// Инициализирует работу с камерой и с локальным распознавателем лиц
        /// </summary>
        private async Task Init()
        {
            MC = new MediaCapture();
            var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            var camera   = cameras.First();
            var settings = new MediaCaptureInitializationSettings()
            {
                VideoDeviceId = camera.Id
            };
            await MC.InitializeAsync(settings);

            ViewFinder.Source = MC;

            // Create face detection
            var def = new FaceDetectionEffectDefinition();

            def.SynchronousDetectionEnabled = false;
            def.DetectionMode                     = FaceDetectionMode.HighPerformance;
            FaceDetector                          = (FaceDetectionEffect)(await MC.AddVideoEffectAsync(def, MediaStreamType.VideoPreview));
            FaceDetector.FaceDetected            += FaceDetectedEvent;
            FaceDetector.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100);
            FaceDetector.Enabled                  = true;

            await MC.StartPreviewAsync();

            var props = MC.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            VideoProps = props as VideoEncodingProperties;
        }
Пример #8
0
        private async void DialController_ButtonClicked(RadialController sender, RadialControllerButtonClickedEventArgs args)
        {
            try
            {
                ShowBusyIndicator("Capturing photo...");

                var cacheFolder = ApplicationData.Current.LocalCacheFolder;
                var file        = await cacheFolder.CreateFileAsync("tempImg.jpg", CreationCollisionOption.ReplaceExisting);

                using (var fileStream = await file.OpenStreamForWriteAsync())
                {
                    if (previewEffect != null)
                    {
                        // We need to make sure the photo stream also has the effect
                        switch (mediaCapture.MediaCaptureSettings.VideoDeviceCharacteristic)
                        {
                        // In these cases, the effect is already applied to the stream that will be used for the photo
                        case VideoDeviceCharacteristic.AllStreamsIdentical:
                        case VideoDeviceCharacteristic.PreviewPhotoStreamsIdentical:
                            break;

                        // However, in these cases, we need to apply the effect to the photo stream
                        case VideoDeviceCharacteristic.AllStreamsIndependent:
                        case VideoDeviceCharacteristic.PreviewRecordStreamsIdentical:
                            await mediaCapture.AddVideoEffectAsync(previewEffect, MediaStreamType.Photo);

                            break;
                        }
                    }

                    await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), fileStream.AsRandomAccessStream());
                }

                this.Frame.Navigate(typeof(EditPhotoPage), file.Path);
            }
            catch (Exception ex)
            {
                await new MessageDialog($"Something went wrong saving the image: {ex}", "Exception").ShowAsync();
            }
            finally
            {
                HideBusyIndicator();
            }
        }
Пример #9
0
        async Task <int> InitMediaCapture()
        {
            mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync();

            await mediaCapture.AddVideoEffectAsync(new VideoEffectDefinition(), MediaStreamType.Photo);

            //await mediaCapture.InitializeAsync();
            return(1);
        }
        /// <summary>
        /// Initializes the camera.
        /// Will raise `CameraInit*` events.
        /// </summary>
        /// <returns>Task.</returns>
        public async Task InitializeCameraAsync(Size previewControlSize)
        {
            // Set ui-related values.
            this.previewControlSize = previewControlSize;

            // Ensure that the media capture hasn't been init, yet.
            if (MediaCapture != null)
            {
                return;
            }

            // Get all camera devices.
            var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            // Ensure there has been exactly one camera found.
            if (devices.Count != 1)
            {
                IsFaceDetectionControlAvailable = false;
                CameraInitFailed(this, new MessageEventArgs("No or more than one camera found. No face detection available."));
            }

            // Create new media capture instance.
            MediaCapture = new MediaCapture();

            // Setup callbacks.
            MediaCapture.Failed += MediaCapture_Failed;

            // Init the actual capturing.
            var settings = new MediaCaptureInitializationSettings {
                VideoDeviceId = devices[0].Id
            };
            await MediaCapture.InitializeAsync(settings);

            // Updated preview properties from mediaCapture.
            previewProperties = MediaCapture
                                .VideoDeviceController
                                .GetMediaStreamProperties(MediaStreamType.VideoPreview)
                                as VideoEncodingProperties;

            // Setup face detection
            var definition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance
            };

            faceDetectionEffect = (FaceDetectionEffect)await MediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;

            // Operation was successful.
            IsFaceDetectionControlAvailable = true;
            CameraInitSucceeded(this, new MessageEventArgs("Face detection is now available."));
        }
 private async Task ApplyVideoEffectAsync()
 {
     if (currentState == RecordingState.Previewing)
     {
         previewEffect = ConstructVideoEffect();
         await mediaCapture.AddVideoEffectAsync(previewEffect, MediaStreamType.VideoPreview);
     }
     else if (currentState == RecordingState.NotInitialized || currentState == RecordingState.Stopped)
     {
         await new MessageDialog("The preview or recording stream is not available.", "Effect not applied").ShowAsync();
     }
 }
Пример #12
0
        private async void btnDetectFaces_Click(object sender, RoutedEventArgs e)
        {
            var faceDetectionDefinition = new FaceDetectionEffectDefinition();

            faceDetectionDefinition.SynchronousDetectionEnabled = false;
            faceDetectionDefinition.DetectionMode = FaceDetectionMode.HighPerformance;
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            _faceDetectionEffect.Enabled = true;
        }
Пример #13
0
        private async void FaceDetect()
        {
            var faceDetectionDefinition = new FaceDetectionEffectDefinition();

            faceDetectionDefinition.DetectionMode = FaceDetectionMode.Balanced;
            faceDetectionDefinition.SynchronousDetectionEnabled = false;
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            _faceDetectionEffect.Enabled = true;
        }
Пример #14
0
        private async void CoreApplication_LeavingBackground(object sender, LeavingBackgroundEventArgs e)
        {
            var Deferral = e.GetDeferral();

            if (ApplicationData.Current.LocalSettings.Values["LastSelectedCameraSource"] is string LastSelectedCameraSource)
            {
                var MediaFraSourceGroup = await MediaFrameSourceGroup.FindAllAsync();

                foreach (var FrameSource in from MediaFrameSourceGroup FrameSource in MediaFraSourceGroup
                         where FrameSource.DisplayName == LastSelectedCameraSource
                         select FrameSource)
                {
                    CaptureControl.Source = Capture = await MediaCaptureProvider.SetFrameSourceAndInitializeCaptureAsync(FrameSource);

                    break;
                }
            }
            else
            {
                CaptureControl.Source = Capture = await MediaCaptureProvider.SetFrameSourceAndInitializeCaptureAsync();
            }

            ApplicationData.Current.LocalSettings.Values["ReturnCosmeticsEffectExcution"] = true;
            VideoEffectDefinition EffectDefinition = new VideoEffectDefinition("CosmeticsEffect.CosmeticsVideoEffect");

            ApplicationData.Current.LocalSettings.Values["ReturnCosmeticsEffectExcution"] = false;
            VideoEffect = await Capture.AddVideoEffectAsync(EffectDefinition, MediaStreamType.VideoPreview);

            CaptureControl.Source = Capture;
            await Capture.StartPreviewAsync();

            VideoEffect.SetProperties(new PropertySet()
            {
                { "LipColor", (CosmeticsControl.SelectedItem as CosmeticsItem).LipColor }
            });
            StayAwake.RequestActive();

            Deferral.Complete();
        }
        /// <summary>
        /// カメラプレビューを開始する
        /// </summary>
        /// <param name="IsCapturedHologram"></param>
        public async Task <bool> StartVideoModeAsync(bool IsCapturedHologram)
        {
            // MediaFrameSourceを取得する
            // MediaFrameSourceはMediaFrameSourceGroupから直接取得することはできず
            // MediaCapture経由で取得する必要がある
            var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id];

            if (mediaFrameSource == null)
            {
                return(false);
            }
            // Unityのテクスチャに変換できるフォーマットを指定
            var pixelFormat = MediaEncodingSubtypes.Bgra8;

            // MediaFrameReaderの作成
            _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat);

            // フレームを取得したときのイベントハンドラ
            _frameReader.FrameArrived += HandleFrameArrived;
            // フレームの取得を開始する
            var result = await _frameReader.StartAsync();

            // デバイスがサポートするビデオフォーマットの一覧を取得する
            // ここではHoloLensがサポートする896x504 30fpsに絞って取得している
            var allPropertySets = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview)
                                  .Select(x => x as VideoEncodingProperties)
                                  .Where(x =>
            {
                if (x == null)
                {
                    return(false);
                }
                if (x.FrameRate.Denominator == 0)
                {
                    return(false);
                }

                double frameRate = (double)x.FrameRate.Numerator / (double)x.FrameRate.Denominator;

                return(x.Width == 896 && x.Height == 504 && (int)Math.Round(frameRate) == 30);
            });
            // 取得したフォーマット情報を使ってキャプチャするフレームの解像度とFPSを設定する
            VideoEncodingProperties properties = allPropertySets.FirstOrDefault();
            await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, properties);

            // Mixed Reality Captureの設定
            IVideoEffectDefinition ved = new MixedRealityCaptureSetting(IsCapturedHologram, false, 0, IsCapturedHologram ? 0.9f : 0.0f);
            await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview);

            return(true);
        }
Пример #16
0
        private async Task CreateFaceDetectionEffectAsync()
        {
            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.Balanced;

            faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(66); // min. 33

            faceDetectionEffect.Enabled = true;
        }
Пример #17
0
        private async void InitializeFaceDetection()
        {
            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.HighPerformance;
            _faceDetectionEffect     = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            _faceDetectionEffect.Enabled = true;


            _faceDetectionEffect.FaceDetected += _faceDetectionEffect_FaceDetected;
        }
Пример #18
0
        private async void captureVideo_Click(object sender, RoutedEventArgs e)
        {
            var settings = new MediaCaptureInitializationSettings()
            {
                StreamingCaptureMode = StreamingCaptureMode.Video
            };
            await _mediaCapture.InitializeAsync(settings);

            _mediaCapture.Failed += MediaCapture_Failed;
            captureElement.Source = _mediaCapture;

            var effect = await _mediaCapture.AddVideoEffectAsync(new VideoEffectDefinition(typeof(SaturationVideoEffect).FullName, _effectConfiguration), MediaStreamType.VideoPreview);

            await _mediaCapture.StartPreviewAsync();
        }
        private async void FaceDetectionInitialization()
        {
            var definition = new FaceDetectionEffectDefinition()
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance
            };

            faceDetectionEffect = (FaceDetectionEffect)await capture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            //Chooses the shortest interval between detection events
            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.Enabled       = true;
            faceDetectionEffect.FaceDetected += FaceDetected;
        }
        /// <summary>
        /// Adds scene analysis to the video preview stream, registers for its event, enables it, and gets the effect instance
        /// </summary>
        /// <returns></returns>
        private async Task CreateSceneAnalysisEffectAsync()
        {
            // Create the definition, which will contain some initialization settings
            var definition = new SceneAnalysisEffectDefinition();

            // Add the effect to the video record stream
            _sceneAnalysisEffect = (SceneAnalysisEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            Debug.WriteLine("SA effect added to pipeline");

            // Subscribe to notifications about scene information
            _sceneAnalysisEffect.SceneAnalyzed += SceneAnalysisEffect_SceneAnalyzed;

            // Enable HDR analysis
            _sceneAnalysisEffect.HighDynamicRangeAnalyzer.Enabled = true;
        }
Пример #21
0
        /// <summary>
        /// Asynchronously starts video mode.
        ///
        /// Activates the web camera with the various settings specified in CameraParameters.
        /// Only one VideoCapture instance can start the video mode at any given time.
        /// After starting the video mode, you listen for new video frame samples via the VideoCapture.FrameSampleAcquired event,
        /// or by calling VideoCapture.RequestNextFrameSample() when will return the next available sample.
        /// While in video mode, more power will be consumed so make sure that you call VideoCapture.StopVideoModeAsync qhen you can afford the start/stop video mode overhead.
        /// </summary>
        /// <param name="setupParams">Parameters that change how video mode is used.</param>
        /// <param name="onVideoModeStartedCallback">This callback will be invoked once video mode has been activated.</param>
        public async void StartVideoModeAsync(CameraParameters setupParams, OnVideoModeStartedCallback onVideoModeStartedCallback)
        {
            var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; //Returns a MediaFrameSource

            if (mediaFrameSource == null)
            {
                onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(1, ResultType.UnknownError, false));
                return;
            }

            bool requires_change =
                mediaFrameSource.CurrentFormat.VideoFormat.Width != setupParams.cameraResolutionWidth ||
                mediaFrameSource.CurrentFormat.VideoFormat.Height != setupParams.cameraResolutionHeight ||
                (int)Math.Round(((double)mediaFrameSource.CurrentFormat.FrameRate.Numerator / mediaFrameSource.CurrentFormat.FrameRate.Denominator)) != setupParams.frameRate;

            if (requires_change)
            {
                await SetFrameType(mediaFrameSource, setupParams.cameraResolutionWidth, setupParams.cameraResolutionHeight, setupParams.frameRate);
            }

            //	gr: taken from here https://forums.hololens.com/discussion/2009/mixedrealitycapture
            IVideoEffectDefinition ved = new VideoMRCSettings(setupParams.enableHolograms, setupParams.enableVideoStabilization, setupParams.videoStabilizationBufferSize, setupParams.hologramOpacity, setupParams.recordingIndicatorVisible);
            await _mediaCapture.AddVideoEffectAsync(ved, STREAM_TYPE);

            if (!_sharedStream)
            {
                VideoEncodingProperties properties = GetVideoEncodingPropertiesForCameraParams(setupParams);

                // Historical context: https://github.com/VulcanTechnologies/HoloLensCameraStream/issues/6

                if (setupParams.rotateImage180Degrees)
                {
                    properties.Properties.Add(ROTATION_KEY, 180);
                }
                // We can't modify the stream properties if we are sharing the stream
                await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(STREAM_TYPE, properties);
            }

            var pixelFormat = ConvertCapturePixelFormatToMediaEncodingSubtype(setupParams.pixelFormat);

            _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat);

            _frameReader.FrameArrived += HandleFrameArrived;
            await _frameReader.StartAsync();

            onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(0, ResultType.Success, true));
        }
Пример #22
0
        public CSharpRecognizer(CaptureElement captureElement, TextBlock progressText)
        {
            mMediaCapture = new MediaCapture();

            var devices = DeviceInformation.FindAllAsync(DeviceClass.VideoCapture).AsTask();

            string deviceID;

            if (devices.Result.Count >= 2)
            {
                deviceID = devices.Result[1].Id;
            }
            else
            {
                deviceID = devices.Result[0].Id;
            }

            devices.Wait();

            var settings = new MediaCaptureInitializationSettings()
            {
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // VideoDeviceId = deviceID
            };

            try
            {
                mMediaCapture.InitializeAsync(settings).AsTask().Wait();
            }
            catch (Exception)
            {
                progressText.Text = "No camera is available.";
                return;
            }

            // This doesn't work on ASUST100TAM.
            // https://msdn.microsoft.com/en-us/library/windows.media.capture.mediacapture.setpreviewmirroring.aspx
            // Use 'FlowDirection' on the CaptureElement instead
            // mMediaCapture.SetPreviewMirroring(true);

            captureElement.Source = mMediaCapture;
            mMediaCapture.StartPreviewAsync().AsTask().Wait();

            mMediaCapture.AddVideoEffectAsync(
                new VideoEffectDefinition(typeof(AnalysisEffect).FullName, new PropertySet()),
                MediaStreamType.VideoPreview).AsTask().Wait();
        }
Пример #23
0
        private async Task <FaceDetectionEffect> SetupFaceDetection()
        {
            var faceDetectionEffectDefinition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance,
            };
            var faceDetection = await defaultManager.AddVideoEffectAsync(faceDetectionEffectDefinition, MediaStreamType.VideoPreview) as FaceDetectionEffect;

            faceDetection.FaceDetected            += FaceDetection_FaceDetected;
            faceDetection.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            if (FaceDetection != null)
            {
                FaceDetection.Enabled = false;
            }
            return(FaceDetection = faceDetection);
        }
Пример #24
0
        public async Task InitAsync()
        {
            if (currentEffect != null)
            {
                currentEffect.Enabled       = false;
                currentEffect.FaceDetected -= FaceDetection_FaceDetected;
            }
            var definition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance,
            };

            currentEffect = await defaultManager.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview) as FaceDetectionEffect;

            currentEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(150);
            currentEffect.FaceDetected            += FaceDetection_FaceDetected;
        }
        /// <summary>
        /// Adds video stabilization to the video record stream, registers for its event, enables it, and gets the effect instance
        /// </summary>
        /// <returns></returns>
        private async Task CreateVideoStabilizationEffectAsync()
        {
            // No work to be done if there already is an effect
            if (_videoStabilizationEffect != null)
            {
                return;
            }

            // Create the definition, which will contain some initialization settings
            var definition = new VideoStabilizationEffectDefinition();

            // Add the effect to the video record stream
            _videoStabilizationEffect = (VideoStabilizationEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoRecord);

            Debug.WriteLine("VS effect added to pipeline");

            // Subscribe to notifications for changes in the enabled state for the effect
            _videoStabilizationEffect.EnabledChanged += VideoStabilizationEffect_EnabledChanged;

            // Enable the effect
            _videoStabilizationEffect.Enabled = true;

#if (USE_VS_RECOMMENDATION)
            // Configure the pipeline to use the optimal settings for VS
            await SetUpVideoStabilizationRecommendationAsync();

            // At this point, one of two things has happened:
            //
            // a) If a more suitable capture resolution was available:
            //    1. Such resolution will have been set up for video capture ("input")
            //    2. The MediaEncodingProfile ("output") will have been changed to specify dimensions reflecting the amount of cropping
            //       done on said capture resolution (possibly even none if the new resolution offers enough padding)
            // b) If no better suited capture resolution was available:
            //    1. The video capture resolution will not have changed
            //    2. The MediaEncodingProfile will have been changed to specify smaller dimensions than the capture resolution
            //       so that the video isn't scaled back up to the capture resolution after cropping, which could cause a loss in quality
#else
            Debug.WriteLine("Not setting up VS recommendation");

            // Not setting up VS recommendation means that the video will be captured at the desired resolution,
            // then cropped by the VS effect as part of the stabilization process, and then scaled back up to the
            // original capture resolution
#endif
        }
        private async Task SetEffectWorker(string effect)
        {
            await ResetEffectsAsync();

            blurAmountSlider.Visibility = Visibility.Collapsed;
            effectPropertySet           = new PropertySet();
            string typeName = null;

            switch (effect)
            {
            case displacementEffect:
                typeName = typeof(DisplacementEffect).FullName;
                break;

            case rotatingTilesEffect:
                typeName = typeof(RotatedTilesEffect).FullName;
                break;

            case gaussianBlurEffect:
                typeName = typeof(DynamicBlurVideoEffect).FullName;
                Debug.WriteLine("SliderValue: " + (float)blurAmountSlider.Value);
                effectPropertySet["ColorMatrix"] = new Matrix5x4()
                {
                    M11 = (float)blurAmountSlider.Value, M12 = 0, M13 = 0, M14 = 0,
                    M21 = 0, M22 = (float)blurAmountSlider.Value, M23 = 0, M24 = 0,
                    M31 = 0, M32 = 0, M33 = (float)blurAmountSlider.Value, M34 = 0,
                    M41 = 0, M42 = 0, M43 = 0, M44 = 1,
                    M51 = 0, M52 = 0, M53 = 0, M54 = 0
                }
                ;
                blurAmountSlider.Visibility = Visibility.Visible;
                break;
            }

            if (typeName == null)
            {
                return;
            }

            await mediaCapture.AddVideoEffectAsync(new VideoEffectDefinition(typeName, effectPropertySet),
                                                   MediaStreamType.VideoPreview);
        }
Пример #27
0
        /// <summary>
        /// Record button tapped for either starting or stopping video recording
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void RecordButton_Tapped(object sender, TappedRoutedEventArgs e)
        {
            if (_isRecording)
            {
                RecordSymbol.Symbol = Symbol.Target;
                await StopVideoRecordingAsync();

                await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoRecord);

                if (_sphericalProjectionEffect != null)
                {
                    if (_sphericalProjectionEffect.SphericalProjection.IsEnabled)
                    {
                        _mediaPlayerProjection.IsEnabled   = (_mediaPlayerProjection.FrameFormat == SphericalVideoFrameFormat.Equirectangular) || (ToggleForceSpherical.IsChecked == true);
                        _mediaPlayerProjection.FrameFormat = _sphericalProjectionEffect.SphericalProjection.FrameFormat;
                        _mediaPlayerProjection.HorizontalFieldOfViewInDegrees = _sphericalProjectionEffect.SphericalProjection.HorizontalFieldOfViewInDegrees;
                        _mediaPlayerProjection.ProjectionMode  = _sphericalProjectionEffect.SphericalProjection.ProjectionMode;
                        _mediaPlayerProjection.ViewOrientation = _sphericalProjectionEffect.SphericalProjection.ViewOrientation;
                    }
                    _sphericalProjectionEffect = null;
                }
            }
            else
            {
                RecordSymbol.Symbol = Symbol.Stop;
                if (ToggleRecordProjection.IsChecked == true)
                {
                    _sphericalProjectionEffect = new Windows.Media.Effects.VideoTransformEffectDefinition();
                    _sphericalProjectionEffect.SphericalProjection.IsEnabled   = _mediaPlayerProjection.IsEnabled;
                    _sphericalProjectionEffect.SphericalProjection.FrameFormat = _mediaPlayerProjection.FrameFormat;
                    _sphericalProjectionEffect.SphericalProjection.HorizontalFieldOfViewInDegrees = _mediaPlayerProjection.HorizontalFieldOfViewInDegrees;
                    _sphericalProjectionEffect.SphericalProjection.ProjectionMode  = _mediaPlayerProjection.ProjectionMode;
                    _sphericalProjectionEffect.SphericalProjection.ViewOrientation = _mediaPlayerProjection.ViewOrientation;
                    _mediaPlayerProjection.IsEnabled = false;
                    await _mediaCapture.AddVideoEffectAsync(_sphericalProjectionEffect, MediaStreamType.VideoRecord);
                }
                await StartVideoRecordingAsync();
            }
            _isRecording = !_isRecording;
            await EnableDisableCameraControlsOnUI(!_isRecording);
        }
Пример #28
0
        private async Task InitMediaDevice()
        {
            try
            {
                if (!FaceDetector.IsSupported)
                {
                    return;
                }

                var cameraDevice = await CameraActions.FindCameraDeviceByPanelAsync(Panel.Front);

                var settings = new MediaCaptureInitializationSettings {
                    VideoDeviceId = cameraDevice.Id
                };
                _mediaCapture = new MediaCapture();
                await _mediaCapture.InitializeAsync(settings);

                var definition = new FaceDetectionEffectDefinition
                {
                    SynchronousDetectionEnabled = false,
                    DetectionMode = FaceDetectionMode.HighPerformance
                };

                var faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

                faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
                faceDetectionEffect.Enabled       = true;
                faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

                CameraViewer.Source = _mediaCapture;
                await _mediaCapture.StartPreviewAsync();

                _previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);
            }
            catch (Exception exception)
            {
                Messages = exception.ToString();
            }
        }
Пример #29
0
        private async Task SetEffectWorker(string effect)
        {
            await ResetEffectsAsync();

            string typeName = null;

            switch (effect)
            {
            case displacementEffect: typeName = typeof(DisplacementEffect).FullName; break;

            case rotatingTilesEffect: typeName = typeof(RotatedTilesEffect).FullName; break;
            }

            if (typeName == null)
            {
                return;
            }

            await mediaCapture.AddVideoEffectAsync(
                new VideoEffectDefinition(typeName, new PropertySet()),
                MediaStreamType.VideoPreview);
        }
Пример #30
0
        private async Task StartPreviewAsync()
        {
            try
            {
                mediaCapture = new MediaCapture();
                var videoEffectDefinition = new VideoEffectDefinition("segm_video_effect_uwp.SegmVideoEffect");

                await mediaCapture.InitializeAsync();

                videoEffect =
                    await mediaCapture.AddVideoEffectAsync(videoEffectDefinition, MediaStreamType.VideoPreview);

                displayRequest.RequestActive();
                DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;
            }
            catch (UnauthorizedAccessException)
            {
                // This will be thrown if the user denied access to the camera in privacy settings
                ShowMessageToUser("The app was denied access to the camera");
                return;
            }

            try
            {
                PreviewControl.Source = mediaCapture;
                await mediaCapture.StartPreviewAsync();

                isPreviewing = true;
            }
            catch (System.IO.FileLoadException)
            {
                mediaCapture.CaptureDeviceExclusiveControlStatusChanged += _mediaCapture_CaptureDeviceExclusiveControlStatusChanged;
                return;
            }
            catch (System.Exception)
            {
                return;
            }
        }