/// <summary> /// 'Initialize Audio and Video' button action function /// Dispose existing MediaCapture object and set it up for audio and video /// Enable or disable appropriate buttons /// - DISABLE 'Initialize Audio and Video' /// - DISABLE 'Start Audio Record' /// - ENABLE 'Initialize Audio Only' /// - ENABLE 'Start Video Record' /// - ENABLE 'Take Photo' /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void initVideo_Click(object sender, RoutedEventArgs e) { // Disable all buttons until initialization completes SetInitButtonVisibility(Action.DISABLE); SetVideoButtonVisibility(Action.DISABLE); SetAudioButtonVisibility(Action.DISABLE); try { if (mediaCapture != null) { // Cleanup MediaCapture object if (isPreviewing) { await mediaCapture.StopPreviewAsync(); captureImage.Source = null; playbackElement.Source = null; isPreviewing = false; } if (isRecording) { await mediaCapture.StopRecordAsync(); isRecording = false; recordVideo.Content = "Start Video Record"; recordAudio.Content = "Start Audio Record"; } mediaCapture.Dispose(); mediaCapture = null; } status.Text = "Initializing camera to capture audio and video..."; // Use default initialization mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(); // Set callbacks for failure and recording limit exceeded status.Text = "Device successfully initialized for video recording!"; mediaCapture.Failed += new MediaCaptureFailedEventHandler(mediaCapture_Failed); mediaCapture.RecordLimitationExceeded += new Windows.Media.Capture.RecordLimitationExceededEventHandler(mediaCapture_RecordLimitExceeded); // Start Preview previewElement.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); isPreviewing = true; status.Text = "Camera preview succeeded"; // Enable buttons for video and photo capture SetVideoButtonVisibility(Action.ENABLE); // Enable Audio Only Init button, leave the video init button disabled audio_init.IsEnabled = true; } catch (Exception ex) { status.Text = "Unable to initialize camera for audio/video mode: " + ex.Message; } }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, starts preview and unlocks the UI /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (_mediaCapture == null) { // Attempt to get the front camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Front); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when video recording has reached the maximum time and when something goes wrong _mediaCapture.RecordLimitationExceeded += MediaCapture_RecordLimitationExceeded; _mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(settings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device _externalCamera = true; } else { // Camera is fixed on the device _externalCamera = false; // Only mirror the preview if the camera is on the front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); UpdateCaptureControls(); } } }
private async void MediaCapture_RecordLimitationExceeded(MediaCapture sender) { await StopRecordingAsync(); }
public void Run(IBackgroundTaskInstance taskInstance) { StorageFolder localFolder = ApplicationData.Current.LocalFolder; this.logging.LogEvent("Application starting"); // Log the Application build, OS version information etc. LoggingFields startupInformation = new LoggingFields(); startupInformation.AddString("Timezone", TimeZoneSettings.CurrentTimeZoneDisplayName); startupInformation.AddString("OSVersion", Environment.OSVersion.VersionString); startupInformation.AddString("MachineName", Environment.MachineName); // This is from the application manifest Package package = Package.Current; PackageId packageId = package.Id; PackageVersion version = packageId.Version; startupInformation.AddString("ApplicationVersion", string.Format($"{version.Major}.{version.Minor}.{version.Build}.{version.Revision}")); try { // see if the configuration file is present if not copy minimal sample one from application directory if (localFolder.TryGetItemAsync(ConfigurationFilename).AsTask().Result == null) { StorageFile templateConfigurationfile = Package.Current.InstalledLocation.GetFileAsync(ConfigurationFilename).AsTask().Result; templateConfigurationfile.CopyAsync(localFolder, ConfigurationFilename).AsTask(); } IConfiguration configuration = new ConfigurationBuilder().AddJsonFile(Path.Combine(localFolder.Path, ConfigurationFilename), false, true).Build(); this.azureCognitiveServicesEndpoint = configuration.GetSection("AzureCognitiveServicesEndpoint").Value; startupInformation.AddString("AzureCognitiveServicesEndpoint", this.azureCognitiveServicesEndpoint); this.azureCognitiveServicesSubscriptionKey = configuration.GetSection("AzureCognitiveServicesSubscriptionKey").Value; startupInformation.AddString("AzureCognitiveServicesSubscriptionKey", this.azureCognitiveServicesSubscriptionKey); foreach (string categoryName in configuration.GetSection("ComputerVisionCategoryNames").Value.Split(',')) { this.categoryList.Add(new Category(name: categoryName)); } this.interruptPinNumber = int.Parse(configuration.GetSection("InterruptPinNumber").Value); startupInformation.AddInt32("Interrupt pin", this.interruptPinNumber); this.interruptTriggerOn = (GpioPinEdge)Enum.Parse(typeof(GpioPinEdge), configuration.GetSection("interruptTriggerOn").Value); startupInformation.AddString("Interrupt Trigger on", this.interruptTriggerOn.ToString()); this.displayPinNumber = int.Parse(configuration.GetSection("DisplayPinNumber").Value); startupInformation.AddInt32("Display pin", this.interruptPinNumber); this.debounceTimeout = TimeSpan.Parse(configuration.GetSection("debounceTimeout").Value); startupInformation.AddTimeSpan("Debounce timeout", this.debounceTimeout); } catch (Exception ex) { this.logging.LogMessage("JSON configuration file load or settings retrieval failed " + ex.Message, LoggingLevel.Error); return; } try { this.computerVisionClient = new ComputerVisionClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(this.azureCognitiveServicesSubscriptionKey), new System.Net.Http.DelegatingHandler[] { }) { Endpoint = this.azureCognitiveServicesEndpoint, }; } catch (Exception ex) { this.logging.LogMessage("Azure Cognitive Services Computer Vision client configuration failed " + ex.Message, LoggingLevel.Error); return; } try { this.mediaCapture = new MediaCapture(); this.mediaCapture.InitializeAsync().AsTask().Wait(); } catch (Exception ex) { this.logging.LogMessage("Camera configuration failed " + ex.Message, LoggingLevel.Error); return; } this.displayOffTimer = new Timer(this.TimerCallback, null, Timeout.Infinite, Timeout.Infinite); try { GpioController gpioController = GpioController.GetDefault(); this.interruptGpioPin = gpioController.OpenPin(this.interruptPinNumber); this.interruptGpioPin.SetDriveMode(GpioPinDriveMode.InputPullUp); this.interruptGpioPin.ValueChanged += this.InterruptGpioPin_ValueChanged; this.displayGpioPin = gpioController.OpenPin(this.displayPinNumber); this.displayGpioPin.SetDriveMode(GpioPinDriveMode.Output); this.displayGpioPin.Write(GpioPinValue.Low); } catch (Exception ex) { this.logging.LogMessage("Digital input configuration failed " + ex.Message, LoggingLevel.Error); return; } this.logging.LogEvent("Application started", startupInformation); // enable task to continue running in background this.backgroundTaskDeferral = taskInstance.GetDeferral(); }
public void Run(IBackgroundTaskInstance taskInstance) { StorageFolder localFolder = ApplicationData.Current.LocalFolder; this.logging.LogEvent("Application starting"); // Log the Application build, OS version information etc. LoggingFields startupInformation = new LoggingFields(); startupInformation.AddString("Timezone", TimeZoneSettings.CurrentTimeZoneDisplayName); startupInformation.AddString("OSVersion", Environment.OSVersion.VersionString); startupInformation.AddString("MachineName", Environment.MachineName); // This is from the application manifest Package package = Package.Current; PackageId packageId = package.Id; PackageVersion version = packageId.Version; startupInformation.AddString("ApplicationVersion", string.Format($"{version.Major}.{version.Minor}.{version.Build}.{version.Revision}")); try { // see if the configuration file is present if not copy minimal sample one from application directory if (localFolder.TryGetItemAsync(ConfigurationFilename).AsTask().Result == null) { StorageFile templateConfigurationfile = Package.Current.InstalledLocation.GetFileAsync(ConfigurationFilename).AsTask().Result; templateConfigurationfile.CopyAsync(localFolder, ConfigurationFilename).AsTask(); this.logging.LogMessage("JSON configuration file missing, templated created", LoggingLevel.Warning); return; } IConfiguration configuration = new ConfigurationBuilder().AddJsonFile(Path.Combine(localFolder.Path, ConfigurationFilename), false, true).Build(); this.localStorageimageFilenameLatestFormat = configuration.GetSection("LocalImageFilenameFormatLatest").Value; startupInformation.AddString("ImageFilenameLatestFormat", this.localStorageimageFilenameLatestFormat); this.localStorageImageFilenameHistoryFormat = configuration.GetSection("LocalImageFilenameFormatHistoric").Value; startupInformation.AddString("ImageFilenameLatestFormat", this.localStorageImageFilenameHistoryFormat); this.interruptPinNumber = int.Parse(configuration.GetSection("InterruptPinNumber").Value); startupInformation.AddInt32("Interrupt pin", this.interruptPinNumber); this.interruptTriggerOn = (GpioPinEdge)Enum.Parse(typeof(GpioPinEdge), configuration.GetSection("interruptTriggerOn").Value); startupInformation.AddString("Interrupt Trigger on", this.interruptTriggerOn.ToString()); this.debounceTimeout = TimeSpan.Parse(configuration.GetSection("debounceTimeout").Value); startupInformation.AddTimeSpan("Debounce timeout", this.debounceTimeout); } catch (Exception ex) { this.logging.LogMessage("JSON configuration file load or settings retrieval failed " + ex.Message, LoggingLevel.Error); return; } try { this.mediaCapture = new MediaCapture(); this.mediaCapture.InitializeAsync().AsTask().Wait(); } catch (Exception ex) { this.logging.LogMessage("Camera configuration failed " + ex.Message, LoggingLevel.Error); return; } try { GpioController gpioController = GpioController.GetDefault(); this.interruptGpioPin = gpioController.OpenPin(this.interruptPinNumber); this.interruptGpioPin.SetDriveMode(GpioPinDriveMode.InputPullUp); this.interruptGpioPin.ValueChanged += this.InterruptGpioPin_ValueChanged; } catch (Exception ex) { this.logging.LogMessage("Digital input configuration failed " + ex.Message, LoggingLevel.Error); return; } this.logging.LogEvent("Application started", startupInformation); // enable task to continue running in background this.backgroundTaskDeferral = taskInstance.GetDeferral(); }
private async void MediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { DebugUtil.Log(() => string.Format("MediaCapture_Failed: (0x{0:X}) {1}", errorEventArgs.Code, errorEventArgs.Message)); await CleanupCameraAsync(); }
private void _mediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { Debug.WriteLine("_mediaCapture FAIL! " + errorEventArgs.Message); }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, and starts preview /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (_mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when something goes wrong _mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(settings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device _externalCamera = true; } else { // Camera is fixed on the device _externalCamera = false; // Only mirror the preview if the camera is on the front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); var picturesLibrary = await StorageLibrary.GetLibraryAsync(KnownLibraryId.Pictures); // Fall back to the local app storage if the Pictures Library is not available _captureFolder = picturesLibrary.SaveFolder ?? ApplicationData.Current.LocalFolder; } } }
private async Task InitializeVideoAsync() { ReloadVideoStreamButton.Visibility = Visibility.Collapsed; ShowBusyIndicator("Initializing..."); try { currentState = RecordingState.NotInitialized; PreviewMediaElement.Source = null; ShowBusyIndicator("starting video device..."); mediaCapture = new MediaCapture(); App.MediaCaptureManager = mediaCapture; selectedCamera = await FindBestCameraAsync(); if (selectedCamera == null) { await new MessageDialog("There are no cameras connected, please connect a camera and try again.").ShowAsync(); await DisposeMediaCaptureAsync(); HideBusyIndicator(); return; } await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { VideoDeviceId = selectedCamera.Id }); if (mediaCapture.MediaCaptureSettings.VideoDeviceId != "" && mediaCapture.MediaCaptureSettings.AudioDeviceId != "") { ShowBusyIndicator("camera initialized.."); mediaCapture.Failed += async(currentCaptureObject, currentFailure) => { await TaskUtilities.RunOnDispatcherThreadAsync(async() => { await new MessageDialog(currentFailure.Message, "MediaCaptureFailed Fired").ShowAsync(); await DisposeMediaCaptureAsync(); ReloadVideoStreamButton.Visibility = Visibility.Visible; }); }; } else { ShowBusyIndicator("camera error!"); } //------starting preview----------// ShowBusyIndicator("starting preview..."); PreviewMediaElement.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); currentState = RecordingState.Previewing; } catch (UnauthorizedAccessException ex) { Debug.WriteLine($"InitializeVideo UnauthorizedAccessException\r\n {ex}"); ShowBusyIndicator("Unauthorized Access Error"); await new MessageDialog("-----Unauthorized Access Error!-----\r\n\n" + "This can happen for a couple reasons:\r\n" + "-You have disabled Camera access to the app\r\n" + "-You have disabled Microphone access to the app\r\n\n" + "To fix this, go to Settings > Privacy > Camera (or Microphone) and reenable it.").ShowAsync(); await DisposeMediaCaptureAsync(); } catch (Exception ex) { ShowBusyIndicator("Initialize Video Error"); await new MessageDialog("InitializeVideoAsync() Exception\r\n\nError Message: " + ex.Message).ShowAsync(); currentState = RecordingState.NotInitialized; PreviewMediaElement.Source = null; } finally { HideBusyIndicator(); } }
/// <summary> /// This method takes a picture. /// Right now the parameter is not evaluated. /// </summary> /// <param name="mode"></param> /// <param name="options"></param> /// <returns></returns> public async Task <StorageFile> CaptureFileAsync(CameraCaptureUIMode mode, StoreCameraMediaOptions options) { var t = IsStopped(); Mode = mode; if (Mode == CameraCaptureUIMode.Photo) { camerButton.Icon = new SymbolIcon(Symbol.Camera); } else if (Mode == CameraCaptureUIMode.Video) { camerButton.Icon = new SymbolIcon(Symbol.Video); } Options = options; // Create new MediaCapture MyMediaCapture = new MediaCapture(); var videoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var backCamera = videoDevices.FirstOrDefault( item => item.EnclosureLocation != null && item.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back); var frontCamera = videoDevices.FirstOrDefault( item => item.EnclosureLocation != null && item.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); var captureSettings = new MediaCaptureInitializationSettings(); if (options.DefaultCamera == CameraDevice.Front && frontCamera != null) { captureSettings.VideoDeviceId = frontCamera.Id; } else if (options.DefaultCamera == CameraDevice.Rear && backCamera != null) { captureSettings.VideoDeviceId = backCamera.Id; } await MyMediaCapture.InitializeAsync(captureSettings); displayInfo.OrientationChanged += DisplayInfo_OrientationChanged; DisplayInfo_OrientationChanged(displayInfo, null); // Assign to Xaml CaptureElement.Source and start preview myCaptureElement.Source = MyMediaCapture; // show preview await MyMediaCapture.StartPreviewAsync(); // now wait until stopflag shows that someone took a picture await t; // picture has been taken // stop preview await CleanUpAsync(); // go back CurrentWindow.Content = originalFrame; mainGrid.Children.Remove(this); return(file); }
/// <summary> /// Invoked when this page is about to be displayed in a Frame. /// </summary> /// <param name="e">Event data that describes how this page was reached. The Parameter /// property is typically used to configure the page.</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { DeviceInformationCollection oCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); switch (oCameras.Count) { case 0: throw new Exception("No cameras found"); case 1: //Only front camera is available Camera = oCameras[(int)CameraLocation.Front]; break; default: //By default, we want the back camera Camera = oCameras[(int)CameraLocation.Back]; break; } MediaCaptureInitializationSettings oCameraSettings = new MediaCaptureInitializationSettings(); oCameraSettings.VideoDeviceId = Camera.Id; MediaCapture oCamera = new MediaCapture(); await oCamera.InitializeAsync(oCameraSettings); // resolution variables //int iMaxResolution = 0; //int iHeight = 0; //int iWidth = 0; //int iSelectedIndex = 0; //IReadOnlyList<IMediaEncodingProperties> oAvailableResolutions = oCamera.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo); //// if no settings available, bail //if (oAvailableResolutions.Count < 1) return; //// list the different format settings //for (int i = 0; i < oAvailableResolutions.Count; i++) //{ // VideoEncodingProperties oProperties = (VideoEncodingProperties)oAvailableResolutions[i]; // if (oProperties.Width * oProperties.Height > iMaxResolution) // { // iHeight = (int)oProperties.Height; // iWidth = (int)oProperties.Width; // iMaxResolution = (int)oProperties.Width; // iSelectedIndex = i; // } //} //// set resolution //await oCamera.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, oAvailableResolutions[iSelectedIndex]); // begin video preview oMediaCapture.Source = oCamera; await oMediaCapture.Source.StartPreviewAsync(); //DrawingCanvas.RenderTransform = oMediaCapture.RenderTransform; //DrawingCanvas.RenderTransformOrigin = oMediaCapture.RenderTransformOrigin; Result oQR = null; m_bTakePictures = true; Box oReferenceBox = e.Parameter as Box; bool bSearchMode = oReferenceBox != null; while (m_bTakePictures && (oQR == null || bSearchMode)) { InMemoryRandomAccessStream oPhotoStream = new InMemoryRandomAccessStream(); await oMediaCapture.Source.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), oPhotoStream); WriteableBitmap oBitmap = new WriteableBitmap(1, 1); oPhotoStream.Seek(0); oBitmap.SetSource(oPhotoStream); oBitmap = new WriteableBitmap(oBitmap.PixelWidth, oBitmap.PixelHeight); oPhotoStream.Seek(0); oBitmap.SetSource(oPhotoStream); BarcodeReader oReader = new BarcodeReader(); oReader.Options.TryHarder = true; oReader.AutoRotate = true; oQR = oReader.Decode(oBitmap); if (bSearchMode && (oQR != null)) { bSearchMode = oQR.ToString().GetHashCode().ToString() != oReferenceBox.QRCode.QRCode; m_bBoxMatchingMode = true; } else { m_bBoxMatchingMode = false; } } if (oQR != null) { // show message Message = String.Empty; QRCodeImage.Visibility = Visibility.Visible; //Message = string.Format("Found QR code!", oQR.ToString()); // highlight qr area //RectangleGeometry oGeometry = new RectangleGeometry(); //List<ResultPoint> oPoints = new List<ResultPoint>(oQR.ResultPoints); //// generate polygon //Polygon oSegment = new Polygon(); //oSegment.Stroke = new SolidColorBrush(Color.FromArgb(125, 255, 0, 0)); //oSegment.Fill = new SolidColorBrush(Color.FromArgb(125, 255, 0, 0)); //PointCollection oNewPoints = new PointCollection(); //foreach (ResultPoint oPoint in oPoints) //{ // oNewPoints.Add(DrawingCanvas.RenderTransform.TransformPoint(new Point(oPoint.X, oPoint.Y))); //} //oSegment.Points = oNewPoints; //// add polygon to canvas //DrawingCanvas.Children.Add(oSegment); // find box await MoveList.CurrentMove.FindBox(new QRCodeWrapper(oQR.ToString())); // start transition timer DispatcherTimer oTransitionTimer = new DispatcherTimer(); oTransitionTimer.Interval = new TimeSpan(0, 0, TRANSITION_TIMER_INTERVAL); oTransitionTimer.Tick += oTransitionTimer_Tick; oTransitionTimer.Start(); } }
public async Task StartAsync(string Name, bool UseGpu = false) { var frameSourceGroups = await AsAsync(MediaFrameSourceGroup.FindAllAsync()); Log.WriteLine($"Found the following devices: {(frameSourceGroups.Any() ? string.Join(", ", frameSourceGroups.Select(x => x.DisplayName)) : "no imaging devices found")}"); // Only select colour cameras to filter out IR ones var selectedGroup = frameSourceGroups .Where(x => x.DisplayName.Contains(Name) && x.SourceInfos.FirstOrDefault().SourceKind == MediaFrameSourceKind.Color) .OrderBy(x => x.DisplayName) .FirstOrDefault(); if (selectedGroup == null) { if ((Name == "*")) { //get first camera selectedGroup = frameSourceGroups.FirstOrDefault(); } else if (("0123456789").Contains(Name)) { //get camera by index try { int vCameraId = int.Parse(Name); if (vCameraId < frameSourceGroups.Count) { selectedGroup = frameSourceGroups[vCameraId]; } } catch (Exception) { } } } if (selectedGroup == null) { throw new ApplicationException($"Unable to find frame source from parameter '{Name}'"); } else { try { Log.WriteLine($"Selected device named '{selectedGroup.DisplayName}', based on '{Name}' filter"); } catch (Exception) { } } var colorSourceInfo = selectedGroup.SourceInfos .Where(x => x.MediaStreamType == MediaStreamType.VideoRecord && x.SourceKind == MediaFrameSourceKind.Color) .FirstOrDefault(); if (null == colorSourceInfo) { throw new ApplicationException($"Unable to find color video recording source on '{selectedGroup.DisplayName}' device"); } mediaCapture = new MediaCapture(); if (null == mediaCapture) { throw new ApplicationException($"Unable to create new mediacapture"); } var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = UseGpu ? MediaCaptureMemoryPreference.Auto : MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { Log.WriteLine($"Before async call to initialise {nameof(mediaCapture)} object..."); await AsAsync(mediaCapture.InitializeAsync(settings)); Log.WriteLine($"After async call to initialise {nameof(mediaCapture)} object..."); } catch (Exception ex) { throw new ApplicationException("MediaCapture initialization failed: " + ex.Message, ex); } // TODO: Use Console.Write to output this Console.WriteLine($"{nameof(colorSourceInfo)} is {(colorSourceInfo == null ? "is null" : "is not null")}"); Console.WriteLine($"colorSourceInfo.Id ({nameof(colorSourceInfo.Id)}) is {colorSourceInfo.Id}"); Console.WriteLine($"{nameof(mediaCapture)} is {(mediaCapture == null ? "is null" : "is not null")}"); Console.WriteLine($"mediaCapture.FrameSources ({nameof(mediaCapture.FrameSources)}) has {mediaCapture.FrameSources.Count} items"); foreach (var source in mediaCapture.FrameSources) { Console.WriteLine($"{source.Key}: {source.Value}"); } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; List <MediaFrameFormat> orderedVideoResolutions = colorFrameSource.SupportedFormats.OrderByDescending(x => x.VideoFormat.Width).ToList(); Log.WriteLine($"Found the following supportedFormats(video resolutions): {string.Join(", ", orderedVideoResolutions.Select(x => $"{x.VideoFormat.Width}x{x.VideoFormat.Height}"))}"); var preferredFormat = orderedVideoResolutions.Where(format => format.VideoFormat.Width >= MinimumVideoWidth).FirstOrDefault(); if (null == preferredFormat) { throw new ApplicationException($"Our desired minimum video width ({MinimumVideoWidth}) is not supported by the imaging devices found on this machine"); } Log.WriteLine($"Selected: {preferredFormat.VideoFormat.Width}x{preferredFormat.VideoFormat.Height}, based on minimum video width requirement of >= '{MinimumVideoWidth}'"); await AsAsync(colorFrameSource.SetFormatAsync(preferredFormat)); mediaFrameReader = await AsAsync(mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32)); if (null == mediaFrameReader) { throw new ApplicationException($"Unable to create new mediaframereader"); } evtFrame = new EventWaitHandle(false, EventResetMode.ManualReset); mediaFrameReader.FrameArrived += (s, a) => evtFrame.Set(); await AsAsync(mediaFrameReader.StartAsync()); Log.WriteLineVerbose("FrameReader Started"); }
/// <summary> /// メディア処理に失敗時の処理 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private static async void _FailMediaCapture(MediaCapture sender, MediaCaptureFailedEventArgs e) { var dialog = new MessageDialog(e.Message); await dialog.ShowAsync(); }
/// <inheritdoc /> /// <summary> /// カメラのプレビュー表示終了 /// </summary> /// <returns></returns> public async Task StopPreview() { await MediaCapture.StopPreviewAsync(); }
private async void MediaSourceFromFrameSource_Click(object sender, RoutedEventArgs e) { // <SnippetMediaSourceSelectGroup> var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front && info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back && info.SourceKind == MediaFrameSourceKind.Color) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { System.Diagnostics.Debug.WriteLine("No source group with front and back-facing camera found."); return; } var selectedGroupIndex = 0; // Select the first eligible group MediaFrameSourceGroup selectedGroup = eligibleGroups[selectedGroupIndex].Group; MediaFrameSourceInfo frontSourceInfo = selectedGroup.SourceInfos[0]; MediaFrameSourceInfo backSourceInfo = selectedGroup.SourceInfos[1]; // </SnippetMediaSourceSelectGroup> // <SnippetMediaSourceInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } // </SnippetMediaSourceInitMediaCapture> // <SnippetMediaSourceMediaPlayer> var frameMediaSource1 = MediaSource.CreateFromMediaFrameSource(mediaCapture.FrameSources[frontSourceInfo.Id]); mediaPlayerElement1.SetMediaPlayer(new Windows.Media.Playback.MediaPlayer()); mediaPlayerElement1.MediaPlayer.Source = frameMediaSource1; mediaPlayerElement1.AutoPlay = true; var frameMediaSource2 = MediaSource.CreateFromMediaFrameSource(mediaCapture.FrameSources[backSourceInfo.Id]); mediaPlayerElement2.SetMediaPlayer(new Windows.Media.Playback.MediaPlayer()); mediaPlayerElement2.MediaPlayer.Source = frameMediaSource2; mediaPlayerElement2.AutoPlay = true; // </SnippetMediaSourceMediaPlayer> }
// <SnippetRecordLimitationExceededHandler> private async void MediaCapture_RecordLimitationExceeded(MediaCapture sender) { await _mediaRecording.StopAsync(); System.Diagnostics.Debug.WriteLine("Record limitation exceeded."); }
private async void ActionButton_Click(object sender, RoutedEventArgs e) { // <SnippetImageElementSource> imageElement.Source = new SoftwareBitmapSource(); // </SnippetImageElementSource> // <SnippetFindAllAsync> var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // </SnippetFindAllAsync> // Color, infrared, and depth // <SnippetSelectColor> var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { // On XBox/Kinect, omit the MediaStreamType and EnclosureLocation tests return(sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color && sourceInfo.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } // </SnippetSelectColor> // <SnippetInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } // </SnippetInitMediaCapture> var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == 1920); }).FirstOrDefault(); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); // <SnippetCreateFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); // </SnippetCreateFrameReader> }
// </SnippetRecordLimitationExceededHandler> private void MediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { System.Diagnostics.Debug.WriteLine("MediaCapture.Failed: {0}", errorEventArgs.Message); }
public async Task ChangeLiveStream() { Debug.WriteLine("ChangeLiveStream"); await CleanupCameraAsync(); SaveEnabled = false; // If webcam hasn't been initialized, bail. if ((_devices == null) || (_devices.Count == 0)) { return; } try { // Check that SCI hasn't < 0 // Probably -1 when doesn't find camera, or when list gone? if (_appModel.SelectedCameraIndex < 0) { Debug.WriteLine("selectedCamera < 0"); NotifyUser(false, "Invalid Camera selected, using default"); _appModel.SelectedCameraIndex = 0; return; } var device = _devices.ToList().ElementAt(_appModel.SelectedCameraIndex); // Create MediaCapture and its settings var settings = new MediaCaptureInitializationSettings { VideoDeviceId = device.Id, PhotoCaptureSource = PhotoCaptureSource.Auto, MemoryPreference = UseGpu ? MediaCaptureMemoryPreference.Auto : MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video, MediaCategory = MediaCategory.Communications, }; _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(settings); _displayrequest.RequestActive(); var capture = new CaptureElement(); capture.Source = _mediaCapture; _appModel.OutputCaptureElement = capture; var modelPath = Path.GetFullPath($"./Assets/{_appModel.ModelSource}.onnx"); VideoEffectDefinition videoEffectDefinition = new VideoEffectDefinition(StyleTransferEffectId, new PropertySet() { { "ModelName", modelPath }, { "UseGpu", UseGpu }, { "Notifier", _notifier }, { "NumThreads", NumThreads } }); IMediaExtension videoEffect = await _mediaCapture.AddVideoEffectAsync(videoEffectDefinition, MediaStreamType.VideoPreview); var props = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; CaptureFPS = props.FrameRate.Numerator / props.FrameRate.Denominator; await _mediaCapture.StartPreviewAsync(); _isPreviewing = true; } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } }
private void CameraMediaCapture_CaptureDeviceExclusiveControlStatusChanged(MediaCapture sender, MediaCaptureDeviceExclusiveControlStatusChangedEventArgs args) { this.Log.Debug(() => $"CameraMediaCapture exclusive controle state changed to {args.Status}"); }
private async void MediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { Debug.WriteLine("MediaCapture_Failed: (0x{0:X}) {1}", errorEventArgs.Code, errorEventArgs.Message); await CleanupCameraAsync(); }
private void CameraMediaCapture_CameraStreamStateChanged(MediaCapture sender, object args) { this.Log.Debug(() => $"CameraMediaCapture state changed to {sender.CameraStreamState.ToString()}"); }
public async Task Initialize() { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAndAwaitAsync(CoreDispatcherPriority.Normal, async() => { _imageQuality = new BitmapPropertySet(); var imageQualityValue = new BitmapTypedValue(IMAGE_QUALITY_PERCENT, Windows.Foundation.PropertyType.Single); _imageQuality.Add("ImageQuality", imageQualityValue); _mediaCapture = new MediaCapture(); _mediaCapture.Failed += async(MediaCapture mediaCapture, MediaCaptureFailedEventArgs args) => { await Logger.Write($"Camera Failed Event: {args.Code}, {args.Message}"); if (args.Code == 2147942414 || args.Code == 3222093442) { await Logger.Write($"Reinitialize camera."); await Initialize(); } }; var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var settings = new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.ExclusiveControl, //With CPU the results contain always SoftwareBitmaps, otherwise with GPU //they preferring D3DSurface MemoryPreference = MediaCaptureMemoryPreference.Cpu, //Capture only video, no audio StreamingCaptureMode = StreamingCaptureMode.Video }; await _mediaCapture.InitializeAsync(settings); var mediaFrameSource = _mediaCapture.FrameSources.First().Value; var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController; videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; videoDeviceController.PrimaryUse = Windows.Media.Devices.CaptureUse.Video; if (!videoDeviceController.BacklightCompensation.TrySetValue(videoDeviceController.BacklightCompensation.Capabilities.Min)) { throw new Exception("Could not set min backlight compensation to camera."); } if (!videoDeviceController.Exposure.TrySetAuto(true)) { throw new Exception("Could not set auto exposure to camera."); } var videoFormat = mediaFrameSource.SupportedFormats.First(sf => sf.VideoFormat.Width == VIDEO_WIDTH && sf.VideoFormat.Height == VIDEO_HEIGHT && sf.Subtype == "YUY2"); await mediaFrameSource.SetFormatAsync(videoFormat); _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); await _mediaFrameReader.StartAsync(); }); }
private void CameraMediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { this.Log.Error(() => $"CameraMediaCapture Exception: {errorEventArgs.Message}", null, (c) => c.AddVariable("ErrorCode", errorEventArgs.Code.ToString())); }
public async Task StartPreviewAsync(QR_Code_Scanner.Business.ComboboxItem comboboxItem) { FrameSourceInformation frameSourceInformation = new FrameSourceInformation(); try { mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video }; if (comboboxItem != null) { settings.VideoDeviceId = comboboxItem.ID; frameSourceInformation = comboboxItem.MediaFrameSourceInformation; } else { if (availableColorCameras == null) { var frameSourceInformations = await GetFrameSourceInformationAsync(); frameSourceInformation = frameSourceInformations.First(); availableColorCameras = await GetFrameSourceGroupsAsync(frameSourceInformation); } settings.VideoDeviceId = availableColorCameras.First().Id; } qrAnalyzerCancellationTokenSource = new CancellationTokenSource(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { MessageManager.ShowMessageToUserAsync("Tried to initialize a color camera but failed to do so."); } List <VideoEncodingProperties> availableResolutions = null; try { availableResolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Where(properties => properties is VideoEncodingProperties).Select(properties => (VideoEncodingProperties)properties).ToList(); } catch (Exception ex) { MessageManager.ShowMessageToUserAsync("No resolutions could be detected, trying default mode."); } VideoEncodingProperties bestVideoResolution = this.findBestResolution(availableResolutions); if (bestVideoResolution != null) { await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, bestVideoResolution); } displayRequest.RequestActive(); } catch (UnauthorizedAccessException) { // This will be thrown if the user denied access to the camera in privacy settings MessageManager.ShowMessageToUserAsync("The app was denied access to the camera"); return; } try { this.ScanForQRcodes = true; previewWindowElement.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); isPreviewing = true; var imgProp = new ImageEncodingProperties { Subtype = "BMP", Width = (uint)imgCaptureWidth, Height = (uint)imgCaptureHeight }; var bcReader = new BarcodeReader(); var qrCaptureInterval = 200; var torch = mediaCapture.VideoDeviceController.TorchControl; var exposureCompensationControl = mediaCapture.VideoDeviceController.ExposureCompensationControl; if (torch.Supported) { torch.Enabled = false; } //if (exposureCompensationControl.Supported) { // var maxSupported = exposureCompensationControl.Max; // var minSupported = exposureCompensationControl.Min; // var middleExposure = (maxSupported + minSupported) / 2; // var quarterExposure = (middleExposure + minSupported) / 2; // await exposureCompensationControl.SetValueAsync(quarterExposure); //} // Get information about the preview var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; while (!qrAnalyzerCancellationTokenSource.IsCancellationRequested && qrAnalyzerCancellationTokenSource != null && qrAnalyzerCancellationTokenSource.Token != null) { //try capture qr code here if (ScanForQRcodes) { VideoFrame videoFrameFormatPlaceholder = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height); await mediaCapture.GetPreviewFrameAsync(videoFrameFormatPlaceholder); await findQRinImageAsync(bcReader, videoFrameFormatPlaceholder); videoFrameFormatPlaceholder.Dispose(); videoFrameFormatPlaceholder = null; } //await Task.Delay(qrCaptureInterval, qrAnalyzerCancellationTokenSource.Token); var delayTask = Task.Delay(qrCaptureInterval, qrAnalyzerCancellationTokenSource.Token); var continuationTask = delayTask.ContinueWith(task => { }); await continuationTask; } } catch (System.IO.FileLoadException) { mediaCapture.CaptureDeviceExclusiveControlStatusChanged += mediaCapture_CaptureDeviceExclusiveControlStatusChanged; } catch (System.ObjectDisposedException) { Debug.WriteLine("object was disposed"); } catch (Exception) { Debug.WriteLine("another exception occurred."); } }
private async void ActionButton2_Click(object sender, RoutedEventArgs e) { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // Color, infrared, and depth var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { return(sourceInfo.SourceKind == MediaFrameSourceKind.Color); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == 1920); }).FirstOrDefault(); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived_FrameRenderer; _frameRenderer = new FrameRenderer(imageElement); await mediaFrameReader.StartAsync(); }
// Set up video capture device private async Task InitializeCameraAsync(bool useFrontPanel) { var panel = Windows.Devices.Enumeration.Panel.Back; if (useFrontPanel) { panel = Windows.Devices.Enumeration.Panel.Front; } if (_mediaCapture == null) { // Get available devices for capturing pictures var allVideoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); // Get the desired camera by panel DeviceInformation cameraDevice = allVideoDevices.FirstOrDefault(x => x.EnclosureLocation != null && x.EnclosureLocation.Panel == panel); // If there is no camera on the specified panel, get any camera cameraDevice = cameraDevice ?? allVideoDevices.FirstOrDefault(); if (cameraDevice == null) { return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when video recording has reached the maximum time and when something goes wrong // _mediaCapture.RecordLimitationExceeded += MediaCapture_RecordLimitationExceeded; var mediaInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(mediaInitSettings); _isInitialized = true; } catch (UnauthorizedAccessException) { } catch (Exception ex) { Debug.WriteLine("Exception when initializing MediaCapture with {0}: {1}", cameraDevice.Id, ex.ToString()); } // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device _externalCamera = true; } else { // Camera is fixed on the device _externalCamera = false; } await StartPreviewAsync(); await initializeFaceDetection(); } } }
// </SnippetMultiFrameDeclarations> private async void InitMultiFrame() { // <SnippetSelectColorAndDepth> var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { System.Diagnostics.Debug.WriteLine("No source group with color, depth or infrared found."); return; } var selectedGroupIndex = 0; // Select the first eligible group MediaFrameSourceGroup selectedGroup = eligibleGroups[selectedGroupIndex].Group; MediaFrameSourceInfo colorSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[0]; MediaFrameSourceInfo depthSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[1]; // </SnippetSelectColorAndDepth> // <SnippetMultiFrameInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; await mediaCapture.InitializeAsync(settings); // </SnippetMultiFrameInitMediaCapture> // <SnippetGetColorAndDepthSource> MediaFrameSource colorSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Color); MediaFrameSource depthSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (colorSource == null || depthSource == null) { System.Diagnostics.Debug.WriteLine("MediaCapture doesn't have the Color and Depth streams"); return; } _colorSourceId = colorSource.Info.Id; _depthSourceId = depthSource.Info.Id; // </SnippetGetColorAndDepthSource> // <SnippetInitMultiFrameReader> _multiFrameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync( new[] { colorSource, depthSource }); _multiFrameReader.FrameArrived += MultiFrameReader_FrameArrived; _frameRenderer = new FrameRenderer(imageElement); MultiSourceMediaFrameReaderStartStatus startStatus = await _multiFrameReader.StartAsync(); if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success) { throw new InvalidOperationException( "Unable to start reader: " + startStatus); } this.CorrelationFailed += MainPage_CorrelationFailed; Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token)); // </SnippetInitMultiFrameReader> }
private async void MediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs) { await CleanupCameraAsync(); }
/// <summary> /// Handles the event of exceeding the record limit of the MediaCapture object. Currently not (yet) implemented. /// </summary> /// <param name="currentCaptureObject">The MediaCapture object that failed to capture the still image from the webcam.</param> public void mediaCapture_RecordLimitExceeded(MediaCapture currentCaptureObject) { throw new NotImplementedException(); }