private async void OnLoaded(object sender, RoutedEventArgs e) { _previewImageSource = new SoftwareBitmapSource(); PreviewImage.Source = _previewImageSource; _cameraHelper = new CameraHelper(); IReadOnlyList <MediaFrameSourceGroup> frameSourceGroups = await CameraHelper.GetFrameSourceGroupsAsync(); CameraHelperResult result = await _cameraHelper.InitializeAndStartCaptureAsync(); if (result == CameraHelperResult.Success) { // Subscribe to the video frame as they arrive _cameraHelper.FrameArrived += CameraHelper_FrameArrived; FrameSourceGroupCombo.ItemsSource = frameSourceGroups; FrameSourceGroupCombo.SelectionChanged += FrameSourceGroupCombo_SelectionChanged; MediaFrameSource frameSource = _cameraHelper.PreviewFrameSource; _mediaPlayer = new MediaPlayer { AutoPlay = true, RealTimePlayback = true }; _mediaPlayer.Source = MediaSource.CreateFromMediaFrameSource(frameSource); MediaPlayerElementControl.SetMediaPlayer(_mediaPlayer); FrameSourceGroupCombo.SelectedIndex = 0; } }
public override void Load(string FriendlyName, int FrameRate, int Height, int Width, string Encoding) { if (NativeDevice != null) { NativeDevice.Dispose(); } NativeDevice = new MediaCapture(); var task = NativeDevice.InitializeAsync(new MediaCaptureInitializationSettings() { VideoDeviceId = FriendlyName, MemoryPreference = MediaCaptureMemoryPreference.Auto, StreamingCaptureMode = StreamingCaptureMode.Video }); while (task.Status == AsyncStatus.Started) { Thread.Sleep(50); } if (task.Status == AsyncStatus.Error) { throw new System.Exception("Access Denied"); } if (Reader != null) { Reader.FrameArrived -= NewFrameArrived; Reader.Dispose(); } IReadOnlyDictionary <string, MediaFrameSource> sources = NativeDevice.FrameSources; MediaFrameSource selectedSource = null; foreach (MediaFrameSource source in sources.Values) { if (source.CurrentFormat.MajorType == "Video") { if (source.Info.MediaStreamType == MediaStreamType.VideoPreview || source.Info.MediaStreamType == MediaStreamType.VideoRecord) { foreach (MediaFrameFormat format in source.SupportedFormats) { if (format.VideoFormat.Height == 480 && format.VideoFormat.Width == 640 && format.VideoFormat.MediaFrameFormat.Subtype == "MJPG") { if (selectedSource == null) { selectedSource = source; var SetTastk = selectedSource.SetFormatAsync(format); while (SetTastk.Status == AsyncStatus.Started) { Thread.Sleep(50); } } } } } } } var Task = NativeDevice.CreateFrameReaderAsync(selectedSource); while (Task.Status == AsyncStatus.Started) { Thread.Sleep(50); } Reader = Task.GetResults(); }
private async Task StartFrameListenerAsync() { if (frameListenerRunning) { Logger.Info("Frame listener already running. Restarting it..."); await StopFrameListenerAsync(); } try { if (cameraCapture.FrameSources.Count > 0) { MediaFrameSource frameSource = cameraCapture.FrameSources.First().Value; int count = cameraCapture.FrameSources.Count; if (!(frameSource is null)) { frameReader = await cameraCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; await frameReader.StartAsync(); frameListenerRunning = true; } else { Logger.Info("MediaFrameSource is null."); await OnErrorAsync(PreviewError.MEDIA_FRAME_IS_NULL); } } else { Logger.Info("MediaFrameReader creation failed with: No camera available."); await OnErrorAsync(PreviewError.MEDIA_FRAME_NO_CAMERA); } }
/// <summary> /// Creates the frame reader using the target format and registers the <see cref="OnFrameArrived"/> event. The width is padded to be divisibly by 64. /// </summary> /// <returns></returns> private async Task <bool> CreateFrameReader() { const MediaStreamType mediaStreamType = MediaStreamType.VideoRecord; CameraParameters parameters = new CameraParameters(_cameraProfile); try { MediaFrameSource source = _mediaCapture.FrameSources.Values.Single(frameSource => frameSource.Info.MediaStreamType == mediaStreamType); MediaFrameFormat format = GetTargetFormat(source, parameters); await source.SetFormatAsync(format); _frameReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); _frameReader.FrameArrived += OnFrameArrived; FrameWidth = Convert.ToInt32(format.VideoFormat.Width); FrameHeight = Convert.ToInt32(format.VideoFormat.Height); FrameWidth = PadTo64(FrameWidth); _logger.Log($"FrameReader initialized using {FrameWidth} x {FrameHeight}, frame rate: {format.FrameRate.Numerator} / {format.FrameRate.Denominator}, color format: {_format}"); } catch (Exception exception) { _logger.LogError("Frame Reader could not be initialized"); _logger.LogException(exception); return(false); } return(true); }
static bool DoesCustomSourceSupportPerceptionFormat(MediaFrameSource source) { return( (source.Info.SourceKind == MediaFrameSourceKind.Custom) && (source.CurrentFormat.MajorType == PerceptionFormat) && (Guid.Parse(source.CurrentFormat.Subtype) == PoseTrackingFrame.PoseTrackingSubtype)); }
//--------------------------------------------------------Set-, Get- Methods:---------------------------------------------------------\\ #region --Set-, Get- Methods-- #endregion //--------------------------------------------------------Misc Methods:---------------------------------------------------------------\\ #region --Misc Methods (Public)-- #endregion #region --Misc Methods (Private)-- private async Task startCameraAsync() { if (mediaCapture != null) { return; } var settings = new MediaCaptureInitializationSettings() { MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape; } catch (UnauthorizedAccessException e) { Logger.Error("[QRCodeReaderControl] Failed to start camera: Access denied!", e); return; } try { cameraPreview_ce.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); if (mediaCapture.FrameSources.Count <= 0) { MediaFrameSource frameSource = mediaCapture.FrameSources.First().Value; if (frameSource != null) { frameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived -= FrameReader_FrameArrived; frameReader.FrameArrived += FrameReader_FrameArrived; QR_CODE_READER.ResultFound -= QR_CODE_READER_ResultFound; QR_CODE_READER.ResultFound += QR_CODE_READER_ResultFound; } else { Logger.Error("[QRCodeReaderControl] Failed to start camera! No sources."); } } else { Logger.Error("[QRCodeReaderControl] Failed to start camera! No sources."); } } catch (Exception e) { Logger.Error("[QRCodeReaderControl] Failed to start camera!", e); return; } }
/// <summary> /// Start previewing from the camera /// </summary> private void StartPreview() { Debug.WriteLine("StartPreview"); _selectedMediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoPreview && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; if (_selectedMediaFrameSource == null) { _selectedMediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; } // if no preview stream are available, bail if (_selectedMediaFrameSource == null) { return; } _mediaPlayer = new MediaPlayer(); _mediaPlayer.RealTimePlayback = true; _mediaPlayer.AutoPlay = true; _mediaPlayer.Source = MediaSource.CreateFromMediaFrameSource(_selectedMediaFrameSource); UIMediaPlayerElement.SetMediaPlayer(_mediaPlayer); UITxtBlockPreviewProperties.Text = string.Format("{0}x{1}@{2}, {3}", _selectedMediaFrameSource.CurrentFormat.VideoFormat.Width, _selectedMediaFrameSource.CurrentFormat.VideoFormat.Height, _selectedMediaFrameSource.CurrentFormat.FrameRate.Numerator + "/" + _selectedMediaFrameSource.CurrentFormat.FrameRate.Denominator, _selectedMediaFrameSource.CurrentFormat.Subtype); UICameraSelectionControls.Visibility = Visibility.Visible; UIMediaPlayerElement.Visibility = Visibility.Visible; UIResultImage.Width = UIMediaPlayerElement.Width; UIResultImage.Height = UIMediaPlayerElement.Height; }
// Used an explicit tuple here as I'm in C# 6.0 async Task <Tuple <MediaCapture, MediaFrameSource> > GetMediaCaptureForDescriptionAsync( MediaFrameSourceKind sourceKind, int width, int height, int frameRate, string[] bitmapFormats = null) { MediaCapture mediaCapture = null; MediaFrameSource frameSource = null; var allSources = await MediaFrameSourceGroup.FindAllAsync(); // Ignore frame rate here on the description as both depth streams seem to tell me they are // 30fps whereas I don't think they are (from the docs) so I leave that to query later on. // NB: LastOrDefault here is a NASTY, NASTY hack - just my way of getting hold of the // *LAST* depth stream rather than the *FIRST* because I'm assuming that the *LAST* // one is the longer distance stream rather than the short distance stream. // I should fix this and find a better way of choosing the right depth stream rather // than relying on some ordering that's not likely to always work! var sourceInfo = allSources.SelectMany(group => group.SourceInfos) .LastOrDefault( si => (si.MediaStreamType == MediaStreamType.VideoRecord) && (si.SourceKind == sourceKind) && (si.VideoProfileMediaDescription.Any( desc => desc.Width == width && desc.Height == height && desc.FrameRate == frameRate))); if (sourceInfo != null) { var sourceGroup = sourceInfo.SourceGroup; mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { // I want software bitmaps MemoryPreference = MediaCaptureMemoryPreference.Cpu, SourceGroup = sourceGroup, StreamingCaptureMode = StreamingCaptureMode.Video } ); frameSource = mediaCapture.FrameSources[sourceInfo.Id]; var selectedFormat = frameSource.SupportedFormats.First( format => format.VideoFormat.Width == width && format.VideoFormat.Height == height && format.FrameRate.Numerator / format.FrameRate.Denominator == frameRate && ((bitmapFormats == null) || (bitmapFormats.Contains(format.Subtype.ToLower())))); await frameSource.SetFormatAsync(selectedFormat); } return(Tuple.Create(mediaCapture, frameSource)); }
// Constructor public VideoProcessor(MediaCapture mediacapture, MediaFrameReader mfr, MediaFrameSource mfs) { this.mediacapture = mediacapture; this.MF_Reader = mfr; this.MF_Source = mfs; Debug.WriteLine("\t --> VideoProcessor constructed !"); }
private async Task InitMediaSourceAsync() { var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。"); return; } MediaFrameSourceGroup sourceGroup = allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth)); if (sourceGroup == null) { Debug.LogError("深度カメラが見つからないようです。"); return; } try { await InitializeMediaCaptureAsync(sourceGroup); } catch (Exception exception) { Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message); await CleanupMediaCaptureAsync(); return; } MediaFrameSource source = _mediaCapture.FrameSources.Values .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (source == null) { Debug.LogError("sourceが見つかりません。"); } MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f => String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase)); if (format == null) { return; } await source.SetFormatAsync(format); _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); MediaFrameReaderStartStatus status = await _depthReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.LogError("_depthReader.StartAsyncに失敗しました"); } }
public VideoFrameProcessor(MediaCapture mediaCapture, MediaFrameReader reader, MediaFrameSource source) { m_mediaCapture = mediaCapture; m_mediaFrameReader = reader; m_mediaFrameSource = source; // Listen for new frames, so we know when to update our m_latestFrame m_mediaFrameReader.FrameArrived += OnFrameArrived; }
protected override async void OnNavigatedTo(NavigationEventArgs e) { App.dispatcher = this.Dispatcher; Cv2.InitContainer((object)App.container); //_helper.SetContainer(App.container); rootPage = MainPage.Current; // setting up the combobox, and default operation OperationComboBox.ItemsSource = Enum.GetValues(typeof(OperationType)); OperationComboBox.SelectedIndex = 0; currentOperation = OperationType.Blur; // Find the sources var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = allGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); if (sourceGroups.Count == 0) { // No camera sources found return; } var selectedSource = sourceGroups.FirstOrDefault(); // Initialize MediaCapture try { await InitializeMediaCaptureAsync(selectedSource.Group); } catch (Exception exception) { Debug.WriteLine("MediaCapture initialization error: " + exception.Message); await CleanupMediaCaptureAsync(); return; } // Create the frame reader MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id]; BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = IMAGE_ROWS, Width = IMAGE_COLS }; _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size); _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync; await _reader.StartAsync(); _FPSTimer.Start(); }
//</SnippetProcessAudioFrame> private void MuteAudioDeviceController(MediaFrameSource frameSource) { //<SnippetAudioDeviceController> audioDeviceController = frameSource.Controller.AudioDeviceController; //</SnippetAudioDeviceController> //<SnippetAudioDeviceControllerMute> audioDeviceController.Muted = true; //</SnippetAudioDeviceControllerMute> }
/// <summary> /// Disposes of the MediaCapture object and clears the items from the Format and Source ComboBoxes. /// </summary> private void DisposeMediaCapture() { FormatComboBox.ItemsSource = null; SourceComboBox.ItemsSource = null; _source = null; _mediaCapture?.Dispose(); _mediaCapture = null; }
public static async Task <VideoFrameProcessor> CreateAsync() { IReadOnlyList <MediaFrameSourceGroup> groups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; // Pick first color source. foreach (var sourceGroup in groups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // No valid camera was found. This will happen on the emulator. if (selectedGroup == null || selectedSourceInfo == null) { return(null); } MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu; // Need SoftwareBitmaps for FaceAnalysis settings.StreamingCaptureMode = StreamingCaptureMode.Video; // Only need to stream video settings.SourceGroup = selectedGroup; MediaCapture mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; MediaFrameReader reader = await mediaCapture.CreateFrameReaderAsync(selectedSource); MediaFrameReaderStartStatus status = await reader.StartAsync(); // Only create a VideoFrameProcessor if the reader successfully started if (status == MediaFrameReaderStartStatus.Success) { return(new VideoFrameProcessor(mediaCapture, reader, selectedSource)); } return(null); }
private static void ReglerControleurVideo(MediaFrameSource sourceImages) { VideoDeviceController controleurVideo = sourceImages.Controller.VideoDeviceController; controleurVideo.DesiredOptimization = MediaCaptureOptimization.Quality; controleurVideo.PrimaryUse = CaptureUse.Video; if (controleurVideo.Exposure.Capabilities.Supported && controleurVideo.Exposure.Capabilities.AutoModeSupported) { controleurVideo.Exposure.TrySetAuto(true); } }
// Constructor public VideoProcessor(MediaCapture mediacapture, MediaFrameReader mfr, MediaFrameSource mfs) { this.mediacapture = mediacapture; this.MF_Reader = mfr; this.MF_Source = mfs; // this.mediacapture.Failed += this.MediaCapture_CameraStreamFailed; this.MF_Reader.FrameArrived += this.OnFrameArrived; Debug.WriteLine("\t --> VideoProcessor constructed !"); }
private FrameGrabber(MediaCapture mediaCapture = null, MediaFrameSource mediaFrameSource = null, MediaFrameReader mediaFrameReader = null) { this.mediaCapture = mediaCapture; this.mediaFrameSource = mediaFrameSource; this.mediaFrameReader = mediaFrameReader; if (this.mediaFrameReader != null) { this.mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived; } }
private FrameGrabber(MediaCapture mediaCapture = null, MediaFrameSource mediaFrameSource = null, MediaFrameReader mediaFrameReader = null) { this.mediaCapture = mediaCapture; // capture audio, video , and image from camera this.mediaFrameSource = mediaFrameSource; // source of camera fames (color camera in this case) this.mediaFrameReader = mediaFrameReader; // access to frames from a MediaFrameSource then notifies when new frame arrives if (this.mediaFrameReader != null) { this.mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived; } }
/// <summary> /// Retrieves the target format specified in <see cref="CameraParameters"/> from the <see cref="MediaFrameSource">frame source</see>. /// </summary> /// <returns></returns> private MediaFrameFormat GetTargetFormat(MediaFrameSource frameSource, CameraParameters parameters) { MediaFrameFormat preferredFormat = frameSource.SupportedFormats.FirstOrDefault(format => CompareFormat(format, parameters)); if (preferredFormat != null) { return(preferredFormat); } _logger.LogWarning("Unable to choose the selected format, use fallback format."); preferredFormat = frameSource.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); return(preferredFormat); }
private async void FrameSourceGroupCombo_SelectionChanged(object sender, SelectionChangedEventArgs e) { if (FrameSourceGroupCombo.SelectedItem is MediaFrameSourceGroup selectedGroup) { _cameraHelper.FrameSourceGroup = selectedGroup; CameraHelperResult result = await _cameraHelper.InitializeAndStartCaptureAsync(); MediaFrameSource frameSource = _cameraHelper.PreviewFrameSource; //_mediaPlayer = new MediaPlayer { AutoPlay = true, RealTimePlayback = true }; _mediaPlayer.Source = MediaSource.CreateFromMediaFrameSource(frameSource); MediaPlayerElementControl.SetMediaPlayer(_mediaPlayer); } }
/// <summary> /// Diagnostic method to print the details of a video frame source. /// </summary> private static void PrintFrameSourceInfo(MediaFrameSource frameSource) { var width = frameSource.CurrentFormat.VideoFormat.Width; var height = frameSource.CurrentFormat.VideoFormat.Height; var fpsNumerator = frameSource.CurrentFormat.FrameRate.Numerator; var fpsDenominator = frameSource.CurrentFormat.FrameRate.Denominator; double fps = fpsNumerator / fpsDenominator; string pixFmt = frameSource.CurrentFormat.Subtype; string deviceName = frameSource.Info.DeviceInformation.Name; Console.WriteLine($"Video capture device {deviceName} successfully initialised: {width}x{height} {fps:0.##}fps pixel format {pixFmt}."); }
/// <summary> /// Initializes MediaCapture's frame source with a compatible format, if possible. /// Throws Exception if no compatible stream(s) available /// </summary> /// <returns></returns> private async Task InitializeMediaFrameSourceAsync() { if (m_mediaCapture == null) { return; } // Get preview or record stream as source Func <KeyValuePair <string, MediaFrameSource>, MediaStreamType, bool> filterFrameSources = (source, type) => { return(source.Value.Info.MediaStreamType == type && source.Value.Info.SourceKind == MediaFrameSourceKind.Color); }; m_frameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoPreview)).Value ?? m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoRecord)).Value; // if no preview stream is available, bail if (m_frameSource == null) { throw new Exception("No preview or record stream available"); } // If we can, let's attempt to change the format set on the source to our preferences if (m_mediaCaptureInitializationSettings.SharingMode == MediaCaptureSharingMode.ExclusiveControl) { // Filter MediaType given resolution and framerate preference, and filter out non-compatible subtypes // Prefer a BGRA8 format and defer to other supported subtypes if none is supported on the source var selectedFormat = m_frameSource.SupportedFormats.Where(format => format.FrameRate.Numerator / format.FrameRate.Denominator > 15 && string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0 )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080))).FirstOrDefault(); if (selectedFormat == null) { selectedFormat = m_frameSource.SupportedFormats.Where(format => format.FrameRate.Numerator / format.FrameRate.Denominator > 15 && (string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0) )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080))).FirstOrDefault(); } if (selectedFormat == null) { throw new Exception("No compatible formats available"); } await m_frameSource.SetFormatAsync(selectedFormat); } FrameWidth = m_frameSource.CurrentFormat.VideoFormat.Width; FrameHeight = m_frameSource.CurrentFormat.VideoFormat.Height; }
private async void InitAudioFrameReader() { //<SnippetInitAudioFrameSource> mediaCapture = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Audio, }; await mediaCapture.InitializeAsync(settings); var audioFrameSources = mediaCapture.FrameSources.Where(x => x.Value.Info.MediaStreamType == MediaStreamType.Audio); if (audioFrameSources.Count() == 0) { Debug.WriteLine("No audio frame source was found."); return; } MediaFrameSource frameSource = audioFrameSources.FirstOrDefault().Value; MediaFrameFormat format = frameSource.CurrentFormat; if (format.Subtype != MediaEncodingSubtypes.Float) { return; } if (format.AudioEncodingProperties.ChannelCount != 1 || format.AudioEncodingProperties.SampleRate != 48000) { return; } //</SnippetInitAudioFrameSource> //<SnippetCreateAudioFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); // Optionally set acquisition mode. Buffered is the default mode for audio. mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; mediaFrameReader.FrameArrived += MediaFrameReader_AudioFrameArrived; var status = await mediaFrameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.WriteLine("The MediaFrameReader couldn't start."); } //</SnippetCreateAudioFrameReader> }
public async void InitializeManager() { _helper = new OpenCVHelper(); // Find the sources var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = allGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); if (sourceGroups.Count == 0) { // No camera sources found return; } var selectedSource = sourceGroups.FirstOrDefault(); // Initialize MediaCapture try { await InitializeMediaCaptureAsync(selectedSource.Group); } catch (Exception exception) { Debug.WriteLine("MediaCapture initialization error: " + exception.Message); await CleanupMediaCaptureAsync(); return; } // Create the frame reader MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id]; var format = frameSource.SupportedFormats.OrderByDescending(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); await frameSource.SetFormatAsync(format); BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = format.VideoFormat.Height, Width = format.VideoFormat.Width }; _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size); _reader.FrameArrived += HandleFrameArrive; await _reader.StartAsync(); }
private async Task SetCameraFormat() { colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 1080 && format.Subtype == MediaEncodingSubtypes.Argb32); }).FirstOrDefault(); if (preferredFormat == null) { return; } await colorFrameSource.SetFormatAsync(preferredFormat); }
private Task SetFrameType(MediaFrameSource frameSource, int width, int height, int framerate) { var preferredFormat = frameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == width && format.VideoFormat.Height == height && (int)Math.Round(((double)format.FrameRate.Numerator / format.FrameRate.Denominator)) == framerate); }); if (preferredFormat.Count() == 0) { throw new ArgumentException(String.Format("No frame type exists for {0}x{1}@{2}", width, height, framerate)); } return(frameSource.SetFormatAsync(preferredFormat.First()).AsTask()); }
public async Task Initialize() { // Find the sources var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = allGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); if (sourceGroups.Count == 0) { // No camera sources found return; } var selectedSource = sourceGroups.FirstOrDefault(); // Initialize MediaCapture try { await InitializeMediaCaptureAsync(selectedSource.Group); } catch (Exception exception) { Debug.WriteLine("MediaCapture initialization error: " + exception.Message); await Cleanup(); return; } // Create the frame reader MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id]; BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = IMAGE_ROWS, Width = IMAGE_COLS }; _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size); _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync; await _reader.StartAsync(); }
/// <summary> /// Initializes MediaCapture in compatible format, if possible. /// Throws Exception if no compatible stream(s) available /// </summary> /// <returns></returns> private async Task InitializeMediaFrameSourceAsync() { if (m_mediaCapture == null) { return; } // Get preview or record stream as source Func <KeyValuePair <string, MediaFrameSource>, MediaStreamType, bool> filterFrameSources = (source, type) => { return(source.Value.Info.MediaStreamType == type && source.Value.Info.SourceKind == MediaFrameSourceKind.Color); }; m_frameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoPreview)).Value ?? m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoRecord)).Value; // if no preview stream are available, bail if (m_frameSource == null) { throw new Exception("No preview or record streams available"); } // Filter MediaType given resolution and framerate preference, and filter out non-compatible subtypes var formats = m_frameSource.SupportedFormats.Where(format => format.FrameRate.Numerator / format.FrameRate.Denominator > 15 && (string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0) )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080))); var selectedFormat = formats.FirstOrDefault(); if (selectedFormat != null) { await m_frameSource.SetFormatAsync(selectedFormat); FrameWidth = m_frameSource.CurrentFormat.VideoFormat.Width; FrameHeight = m_frameSource.CurrentFormat.VideoFormat.Height; } else { throw new Exception("No compatible formats available"); } }
private static bool GetBestSourceAndFormat( MediaCapture mediaCapture, out MediaFrameSource frameSource, out MediaFrameFormat frameFormat) { foreach (var source in mediaCapture.FrameSources.Values) { foreach (var format in source.SupportedFormats) { if (format.VideoFormat.Width == DesiredWidth && format.VideoFormat.Height == DesiredHeight && format.FrameRate.Numerator == DesiredFramerate) { frameSource = source; frameFormat = format; return(true); } } } frameSource = null; frameFormat = null; return(false); }
/// <summary> /// Updates the current frame source to the one corresponding to the user's selection. /// </summary> private void UpdateFrameSource() { var info = SourceComboBox.SelectedItem as FrameSourceInfoModel; if (_mediaCapture != null && info != null && info.SourceGroup != null) { var groupModel = GroupComboBox.SelectedItem as FrameSourceGroupModel; if (groupModel == null || groupModel.Id != info.SourceGroup.Id) { SourceComboBox.SelectedItem = null; return; } if (_source == null || _source.Info.Id != info.SourceInfo.Id) { _mediaCapture.FrameSources.TryGetValue(info.SourceInfo.Id, out _source); } } else { _source = null; } }