private void Setup(string ip) { if (this.requestHandlers == null) { this.requestHandlers = new Dictionary <string, Func <byte[], bool> >() { { "/kinect/request/image", HandleRequestImage }, { "/kinect/request/centers", HandleRequestImageCenters } }; } if (this.client == null) { this.client = new MqttClient(ip); this.client.ProtocolVersion = MqttProtocolVersion.Version_3_1; this.client.MqttMsgPublishReceived += this.onMqttReceive; this.client.Subscribe(this.requestHandlers.Keys.ToArray(), Enumerable.Repeat(MqttMsgBase.QOS_LEVEL_AT_LEAST_ONCE, this.requestHandlers.Count).ToArray()); this.client.Connect(Guid.NewGuid().ToString()); } if (this.frames == null) { this.frames = new Dictionary <MediaFrameSourceKind, MediaFrameReference>() { { MediaFrameSourceKind.Color, null }, { MediaFrameSourceKind.Depth, null } } } ; if (this.mediaCapture == null) { // select device with both color and depth streams var cameras = Task.Run(async() => { return(await MediaFrameSourceGroup.FindAllAsync()); }); var eligible = cameras.Result.Select(c => new { Group = c, SourceInfos = new MediaFrameSourceInfo[] { c.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), c.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth) } }).Where(c => c.SourceInfos[0] != null && c.SourceInfos[1] != null).ToList(); if (eligible.Count == 0) { return; } var selected = eligible[0]; // open device this.mediaCapture = new MediaCapture(); Task.Run(async() => { await this.mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { SourceGroup = selected.Group, SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu }); }).Wait(); // set stream callbacks for (int i = 0; i < selected.SourceInfos.Length; ++i) { MediaFrameSourceInfo info = selected.SourceInfos[i]; MediaFrameSource frameSource = null; if (this.mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { var frameReader = Task.Run(async() => { return(await this.mediaCapture.CreateFrameReaderAsync(frameSource)); }); frameReader.Result.FrameArrived += FrameReader_FrameArrived; var status = Task.Run(async() => { return(await frameReader.Result.StartAsync()); }); if (status.Result != MediaFrameReaderStartStatus.Success) { return; } } } } #if PRINT_STATUS_MESSAGE this.appClock.Start(); #endif }
public async Task<string> Capture() { var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Infrared), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { System.Diagnostics.Debug.WriteLine("No source group with color, depth or infrared found."); return "No source group with color, depth or infrared found."; } var selectedGroupIndex = 0; // Select the first eligible group MediaFrameSourceGroup selectedGroup = eligibleGroups[selectedGroupIndex].Group; MediaFrameSourceInfo colorSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[0]; MediaFrameSourceInfo infraredSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[1]; MediaFrameSourceInfo depthSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[2]; var mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return "MediaCapture initialization failed: " + ex.Message; } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats[0]; System.Diagnostics.Debug.WriteLine((int)preferredFormat.FrameRate.Numerator/preferredFormat.FrameRate.Denominator); if (preferredFormat == null) { // Our desired format is not supported return "Our desired format is not supported"; } await colorFrameSource.SetFormatAsync(preferredFormat); MediaFrameReader mediaFrameReader; mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); return "Success"; }
/// <summary> /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element. /// </summary> /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns> private async Task <bool> StartWebcamStreaming() { bool successful = true; try { this.mediaCapture = new MediaCapture(); var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo colorSourceInfo = null; foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if ((sourceInfo.MediaStreamType == MediaStreamType.VideoPreview || sourceInfo.MediaStreamType == MediaStreamType.VideoRecord) && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } if (colorSourceInfo != null) { var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; await mediaCapture.InitializeAsync(settings); this.mediaCapture.Failed += this.MediaCapture_CameraStreamFailed; var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats .OrderByDescending(x => x.VideoFormat.Width) .FirstOrDefault(x => x.VideoFormat.Width <= 1920 && x.Subtype.Equals(MediaEncodingSubtypes.Nv12, StringComparison.OrdinalIgnoreCase)); await colorFrameSource.SetFormatAsync(preferredFormat); this.mediaFrameReader = await this.mediaCapture.CreateFrameReaderAsync(colorFrameSource); this.mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived; await this.mediaFrameReader.StartAsync(); } // Cache the media properties as we'll need them later. var deviceController = this.mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; // Immediately start streaming to our CaptureElement UI. // NOTE: CaptureElement's Source must be set before streaming is started. this.CamPreview.Source = this.mediaCapture; this.CamPreview.FlowDirection = FlowDirection.RightToLeft; this.VisualizationCanvas.FlowDirection = FlowDirection.RightToLeft; await this.mediaCapture.StartPreviewAsync(); } catch (System.UnauthorizedAccessException) { // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact. await LogError("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage."); successful = false; } catch (Exception ex) { await LogError(ex.ToString()); successful = false; } return(successful); }
//Event setup for interaction //Code using the UWP API. Is working when compiled and export, but can't compile in Unity without the WINDOW_UWP tag #region #if WINDOWS_UWP async void LaunchScanSequence() { print("Start initialize, preparing cap"); //MediaGroup Selection var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } //MediaCapture Init var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } print("Mid initialize, preparing cap"); //Find supported format and select one var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; IEnumerable <MediaFrameFormat> preferredFormat = colorFrameSource.SupportedFormats; MediaFrameFormat x = null; int a = 0; foreach (MediaFrameFormat r in preferredFormat) { if (r.VideoFormat.Height * r.VideoFormat.Width > a) { x = r; a = (int)(r.VideoFormat.Height * r.VideoFormat.Width); resH = (int)r.VideoFormat.Height; resW = (int)r.VideoFormat.Width; } } if (x == null) { print("error on format"); // Our desired format is not supported return; } //finalize data structure for the scanner print("FrameFormat done, Resolution is " + a + "p, " + resH + "x" + resW); /* halfresW = (int)Mathf.Floor(resW / 2); * halfresH = (int)Mathf.Floor(resH / 2); * for(int i = 0; i < 4; i++) * { * subImages[i] = new byte[((halfresW*4)*(halfresH*4))+2]; * for(int y = 0; y < subImages[i].Length; y++) * { * subImages[i][y] = 0; * } * }*/ buffer = new Windows.Storage.Streams.Buffer((uint)a * 32); await colorFrameSource.SetFormatAsync(x); print("End initialize, preparing cap"); //Start capts mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task BeginDepthFrameStreaming() { await CleanupMediaCaptureAsync(); // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have a depth source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For the Move panel we only care about the Depth source feed SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { Logger.GetInstance().LogLine("No source group with color info found."); return; } // It does not matter which one we use so we'll just use the first. var selected = eligibleGroups.First(); Logger.GetInstance().LogLine($"Found {eligibleGroups.Count} groups and selecting the first: {selected.Group.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); } catch (Exception exception) { Logger.GetInstance().LogLine($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < selected.SourceInfos.Length; i++) { MediaFrameSourceInfo info = selected.SourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Logger.GetInstance().LogLine("Unable to start the MediaFrameReader frameReader."); } } } } }
public async Task StartAsync(string Name, bool UseGpu = false) { var frameSourceGroups = await AsAsync(MediaFrameSourceGroup.FindAllAsync()); var selectedGroup = frameSourceGroups.Where(x => x.DisplayName.Contains(Name)).FirstOrDefault(); if (null == selectedGroup) { throw new ApplicationException($"Unable to find frame source named {Name}"); } var colorSourceInfo = selectedGroup.SourceInfos .Where(x => x.MediaStreamType == MediaStreamType.VideoRecord && x.SourceKind == MediaFrameSourceKind.Color) .FirstOrDefault(); if (null == colorSourceInfo) { throw new ApplicationException($"Unable to find color video recording source on {Name}"); } mediaCapture = new MediaCapture(); if (null == mediaCapture) { throw new ApplicationException($"Unable to create new mediacapture"); } var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = UseGpu ? MediaCaptureMemoryPreference.Auto : MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await AsAsync(mediaCapture.InitializeAsync(settings)); } catch (Exception ex) { throw new ApplicationException("MediaCapture initialization failed: " + ex.Message, ex); } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => format.VideoFormat.Width >= 1080 && format.Subtype == "NV12").FirstOrDefault(); if (null == preferredFormat) { throw new ApplicationException("Our desired format is not supported"); } await AsAsync(colorFrameSource.SetFormatAsync(preferredFormat)); mediaFrameReader = await AsAsync(mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32)); if (null == mediaFrameReader) { throw new ApplicationException($"Unable to create new mediaframereader"); } evtFrame = new EventWaitHandle(false, EventResetMode.ManualReset); mediaFrameReader.FrameArrived += (s, a) => evtFrame.Set(); await AsAsync(mediaFrameReader.StartAsync()); Log.WriteLineVerbose("FrameReader Started"); }
public static async Task <FrameGrabber> CreateAsync(IFrameGrabberDataSource datasource) { MediaCapture mediaCapture = null; MediaFrameReader mediaFrameReader = null; MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; // Pick first color source var groups = await MediaFrameSourceGroup.FindAllAsync(); foreach (MediaFrameSourceGroup sourceGroup in groups) { foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // No valid camera was found. This will happen on the emulator. if (selectedGroup == null || selectedSourceInfo == null) { Debug.WriteLine("Failed to find Group and SourceInfo"); return(new FrameGrabber()); } // Create settings var settings = new MediaCaptureInitializationSettings { SourceGroup = selectedGroup, // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu, }; // Create and initilize capture device mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception e) { Debug.WriteLine($"Failed to initilise mediacaptrue {e.ToString()}"); return(new FrameGrabber()); } MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; // create new frame reader mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(selectedSource); MediaFrameReaderStartStatus status = await mediaFrameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { Debug.WriteLine("MediaFrameReaderStartStatus == Success"); return(new FrameGrabber(datasource, mediaCapture, selectedSource, mediaFrameReader)); } else { Debug.WriteLine($"MediaFrameReaderStartStatus != Success; {status}"); return(new FrameGrabber()); } }
private async void MediaSourceFromFrameSource_Click(object sender, RoutedEventArgs e) { //<SnippetMediaSourceSelectGroup> var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front && info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back && info.SourceKind == MediaFrameSourceKind.Color) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { System.Diagnostics.Debug.WriteLine("No source group with front and back-facing camera found."); return; } var selectedGroupIndex = 0; // Select the first eligible group MediaFrameSourceGroup selectedGroup = eligibleGroups[selectedGroupIndex].Group; MediaFrameSourceInfo frontSourceInfo = selectedGroup.SourceInfos[0]; MediaFrameSourceInfo backSourceInfo = selectedGroup.SourceInfos[1]; //</SnippetMediaSourceSelectGroup> //<SnippetMediaSourceInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } //</SnippetMediaSourceInitMediaCapture> //<SnippetMediaSourceMediaPlayer> var frameMediaSource1 = MediaSource.CreateFromMediaFrameSource(mediaCapture.FrameSources[frontSourceInfo.Id]); mediaPlayerElement1.SetMediaPlayer(new Windows.Media.Playback.MediaPlayer()); mediaPlayerElement1.MediaPlayer.Source = frameMediaSource1; mediaPlayerElement1.AutoPlay = true; var frameMediaSource2 = MediaSource.CreateFromMediaFrameSource(mediaCapture.FrameSources[backSourceInfo.Id]); mediaPlayerElement2.SetMediaPlayer(new Windows.Media.Playback.MediaPlayer()); mediaPlayerElement2.MediaPlayer.Source = frameMediaSource2; mediaPlayerElement2.AutoPlay = true; //</SnippetMediaSourceMediaPlayer> }
private async void ActionButton_Click(object sender, RoutedEventArgs e) { //<SnippetImageElementSource> imageElement.Source = new SoftwareBitmapSource(); //</SnippetImageElementSource> //<SnippetFindAllAsync> var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); //</SnippetFindAllAsync> // Color, infrared, and depth //<SnippetSelectColor> var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { // On XBox/Kinect, omit the MediaStreamType and EnclosureLocation tests return(sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color && sourceInfo.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } //</SnippetSelectColor> //<SnippetInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } //</SnippetInitMediaCapture> var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == 1920); }).FirstOrDefault(); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); //<SnippetCreateFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); //</SnippetCreateFrameReader> }
private async void ActionButton2_Click(object sender, RoutedEventArgs e) { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // Color, infrared, and depth var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { return(sourceInfo.SourceKind == MediaFrameSourceKind.Color); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == 1920); }).FirstOrDefault(); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived_FrameRenderer; _frameRenderer = new FrameRenderer(imageElement); await mediaFrameReader.StartAsync(); }
//</SnippetMultiFrameDeclarations> private async void InitMultiFrame() { //<SnippetSelectColorAndDepth> var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { System.Diagnostics.Debug.WriteLine("No source group with color, depth or infrared found."); return; } var selectedGroupIndex = 0; // Select the first eligible group MediaFrameSourceGroup selectedGroup = eligibleGroups[selectedGroupIndex].Group; MediaFrameSourceInfo colorSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[0]; MediaFrameSourceInfo depthSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[1]; //</SnippetSelectColorAndDepth> //<SnippetMultiFrameInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; await mediaCapture.InitializeAsync(settings); //</SnippetMultiFrameInitMediaCapture> //<SnippetGetColorAndDepthSource> MediaFrameSource colorSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Color); MediaFrameSource depthSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (colorSource == null || depthSource == null) { System.Diagnostics.Debug.WriteLine("MediaCapture doesn't have the Color and Depth streams"); return; } _colorSourceId = colorSource.Info.Id; _depthSourceId = depthSource.Info.Id; //</SnippetGetColorAndDepthSource> //<SnippetInitMultiFrameReader> _multiFrameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync( new[] { colorSource, depthSource }); _multiFrameReader.FrameArrived += MultiFrameReader_FrameArrived; _frameRenderer = new FrameRenderer(imageElement); MultiSourceMediaFrameReaderStartStatus startStatus = await _multiFrameReader.StartAsync(); if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success) { throw new InvalidOperationException( "Unable to start reader: " + startStatus); } this.CorrelationFailed += MainPage_CorrelationFailed; Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token)); //</SnippetInitMultiFrameReader> }
private async void MultiRecord_Click(object sender, RoutedEventArgs e) { //<SnippetMultiRecordFindSensorGroups> var sensorGroups = await MediaFrameSourceGroup.FindAllAsync(); var foundGroup = sensorGroups.Select(g => new { group = g, color1 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front).FirstOrDefault(), color2 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back).FirstOrDefault() }).Where(g => g.color1 != null && g.color2 != null).FirstOrDefault(); if (foundGroup == null) { Debug.WriteLine("No groups found."); return; } //</SnippetMultiRecordFindSensorGroups> //<SnippetMultiRecordInitMediaCapture> var settings = new MediaCaptureInitializationSettings() { SourceGroup = foundGroup.group }; mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); //</SnippetMultiRecordInitMediaCapture> //<SnippetMultiRecordMediaEncodingProfile> var profile = new MediaEncodingProfile(); profile.Container = new ContainerEncodingProperties(); profile.Container.Subtype = MediaEncodingSubtypes.Mpeg4; List <VideoStreamDescriptor> streams = new List <VideoStreamDescriptor>(); var encodeProps = VideoEncodingProperties.CreateH264(); encodeProps.Subtype = MediaEncodingSubtypes.H264; var stream1Desc = new VideoStreamDescriptor(encodeProps); stream1Desc.Label = foundGroup.color1.Id; streams.Add(stream1Desc); var encodeProps2 = VideoEncodingProperties.CreateH264(); encodeProps2.Subtype = MediaEncodingSubtypes.H264; var stream2Desc = new VideoStreamDescriptor(encodeProps2); stream2Desc.Label = foundGroup.color2.Id; streams.Add(stream2Desc); profile.SetVideoTracks(streams); profile.Audio = null; //</SnippetMultiRecordMediaEncodingProfile> Debug.WriteLine("started"); //<SnippetMultiRecordToFile> var recordFile = await Windows.Storage.KnownFolders.CameraRoll.CreateFileAsync("record.mp4", Windows.Storage.CreationCollisionOption.GenerateUniqueName); await mediaCapture.StartRecordToStorageFileAsync(profile, recordFile); await Task.Delay(8000); await mediaCapture.StopRecordAsync(); //</SnippetMultiRecordToFile> Debug.WriteLine("done"); }
public static async Task InitCamera(int usedThreads, double videoQuality, string videoSubtype, string videoResolution) { if (!videoResolution.Contains('x')) { throw new ArgumentException("Resolution is not in valid format."); } await Task.Run(async() => { try { _threadsCount = usedThreads; _stoppedThreads = usedThreads; imageQuality = new BitmapPropertySet(); var imageQualityValue = new BitmapTypedValue(videoQuality, Windows.Foundation.PropertyType.Single); imageQuality.Add("ImageQuality", imageQualityValue); mediaCapture = new MediaCapture(); var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var settings = new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.ExclusiveControl, //With CPU the results contain always SoftwareBitmaps, otherwise with GPU //they preferring D3DSurface MemoryPreference = MediaCaptureMemoryPreference.Cpu, //Capture only video, no audio StreamingCaptureMode = StreamingCaptureMode.Video }; await mediaCapture.InitializeAsync(settings); var mediaFrameSource = mediaCapture.FrameSources.First().Value; var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController; videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; videoDeviceController.PrimaryUse = Windows.Media.Devices.CaptureUse.Video; //Set exposure (auto light adjustment) if (mediaCapture.VideoDeviceController.Exposure.Capabilities.Supported && mediaCapture.VideoDeviceController.Exposure.Capabilities.AutoModeSupported) { mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true); } var videoResolutionWidth = uint.Parse(videoResolution.Split('x').FirstOrDefault()); var videoResolutionHeight = uint.Parse(videoResolution.Split('x').LastOrDefault()); var videoSubType = videoSubtype; //Set resolution, frame rate and video subtyp var videoFormat = mediaFrameSource.SupportedFormats.Where(sf => sf.VideoFormat.Width == videoResolutionWidth && sf.VideoFormat.Height == videoResolutionHeight && sf.Subtype == videoSubType) .OrderByDescending(m => m.FrameRate.Numerator / m.FrameRate.Denominator) .First(); await mediaFrameSource.SetFormatAsync(videoFormat); mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource); await mediaFrameReader.StartAsync(); } catch (Exception ex) { Debug.WriteLine($"Error during camera initialization: {ex.Message}"); } }); }
private async Task SelectExclusiveSourceGroupAsync() { #if USE_INFRARED var devices = await DeviceInformation.FindAllAsync(MediaFrameSourceGroup.GetDeviceSelector()); foreach (var deviceInformation in devices) { var sourceGroup = await MediaFrameSourceGroup.FromIdAsync(deviceInformation.Id); var current = new DefaultDictionary <MediaFrameSourceKind, Dictionary <MediaStreamType, MediaFrameSourceInfo> >(); ulong currentIrCapability = 0u; foreach (var sourceInfo in sourceGroup.SourceInfos) { var originSourceGroupId = sourceInfo.Id.Split('@').ElementAtOrDefault(1) ?? sourceInfo.Id; EnclosureLocations[originSourceGroupId] = EnclosureLocations[originSourceGroupId] ?? deviceInformation.EnclosureLocation; if (sourceInfo.MediaStreamType != MediaStreamType.VideoPreview && sourceInfo.MediaStreamType != MediaStreamType.VideoRecord) { continue; } switch (sourceInfo.SourceKind) { case MediaFrameSourceKind.Color: current[sourceInfo.SourceKind][sourceInfo.MediaStreamType] = sourceInfo; break; case MediaFrameSourceKind.Infrared: if (sourceInfo.Properties.TryGetValue(MF._DEVICESTREAM_ATTRIBUTE_FACEAUTH_CAPABILITY, out var capability)) { if (capability is ulong ulCapability && (ulCapability & ( KS.CAMERA_EXTENDEDPROP_FACEAUTH_MODE_ALTERNATIVE_FRAME_ILLUMINATION | KS.CAMERA_EXTENDEDPROP_FACEAUTH_MODE_BACKGROUND_SUBTRACTION)) != 0) { currentIrCapability = ulCapability; current[sourceInfo.SourceKind][sourceInfo.MediaStreamType] = sourceInfo; } } break; } } if (current[MediaFrameSourceKind.Infrared].Any()) { var score = GetSourceGroupScore(current); if (score <= ExclusiveSourceGroupScore) { continue; } var preferredMediaStreamType = current[MediaFrameSourceKind.Infrared].Keys .Intersect(current[MediaFrameSourceKind.Color].Keys) .DefaultIfEmpty(MediaStreamType.VideoPreview) .OrderByDescending(mediaStreamType => mediaStreamType == MediaStreamType.VideoPreview) .First(); ExclusiveIrSourceInfo = current[MediaFrameSourceKind.Infrared].OrderByDescending(kvp => kvp.Key == preferredMediaStreamType).First().Value; ExclusiveRgbSourceInfo = current[MediaFrameSourceKind.Color].OrderByDescending(kvp => kvp.Key == ExclusiveIrSourceInfo.MediaStreamType).Select(kvp => kvp.Value).FirstOrDefault(); ExclusiveSourceGroup = sourceGroup; ExclusiveIrCapability = currentIrCapability; ExclusiveSourceGroupScore = score; if ((ExclusiveRgbSourceInfo != null) && deviceInformation.EnclosureLocation != null) { break; } } } if (ExclusiveIrSourceInfo != null) { EnclosureLocations.TryGetValue(ExclusiveIrSourceInfo.Id.Split('@').ElementAtOrDefault(1) ?? ExclusiveIrSourceInfo.Id, out InfraredEnclosureLocation); } if (ExclusiveRgbSourceInfo != null) { EnclosureLocations.TryGetValue(ExclusiveRgbSourceInfo.Id.Split('@').ElementAtOrDefault(1) ?? ExclusiveRgbSourceInfo.Id, out ColorEnclosureLocation); } #else var sourceGroups = await MediaFrameSourceGroup.FindAllAsync(); if (sourceGroups.Any() && sourceGroups.First() is MediaFrameSourceGroup sourceGroup) { ExclusiveRgbSourceInfo = sourceGroup.SourceInfos.OrderByDescending(si => si.MediaStreamType == MediaStreamType.VideoPreview).First(); ExclusiveSourceGroup = sourceGroup; } #endif if (ExclusiveRgbSourceInfo == null) { UseFallbackSourceGroup = true; } }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task PickNextMediaSourceWorkerAsync() { await CleanupMediaCaptureAsync(); var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { _logger.Log("No source groups found."); return; } // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % allGroups.Count; var selectedGroup = allGroups[1]; // _logger.Log($"Found {allGroups.Count} groups and selecting index [{_groupSelectionIndex}]: {selectedGroup.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selectedGroup); } catch (Exception exception) { _logger.Log($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. var startedKinds = new HashSet <MediaFrameSourceKind>(); foreach (MediaFrameSource source in _mediaCapture.FrameSources.Values) { MediaFrameSourceKind kind = source.Info.SourceKind; // Ignore this source if we already have a source of this kind. if (startedKinds.Contains(kind)) { continue; } // Look for a format which the FrameRenderer can render. string requestedSubtype = null; foreach (MediaFrameFormat format in source.SupportedFormats) { requestedSubtype = FrameRenderer.GetSubtypeForFrameReader(kind, format); if (requestedSubtype != null) { // Tell the source to use the format we can render. await source.SetFormatAsync(format); break; } } if (requestedSubtype == null) { // No acceptable format was found. Ignore this source. continue; } MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(source, requestedSubtype); frameReader.FrameArrived += FrameReader_FrameArrived; _sourceReaders.Add(frameReader); MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { // _logger.Log($"Started {kind} reader."); startedKinds.Add(kind); } else { _logger.Log($"Unable to start {kind} reader. Error: {status}"); } } if (startedKinds.Count == 0) { _logger.Log($"No eligible sources in {selectedGroup.DisplayName}."); } }
public async Task Initialize(VideoSetting videoSetting) { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAndAwaitAsync(CoreDispatcherPriority.Normal, async() => { _threadsCount = videoSetting.UsedThreads; _stoppedThreads = videoSetting.UsedThreads; _lastFrameAdded.Start(); _imageQuality = new BitmapPropertySet(); var imageQualityValue = new BitmapTypedValue(videoSetting.VideoQuality, Windows.Foundation.PropertyType.Single); _imageQuality.Add("ImageQuality", imageQualityValue); _mediaCapture = new MediaCapture(); var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = frameSourceGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = sourceGroups?.FirstOrDefault()?.Group, SharingMode = MediaCaptureSharingMode.ExclusiveControl, //With CPU the results contain always SoftwareBitmaps, otherwise with GPU //they preferring D3DSurface MemoryPreference = MediaCaptureMemoryPreference.Cpu, //Capture only video, no audio StreamingCaptureMode = StreamingCaptureMode.Video }; await _mediaCapture.InitializeAsync(settings); var mediaFrameSource = this._mediaCapture.FrameSources[sourceGroups?.FirstOrDefault()?.SourceInfo.Id]; /* * // Commented to reduce CPU usage. * var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController; * * videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; * videoDeviceController.PrimaryUse = Windows.Media.Devices.CaptureUse.Video; * * //Set exposure (auto light adjustment) * if (_mediaCapture.VideoDeviceController.Exposure.Capabilities.Supported * && _mediaCapture.VideoDeviceController.Exposure.Capabilities.AutoModeSupported) * { * _mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true); * } * * var videoResolutionWidthHeight = VideoResolutionWidthHeight.Get(videoSetting.VideoResolution); * var videoSubType = VideoSubtypeHelper.Get(videoSetting.VideoSubtype); * * //Set resolution, frame rate and video subtyp * var videoFormat = mediaFrameSource.SupportedFormats.Where(sf => sf.VideoFormat.Width == videoResolutionWidthHeight.Width * && sf.VideoFormat.Height == videoResolutionWidthHeight.Height * && sf.Subtype == videoSubType) * .OrderByDescending(m => m.FrameRate.Numerator / m.FrameRate.Denominator) * .First(); * * await mediaFrameSource.SetFormatAsync(videoFormat); */ _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); await _mediaFrameReader.StartAsync(); }); }
public static async Task <IEnumerable <string> > GetSourceNamesAsync() { var frameSourceGroups = await AsAsync(MediaFrameSourceGroup.FindAllAsync()); return(frameSourceGroups.Select(x => x.DisplayName)); }
/// <summary> /// /// </summary> /// <returns></returns> private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + " " + id + ": MediaCapture is successfully initialized in shared mode."); // logging all frame source information string logString = ""; foreach (var frameSource in mediaCapture.FrameSources) { var info = frameSource.Value.Info; logString += info.Id + ", " + info.MediaStreamType + ", " + info.SourceKind + "\n"; logString += "Total number of SupportedFormats is " + frameSource.Value.SupportedFormats.Count + "\n"; foreach (var format in frameSource.Value.SupportedFormats) { logString += format.VideoFormat.Width + " x " + format.VideoFormat.Height + ", Major type: " + format.MajorType + ", Subtype: " + format.Subtype + ", Framerate: " + format.FrameRate.Numerator + "/" + format.FrameRate.Denominator + "\n"; } } Debug.Log(logString); MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id); MediaFrameFormat targetResFormat = targetFrameSource.SupportedFormats[0]; try { // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await targetFrameSource.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + " " + id + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator + ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype); } catch (Exception e) { Debug.Log(TAG + " " + id + ": FrameReader is not initialized"); Debug.Log(TAG + " " + id + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
/// <summary> /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader /// will be initialized and register the callback function OnFrameArrived to each video /// frame. Note that this task does not start running the video preview, but configures the /// running behavior. This task should be executed when ARUWPController status is /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error /// occurred. [internal use] /// </summary> /// <returns>Whether video pipeline is successfully initialized</returns> public async Task <bool> InitializeMediaCaptureAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CLEAN) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status"); return(false); } if (mediaCapture != null) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null"); return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in shared mode."); try { var mediaFrameSourceVideoPreview = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == MediaStreamType.VideoPreview); var minResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); await mediaFrameSourceVideoPreview.SetFormatAsync(minResFormat); Debug.Log(TAG + ": minResFormat.Subtype is " + minResFormat.Subtype); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideoPreview, minResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; controller.frameWidth = Convert.ToInt32(minResFormat.VideoFormat.Width); controller.frameHeight = Convert.ToInt32(minResFormat.VideoFormat.Height); videoBufferSize = controller.frameWidth * controller.frameHeight * 4; Debug.Log(TAG + ": FrameReader is successfully initialized"); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED; signalInitDone = true; Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful"); return(true); }
public async Task GetDataSources() { // If pose is supported and selected add pose source //var ver = Windows.System.Profile.AnalyticsInfo.VersionInfo.DeviceFamily; //if (ver == "Windows.Holographic") //{ // bool isSelected; // Config.SourceSelectionDictionary.TryGetValue(Config.Pose, out isSelected); // if (isSelected) // { // DataSources.Add(new PoseSource(ref RosConnector, ref SharedTimer) // { // SourceName = Config.Pose, // PublishPeriod = 1 / Config.HololensPoseFPS // }); // } //} // Check for any available cameras var possibleSourceKinds = new MediaFrameSourceKind[] { MediaFrameSourceKind.Depth, MediaFrameSourceKind.Infrared, MediaFrameSourceKind.Color }; var groups = await MediaFrameSourceGroup.FindAllAsync(); // Find the group that exposes all of the sensors for streaming foreach (var g in groups) { if (g.DisplayName == "Sensor Streaming") { Debug.WriteLine("Found Sensor Streaming Source Group"); var mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = g, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); var sources = mediaCapture.FrameSources.Where(fs => possibleSourceKinds.Contains(fs.Value.Info.SourceKind)).Select(fs => fs.Value); foreach (var source in sources) { string originalSourceName = source.Info.Id.Substring(source.Info.Id.IndexOf("Source#"), 8); string assignedSourceName; Config.DataSourceDictionary.TryGetValue(originalSourceName, out assignedSourceName); bool isSelected; Config.SourceSelectionDictionary.TryGetValue(assignedSourceName, out isSelected); if (isSelected) { double assignedFrameRate; Config.FrameRateDictionary.TryGetValue(assignedSourceName, out assignedFrameRate); double assignedPublishPeriod = 1.0 / (double)assignedFrameRate; int originalFPS = (int)source.Info.VideoProfileMediaDescription[0].FrameRate; CameraHandler handler = new CameraHandler(source.Info, mediaCapture, assignedPublishPeriod); await handler.SetupReaderAsync(); DataSources.Add(new CameraSource(ref RosConnector, handler, assignedSourceName, assignedPublishPeriod) { Resolution = $"{ source.Info.VideoProfileMediaDescription[0].Width } x { source.Info.VideoProfileMediaDescription[0].Height }", OriginalFPS = originalFPS, SourceName = assignedSourceName }); } } break; } } }
private async void InitOpenCVFrameReader() { //<SnippetOpenCVFrameSourceGroups> var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { // On XBox/Kinect, omit the MediaStreamType and EnclosureLocation tests return(sourceInfo.SourceKind == MediaFrameSourceKind.Color); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } //</SnippetOpenCVFrameSourceGroups> //<SnippetOpenCVInitMediaCapture> _mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await _mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } var colorFrameSource = _mediaCapture.FrameSources[colorSourceInfo.Id]; //</SnippetOpenCVInitMediaCapture> //<SnippetOpenCVFrameReader> BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = 480, Width = 640 }; _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32, size); _mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived_OpenCV; _imageElement.Source = new SoftwareBitmapSource(); _frameRenderer = new FrameRenderer(_imageElement); await _mediaFrameReader.StartAsync(); //</SnippetOpenCVFrameReader> }
private async void init() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); LogError("checkpoint 1.1"); var targetGroups = frameSourceGroups.Select(g => new { Group = g, SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); LogError("checkpoint 1.2"); if (targetGroups.Count == 0) { LogError("No source groups found."); return; } MediaFrameSourceGroup mediaSourceGroup = targetGroups[0].Group; LogError("checkpoint 1.3"); mediaCapture = new MediaCapture(); LogError("checkpoint 1.4"); var settings = new MediaCaptureInitializationSettings() { SourceGroup = mediaSourceGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; LogError("checkpoint 1.5"); await mediaCapture.InitializeAsync(settings); LogError("checkpoint 1.6"); MediaFrameSource colorSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Color); MediaFrameSource depthSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Depth); LogError("checkpoint 1.7"); if (colorSource == null || depthSource == null) { LogError("Cannot find color or depth stream."); return; } MediaFrameFormat colorFormat = colorSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.Rgb24); }).FirstOrDefault(); MediaFrameFormat depthFormat = depthSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.D16); }).FirstOrDefault(); await colorSource.SetFormatAsync(colorFormat); await depthSource.SetFormatAsync(depthFormat); _colorSourceId = colorSource.Info.Id; _depthSourceId = depthSource.Info.Id; _frameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync( new[] { colorSource, depthSource }); _frameReader.FrameArrived += FrameReader_FrameArrived; MultiSourceMediaFrameReaderStartStatus startStatus = await _frameReader.StartAsync(); if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success) { throw new InvalidOperationException("Unable to start reader: " + startStatus); } this.CorrelationFailed += MainPage_CorrelationFailed; Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token)); }
public async Task initialize(Image imageElement) { if (_frameRenderers == null) { _frameRenderers = new Dictionary <MediaFrameSourceKind, FrameRenderer>() { { MediaFrameSourceKind.Color, new FrameRenderer(imageElement) }, }; } await CleanupMediaCaptureAsync(); _imageElement = imageElement; // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have at least one recognized source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { return; } bool foundOne = false; int count = eligibleGroups.Count; MediaFrameSourceInfo[] sourceInfos = null; string groupName = ""; while (!foundOne && count-- >= 0) { // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % eligibleGroups.Count; var selected = eligibleGroups[_groupSelectionIndex]; try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); groupName = selected.Group.DisplayName; sourceInfos = selected.SourceInfos; foundOne = true; } catch (Exception exception) { Debug.WriteLine(exception.Message + "\n" + exception.StackTrace); await CleanupMediaCaptureAsync(); } } if (!foundOne) { return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < sourceInfos.Length; i++) { MediaFrameSourceInfo info = sourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.WriteLine($"Unable to start {info.SourceKind} reader. Error: {status}"); } } else { Debug.WriteLine($"Unable to start {info.SourceKind} reader. Frame source not found"); } } else { string frameKind = (i == 0 ? "Color" : i == 1 ? "Depth" : "Infrared"); Debug.WriteLine($"No {frameKind} source in group '{groupName}'."); } } }
/// <summary> /// Initializes the camera /// </summary> /// <returns></returns> public async Task Initialize() { lock (stateLock) { State = CameraState.Initializing; } #if CAN_USE_UWP_TYPES try { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); StreamSelector = new StreamSelector(); foreach (var sourceGroup in frameSourceGroups) { string name = sourceGroup.DisplayName; string id = sourceGroup.Id; foreach (var sourceInfo in sourceGroup.SourceInfos) { switch (CaptureMode) { case CaptureMode.Continuous: case CaptureMode.SingleLowLatency: { if ((sourceInfo.MediaStreamType == MediaStreamType.VideoRecord || sourceInfo.MediaStreamType == MediaStreamType.VideoPreview) && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { foreach (var setting in sourceInfo.VideoProfileMediaDescription) { StreamDescriptionInternal desc = new StreamDescriptionInternal() { SourceName = sourceInfo.DeviceInformation.Name, SourceId = sourceInfo.Id, Resolution = new CameraResolution() { Width = setting.Width, Height = setting.Height, Framerate = setting.FrameRate }, FrameSourceInfo = sourceInfo, FrameSourceGroup = sourceGroup, CameraType = GetCameraType(sourceInfo.SourceKind) }; StreamSelector.AddStream(desc); } } break; } case CaptureMode.Single: { if (sourceInfo.MediaStreamType == MediaStreamType.Photo && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { foreach (var setting in sourceInfo.VideoProfileMediaDescription) { StreamDescriptionInternal desc = new StreamDescriptionInternal() { SourceName = sourceInfo.DeviceInformation.Name, SourceId = sourceInfo.Id, Resolution = new CameraResolution() { Width = setting.Width, Height = setting.Height, Framerate = setting.FrameRate }, FrameSourceInfo = sourceInfo, FrameSourceGroup = sourceGroup, CameraType = GetCameraType(sourceInfo.SourceKind) }; StreamSelector.AddStream(desc); } } break; } } } } lock (stateLock) { State = CameraState.Initialized; OnCameraInitialized?.Invoke(this, true); } } catch { OnCameraInitialized?.Invoke(this, false); } #else await Task.CompletedTask; #endif }
async Task Start() { // Socket listener audioSocketListener = new StreamSocketListener(); audioSocketListener.ConnectionReceived += OnConnectionAudio; await audioSocketListener.BindServiceNameAsync(audioServiceName); videoSocketListener = new StreamSocketListener(); videoSocketListener.ConnectionReceived += OnConnectionVideo; await videoSocketListener.BindServiceNameAsync(videoServiceName); // Find a media source group which gives us webcam and microphone input streams var sourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedSourceGroup = null; MediaCaptureVideoProfile selectedVideoProfile = null; MediaCaptureVideoProfileMediaDescription selectedDescription = null; foreach (MediaFrameSourceGroup sourceGroup in sourceGroups) { var videoProfiles = MediaCapture.FindKnownVideoProfiles(sourceGroup.Id, KnownVideoProfile.VideoConferencing); foreach (MediaCaptureVideoProfile videoProfile in videoProfiles) { foreach (var desc in videoProfile.SupportedRecordMediaDescription) { if (desc.Width == videoWidth && desc.Height == videoHeight && desc.FrameRate == frameRate) { selectedSourceGroup = sourceGroup; selectedVideoProfile = videoProfile; selectedDescription = desc; } } } } if (selectedSourceGroup == null) { Debug.Log("No source group was found."); return; } mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { AudioProcessing = AudioProcessing.Raw, MemoryPreference = MediaCaptureMemoryPreference.Cpu, RecordMediaDescription = selectedDescription, SharingMode = MediaCaptureSharingMode.ExclusiveControl, SourceGroup = selectedSourceGroup, StreamingCaptureMode = StreamingCaptureMode.AudioAndVideo, VideoProfile = selectedVideoProfile, }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { Debug.Log("MediaCapture initialization failed: " + ex.Message); return; } var audioFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.MediaStreamType == MediaStreamType.Audio); if (audioFrameSources.Count() == 0) { Debug.Log("No audio source was found."); return; } MediaFrameSource audioFrameSource = audioFrameSources.FirstOrDefault().Value; var videoFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.SourceKind == MediaFrameSourceKind.Color); if (videoFrameSources.Count() == 0) { Debug.Log("No video source was found."); return; } // MediaFrameSource videoFrameSource = videoFrameSources.FirstOrDefault().Value; MediaFrameSource videoFrameSource = null; MediaFrameFormat selectedFormat = null; foreach (var kv in videoFrameSources) { MediaFrameSource source = kv.Value; foreach (MediaFrameFormat format in source.SupportedFormats) { if (format.VideoFormat.Width == videoWidth && format.VideoFormat.Height == videoHeight && format.FrameRate.Numerator == frameRate && format.FrameRate.Denominator == 1) { videoFrameSource = source; selectedFormat = format; break; } } if (videoFrameSource != null) { break; } } if (selectedFormat != null) { await videoFrameSource.SetFormatAsync(selectedFormat); } else { Debug.Log("Cannot find a proper MediaFrameFormat."); return; } // Start streaming audioFrameReader = await mediaCapture.CreateFrameReaderAsync(audioFrameSource); audioFrameReader.FrameArrived += AudioFrameArrived; videoFrameReader = await mediaCapture.CreateFrameReaderAsync(videoFrameSource); videoFrameReader.FrameArrived += VideoFrameArrived; var audioStartStatus = audioFrameReader.StartAsync(); var videoStartStatus = videoFrameReader.StartAsync(); if (await audioStartStatus != MediaFrameReaderStartStatus.Success) { Debug.Log("The audioFrameReader couldn't start."); } if (await videoStartStatus != MediaFrameReaderStartStatus.Success) { Debug.Log("The videoFrameReader couldn't start."); } }
private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (group.DisplayName == "MN34150") { selectedGroupIndex = i; HL = 1; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } // for HoloLens 2 else if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; HL = 2; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; //IReadOnlyList<MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], //VideoDeviceId = deviceId, //VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { // Check current media frame source resolution versus target resolution if (f.VideoFormat.Width == _targetVideoWidth && f.VideoFormat.Height == _targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); } // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); Debug.Log(TAG + ": mediaFrameSourceVideo.SetFormatAsync()"); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); Debug.Log(TAG + ": mediaCapture.CreateFrameReaderAsync()"); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task PickNextMediaSourceWorkerAsync() { await CleanupMediaCaptureAsync(); // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have at least one recognized source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Infrared), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { _logger.Log("No source group with color, depth or infrared found."); return; } // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % eligibleGroups.Count; var selected = eligibleGroups[_groupSelectionIndex]; _logger.Log($"Found {eligibleGroups.Count} groups and selecting index [{_groupSelectionIndex}]: {selected.Group.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); } catch (Exception exception) { _logger.Log($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < selected.SourceInfos.Length; i++) { MediaFrameSourceInfo info = selected.SourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { _logger.Log($"Unable to start {info.SourceKind} reader. Error: {status}"); } } else { _logger.Log($"Unable to start {info.SourceKind} reader. Frame source not found"); } } else { string frameKind = (i == 0 ? "Color" : i == 1 ? "Depth" : "Infrared"); _logger.Log($"No {frameKind} source in group '{selected.Group.DisplayName}'."); } } }
/// <summary> /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader /// will be initialized and register the callback function OnFrameArrived to each video /// frame. Note that this task does not start running the video preview, but configures the /// running behavior. This task should be executed when ARUWPController status is /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error /// occurred. [internal use] /// </summary> /// <returns>Whether video pipeline is successfully initialized</returns> public async Task <bool> InitializeMediaCaptureAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CLEAN) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status"); return(false); } if (mediaCapture != null) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null"); return(false); } int targetVideoWidth, targetVideoHeight; float targetVideoFrameRate; switch (videoParameter) { case VideoParameter.HL1Param1280x720x15: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1280x720x30: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param1344x756x15: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1344x756x30: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param896x504x15: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param896x504x30: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL2Param1504x846x60: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 60.0f; break; case VideoParameter.HL2Param1504x846x30: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 30.0f; break; default: return(false); break; } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (HL == 1) { if (group.DisplayName == "MN34150") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } } // for HoloLens 2 else if (HL == 2) { if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles //IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { //SourceGroup = allGroups[selectedGroupIndex], VideoDeviceId = deviceId, VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); targetResFormat = mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); } await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; controller.frameWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); controller.frameHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); // if controller.frameWidth is not aligned with 64, then pad to 64 // on HoloLens 2, it is a must if (controller.frameWidth % 64 != 0) { int paddedFrameWidth = ((controller.frameWidth >> 6) + 1) << 6; Debug.Log(TAG + ": the width is padded to " + paddedFrameWidth); controller.frameWidth = paddedFrameWidth; } // Since v0.3, feature grayscale is forced frameData = new byte[controller.frameWidth * controller.frameHeight]; Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED; signalInitDone = true; Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful"); return(true); }
/// <summary> /// Creates the initialization settings for the MediaCapture object that will support /// all the requested capture settings specified in the configuration object. This method /// will iterate through all the device's video capture profiles to find one that supports /// the requested capture frame dimensions and frame rate. If both Video and Preview streams /// are selected (e.g. for simultaneous mixed reality capture), then the selected profile must /// support the capture modes for both streams. /// </summary> /// <returns> /// A MediaCaptureInitializationSettings object for the first profile that satisfies all the /// requested capture settings in the configuration object, or null if no such profile was found. /// </returns> private async Task <MediaCaptureInitializationSettings> CreateMediaCaptureSettingsAsync() { MediaFrameSourceGroup selectedSourceGroup = null; MediaCaptureVideoProfile profile = null; MediaCaptureVideoProfileMediaDescription videoDesc = null; MediaCaptureVideoProfileMediaDescription previewDesc = null; var mediaFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // Search all source groups foreach (var mediaFrameSourceGroup in mediaFrameSourceGroups) { // Search for a profile that supports the requested capture modes var knownProfiles = MediaCapture.FindAllVideoProfiles(mediaFrameSourceGroup.Id); foreach (var knownProfile in knownProfiles) { // If a video stream capture mode was specified if (this.configuration.VideoStreamSettings != null) { // Clear any partial matches and continue searching profile = null; videoDesc = null; selectedSourceGroup = null; // Search the supported video (recording) modes for the requested resolution and frame rate foreach (var knownDesc in knownProfile.SupportedRecordMediaDescription) { if (knownDesc.Width == this.configuration.VideoStreamSettings.ImageWidth && knownDesc.Height == this.configuration.VideoStreamSettings.ImageHeight && knownDesc.FrameRate == this.configuration.VideoStreamSettings.FrameRate) { // Found a match for video. Need to also match the requested preview mode (if any) // within the same profile and source group, otherwise we have to keep searching. profile = knownProfile; videoDesc = knownDesc; selectedSourceGroup = mediaFrameSourceGroup; break; } } if (profile == null) { // This profile does not support the requested video stream capture parameters - try the next profile continue; } } // If a preview stream capture mode was specified if (this.configuration.PreviewStreamSettings != null) { // Clear any partial matches and continue searching profile = null; previewDesc = null; selectedSourceGroup = null; // Search the supported preview modes for the requested resolution and frame rate foreach (var knownDesc in knownProfile.SupportedPreviewMediaDescription) { if (knownDesc.Width == this.configuration.PreviewStreamSettings.ImageWidth && knownDesc.Height == this.configuration.PreviewStreamSettings.ImageHeight && knownDesc.FrameRate == this.configuration.PreviewStreamSettings.FrameRate) { // Found a match profile = knownProfile; previewDesc = knownDesc; selectedSourceGroup = mediaFrameSourceGroup; break; } } if (profile == null) { // This profile does not support the requested preview mode - try the next profile continue; } } if (profile != null) { // Found a valid profile that supports the requested capture settings return(new MediaCaptureInitializationSettings { VideoProfile = profile, RecordMediaDescription = videoDesc, PreviewMediaDescription = previewDesc, VideoDeviceId = selectedSourceGroup.Id, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu, SharingMode = MediaCaptureSharingMode.ExclusiveControl, SourceGroup = selectedSourceGroup, }); } } } // No matching settings were found return(null); }
public static async Task <VideoProcessor> CreateAsync() { Debug.WriteLine("VideoProcessor.CreateAsync() called !"); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; //Gets all camera groups var groups = await MediaFrameSourceGroup.FindAllAsync(); Debug.WriteLine("MediaFrameSourceGroup.FindAllAsync() called !"); // Iterates over all cameras to find the first color camera available foreach (MediaFrameSourceGroup sourceGroup in groups) { foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos) { //Pick first color camera source if (sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // if no valid camera is found return null if (selectedGroup == null || selectedSourceInfo == null) { return(null); } // Prepare settings MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu; settings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; settings.StreamingCaptureMode = StreamingCaptureMode.Video; settings.SourceGroup = selectedGroup; // Initialize media capture MediaCapture mediacapture = new MediaCapture(); await mediacapture.InitializeAsync(settings); // Gets the media frame source MediaFrameSource MF_Source; mediacapture.FrameSources.TryGetValue(selectedSourceInfo.Id, out MF_Source); // Create a media frame reader from the media frame source MediaFrameReader MF_Reader = await mediacapture.CreateFrameReaderAsync(MF_Source); MediaFrameReaderStartStatus status = await MF_Reader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { return(new VideoProcessor(mediacapture, MF_Reader, MF_Source)); } else { Debug.WriteLine("Frame Reader Failed !"); return(null); } }