/// <summary> /// Creates the frame reader using the target format and registers the <see cref="OnFrameArrived"/> event. The width is padded to be divisibly by 64. /// </summary> /// <returns></returns> private async Task <bool> CreateFrameReader() { const MediaStreamType mediaStreamType = MediaStreamType.VideoRecord; CameraParameters parameters = new CameraParameters(_cameraProfile); try { MediaFrameSource source = _mediaCapture.FrameSources.Values.Single(frameSource => frameSource.Info.MediaStreamType == mediaStreamType); MediaFrameFormat format = GetTargetFormat(source, parameters); await source.SetFormatAsync(format); _frameReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); _frameReader.FrameArrived += OnFrameArrived; FrameWidth = Convert.ToInt32(format.VideoFormat.Width); FrameHeight = Convert.ToInt32(format.VideoFormat.Height); FrameWidth = PadTo64(FrameWidth); _logger.Log($"FrameReader initialized using {FrameWidth} x {FrameHeight}, frame rate: {format.FrameRate.Numerator} / {format.FrameRate.Denominator}, color format: {_format}"); } catch (Exception exception) { _logger.LogError("Frame Reader could not be initialized"); _logger.LogException(exception); return(false); } return(true); }
/// <summary> /// Determines the subtype to request from the MediaFrameReader that will result in /// a frame that can be rendered by ConvertToDisplayableImage. /// </summary> /// <returns>Subtype string to request, or null if subtype is not renderable.</returns> public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format) { // Note that media encoding subtypes may differ in case. // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.MediaProperties.MediaEncodingSubtypes string subtype = format.Subtype; switch (kind) { // For color sources, we accept anything and request that it be converted to Bgra8. case MediaFrameSourceKind.Color: return(MediaEncodingSubtypes.Bgra8); // The only depth format we can render is D16. case MediaFrameSourceKind.Depth: return(String.Equals(subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase) ? subtype : null); // The only infrared formats we can render are L8 and L16. case MediaFrameSourceKind.Infrared: return((String.Equals(subtype, MediaEncodingSubtypes.L8, StringComparison.OrdinalIgnoreCase) || String.Equals(subtype, MediaEncodingSubtypes.L16, StringComparison.OrdinalIgnoreCase)) ? subtype : null); // No other source kinds are supported by this class. default: return(null); } }
private double GetFormatBandwidth(MediaFrameFormat format) { double bandwidth = 1.0; bandwidth *= GetFrameRate(format.FrameRate); bandwidth *= format.VideoFormat.Width * format.VideoFormat.Height; return(bandwidth); }
private async Task InitMediaSourceAsync() { var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。"); return; } MediaFrameSourceGroup sourceGroup = allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth)); if (sourceGroup == null) { Debug.LogError("深度カメラが見つからないようです。"); return; } try { await InitializeMediaCaptureAsync(sourceGroup); } catch (Exception exception) { Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message); await CleanupMediaCaptureAsync(); return; } MediaFrameSource source = _mediaCapture.FrameSources.Values .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (source == null) { Debug.LogError("sourceが見つかりません。"); } MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f => String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase)); if (format == null) { return; } await source.SetFormatAsync(format); _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); MediaFrameReaderStartStatus status = await _depthReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.LogError("_depthReader.StartAsyncに失敗しました"); } }
/// <summary> /// Compares the target format specified in <see cref="CameraParameters"/> with a format. /// </summary> /// <returns>If the format is considered equal</returns> // adapted from https://github.com/microsoft/MixedReality-SpectatorView/blob/master/src/SpectatorView.Unity/Assets/PhotoCapture/Scripts/HoloLensCamera.cs private bool CompareFormat(MediaFrameFormat format, CameraParameters parameters) { const double epsilon = 0.00001; bool width = format.VideoFormat.Width == parameters.CameraResolutionWidth; bool height = format.VideoFormat.Height == parameters.CameraResolutionHeight; bool frameRate = Math.Abs((double)format.FrameRate.Numerator / (double)format.FrameRate.Denominator - parameters.FrameRate) < epsilon; return(width && height && frameRate); }
public FrameFormatModel(MediaFrameFormat format) { this.Format = format; this.DisplayName = string.Format("{0} | {1} | {2} x {3} | {4:#.##}fps", format.MajorType, format.Subtype, format.VideoFormat?.Width, format.VideoFormat?.Height, Math.Round((double)format.FrameRate.Numerator / format.FrameRate.Denominator, 2)); }
/// <summary> /// Retrieves the target format specified in <see cref="CameraParameters"/> from the <see cref="MediaFrameSource">frame source</see>. /// </summary> /// <returns></returns> private MediaFrameFormat GetTargetFormat(MediaFrameSource frameSource, CameraParameters parameters) { MediaFrameFormat preferredFormat = frameSource.SupportedFormats.FirstOrDefault(format => CompareFormat(format, parameters)); if (preferredFormat != null) { return(preferredFormat); } _logger.LogWarning("Unable to choose the selected format, use fallback format."); preferredFormat = frameSource.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); return(preferredFormat); }
private async void InitAudioFrameReader() { //<SnippetInitAudioFrameSource> mediaCapture = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Audio, }; await mediaCapture.InitializeAsync(settings); var audioFrameSources = mediaCapture.FrameSources.Where(x => x.Value.Info.MediaStreamType == MediaStreamType.Audio); if (audioFrameSources.Count() == 0) { Debug.WriteLine("No audio frame source was found."); return; } MediaFrameSource frameSource = audioFrameSources.FirstOrDefault().Value; MediaFrameFormat format = frameSource.CurrentFormat; if (format.Subtype != MediaEncodingSubtypes.Float) { return; } if (format.AudioEncodingProperties.ChannelCount != 1 || format.AudioEncodingProperties.SampleRate != 48000) { return; } //</SnippetInitAudioFrameSource> //<SnippetCreateAudioFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); // Optionally set acquisition mode. Buffered is the default mode for audio. mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; mediaFrameReader.FrameArrived += MediaFrameReader_AudioFrameArrived; var status = await mediaFrameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.WriteLine("The MediaFrameReader couldn't start."); } //</SnippetCreateAudioFrameReader> }
/// <summary> /// Compares the Format contained by this view model to the given format for equivalency. /// </summary> /// <param name="otherFormat">The MediaFrameFormat to compare to the MediaFrameFormat in this view model.</param> /// <returns></returns> public bool HasSameFormat(MediaFrameFormat otherFormat) { if (otherFormat == null) { return(Format == null); } return(this.Format.MajorType == otherFormat.MajorType && this.Format.Subtype == otherFormat.Subtype && this.Format.FrameRate.Numerator == otherFormat.FrameRate.Numerator && this.Format.FrameRate.Denominator == otherFormat.FrameRate.Denominator && this.Format.VideoFormat?.Width == otherFormat.VideoFormat?.Width && this.Format.VideoFormat?.Height == otherFormat.VideoFormat?.Height); }
private async Task StartCapturer() { MediaFrameSource mediaFrameSource; var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count <= 0) { //textmesh.text = "Orca"; Debug.Log("Orca"); } var mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); //render.material.color = new Color(0, 0, 0.5f); mediaFrameSource = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == MediaStreamType.VideoRecord); try { MediaFrameFormat targetResFormat = null; foreach (var f in mediaFrameSource.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { //textmesh.text = string.Format("{0}x{1} {2}/{3}", f.VideoFormat.Width, f.VideoFormat.Height, f.FrameRate.Numerator, f.FrameRate.Denominator); if (f.VideoFormat.Width == 896 && f.VideoFormat.Height == 504 && f.FrameRate.Numerator == 24) { targetResFormat = f; } } await mediaFrameSource.SetFormatAsync(targetResFormat); } catch { //textmesh.text = "Orca2"; } try { frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Bgra8); frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; frameReader.FrameArrived += OnFrameArrived; } catch { //textmesh.text = "Orca3"; } }
public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format) { switch (kind) { case MediaFrameSourceKind.Color: return(MediaEncodingSubtypes.Bgra8); case MediaFrameSourceKind.Depth: return(String.Equals(format.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase) ? format.Subtype : null); default: return(null); } }
private static bool GetBestSourceAndFormat( MediaCapture mediaCapture, out MediaFrameSource frameSource, out MediaFrameFormat frameFormat) { foreach (var source in mediaCapture.FrameSources.Values) { foreach (var format in source.SupportedFormats) { if (format.VideoFormat.Width == DesiredWidth && format.VideoFormat.Height == DesiredHeight && format.FrameRate.Numerator == DesiredFramerate) { frameSource = source; frameFormat = format; return(true); } } } frameSource = null; frameFormat = null; return(false); }
private async Task <bool> InitialiserSourceImages(uint largeur, uint hauteur, string sousTypeVideo) { if (_mediaCapture.FrameSources.Count > 0) { MediaFrameSource sourceImages = _mediaCapture.FrameSources.First().Value; ReglerControleurVideo(sourceImages); MediaFrameFormat formatVideo = sourceImages.SupportedFormats.FirstOrDefault(f => f.VideoFormat.Width == largeur && f.VideoFormat.Height == hauteur && f.Subtype == sousTypeVideo); if (formatVideo != null) { await sourceImages.SetFormatAsync(formatVideo); _lecteurMedia = await _mediaCapture.CreateFrameReaderAsync(sourceImages); _lecteurMedia.FrameArrived += _lecteurMedia_FrameArrived; await _lecteurMedia.StartAsync(); return(true); } else { Debug.WriteLine("Le format demandé " + largeur.ToString() + "x" + hauteur.ToString() + " en " + sousTypeVideo + " n'est pas supporté par cette caméra !"); Debug.WriteLine("Voici les formats supportés :"); foreach (MediaFrameFormat formatImages in sourceImages.SupportedFormats) { Debug.WriteLine(formatImages.MajorType + " " + formatImages.VideoFormat.Width.ToString() + "x" + formatImages.VideoFormat.Height.ToString() + " " + formatImages.Subtype); } return(false); } } else { Debug.WriteLine("Aucun périphérique de capture détecté !"); return(false); } }
/// <summary> /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader /// will be initialized and register the callback function OnFrameArrived to each video /// frame. Note that this task does not start running the video preview, but configures the /// running behavior. This task should be executed when ARUWPController status is /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error /// occurred. [internal use] /// </summary> /// <returns>Whether video pipeline is successfully initialized</returns> public async Task <bool> InitializeMediaCaptureAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CLEAN) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status"); return(false); } if (mediaCapture != null) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null"); return(false); } int targetVideoWidth, targetVideoHeight; float targetVideoFrameRate; switch (videoParameter) { case VideoParameter.HL1Param1280x720x15: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1280x720x30: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param1344x756x15: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1344x756x30: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param896x504x15: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param896x504x30: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL2Param1504x846x60: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 60.0f; break; case VideoParameter.HL2Param1504x846x30: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 30.0f; break; default: return(false); break; } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (HL == 1) { if (group.DisplayName == "MN34150") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } } // for HoloLens 2 else if (HL == 2) { if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles //IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { //SourceGroup = allGroups[selectedGroupIndex], VideoDeviceId = deviceId, VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); targetResFormat = mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); } await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; controller.frameWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); controller.frameHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); // if controller.frameWidth is not aligned with 64, then pad to 64 // on HoloLens 2, it is a must if (controller.frameWidth % 64 != 0) { int paddedFrameWidth = ((controller.frameWidth >> 6) + 1) << 6; Debug.Log(TAG + ": the width is padded to " + paddedFrameWidth); controller.frameWidth = paddedFrameWidth; } // Since v0.3, feature grayscale is forced frameData = new byte[controller.frameWidth * controller.frameHeight]; Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED; signalInitDone = true; Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful"); return(true); }
/// <summary> /// Compares the Format contained by this view model to the given format for equivalency. /// </summary> /// <param name="otherFormat">The MediaFrameFormat to compare to the MediaFrameFormat in this view model.</param> /// <returns></returns> public bool HasSameFormat(MediaFrameFormat otherFormat) { if (otherFormat == null) { return (Format == null); } return this.Format.MajorType == otherFormat.MajorType && this.Format.Subtype == otherFormat.Subtype && this.Format.FrameRate.Numerator == otherFormat.FrameRate.Numerator && this.Format.FrameRate.Denominator == otherFormat.FrameRate.Denominator && this.Format.VideoFormat?.Width == otherFormat.VideoFormat?.Width && this.Format.VideoFormat?.Height == otherFormat.VideoFormat?.Height; }
public async void StartCapture() { System.Diagnostics.Debug.WriteLine("Starting capture."); var videoDevice = await GetBestVideoDevice(); if (videoDevice == null) { System.Diagnostics.Debug.WriteLine("Failed to find video device."); return; } MediaCaptureVideoProfile profile; MediaCaptureVideoProfileMediaDescription description; if (!GetBestProfileAndDescription(videoDevice, out profile, out description)) { System.Diagnostics.Debug.WriteLine("Failed to find profile and description."); return; } var settings = new MediaCaptureInitializationSettings { MemoryPreference = MediaCaptureMemoryPreference.Cpu, VideoDeviceId = videoDevice.Id, VideoProfile = profile, RecordMediaDescription = description, }; var mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); MediaFrameSource source = null; MediaFrameFormat format = null; if (!GetBestSourceAndFormat(mediaCapture, out source, out format)) { System.Diagnostics.Debug.WriteLine("Failed to find source and format."); return; } System.Diagnostics.Debug.WriteLine(string.Format("Selected Video Format: Width: {0}, Height: {1}, Major Type: {2}, Subtype: {3}, Frame Rate: {4}/{5}", format.VideoFormat.Width, format.VideoFormat.Height, format.MajorType, format.Subtype, format.FrameRate == null ? "null" : format.FrameRate.Numerator.ToString(), format.FrameRate == null ? "null" : format.FrameRate.Denominator.ToString())); await source.SetFormatAsync(format); mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(source, MediaEncodingSubtypes.Bgra8); if (await mediaFrameReader.StartAsync() != MediaFrameReaderStartStatus.Success) { System.Diagnostics.Debug.WriteLine("Failed to start media frame reader."); return; } mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived; System.Diagnostics.Debug.WriteLine("Capture started."); }
async Task Start() { // Socket listener audioSocketListener = new StreamSocketListener(); audioSocketListener.ConnectionReceived += OnConnectionAudio; await audioSocketListener.BindServiceNameAsync(audioServiceName); videoSocketListener = new StreamSocketListener(); videoSocketListener.ConnectionReceived += OnConnectionVideo; await videoSocketListener.BindServiceNameAsync(videoServiceName); // Find a media source group which gives us webcam and microphone input streams var sourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedSourceGroup = null; MediaCaptureVideoProfile selectedVideoProfile = null; MediaCaptureVideoProfileMediaDescription selectedDescription = null; foreach (MediaFrameSourceGroup sourceGroup in sourceGroups) { var videoProfiles = MediaCapture.FindKnownVideoProfiles(sourceGroup.Id, KnownVideoProfile.VideoConferencing); foreach (MediaCaptureVideoProfile videoProfile in videoProfiles) { foreach (var desc in videoProfile.SupportedRecordMediaDescription) { if (desc.Width == videoWidth && desc.Height == videoHeight && desc.FrameRate == frameRate) { selectedSourceGroup = sourceGroup; selectedVideoProfile = videoProfile; selectedDescription = desc; } } } } if (selectedSourceGroup == null) { Debug.Log("No source group was found."); return; } mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { AudioProcessing = AudioProcessing.Raw, MemoryPreference = MediaCaptureMemoryPreference.Cpu, RecordMediaDescription = selectedDescription, SharingMode = MediaCaptureSharingMode.ExclusiveControl, SourceGroup = selectedSourceGroup, StreamingCaptureMode = StreamingCaptureMode.AudioAndVideo, VideoProfile = selectedVideoProfile, }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { Debug.Log("MediaCapture initialization failed: " + ex.Message); return; } var audioFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.MediaStreamType == MediaStreamType.Audio); if (audioFrameSources.Count() == 0) { Debug.Log("No audio source was found."); return; } MediaFrameSource audioFrameSource = audioFrameSources.FirstOrDefault().Value; var videoFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.SourceKind == MediaFrameSourceKind.Color); if (videoFrameSources.Count() == 0) { Debug.Log("No video source was found."); return; } // MediaFrameSource videoFrameSource = videoFrameSources.FirstOrDefault().Value; MediaFrameSource videoFrameSource = null; MediaFrameFormat selectedFormat = null; foreach (var kv in videoFrameSources) { MediaFrameSource source = kv.Value; foreach (MediaFrameFormat format in source.SupportedFormats) { if (format.VideoFormat.Width == videoWidth && format.VideoFormat.Height == videoHeight && format.FrameRate.Numerator == frameRate && format.FrameRate.Denominator == 1) { videoFrameSource = source; selectedFormat = format; break; } } if (videoFrameSource != null) { break; } } if (selectedFormat != null) { await videoFrameSource.SetFormatAsync(selectedFormat); } else { Debug.Log("Cannot find a proper MediaFrameFormat."); return; } // Start streaming audioFrameReader = await mediaCapture.CreateFrameReaderAsync(audioFrameSource); audioFrameReader.FrameArrived += AudioFrameArrived; videoFrameReader = await mediaCapture.CreateFrameReaderAsync(videoFrameSource); videoFrameReader.FrameArrived += VideoFrameArrived; var audioStartStatus = audioFrameReader.StartAsync(); var videoStartStatus = videoFrameReader.StartAsync(); if (await audioStartStatus != MediaFrameReaderStartStatus.Success) { Debug.Log("The audioFrameReader couldn't start."); } if (await videoStartStatus != MediaFrameReaderStartStatus.Success) { Debug.Log("The videoFrameReader couldn't start."); } }
static async Task <Tuple <MediaFrameReader, EventWaitHandle> > GetFrameReaderAsync() { MediaCapture capture = new MediaCapture(); MediaCaptureInitializationSettings init = new MediaCaptureInitializationSettings(); Log.WriteLine("Enumerating Frame Source Info"); var(frame_group, frame_source_info) = await EnumFrameSourcesAsync(); Log.WriteLine("Selecting Source"); init.SourceGroup = frame_group; init.SharingMode = MediaCaptureSharingMode.ExclusiveControl; //init.SharingMode(MediaCaptureSharingMode::SharedReadOnly); //init.MemoryPreference(a.opt.fgpu_only ? MediaCaptureMemoryPreference::Auto : MediaCaptureMemoryPreference::Cpu); init.MemoryPreference = MediaCaptureMemoryPreference.Cpu; init.StreamingCaptureMode = StreamingCaptureMode.Video; Log.WriteLine("Enumerating Frame Sources"); await AsyncHelper.SyncFromAsync(capture.InitializeAsync(init), "capture init"); Log.WriteLine("capture initialized"); var sources = capture.FrameSources; Log.WriteLine("have frame sources"); MediaFrameSource source; var found = sources.TryGetValue(frame_source_info.Id, out source); if (!found) { throw new ApplicationException(string.Format("can't find source {0}", source)); } Log.WriteLine("have frame source that matches chosen source info id"); // MediaCaptureVideoProfile doesn't have frame reader variant only photo, preview, and record. // so we will enumerate and select instead of just declaring what we want and having the system // give us the closest match var formats = source.SupportedFormats; Log.WriteLine("have formats"); MediaFrameFormat format = null; Log.WriteLine("hunting for format"); foreach (var f in formats) { Log.Write(string.Format("major {0} sub {1} ", f.MajorType, f.Subtype)); if (f.MajorType == "Video") { Log.Write(string.Format("w {0} h {1} ", f.VideoFormat.Width, f.VideoFormat.Height)); if (format == null) { format = f; Log.Write(" *** Updating Selection *** "); } else { var vf = format.VideoFormat; var new_vf = f.VideoFormat; if (new_vf.Width > vf.Width || new_vf.Height > vf.Height) { // this will select first of the dupes which hopefully is ok format = f; Log.Write(" *** Updating Selection *** "); } } } Log.WriteLine(""); } if (format == null) { throw new ApplicationException("Can't find a Video Format"); } Log.WriteLine(string.Format("selected videoformat -- major {0} sub {1} w {2} h {3}", format.MajorType, format.Subtype, format.VideoFormat.Width, format.VideoFormat.Height)); await AsyncHelper.SyncFromAsync(source.SetFormatAsync(format), "set format"); Log.WriteLine("set format complete"); var reader = await AsyncHelper.SyncFromAsync(capture.CreateFrameReaderAsync(source), "reader"); Log.WriteLine("frame reader retrieved\r\n"); reader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; var evtframe = new EventWaitHandle(false, EventResetMode.ManualReset); reader.FrameArrived += (MediaFrameReader sender, MediaFrameArrivedEventArgs args) => evtframe.Set(); return(new Tuple <MediaFrameReader, EventWaitHandle>(reader, evtframe)); }
/// <summary> /// /// </summary> /// <returns></returns> private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + " " + id + ": MediaCapture is successfully initialized in shared mode."); // logging all frame source information string logString = ""; foreach (var frameSource in mediaCapture.FrameSources) { var info = frameSource.Value.Info; logString += info.Id + ", " + info.MediaStreamType + ", " + info.SourceKind + "\n"; logString += "Total number of SupportedFormats is " + frameSource.Value.SupportedFormats.Count + "\n"; foreach (var format in frameSource.Value.SupportedFormats) { logString += format.VideoFormat.Width + " x " + format.VideoFormat.Height + ", Major type: " + format.MajorType + ", Subtype: " + format.Subtype + ", Framerate: " + format.FrameRate.Numerator + "/" + format.FrameRate.Denominator + "\n"; } } Debug.Log(logString); MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id); MediaFrameFormat targetResFormat = targetFrameSource.SupportedFormats[0]; try { // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await targetFrameSource.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + " " + id + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator + ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype); } catch (Exception e) { Debug.Log(TAG + " " + id + ": FrameReader is not initialized"); Debug.Log(TAG + " " + id + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
//Event setup for interaction //Code using the UWP API. Is working when compiled and export, but can't compile in Unity without the WINDOW_UWP tag #region #if WINDOWS_UWP async void LaunchScanSequence() { print("Start initialize, preparing cap"); //MediaGroup Selection var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } //MediaCapture Init var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } print("Mid initialize, preparing cap"); //Find supported format and select one var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; IEnumerable <MediaFrameFormat> preferredFormat = colorFrameSource.SupportedFormats; MediaFrameFormat x = null; int a = 0; foreach (MediaFrameFormat r in preferredFormat) { if (r.VideoFormat.Height * r.VideoFormat.Width > a) { x = r; a = (int)(r.VideoFormat.Height * r.VideoFormat.Width); resH = (int)r.VideoFormat.Height; resW = (int)r.VideoFormat.Width; } } if (x == null) { print("error on format"); // Our desired format is not supported return; } //finalize data structure for the scanner print("FrameFormat done, Resolution is " + a + "p, " + resH + "x" + resW); /* halfresW = (int)Mathf.Floor(resW / 2); * halfresH = (int)Mathf.Floor(resH / 2); * for(int i = 0; i < 4; i++) * { * subImages[i] = new byte[((halfresW*4)*(halfresH*4))+2]; * for(int y = 0; y < subImages[i].Length; y++) * { * subImages[i][y] = 0; * } * }*/ buffer = new Windows.Storage.Streams.Buffer((uint)a * 32); await colorFrameSource.SetFormatAsync(x); print("End initialize, preparing cap"); //Start capts mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); }
private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[group_id], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(": MediaCapture is successfully initialized in shared mode."); MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id); MediaFrameFormat targetResFormat = targetFrameSource.SupportedFormats[0]; try { await targetFrameSource.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator + ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype); } catch (Exception e) { Debug.Log(": FrameReader is not initialized"); Debug.Log(": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
private async void init() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); LogError("checkpoint 1.1"); var targetGroups = frameSourceGroups.Select(g => new { Group = g, SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); LogError("checkpoint 1.2"); if (targetGroups.Count == 0) { LogError("No source groups found."); return; } MediaFrameSourceGroup mediaSourceGroup = targetGroups[0].Group; LogError("checkpoint 1.3"); mediaCapture = new MediaCapture(); LogError("checkpoint 1.4"); var settings = new MediaCaptureInitializationSettings() { SourceGroup = mediaSourceGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; LogError("checkpoint 1.5"); await mediaCapture.InitializeAsync(settings); LogError("checkpoint 1.6"); MediaFrameSource colorSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Color); MediaFrameSource depthSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Depth); LogError("checkpoint 1.7"); if (colorSource == null || depthSource == null) { LogError("Cannot find color or depth stream."); return; } MediaFrameFormat colorFormat = colorSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.Rgb24); }).FirstOrDefault(); MediaFrameFormat depthFormat = depthSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.D16); }).FirstOrDefault(); await colorSource.SetFormatAsync(colorFormat); await depthSource.SetFormatAsync(depthFormat); _colorSourceId = colorSource.Info.Id; _depthSourceId = depthSource.Info.Id; _frameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync( new[] { colorSource, depthSource }); _frameReader.FrameArrived += FrameReader_FrameArrived; MultiSourceMediaFrameReaderStartStatus startStatus = await _frameReader.StartAsync(); if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success) { throw new InvalidOperationException("Unable to start reader: " + startStatus); } this.CorrelationFailed += MainPage_CorrelationFailed; Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token)); }
private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (group.DisplayName == "MN34150") { selectedGroupIndex = i; HL = 1; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } // for HoloLens 2 else if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; HL = 2; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; //IReadOnlyList<MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], //VideoDeviceId = deviceId, //VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { // Check current media frame source resolution versus target resolution if (f.VideoFormat.Width == _targetVideoWidth && f.VideoFormat.Height == _targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); } // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); Debug.Log(TAG + ": mediaFrameSourceVideo.SetFormatAsync()"); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); Debug.Log(TAG + ": mediaCapture.CreateFrameReaderAsync()"); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
public bool Compare(MediaFrameFormat mff) { return(mff.VideoFormat.Width == Width && mff.VideoFormat.Height == Height && mff.VideoFormat.MediaFrameFormat.FrameRate.Numerator == FrameRate); }
/// <summary> /// Start the video stream. This just prepares the stream for capture, and doesn't start collecting frames /// </summary> /// <param name="streamDesc">The description of the stream to start.</param> public async void Start(StreamDescription streamDesc) { #if CAN_USE_UWP_TYPES lock (stateLock) { if (State != CameraState.Initialized) { throw new InvalidOperationException("Start cannot be called until the camera is in the Initialized state"); } State = CameraState.Starting; } Resolution = streamDesc.Resolution; CameraType = streamDesc.CameraType; StreamDescriptionInternal desc = streamDesc as StreamDescriptionInternal; MediaCaptureInitializationSettings initSettings = new MediaCaptureInitializationSettings() { SourceGroup = desc.FrameSourceGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; // initialize the media device mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(initSettings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine($"MediaCapture initialization failed: {ex.Message}"); mediaCapture.Dispose(); mediaCapture = null; } if (mediaCapture != null) { // get access to the video device controller for property settings videoDeviceController = mediaCapture.VideoDeviceController; // choose media source MediaFrameSource frameSource = mediaCapture.FrameSources[desc.FrameSourceInfo.Id]; MediaFrameFormat preferredFormat = null; foreach (MediaFrameFormat format in frameSource.SupportedFormats) { if (format.VideoFormat.Width == desc.Resolution.Width && format.VideoFormat.Height == desc.Resolution.Height && Math.Abs((double)format.FrameRate.Numerator / (double)format.FrameRate.Denominator - desc.Resolution.Framerate) < epsilon) { preferredFormat = format; break; } } if (preferredFormat != null && preferredFormat != frameSource.CurrentFormat) { await frameSource.SetFormatAsync(preferredFormat); } else { System.Diagnostics.Debug.WriteLine($"failed to set desired frame format"); } // set up frame readercapture frame data frameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += OnMediaFrameArrived; await frameReader.StartAsync(); lock (stateLock) { State = CameraState.Ready; OnCameraStarted?.Invoke(this, true); } } else { lock (stateLock) { // drop back to initialized when the camera doesn't initialize State = CameraState.Initialized; OnCameraStarted?.Invoke(this, false); } } #else await Task.CompletedTask; #endif }
/// <summary> /// Initialize camera pipeline resources and register a callback for when new VideoFrames become available. /// </summary> /// <returns></returns> private async Task InitializeAsync() { // Initialize MediaCapture with default settings in video-only streaming mode. // We first try to aquire exclusive sharing mode and if we fail, we then attempt again in shared mode // so that multiple instances can access the camera concurrently m_mediaCapture = new MediaCapture(); var mediaCaptureInistializationSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = m_sharingMode }; // Register a callback in case MediaCapture fails. This can happen for example if another app is using the camera and we can't get ExclusiveControl m_mediaCapture.Failed += MediaCapture_Failed; await m_mediaCapture.InitializeAsync(mediaCaptureInistializationSettings); // Get a list of available Frame source and iterate through them to find a video preview or // a video record source with color images (and not IR, depth or other types) var selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoPreview && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; if (selectedFrameSource == null) { selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; } if (selectedFrameSource == null) { throw new Exception("No valid video frame sources were found with source type color."); } Console.WriteLine($"{selectedFrameSource.Info.DeviceInformation?.Name} | MediaStreamType: {selectedFrameSource.Info.MediaStreamType} MediaFrameSourceKind: {selectedFrameSource.Info.SourceKind}"); // If initializing in ExclusiveControl mode, attempt to use a 15fps+ BGRA8 format natively from the camera. // If not, just use whatever format is already set. MediaFrameFormat selectedFormat = selectedFrameSource.CurrentFormat; if (m_sharingMode == MediaCaptureSharingMode.ExclusiveControl) { var mediaFrameFormats = selectedFrameSource.SupportedFormats.OrderByDescending((format) => format.VideoFormat.Width * format.VideoFormat.Height); selectedFormat = mediaFrameFormats.Where( format => format.FrameRate.Numerator / format.FrameRate.Denominator >= 15 && // fps string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0).FirstOrDefault(); // If not possible, then try to use other supported format at 15fps+ if (selectedFormat == null) { selectedFormat = mediaFrameFormats.Where( format => format.FrameRate.Numerator / format.FrameRate.Denominator >= 15 && // fps (string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 || string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0)).FirstOrDefault(); } if (selectedFormat == null) { throw (new Exception("No suitable media format found on the selected source")); } await selectedFrameSource.SetFormatAsync(selectedFormat); selectedFormat = selectedFrameSource.CurrentFormat; Console.WriteLine($"Attempting to set camera source to {selectedFormat.Subtype} : " + $"{selectedFormat.VideoFormat.Width}x{selectedFormat.VideoFormat.Height}" + $"@{selectedFormat.FrameRate.Numerator / selectedFormat.FrameRate.Denominator}fps"); } Console.WriteLine($"Frame source format: {selectedFormat.Subtype} : " + $"{selectedFormat.VideoFormat.Width}x{selectedFormat.VideoFormat.Height}" + $"@{selectedFormat.FrameRate.Numerator / selectedFormat.FrameRate.Denominator}fps"); m_frameReader = await m_mediaCapture.CreateFrameReaderAsync(selectedFrameSource); m_frameReader.FrameArrived += FrameArrivedHandler; m_frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; await m_frameReader.StartAsync(); }
/// <summary> /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader /// will be initialized and register the callback function OnFrameArrived to each video /// frame. Note that this task does not start running the video preview, but configures the /// running behavior. This task should be executed when ARUWPController status is /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error /// occurred. [internal use] /// </summary> /// <returns>Whether video pipeline is successfully initialized</returns> public async Task <bool> InitializeMediaCaptureAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CLEAN) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status"); return(false); } if (mediaCapture != null) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null"); return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in shared mode."); try { int targetVideoWidth, targetVideoHeight; float targetVideoFrameRate; switch (videoParameter) { case VideoParameter.Param1280x720x15: targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 15.0f; break; case VideoParameter.Param1280x720x30: targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 30.0f; break; case VideoParameter.Param1344x756x15: targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 15.0f; break; case VideoParameter.Param1344x756x30: targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 30.0f; break; case VideoParameter.Param896x504x15: targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 15.0f; break; case VideoParameter.Param896x504x30: targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 30.0f; break; default: targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 30.0f; break; } var mediaFrameSourceVideoPreview = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == MediaStreamType.VideoPreview); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); } await mediaFrameSourceVideoPreview.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideoPreview, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; controller.frameWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); controller.frameHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); videoBufferSize = controller.frameWidth * controller.frameHeight * 4; Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED; signalInitDone = true; Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful"); return(true); }