/// <summary> /// The task to asynchronously starts the video pipeline and frame reading. The task should /// be executed when the ARUWPController status is ARUWP_STATUS_CTRL_INITIALIZED, and will /// change the status to ARUWP_STATUS_RUNNING if the task is successful. The task is wrapped /// up in ARUWPController.Resume() function. [internal use] /// </summary> /// <returns>Whether the frame reader is successfully started</returns> public async Task <bool> StartFrameReaderAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CTRL_INITIALIZED) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() fails because of incorrect status"); return(false); } if (initializeVideoHere) { MediaFrameReaderStartStatus mediaFrameReaderStartStatus = await frameReader.StartAsync(); if (mediaFrameReaderStartStatus == MediaFrameReaderStartStatus.Success) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is successful"); controller.status = ARUWP.ARUWP_STATUS_RUNNING; return(true); } else { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is not successful, status = " + mediaFrameReaderStartStatus); return(false); } } else { Debug.Log(TAG + ": StartFrameReaderAsyncTask(): initializeVideoHere = false, mock-setting the controller status to ARUWP_STATUS_RUNNING despite not starting frameReader"); controller.status = ARUWP.ARUWP_STATUS_RUNNING; return(true); } }
private async Task InitMediaSourceAsync() { var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。"); return; } MediaFrameSourceGroup sourceGroup = allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth)); if (sourceGroup == null) { Debug.LogError("深度カメラが見つからないようです。"); return; } try { await InitializeMediaCaptureAsync(sourceGroup); } catch (Exception exception) { Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message); await CleanupMediaCaptureAsync(); return; } MediaFrameSource source = _mediaCapture.FrameSources.Values .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (source == null) { Debug.LogError("sourceが見つかりません。"); } MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f => String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase)); if (format == null) { return; } await source.SetFormatAsync(format); _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); MediaFrameReaderStartStatus status = await _depthReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.LogError("_depthReader.StartAsyncに失敗しました"); } }
public static async Task <VideoFrameProcessor> CreateAsync() { IReadOnlyList <MediaFrameSourceGroup> groups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; // Pick first color source. foreach (var sourceGroup in groups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // No valid camera was found. This will happen on the emulator. if (selectedGroup == null || selectedSourceInfo == null) { return(null); } MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu; // Need SoftwareBitmaps for FaceAnalysis settings.StreamingCaptureMode = StreamingCaptureMode.Video; // Only need to stream video settings.SourceGroup = selectedGroup; MediaCapture mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; MediaFrameReader reader = await mediaCapture.CreateFrameReaderAsync(selectedSource); MediaFrameReaderStartStatus status = await reader.StartAsync(); // Only create a VideoFrameProcessor if the reader successfully started if (status == MediaFrameReaderStartStatus.Success) { return(new VideoFrameProcessor(mediaCapture, reader, selectedSource)); } return(null); }
/// <summary> /// Starts the video pipeline and frame reading. /// </summary> /// <returns>Whether the frame reader is successfully started</returns> private async Task <bool> StartFrameReaderAsyncTask() { MediaFrameReaderStartStatus mediaFrameReaderStartStatus = await _frameReader.StartAsync(); if (mediaFrameReaderStartStatus == MediaFrameReaderStartStatus.Success) { _logger.Log("Started Frame reader"); return(true); } _logger.LogError($"Could not start frame reader, status: {mediaFrameReaderStartStatus}"); return(false); }
/// <summary> /// Starts reading frames from the current reader. /// </summary> private async Task StartReaderAsync() { await CreateReaderAsync(); if (_reader != null && !_streaming) { MediaFrameReaderStartStatus result = await _reader.StartAsync(); _logger.Log($"Start reader with result: {result}"); if (result == MediaFrameReaderStartStatus.Success) { _streaming = true; } } }
private async Task StartReaderAsync2() { await CreateReaderAsync2(); if (_reader2 != null && !_streaming2) { MediaFrameReaderStartStatus result = await _reader2.StartAsync(); _logger2.Log($"Start reader2 with result: {result}"); if (result == MediaFrameReaderStartStatus.Success) { _streaming2 = true; await UpdateButtonStateAsync2(); } } }
/// <summary> /// The task to asynchronously starts the video pipeline and frame reading. The task should /// be executed when the ARUWPController status is ARUWP_STATUS_CTRL_INITIALIZED, and will /// change the status to ARUWP_STATUS_RUNNING if the task is successful. The task is wrapped /// up in ARUWPController.Resume() function. [internal use] /// </summary> /// <returns>Whether the frame reader is successfully started</returns> public async Task <bool> StartFrameReaderAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CTRL_INITIALIZED) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() fails because of incorrect status"); return(false); } MediaFrameReaderStartStatus mediaFrameReaderStartStatus = await frameReader.StartAsync(); if (mediaFrameReaderStartStatus == MediaFrameReaderStartStatus.Success) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is successful"); controller.status = ARUWP.ARUWP_STATUS_RUNNING; return(true); } else { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is not successful, status = " + mediaFrameReaderStartStatus); return(false); } }
private async Task <bool> StartFrameReaderAsync() { Debug.Log(TAG + " StartFrameReaderAsync() thread ID is " + Thread.CurrentThread.ManagedThreadId); if (captureStatus != CaptureStatus.Initialized) { Debug.Log(TAG + ": StartFrameReaderAsync() fails because of incorrect status"); return(false); } MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { Debug.Log(TAG + ": StartFrameReaderAsync() is successful"); captureStatus = CaptureStatus.Running; return(true); } else { Debug.Log(TAG + ": StartFrameReaderAsync() is successful, status = " + status); return(false); } }
public static async Task <FrameGrabber> CreateAsync() { MediaCapture mediaCapture = null; MediaFrameReader mediaFrameReader = null; MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; // Pick first color source var groups = await MediaFrameSourceGroup.FindAllAsync(); foreach (MediaFrameSourceGroup sourceGroup in groups) { foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // No valid camera was found. This will happen on the emulator. if (selectedGroup == null || selectedSourceInfo == null) { Debug.WriteLine("Failed to find Group and SourceInfo"); return(new FrameGrabber()); } // Create settings var settings = new MediaCaptureInitializationSettings { SourceGroup = selectedGroup, // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu, }; // Create and initilize capture device mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception e) { Debug.WriteLine($"Failed to initilise mediacaptrue {e.ToString()}"); return(new FrameGrabber()); } MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; // create new frame reader mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(selectedSource); MediaFrameReaderStartStatus status = await mediaFrameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { Debug.WriteLine("MediaFrameReaderStartStatus == Success"); return(new FrameGrabber(mediaCapture, selectedSource, mediaFrameReader)); } else { Debug.WriteLine($"MediaFrameReaderStartStatus != Success; {status}"); return(new FrameGrabber()); } }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task PickNextMediaSourceWorkerAsync() { await CleanupMediaCaptureAsync(); // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have at least one recognized source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Infrared), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { _logger.Log("No source group with color, depth or infrared found."); return; } // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % eligibleGroups.Count; var selected = eligibleGroups[_groupSelectionIndex]; _logger.Log($"Found {eligibleGroups.Count} groups and selecting index [{_groupSelectionIndex}]: {selected.Group.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); } catch (Exception exception) { _logger.Log($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < selected.SourceInfos.Length; i++) { MediaFrameSourceInfo info = selected.SourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { _logger.Log($"Unable to start {info.SourceKind} reader. Error: {status}"); } } else { _logger.Log($"Unable to start {info.SourceKind} reader. Frame source not found"); } } else { string frameKind = (i == 0 ? "Color" : i == 1 ? "Depth" : "Infrared"); _logger.Log($"No {frameKind} source in group '{selected.Group.DisplayName}'."); } } }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task BeginDepthFrameStreaming() { await CleanupMediaCaptureAsync(); // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have a depth source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For the Move panel we only care about the Depth source feed SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { Logger.GetInstance().LogLine("No source group with color info found."); return; } // It does not matter which one we use so we'll just use the first. var selected = eligibleGroups.First(); Logger.GetInstance().LogLine($"Found {eligibleGroups.Count} groups and selecting the first: {selected.Group.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); } catch (Exception exception) { Logger.GetInstance().LogLine($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < selected.SourceInfos.Length; i++) { MediaFrameSourceInfo info = selected.SourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Logger.GetInstance().LogLine("Unable to start the MediaFrameReader frameReader."); } } } } }
public async Task initialize(Image imageElement) { if (_frameRenderers == null) { _frameRenderers = new Dictionary <MediaFrameSourceKind, FrameRenderer>() { { MediaFrameSourceKind.Color, new FrameRenderer(imageElement) }, }; } await CleanupMediaCaptureAsync(); _imageElement = imageElement; // Identify the color, depth, and infrared sources of each group, // and keep only the groups that have at least one recognized source. var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var eligibleGroups = allGroups.Select(g => new { Group = g, // For each source kind, find the source which offers that kind of media frame, // or null if there is no such source. SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color) } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); if (eligibleGroups.Count == 0) { return; } bool foundOne = false; int count = eligibleGroups.Count; MediaFrameSourceInfo[] sourceInfos = null; string groupName = ""; while (!foundOne && count-- >= 0) { // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % eligibleGroups.Count; var selected = eligibleGroups[_groupSelectionIndex]; try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selected.Group); groupName = selected.Group.DisplayName; sourceInfos = selected.SourceInfos; foundOne = true; } catch (Exception exception) { Debug.WriteLine(exception.Message + "\n" + exception.StackTrace); await CleanupMediaCaptureAsync(); } } if (!foundOne) { return; } // Set up frame readers, register event handlers and start streaming. for (int i = 0; i < sourceInfos.Length; i++) { MediaFrameSourceInfo info = sourceInfos[i]; if (info != null) { // Access the initialized frame source by looking up the the ID of the source found above. // Verify that the Id is present, because it may have left the group while were were // busy deciding which group to use. MediaFrameSource frameSource = null; if (_mediaCapture.FrameSources.TryGetValue(info.Id, out frameSource)) { MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.WriteLine($"Unable to start {info.SourceKind} reader. Error: {status}"); } } else { Debug.WriteLine($"Unable to start {info.SourceKind} reader. Frame source not found"); } } else { string frameKind = (i == 0 ? "Color" : i == 1 ? "Depth" : "Infrared"); Debug.WriteLine($"No {frameKind} source in group '{groupName}'."); } } }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task PickNextMediaSourceWorkerAsync() { await CleanupMediaCaptureAsync(); var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { _logger.Log("No source groups found."); return; } // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % allGroups.Count; var selectedGroup = allGroups[1]; // _logger.Log($"Found {allGroups.Count} groups and selecting index [{_groupSelectionIndex}]: {selectedGroup.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selectedGroup); } catch (Exception exception) { _logger.Log($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. var startedKinds = new HashSet <MediaFrameSourceKind>(); foreach (MediaFrameSource source in _mediaCapture.FrameSources.Values) { MediaFrameSourceKind kind = source.Info.SourceKind; // Ignore this source if we already have a source of this kind. if (startedKinds.Contains(kind)) { continue; } // Look for a format which the FrameRenderer can render. string requestedSubtype = null; foreach (MediaFrameFormat format in source.SupportedFormats) { requestedSubtype = FrameRenderer.GetSubtypeForFrameReader(kind, format); if (requestedSubtype != null) { // Tell the source to use the format we can render. await source.SetFormatAsync(format); break; } } if (requestedSubtype == null) { // No acceptable format was found. Ignore this source. continue; } MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(source, requestedSubtype); frameReader.FrameArrived += FrameReader_FrameArrived; _sourceReaders.Add(frameReader); MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { // _logger.Log($"Started {kind} reader."); startedKinds.Add(kind); } else { _logger.Log($"Unable to start {kind} reader. Error: {status}"); } } if (startedKinds.Count == 0) { _logger.Log($"No eligible sources in {selectedGroup.DisplayName}."); } }
public static async Task <VideoProcessor> CreateAsync() { Debug.WriteLine("VideoProcessor.CreateAsync() called !"); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; //Gets all camera groups var groups = await MediaFrameSourceGroup.FindAllAsync(); Debug.WriteLine("MediaFrameSourceGroup.FindAllAsync() called !"); // Iterates over all cameras to find the first color camera available foreach (MediaFrameSourceGroup sourceGroup in groups) { foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos) { //Pick first color camera source if (sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // if no valid camera is found return null if (selectedGroup == null || selectedSourceInfo == null) { return(null); } // Prepare settings MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu; settings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; settings.StreamingCaptureMode = StreamingCaptureMode.Video; settings.SourceGroup = selectedGroup; // Initialize media capture MediaCapture mediacapture = new MediaCapture(); await mediacapture.InitializeAsync(settings); // Gets the media frame source MediaFrameSource MF_Source; mediacapture.FrameSources.TryGetValue(selectedSourceInfo.Id, out MF_Source); // Create a media frame reader from the media frame source MediaFrameReader MF_Reader = await mediacapture.CreateFrameReaderAsync(MF_Source); MediaFrameReaderStartStatus status = await MF_Reader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { return(new VideoProcessor(mediacapture, MF_Reader, MF_Source)); } else { Debug.WriteLine("Frame Reader Failed !"); return(null); } }
public static async Task <FrameGrabber> CreateAsync() { MediaCapture mediaCapture = null; MediaFrameReader mediaFrameReader = null; MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; var groups = await MediaFrameSourceGroup.FindAllAsync(); foreach (MediaFrameSourceGroup sourceGroup in groups) { // there should be only one color source for the HoloLens foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; } } // define the type of MediaCapture we want (Initialize MediaCapture to capture video from a color camera on teh CPU) var settings = new MediaCaptureInitializationSettings { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu, }; mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(settings); } catch (Exception e) { Debug.WriteLine($"Can't initialize MediaCapture {e.ToString()}"); return(new FrameGrabber()); } // if initialization is successful, obtain MediaFrameSource and create MediaFrameReader MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(selectedSource); // ensure MediaFrameReader is successfully created to instantiate Grabber instance MediaFrameReaderStartStatus status = await mediaFrameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { return(new FrameGrabber(mediaCapture, selectedSource, mediaFrameReader)); } else { return(new FrameGrabber()); } }