public Task ResumeVideo() { if (_mediaFrameReader != null) { return(_mediaFrameReader.StartAsync().AsTask()); } else { return(Task.CompletedTask); } }
public override void StartStream() { paused = false; if (!Streaming) { Streaming = true; Reader.FrameArrived += NewFrameArrived; var task = Reader.StartAsync(); while (task.Status == AsyncStatus.Started) { Thread.Sleep(50); } CaptureElement captureElement = null; bool waiting = true; Xamarin.Essentials.MainThread.BeginInvokeOnMainThread(() => { captureElement = new CaptureElement { Source = NativeDevice }; waiting = false; }); NativeDevice.Failed += OnFail; while (waiting) { Thread.Sleep(50); } Task.Run(() => { while (Streaming) { if (StreamFaultTimer.ElapsedMilliseconds > 1000) { Debug.WriteLine("Been a second without frames. forcing a call"); var Task = Reader.StopAsync(); while (Task.Status == AsyncStatus.Started) { Thread.Sleep(50); } var taskt = Reader.StartAsync(); while (taskt.Status == AsyncStatus.Started) { Thread.Sleep(50); } StreamFaultTimer.Reset(); } } }); } }
public override async Task Start() { MediaRecorder = await MediaCaptureHandler.Init(MediaCategory.Media, AudioProcessing.Raw); // Start Recognition await this.StartRecognition(); // Start frame recognizer var frameSources = MediaRecorder.FrameSources.Where(x => x.Value.Info.MediaStreamType == MediaStreamType.Audio); var source = frameSources.First().Value; frameReader = await MediaRecorder.CreateFrameReaderAsync(source); frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; frameReader.FrameArrived += OnFrameArrived; messageStream = new InMemoryRandomAccessStream(); await messageStream.WriteAsync(Utils.Audio.CreateWavHeader().AsBuffer()); var status = await frameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { throw new Exception("The MediaFrameReader couldn't start."); } }
/// <summary> /// The task to asynchronously starts the video pipeline and frame reading. The task should /// be executed when the ARUWPController status is ARUWP_STATUS_CTRL_INITIALIZED, and will /// change the status to ARUWP_STATUS_RUNNING if the task is successful. The task is wrapped /// up in ARUWPController.Resume() function. [internal use] /// </summary> /// <returns>Whether the frame reader is successfully started</returns> public async Task <bool> StartFrameReaderAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CTRL_INITIALIZED) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() fails because of incorrect status"); return(false); } if (initializeVideoHere) { MediaFrameReaderStartStatus mediaFrameReaderStartStatus = await frameReader.StartAsync(); if (mediaFrameReaderStartStatus == MediaFrameReaderStartStatus.Success) { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is successful"); controller.status = ARUWP.ARUWP_STATUS_RUNNING; return(true); } else { Debug.Log(TAG + ": StartFrameReaderAsyncTask() is not successful, status = " + mediaFrameReaderStartStatus); return(false); } } else { Debug.Log(TAG + ": StartFrameReaderAsyncTask(): initializeVideoHere = false, mock-setting the controller status to ARUWP_STATUS_RUNNING despite not starting frameReader"); controller.status = ARUWP.ARUWP_STATUS_RUNNING; return(true); } }
public async Task Initialize(VideoSetting videoSetting) { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAndAwaitAsync(CoreDispatcherPriority.Normal, async() => { _threadsCount = videoSetting.UsedThreads; _stoppedThreads = videoSetting.UsedThreads; _lastFrameAdded.Start(); _imageQuality = new BitmapPropertySet(); var imageQualityValue = new BitmapTypedValue(videoSetting.VideoQuality, Windows.Foundation.PropertyType.Single); _imageQuality.Add("ImageQuality", imageQualityValue); _mediaCapture = new MediaCapture(); var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var settings = new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.ExclusiveControl, //With CPU the results contain always SoftwareBitmaps, otherwise with GPU //they preferring D3DSurface MemoryPreference = MediaCaptureMemoryPreference.Cpu, //Capture only video, no audio StreamingCaptureMode = StreamingCaptureMode.Video }; await _mediaCapture.InitializeAsync(settings); var mediaFrameSource = _mediaCapture.FrameSources.First().Value; var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController; videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; videoDeviceController.PrimaryUse = Windows.Media.Devices.CaptureUse.Video; //Set exposure (auto light adjustment) if (_mediaCapture.VideoDeviceController.Exposure.Capabilities.Supported && _mediaCapture.VideoDeviceController.Exposure.Capabilities.AutoModeSupported) { _mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true); } var videoResolutionWidthHeight = VideoResolutionWidthHeight.Get(videoSetting.VideoResolution); var videoSubType = VideoSubtypeHelper.Get(videoSetting.VideoSubtype); //Set resolution, frame rate and video subtyp var videoFormat = mediaFrameSource.SupportedFormats.Where(sf => sf.VideoFormat.Width == videoResolutionWidthHeight.Width && sf.VideoFormat.Height == videoResolutionWidthHeight.Height && sf.Subtype == videoSubType) .OrderByDescending(m => m.FrameRate.Numerator / m.FrameRate.Denominator) .First(); await mediaFrameSource.SetFormatAsync(videoFormat); _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); await _mediaFrameReader.StartAsync(); }); }
private async Task CreateFrameReader() { mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); }
public async Task InitMedia() { var groups = await MediaFrameSourceGroup.FindAllAsync(); capture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = groups[0], SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await capture.InitializeAsync(settings); MediaFrameSource frameSource; try { frameSource = capture.FrameSources.Values.First(x => x.Info.SourceKind == MediaFrameSourceKind.Color); } catch { frameSource = capture.FrameSources.Values.First(); } reader = await capture.CreateFrameReaderAsync(frameSource); reader.FrameArrived += HandleFrames; await reader.StartAsync(); }
static async Task <int> MainAsync(string[] args) { //Log.WriteLine("pause..."); //var x = Console.ReadLine(); Log.WriteLine("Starting async..."); Model model = null; AzureConnection connection = null; MediaFrameReader reader = null; EventWaitHandle evtFrame = null; await Task.WhenAll( Task.Run(async() => model = await Model.CreateModelAsync( Directory.GetCurrentDirectory() + "\\resources\\office_fruit_coreml.onnx")), Task.Run(async() => connection = await AzureConnection.CreateAzureConnectionAsync()), Task.Run(async() => { (reader, evtFrame) = await GetFrameReaderAsync(); await AsyncHelper.SyncFromAsync(reader.StartAsync(), "reader start"); }) ); Log.WriteLine("Model loaded, Azure Connection created, and FrameReader Started\n\n\n\n"); await CameraProcessingAsync(model, reader, evtFrame, connection); return(0); }
/// <summary> /// Initializes and starts Media Capture and frame reader. /// </summary> /// <param name=""></param> /// <returns></returns> private static async Task StartMediaCaptureAsync() { // Initialize media capture with default settings in video-only streaming mode and in shared mode so that multiple instances can access the camera concurrently m_mediaCapture = new MediaCapture(); var mediaCaptureInistializationSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = MediaCaptureSharingMode.SharedReadOnly }; await m_mediaCapture.InitializeAsync(mediaCaptureInistializationSettings); var selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoPreview && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; if (selectedFrameSource == null) { selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; } if (selectedFrameSource == null) { throw(new Exception("No valid video frame sources were found with source type color.")); } Console.WriteLine($"{selectedFrameSource.Info.DeviceInformation?.Name} | MediaStreamType: {selectedFrameSource.Info.MediaStreamType} MediaFrameSourceKind: {selectedFrameSource.Info.SourceKind}"); m_frameReader = await m_mediaCapture.CreateFrameReaderAsync(selectedFrameSource); m_frameReader.FrameArrived += FrameArrivedHandler; m_frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; await m_frameReader.StartAsync(); }
private async Task StartFrameListenerAsync() { if (frameListenerRunning) { Logger.Info("Frame listener already running. Restarting it..."); await StopFrameListenerAsync(); } try { if (cameraCapture.FrameSources.Count > 0) { MediaFrameSource frameSource = cameraCapture.FrameSources.First().Value; int count = cameraCapture.FrameSources.Count; if (!(frameSource is null)) { frameReader = await cameraCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += FrameReader_FrameArrived; await frameReader.StartAsync(); frameListenerRunning = true; } else { Logger.Info("MediaFrameSource is null."); await OnErrorAsync(PreviewError.MEDIA_FRAME_IS_NULL); } } else { Logger.Info("MediaFrameReader creation failed with: No camera available."); await OnErrorAsync(PreviewError.MEDIA_FRAME_NO_CAMERA); } }
private async Task InitializeCameraFrameReader() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo colorSourceInfo = null; foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } var colorFrameSource = CameraCapture.FrameSources[colorSourceInfo.Id]; CaptureWidth = (int)colorFrameSource.CurrentFormat.VideoFormat.Width; CaptureHeight = (int)colorFrameSource.CurrentFormat.VideoFormat.Height; CameraFrameReader = await CameraCapture.CreateFrameReaderAsync(colorFrameSource); await CameraFrameReader.StartAsync(); }
private async Task InitializeCameraFrameReader() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo colorSourceInfo = null; foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } var colorFrameSource = CameraCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.Subtype == MediaEncodingSubtypes.Argb32); }).FirstOrDefault(); CameraFrameReader = await CameraCapture.CreateFrameReaderAsync(colorFrameSource); await CameraFrameReader.StartAsync(); }
//******************************************************** // LEFTSIDE, LEFTFRONT, RIGHTSIDE, RIGHTFRONT CAPTURE //******************************************************** public async Task <string> StartFourCamerasCapture() { leftSideBytes = new byte[480 * 160 * 4]; leftSideCapture = new MediaCapture(); try { await leftSideCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = depthGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); } catch (Exception ex) { return(null); } var colorFrameSource = depthNearCapture.FrameSources[leftSideInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.FirstOrDefault(); await colorFrameSource.SetFormatAsync(preferredFormat); leftSideFrameReader = await depthNearCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); leftSideFrameReader.FrameArrived += leftSideFrameReader_FrameArrived; await leftSideFrameReader.StartAsync(); return(null); }
//******************************************************** // DEPTH NEAR CAPTURE //******************************************************** public async Task <string> StartDepthNearCapture() { depthNearBytes = new byte[450 * 448 * 2]; depthNearCapture = new MediaCapture(); try { await depthNearCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = depthGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); } catch (Exception ex) { return(null); } var depthFrameSource = depthNearCapture.FrameSources[depthNearInfo.Id]; var preferredFormat = depthFrameSource.SupportedFormats.FirstOrDefault(); await depthFrameSource.SetFormatAsync(preferredFormat); depthNearFrameReader = await depthNearCapture.CreateFrameReaderAsync(depthFrameSource, MediaEncodingSubtypes.D16); depthNearFrameReader.FrameArrived += DepthNearFrameReader_FrameArrived; await depthNearFrameReader.StartAsync(); return(null); }
/// <summary> /// Asynchronously starts video mode. /// /// Activates the web camera with the various settings specified in CameraParameters. /// Only one VideoCapture instance can start the video mode at any given time. /// After starting the video mode, you listen for new video frame samples via the VideoCapture.FrameSampleAcquired event, /// or by calling VideoCapture.RequestNextFrameSample() when will return the next available sample. /// While in video mode, more power will be consumed so make sure that you call VideoCapture.StopVideoModeAsync qhen you can afford the start/stop video mode overhead. /// </summary> /// <param name="setupParams">Parameters that change how video mode is used.</param> /// <param name="onVideoModeStartedCallback">This callback will be invoked once video mode has been activated.</param> public async void StartVideoModeAsync(CameraParameters setupParams, OnVideoModeStartedCallback onVideoModeStartedCallback) { var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; //Returns a MediaFrameSource if (mediaFrameSource == null) { onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(1, ResultType.UnknownError, false)); return; } var pixelFormat = ConvertCapturePixelFormatToMediaEncodingSubtype(setupParams.pixelFormat); _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat); _frameReader.FrameArrived += HandleFrameArrived; await _frameReader.StartAsync(); VideoEncodingProperties properties = GetVideoEncodingPropertiesForCameraParams(setupParams); // Historical context: https://github.com/VulcanTechnologies/HoloLensCameraStream/issues/6 if (setupParams.rotateImage180Degrees) { properties.Properties.Add(ROTATION_KEY, 180); } // gr: taken from here https://forums.hololens.com/discussion/2009/mixedrealitycapture IVideoEffectDefinition ved = new VideoMRCSettings(setupParams.enableHolograms, setupParams.enableVideoStabilization, setupParams.videoStabilizationBufferSize, setupParams.hologramOpacity); await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(STREAM_TYPE, properties); onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(0, ResultType.Success, true)); }
public IAsyncOperation <MediaFrameReaderStartStatus> OpenAsync() { return(Task.Run(async() => { if (!_isStarted) { if (_depthReader != null) { var status = await _depthReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { _depthReader.FrameArrived += DepthReader_FrameArrived; _isStarted = true; } return status; } else { var response = await _networkClient.SendCommandAsync(new OpenReader(ReaderType.Depth, ReaderConfig.Default)); if (response.Status == OperationStatus.ResponseSuccess) { _networkClient.DepthFrameArrived += NetworkClient_DepthFrameArrived; _isStarted = true; } return response.Status.ToMediaReaderStartStatus(); } } return MediaFrameReaderStartStatus.Success; }).AsAsyncOperation()); }
// Create and initialze the MediaCapture object. public async Task CaptureImageAsync() { autoStopCamera = new System.Timers.Timer(500); autoStopCamera.Elapsed += AutoStopCamera; frameQueue = new List <SoftwareBitmap>( ); var cameraName = "Surface Camera Front"; var frameSourceGroup = await MediaFrameSourceGroup.FindAllAsync( ); Debug.WriteLine($"frameSourceGroup = {frameSourceGroup}"); var cameraGroup = frameSourceGroup.FirstOrDefault(fg => fg.DisplayName == cameraName); Debug.WriteLine($"cameraGroup = {cameraGroup}"); var mediaCapture = new MediaCapture( ); var settings = new MediaCaptureInitializationSettings( ) { SourceGroup = cameraGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video, }; await mediaCapture.InitializeAsync(settings); var exposureSuccess = mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true); var brightnessSuccess = mediaCapture.VideoDeviceController.Brightness.TrySetAuto(true); var currExposureSuccess = mediaCapture.VideoDeviceController.Exposure.TryGetValue(out double expValue); var currBrightnessSuccess = mediaCapture.VideoDeviceController.Brightness.TryGetValue(out double brightValue); Debug.WriteLine($"exposureSuccess: {exposureSuccess}"); Debug.WriteLine($"brightnessSuccess: {brightnessSuccess}"); Debug.WriteLine($"expValue: {expValue}"); Debug.WriteLine($"brightValue: {brightValue}"); var sourceInfoId = cameraGroup?.SourceInfos?.FirstOrDefault( )?.Id; var mediaFrameSource = (mediaCapture?.FrameSources?.ContainsKey(sourceInfoId) ?? false) ? mediaCapture.FrameSources[sourceInfoId] : null; var preferredFormat = mediaFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 1080); //&& format.Subtype == "NV12"; }).FirstOrDefault( ); if (preferredFormat == null) { // Our desired format is not supported return; } await mediaFrameSource.SetFormatAsync(preferredFormat); frameReader?.Dispose( ); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource); frameReader.FrameArrived += FrameReaderOnFrameArrived; await frameReader.StartAsync( ); autoStopCamera.Start( ); }
private async Task InitMediaSourceAsync() { var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。"); return; } MediaFrameSourceGroup sourceGroup = allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth)); if (sourceGroup == null) { Debug.LogError("深度カメラが見つからないようです。"); return; } try { await InitializeMediaCaptureAsync(sourceGroup); } catch (Exception exception) { Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message); await CleanupMediaCaptureAsync(); return; } MediaFrameSource source = _mediaCapture.FrameSources.Values .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth); if (source == null) { Debug.LogError("sourceが見つかりません。"); } MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f => String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase)); if (format == null) { return; } await source.SetFormatAsync(format); _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype); MediaFrameReaderStartStatus status = await _depthReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.LogError("_depthReader.StartAsyncに失敗しました"); } }
private async void MainPage_OnLoaded(object sender, RoutedEventArgs e) { displayRequest.RequestActive(); try { var(colorSourceInfo, selectedGroup) = await MediaFrameSourceInfo(); if (selectedGroup == null) { return; } mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }); var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.FirstOrDefault(format => format.VideoFormat.Width == targetWidth && string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Nv12); await mediaFrameReader.StartAsync(); Task.Run(() => ProcessPreview(mediaFrameReader)); } catch (UnauthorizedAccessException) { // This will be thrown if the user denied access to the camera in privacy settings return; } try { CameraPreview.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); } catch (System.IO.FileLoadException) { } }
/// <summary> /// Method to start capturing camera frames at desired resolution. /// </summary> /// <param name="width"></param> /// <param name="height"></param> /// <returns></returns> public async Task InitializeMediaFrameReaderAsync(uint width = 224, uint height = 224) { // Check state of media capture object if (_mediaCapture == null || _mediaCapture.CameraStreamState == CameraStreamState.Shutdown || _mediaCapture.CameraStreamState == CameraStreamState.NotStreaming) { if (_mediaCapture != null) { _mediaCapture.Dispose(); } // Find right camera settings and prefer back camera MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); var allCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); Debug.Log($"InitializeMediaFrameReaderAsync: allCameras: {allCameras}"); var selectedCamera = allCameras.FirstOrDefault(c => c.EnclosureLocation?.Panel == Panel.Back) ?? allCameras.FirstOrDefault(); Debug.Log($"InitializeMediaFrameReaderAsync: selectedCamera: {selectedCamera}"); if (selectedCamera != null) { settings.VideoDeviceId = selectedCamera.Id; Debug.Log($"InitializeMediaFrameReaderAsync: settings.VideoDeviceId: {settings.VideoDeviceId}"); } // Init capturer and Frame reader _mediaCapture = new MediaCapture(); Debug.Log("InitializeMediaFrameReaderAsync: Successfully created media capture object."); await _mediaCapture.InitializeAsync(settings); Debug.Log("InitializeMediaFrameReaderAsync: Successfully initialized media capture object."); var frameSource = _mediaCapture.FrameSources.Where(source => source.Value.Info.SourceKind == MediaFrameSourceKind.Color).First(); Debug.Log($"InitializeMediaFrameReaderAsync: frameSource: {frameSource}."); // Convert the pixel formats var subtype = MediaEncodingSubtypes.Bgra8; // The overloads of CreateFrameReaderAsync with the format arguments will actually make a copy in FrameArrived BitmapSize outputSize = new BitmapSize { Width = width, Height = height }; _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource.Value, subtype, outputSize); Debug.Log("InitializeMediaFrameReaderAsync: Successfully created media frame reader."); _mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; await _mediaFrameReader.StartAsync(); Debug.Log("InitializeMediaFrameReaderAsync: Successfully started media frame reader."); IsCapturing = true; } }
protected override async void OnNavigatedTo(NavigationEventArgs e) { App.dispatcher = this.Dispatcher; Cv2.InitContainer((object)App.container); //_helper.SetContainer(App.container); rootPage = MainPage.Current; // setting up the combobox, and default operation OperationComboBox.ItemsSource = Enum.GetValues(typeof(OperationType)); OperationComboBox.SelectedIndex = 0; currentOperation = OperationType.Blur; // Find the sources var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = allGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); if (sourceGroups.Count == 0) { // No camera sources found return; } var selectedSource = sourceGroups.FirstOrDefault(); // Initialize MediaCapture try { await InitializeMediaCaptureAsync(selectedSource.Group); } catch (Exception exception) { Debug.WriteLine("MediaCapture initialization error: " + exception.Message); await CleanupMediaCaptureAsync(); return; } // Create the frame reader MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id]; BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = IMAGE_ROWS, Width = IMAGE_COLS }; _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size); _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync; await _reader.StartAsync(); _FPSTimer.Start(); }
public static async Task <VideoFrameProcessor> CreateAsync() { IReadOnlyList <MediaFrameSourceGroup> groups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo selectedSourceInfo = null; // Pick first color source. foreach (var sourceGroup in groups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { selectedSourceInfo = sourceInfo; break; } } if (selectedSourceInfo != null) { selectedGroup = sourceGroup; break; } } // No valid camera was found. This will happen on the emulator. if (selectedGroup == null || selectedSourceInfo == null) { return(null); } MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu; // Need SoftwareBitmaps for FaceAnalysis settings.StreamingCaptureMode = StreamingCaptureMode.Video; // Only need to stream video settings.SourceGroup = selectedGroup; MediaCapture mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id]; MediaFrameReader reader = await mediaCapture.CreateFrameReaderAsync(selectedSource); MediaFrameReaderStartStatus status = await reader.StartAsync(); // Only create a VideoFrameProcessor if the reader successfully started if (status == MediaFrameReaderStartStatus.Success) { return(new VideoFrameProcessor(mediaCapture, reader, selectedSource)); } return(null); }
//############################################################################################# /// <summary> /// Start video capture. /// By default, if the callback is enabled, _imageElement is updated. /// </summary> public async Task Start() { if (!_running) { // create and start the reader _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(_frameSource, MediaEncodingSubtypes.Argb32); await _mediaFrameReader.StartAsync(); _running = true; } }
public async Task StartPreviewAsync() { #if WINDOWS_UWP var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); MediaFrameSourceGroup selectedGroup = null; MediaFrameSourceInfo colorSourceInfo = null; foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { if (sourceInfo.MediaStreamType == MediaStreamType.VideoRecord && sourceInfo.SourceKind == MediaFrameSourceKind.Color) { colorSourceInfo = sourceInfo; break; } } if (colorSourceInfo != null) { selectedGroup = sourceGroup; break; } } var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Auto, StreamingCaptureMode = StreamingCaptureMode.Video }; try { mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); isPreviewing = true; } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } #endif }
/// <summary> /// Starts the video pipeline and frame reading. /// </summary> /// <returns>Whether the frame reader is successfully started</returns> private async Task <bool> StartFrameReaderAsyncTask() { MediaFrameReaderStartStatus mediaFrameReaderStartStatus = await _frameReader.StartAsync(); if (mediaFrameReaderStartStatus == MediaFrameReaderStartStatus.Success) { _logger.Log("Started Frame reader"); return(true); } _logger.LogError($"Could not start frame reader, status: {mediaFrameReaderStartStatus}"); return(false); }
/// <summary> /// Video Capture: Initialize Camera Capture. /// Implementation is from the UWP official tutorial. /// https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader /// </summary> public async void InitializeCamera() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { // On Xbox/Kinect, omit the MediaStreamType and EnclosureLocation tests return(sourceInfo.SourceKind == MediaFrameSourceKind.Color); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); }
/// <summary> /// カメラプレビューを開始する /// </summary> /// <param name="IsCapturedHologram"></param> public async Task <bool> StartVideoModeAsync(bool IsCapturedHologram) { // MediaFrameSourceを取得する // MediaFrameSourceはMediaFrameSourceGroupから直接取得することはできず // MediaCapture経由で取得する必要がある var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; if (mediaFrameSource == null) { return(false); } // Unityのテクスチャに変換できるフォーマットを指定 var pixelFormat = MediaEncodingSubtypes.Bgra8; // MediaFrameReaderの作成 _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat); // フレームを取得したときのイベントハンドラ _frameReader.FrameArrived += HandleFrameArrived; // フレームの取得を開始する var result = await _frameReader.StartAsync(); // デバイスがサポートするビデオフォーマットの一覧を取得する // ここではHoloLensがサポートする896x504 30fpsに絞って取得している var allPropertySets = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview) .Select(x => x as VideoEncodingProperties) .Where(x => { if (x == null) { return(false); } if (x.FrameRate.Denominator == 0) { return(false); } double frameRate = (double)x.FrameRate.Numerator / (double)x.FrameRate.Denominator; return(x.Width == 896 && x.Height == 504 && (int)Math.Round(frameRate) == 30); }); // 取得したフォーマット情報を使ってキャプチャするフレームの解像度とFPSを設定する VideoEncodingProperties properties = allPropertySets.FirstOrDefault(); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, properties); // Mixed Reality Captureの設定 IVideoEffectDefinition ved = new MixedRealityCaptureSetting(IsCapturedHologram, false, 0, IsCapturedHologram ? 0.9f : 0.0f); await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview); return(true); }
private async void InitAudioFrameReader() { //<SnippetInitAudioFrameSource> mediaCapture = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Audio, }; await mediaCapture.InitializeAsync(settings); var audioFrameSources = mediaCapture.FrameSources.Where(x => x.Value.Info.MediaStreamType == MediaStreamType.Audio); if (audioFrameSources.Count() == 0) { Debug.WriteLine("No audio frame source was found."); return; } MediaFrameSource frameSource = audioFrameSources.FirstOrDefault().Value; MediaFrameFormat format = frameSource.CurrentFormat; if (format.Subtype != MediaEncodingSubtypes.Float) { return; } if (format.AudioEncodingProperties.ChannelCount != 1 || format.AudioEncodingProperties.SampleRate != 48000) { return; } //</SnippetInitAudioFrameSource> //<SnippetCreateAudioFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); // Optionally set acquisition mode. Buffered is the default mode for audio. mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; mediaFrameReader.FrameArrived += MediaFrameReader_AudioFrameArrived; var status = await mediaFrameReader.StartAsync(); if (status != MediaFrameReaderStartStatus.Success) { Debug.WriteLine("The MediaFrameReader couldn't start."); } //</SnippetCreateAudioFrameReader> }
public async void InitializeManager() { _helper = new OpenCVHelper(); // Find the sources var allGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = allGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); if (sourceGroups.Count == 0) { // No camera sources found return; } var selectedSource = sourceGroups.FirstOrDefault(); // Initialize MediaCapture try { await InitializeMediaCaptureAsync(selectedSource.Group); } catch (Exception exception) { Debug.WriteLine("MediaCapture initialization error: " + exception.Message); await CleanupMediaCaptureAsync(); return; } // Create the frame reader MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id]; var format = frameSource.SupportedFormats.OrderByDescending(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); await frameSource.SetFormatAsync(format); BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant { Height = format.VideoFormat.Height, Width = format.VideoFormat.Width }; _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size); _reader.FrameArrived += HandleFrameArrive; await _reader.StartAsync(); }
public async void Initialize() { var sourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var desiredGroupInfo = sourceGroups.Select(sourceGroup => new { Group = sourceGroup, Info = sourceGroup.SourceInfos.FirstOrDefault(info => info.MediaStreamType == MediaStreamType.VideoPreview && info.SourceKind == MediaFrameSourceKind.Color) }).FirstOrDefault(groupInfo => groupInfo.Info != null); if (desiredGroupInfo == null) { return; } var settings = new MediaCaptureInitializationSettings() { SourceGroup = desiredGroupInfo.Group, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Auto, StreamingCaptureMode = StreamingCaptureMode.Video }; await mediaCapture.InitializeAsync(settings); var frameSource = mediaCapture.FrameSources[desiredGroupInfo.Info.Id]; var preferredFormat = frameSource.SupportedFormats .OrderByDescending(format => format.VideoFormat.Width) .ThenByDescending(format => (float)format.FrameRate.Numerator / format.FrameRate.Denominator) .FirstOrDefault(); if (preferredFormat == null) { return; } await frameSource.SetFormatAsync(preferredFormat); var cameraController = frameSource.Controller.VideoDeviceController; cameraController.WhiteBalance.TrySetAuto(false); cameraController.WhiteBalance.TrySetValue(2600); cameraController.Exposure.TrySetAuto(false); cameraController.Exposure.TrySetValue(5.0); cameraController.BacklightCompensation.TrySetAuto(false); cameraController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); mediaFrameReader.FrameArrived += OnFrameArrived; await mediaFrameReader.StartAsync(); }