async Task InitializeCameraAsync() { if (mediaCapture == null) { var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var cameraDevice = devices.FirstOrDefault(c => c.EnclosureLocation != null && c.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back); // Get any camera if there isn't one on the back panel cameraDevice = cameraDevice ?? devices.FirstOrDefault(); if (cameraDevice == null) { Debug.WriteLine("No camera found"); return; } mediaCapture = new MediaCapture(); var mediaCaptureInitSettings = this.CreateInitializationSettings(cameraDevice.Id); await mediaCapture.InitializeAsync(mediaCaptureInitSettings); try { // Prevent the device from sleeping while the preview is running displayRequest.RequestActive(); // Setup preview source in UI and mirror if required captureElement.Source = mediaCapture; captureElement.FlowDirection = mirroringPreview ? FlowDirection.RightToLeft : FlowDirection.LeftToRight; // Start preview await mediaCapture.StartPreviewAsync(); isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("Camera access denied"); } catch (Exception ex) { Debug.WriteLine("Exception initializing MediaCapture - {0}: {1}", cameraDevice.Id, ex.ToString()); } if (isInitialized) { if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { externalCamera = true; } else { // Camera is on device externalCamera = false; // Mirror preview if camera is on front panel mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); } } }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, and starts preview /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (_mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when something goes wrong _mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(settings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } catch (Exception ex) { Debug.WriteLine("Exception when initializing MediaCapture with {0}: {1}", cameraDevice.Id, ex.ToString()); } // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device _externalCamera = true; } else { // Camera is fixed on the device _externalCamera = false; // Only mirror the preview if the camera is on the front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); } } }
public async Task StartAsync(string Name, bool UseGpu = false) { var frameSourceGroups = await AsAsync(MediaFrameSourceGroup.FindAllAsync()); var selectedGroup = frameSourceGroups.Where(x => x.DisplayName.Contains(Name)).FirstOrDefault(); if (null == selectedGroup) { throw new ApplicationException($"Unable to find frame source named {Name}"); } var colorSourceInfo = selectedGroup.SourceInfos .Where(x => x.MediaStreamType == MediaStreamType.VideoRecord && x.SourceKind == MediaFrameSourceKind.Color) .FirstOrDefault(); if (null == colorSourceInfo) { throw new ApplicationException($"Unable to find color video recording source on {Name}"); } mediaCapture = new MediaCapture(); if (null == mediaCapture) { throw new ApplicationException($"Unable to create new mediacapture"); } var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = UseGpu ? MediaCaptureMemoryPreference.Auto : MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await AsAsync(mediaCapture.InitializeAsync(settings)); } catch (Exception ex) { throw new ApplicationException("MediaCapture initialization failed: " + ex.Message, ex); } var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => format.VideoFormat.Width >= 1080).FirstOrDefault(); if (null == preferredFormat) { throw new ApplicationException("Our desired format is not supported"); } await AsAsync(colorFrameSource.SetFormatAsync(preferredFormat)); mediaFrameReader = await AsAsync(mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32)); if (null == mediaFrameReader) { throw new ApplicationException($"Unable to create new mediaframereader"); } evtFrame = new EventWaitHandle(false, EventResetMode.ManualReset); mediaFrameReader.FrameArrived += (s, a) => evtFrame.Set(); await AsAsync(mediaFrameReader.StartAsync()); Log.WriteLineVerbose("FrameReader Started"); }
async Task InitializeCameraAsync() { await _mediaCaptureLifeLock.WaitAsync(); if (_mediaCapture == null) { // Attempt to get the back camera, but use any camera if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera found"); return; } _mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; try { await _mediaCapture.InitializeAsync(settings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("Camera access denied"); } catch (Exception ex) { Debug.WriteLine("Exception initializing MediaCapture - {0}: {1}", cameraDevice.Id, ex.ToString()); } finally { _mediaCaptureLifeLock.Release(); } if (_isInitialized) { if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { _externalCamera = true; } else { // Camera is on device _externalCamera = false; // Mirror preview if camera is on front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); } } else { _mediaCaptureLifeLock.Release(); } }
/// <summary> /// Initialises the camera and resolves the resolution for both the /// full resolution and preview images. /// </summary> private async void InitializeCameraAsync() { if (CameraState != CameraStates.NotInitialized) { Debug.WriteLine(DebugTag + "InitializeCameraAsync(): Invalid state"); return; } Debug.WriteLine(DebugTag + "InitializeCameraAsync() ->"); CameraState = CameraStates.Initializing; ProgressIndicator.IsActive = true; MessageDialog messageDialog = null; #if WINDOWS_PHONE_APP DeviceInformationCollection devices; devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); if (devices.Count == 0) { ProgressIndicator.IsActive = false; CameraState = CameraStates.NotInitialized; messageDialog = new MessageDialog( _resourceLoader.GetString("FailedToFindCameraDevice/Text"), _resourceLoader.GetString("CameraInitializationFailed/Text")); await messageDialog.ShowAsync(); return; } // Use the back camera DeviceInformation info = devices[0]; _cam = true; foreach (var devInfo in devices) { if (devInfo.Name.ToLowerInvariant().Contains("back")) { info = devInfo; _cam = false; // Set this to true if you use front camera break; } } MyCaptureElement.FlowDirection = _cam ? FlowDirection.RightToLeft : FlowDirection.LeftToRight; #endif _mediaCapture = new MediaCapture(); try { #if WINDOWS_PHONE_APP await _mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings { StreamingCaptureMode = StreamingCaptureMode.Video, PhotoCaptureSource = PhotoCaptureSource.VideoPreview, AudioDeviceId = string.Empty, VideoDeviceId = info.Id }); #else await _mediaCapture.InitializeAsync(); #endif } catch (Exception ex) { messageDialog = new MessageDialog(ex.ToString(), _resourceLoader.GetString("CameraInitializationFailed/Text")); } MyCaptureElement.Source = _mediaCapture; if (messageDialog != null) { /* Add commands and set their callbacks; both buttons use the * same callback function instead of inline event handlers */ if (messageDialog.Commands != null) { messageDialog.Commands.Add(new UICommand(_resourceLoader.GetString("Retry/Text"), CommandInvokedHandler)); messageDialog.Commands.Add(new UICommand(_resourceLoader.GetString("Cancel/Text"), CommandInvokedHandler)); } // Set the command that will be invoked by default messageDialog.DefaultCommandIndex = 0; // Set the command to be invoked when escape is pressed messageDialog.CancelCommandIndex = 1; await messageDialog.ShowAsync(); } else { // Get the resolution uint width = 0; uint height = 0; IMediaEncodingProperties propertiesToSet = null; AppUtils.GetBestResolution(_mediaCapture, ref width, ref height, ref propertiesToSet); if (width > 0 && height > 0) { _dataContext.SetFullResolution((int)width, (int)height); int previewWidth = (int)FilterEffects.DataContext.DefaultPreviewResolutionWidth; int previewHeight = 0; AppUtils.CalculatePreviewResolution((int)width, (int)height, ref previewWidth, ref previewHeight); _dataContext.SetPreviewResolution(previewWidth, previewHeight); } if (propertiesToSet != null) { await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync( MediaStreamType.Photo, propertiesToSet); Debug.WriteLine(DebugTag + "Capture resolution set to " + width + "x" + height + "!"); } else { Debug.WriteLine(DebugTag + "Failed to set capture resolution! Using fallback resolution."); var fallbackResolution = new Size( FilterEffects.DataContext.DefaultPreviewResolutionWidth, FilterEffects.DataContext.DefaultPreviewResolutionHeight); _dataContext.PreviewResolution = fallbackResolution; _dataContext.FullResolution = fallbackResolution; } _mediaCapture.SetPreviewMirroring(false); await _mediaCapture.StartPreviewAsync(); } #if WINDOWS_PHONE_APP // In case front camera is used, we need to handle the rotations DisplayInformation displayInfo = DisplayInformation.GetForCurrentView(); displayInfo.OrientationChanged += DisplayInfo_OrientationChanged; DisplayInfo_OrientationChanged(displayInfo, null); #endif CameraState = CameraStates.Initialized; ProgressIndicator.IsActive = false; CaptureButton.IsEnabled = true; Debug.WriteLine(DebugTag + "InitializeCameraAsync() <-"); }
private async Task InitCameraAsync() { try { //mediaCaptureオブジェクトが有効な時は一度Disposeする if (mediaCapture != null) { mediaCapture.Dispose(); mediaCapture = null; } //キャプチャーの設定 var captureInitSettings = new MediaCaptureInitializationSettings(); captureInitSettings.VideoDeviceId = ""; captureInitSettings.StreamingCaptureMode = StreamingCaptureMode.Video; //カメラデバイスの取得 var cameraDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); if (cameraDevices.Count() == 0) { Debug.WriteLine("No Camera"); return; } else if (cameraDevices.Count() == 1) { Debug.WriteLine("count1\n"); captureInitSettings.VideoDeviceId = cameraDevices[0].Id; } else { Debug.WriteLine("countelse\n"); captureInitSettings.VideoDeviceId = cameraDevices[1].Id; } //キャプチャーの準備 mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(captureInitSettings); VideoEncodingProperties vp = new VideoEncodingProperties(); Debug.WriteLine("before camera size\n"); //RasperryPiでは解像度が高いと映像が乱れるので小さい解像度にしている //ラズパイじゃなければ必要ないかも? vp.Height = 720; vp.Width = 1280; vp.Subtype = "RGB24"; await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vp); capture.Source = mediaCapture; //キャプチャーの開始 await mediaCapture.StartPreviewAsync(); Debug.WriteLine("Camera Initialized"); //指定したFPS毎にタイマーを起動する。 TimeSpan timerInterval = TimeSpan.FromMilliseconds(1); timer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(CurrentVideoFrame), timerInterval); } catch (Exception ex) { Debug.Write(ex.Message); } }
private async Task StartPreviewAsync() { try { var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var deviceList = devices.ToList(); var device = devices.FirstOrDefault(x => x.Name.Contains(settings.CameraKey)); string deviceId = device == null ? "" : device.Id; mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.ExclusiveControl, VideoDeviceId = deviceId }); var resolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).ToList(); var availableResolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Cast <VideoEncodingProperties>().OrderByDescending(v => v.Width * v.Height * (v.FrameRate.Numerator / v.FrameRate.Denominator)); //1080p or lower var reslution = availableResolutions.FirstOrDefault(v => v.Height <= 1080); // set used resolution await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, reslution); mediaCapture2 = new MediaCapture(); await mediaCapture2.InitializeAsync(new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId }); mediaCapture3 = new MediaCapture(); await mediaCapture3.InitializeAsync(new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId }); mediaCapture4 = new MediaCapture(); await mediaCapture4.InitializeAsync(new MediaCaptureInitializationSettings() { SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId }); displayRequest.RequestActive(); DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape; // Create the definition, which will contain some initialization settings var definition = new FaceDetectionEffectDefinition(); // To ensure preview smoothness, do not delay incoming samples definition.SynchronousDetectionEnabled = false; // In this scenario, choose detection speed over accuracy definition.DetectionMode = FaceDetectionMode.HighQuality; imageAnalysisRunning = false; // Add the effect to the preview stream _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview); // Choose the shortest interval between detection events _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(300); // Start detecting faces _faceDetectionEffect.Enabled = true; // Register for face detection events _faceDetectionEffect.FaceDetected += _faceDetectionEffect_FaceDetectedAsync; timerFailsafe.Start(); } catch (Exception) { // This will be thrown if the user denied access to the camera in privacy settings Console.Write("The app was denided access to the camera"); faceLastDate = DateTime.Now.Subtract(new TimeSpan(1, 1, 1)); return; } try { captionsControl.MainCapture.Source = mediaCapture; speechControl.MainCapture.Source = mediaCapture2; tagsControl.MainCapture.Source = mediaCapture3; facesControl.MainCapture.Source = mediaCapture4; await mediaCapture.StartPreviewAsync(); await mediaCapture2.StartPreviewAsync(); await mediaCapture3.StartPreviewAsync(); await mediaCapture4.StartPreviewAsync(); } catch (Exception) { //mediaCapture.CaptureDeviceExclusiveControlStatusChanged += MediaCapture_CaptureDeviceExclusiveControlStatusChanged; ; } }
/// <summary> /// /// </summary> /// <returns></returns> private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); foreach (var group in allGroups) { Debug.Log(group.DisplayName + ", " + group.Id); } if (allGroups.Count <= 0) { Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + " " + id + ": MediaCapture is successfully initialized in shared mode."); // logging all frame source information string logString = ""; foreach (var frameSource in mediaCapture.FrameSources) { var info = frameSource.Value.Info; logString += info.Id + ", " + info.MediaStreamType + ", " + info.SourceKind + "\n"; logString += "Total number of SupportedFormats is " + frameSource.Value.SupportedFormats.Count + "\n"; foreach (var format in frameSource.Value.SupportedFormats) { logString += format.VideoFormat.Width + " x " + format.VideoFormat.Height + ", Major type: " + format.MajorType + ", Subtype: " + format.Subtype + ", Framerate: " + format.FrameRate.Numerator + "/" + format.FrameRate.Denominator + "\n"; } } Debug.Log(logString); MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id); MediaFrameFormat targetResFormat = targetFrameSource.SupportedFormats[0]; try { // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await targetFrameSource.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + " " + id + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator + ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype); } catch (Exception e) { Debug.Log(TAG + " " + id + ": FrameReader is not initialized"); Debug.Log(TAG + " " + id + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
private async void PrepareCamera() { Capture = new MediaCapture(); await Capture.InitializeAsync(); }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, and starts preview /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (_mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when something goes wrong _mediaCapture.Failed += MediaCapture_Failed; var mediaInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; /*IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(cameraDevice.Id); * // Debug all possible (capture) resolution * foreach (var p in profiles) * { * foreach (var d in p.SupportedRecordMediaDescription) * { * Debug.WriteLine("" + d.Width + ", " + d.Height + ", " + d.FrameRate); * } * * } * * var match = (from profile in profiles * from desc in profile.SupportedRecordMediaDescription * where desc.Width == Const.IMAGE_WIDTH && desc.Height == Const.IMAGE_HEIGHT && Math.Round(desc.FrameRate) >= Const.MIN_FPS * select new { profile, desc }).FirstOrDefault(); * * if (match != null) * { * mediaInitSettings.VideoProfile = match.profile; * mediaInitSettings.RecordMediaDescription = match.desc; * } * else * { * Debug.WriteLine("Camera does not support desired resolution or framerate"); * mediaInitSettings.VideoProfile = profiles[0]; * }*/ // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(mediaInitSettings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } // Query all properties of the specified stream type IEnumerable <StreamPropertiesHelper> allStreamProperties = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Select(x => new StreamPropertiesHelper(x)); // Order them by resolution then frame rate allStreamProperties = allStreamProperties.OrderBy(x => x.Height * x.Width).ThenBy(x => x.FrameRate); //foreach (var streamProperty in allStreamProperties) //{ // Debug.WriteLine("" + streamProperty.Width + ", " + streamProperty.Height + ", " + streamProperty.FrameRate); //} var encodingProperties = allStreamProperties.ElementAt(1).EncodingProperties; // TODO: this only works for Hololens await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, encodingProperties); // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation != null) { // Only mirror the preview if the camera is on the front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); } } }
public async Task InitializeCameraAsync() { try { if (_setup == null) { _setup = new SetupService(); } isAutoShot = await _setup.GetAutomode(); if (_mediaCapture == null) { _mediaCapture = new MediaCapture(); _mediaCapture.Failed += MediaCapture_Failed; _cameraDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); if (_cameraDevices == null || !_cameraDevices.Any()) { throw new NotSupportedException(); } DeviceInformation device; if (_cameraDevices.Count > 1) { device = _cameraDevices.FirstOrDefault(camera => camera.EnclosureLocation?.Panel == Windows.Devices.Enumeration.Panel.Back); } else { device = _cameraDevices.FirstOrDefault(camera => camera.EnclosureLocation?.Panel == Panel); } var cameraId = device?.Id ?? _cameraDevices.First().Id; await _mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { VideoDeviceId = cameraId }); if (_mediaCapture.VideoDeviceController.FocusControl.Supported) { isAutoFocusCapable = true; errorMessage.Text = "VIZZoneInFront".GetLocalized(); } else { isAutoFocusCapable = false; errorMessage.Text = "NoFocusCamera".GetLocalized(); } IMediaEncodingProperties IProps = this._mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview); vep = (VideoEncodingProperties)IProps; DrawLineOnCanvas(vep.Width, vep.Height); if (Panel == Windows.Devices.Enumeration.Panel.Back) { //_mediaCapture.SetRecordRotation(VideoRotation.Clockwise90Degrees); //_mediaCapture.SetPreviewRotation(VideoRotation.Clockwise90Degrees); _mirroringPreview = false; } else { _mirroringPreview = false; } IsInitialized = true; CanSwitch = _cameraDevices?.Count > 1; RegisterOrientationEventHandlers(); await StartPreviewAsync(); } } catch (UnauthorizedAccessException) { errorMessage.Text = "Camera_Exception_UnauthorizedAccess".GetLocalized(); } catch (NotSupportedException) { errorMessage.Text = "Camera_Exception_NotSupported".GetLocalized(); } catch (TaskCanceledException) { errorMessage.Text = "Camera_Exception_InitializationCanceled".GetLocalized(); } catch (Exception) { errorMessage.Text = "Camera_Exception_InitializationError".GetLocalized(); } }
async private void InitMediaCapture_Click(object sender, RoutedEventArgs e) { mediaCaptureManager = new MediaCapture(); await mediaCaptureManager.InitializeAsync(); }
/// <summary> /// /// </summary> /// <returns></returns> private async Task InitCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); try { //mediaCaptureオブジェクトが有効な時は一度Disposeする if (mediaCapture != null) { mediaCapture.Dispose(); mediaCapture = null; } //キャプチャーの設定 var captureInitSettings = new MediaCaptureInitializationSettings { VideoDeviceId = "", StreamingCaptureMode = StreamingCaptureMode.Video }; //カメラデバイスの取得 var cameraDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); if (cameraDevices.Count() == 0) { Debug.WriteLine("No Camera"); return; } else if (cameraDevices.Count() == 1) { captureInitSettings.VideoDeviceId = cameraDevices[0].Id; } else { captureInitSettings.VideoDeviceId = cameraDevices[1].Id; } //キャプチャーの準備 mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(captureInitSettings); var resolutions = GetPreviewResolusions(mediaCapture); VideoEncodingProperties vp = new VideoEncodingProperties(); vp.Height = 240; vp.Width = 320; vp.Subtype = "YUY2"; await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vp); capture.Source = mediaCapture; //キャプチャーの開始 await mediaCapture.StartPreviewAsync(); Debug.WriteLine("Camera Initialized"); //15FPS毎にタイマーを起動する。 TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); timer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(CurrentVideoFrame), timerInterval); } catch (Exception ex) { Debug.Write(ex.Message); } }
public async Task Initialize(VideoSetting videoSetting) { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAndAwaitAsync(CoreDispatcherPriority.Normal, async() => { _threadsCount = videoSetting.UsedThreads; _stoppedThreads = videoSetting.UsedThreads; _lastFrameAdded.Start(); _imageQuality = new BitmapPropertySet(); var imageQualityValue = new BitmapTypedValue(videoSetting.VideoQuality, Windows.Foundation.PropertyType.Single); _imageQuality.Add("ImageQuality", imageQualityValue); _mediaCapture = new MediaCapture(); var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); var sourceGroups = frameSourceGroups.Select(g => new { Group = g, SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color) }).Where(g => g.SourceInfo != null).ToList(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = sourceGroups?.FirstOrDefault()?.Group, SharingMode = MediaCaptureSharingMode.ExclusiveControl, //With CPU the results contain always SoftwareBitmaps, otherwise with GPU //they preferring D3DSurface MemoryPreference = MediaCaptureMemoryPreference.Cpu, //Capture only video, no audio StreamingCaptureMode = StreamingCaptureMode.Video }; await _mediaCapture.InitializeAsync(settings); var mediaFrameSource = this._mediaCapture.FrameSources[sourceGroups?.FirstOrDefault()?.SourceInfo.Id]; /* * // Commented to reduce CPU usage. * var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController; * * videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality; * videoDeviceController.PrimaryUse = Windows.Media.Devices.CaptureUse.Video; * * //Set exposure (auto light adjustment) * if (_mediaCapture.VideoDeviceController.Exposure.Capabilities.Supported * && _mediaCapture.VideoDeviceController.Exposure.Capabilities.AutoModeSupported) * { * _mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true); * } * * var videoResolutionWidthHeight = VideoResolutionWidthHeight.Get(videoSetting.VideoResolution); * var videoSubType = VideoSubtypeHelper.Get(videoSetting.VideoSubtype); * * //Set resolution, frame rate and video subtyp * var videoFormat = mediaFrameSource.SupportedFormats.Where(sf => sf.VideoFormat.Width == videoResolutionWidthHeight.Width * && sf.VideoFormat.Height == videoResolutionWidthHeight.Height * && sf.Subtype == videoSubType) * .OrderByDescending(m => m.FrameRate.Numerator / m.FrameRate.Denominator) * .First(); * * await mediaFrameSource.SetFormatAsync(videoFormat); */ _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); await _mediaFrameReader.StartAsync(); }); }
/// <summary> /// 'Initialize Audio and Video' button action function /// Dispose existing MediaCapture object and set it up for audio and video /// Enable or disable appropriate buttons /// - DISABLE 'Initialize Audio and Video' /// - DISABLE 'Start Audio Record' /// - ENABLE 'Initialize Audio Only' /// - ENABLE 'Start Video Record' /// - ENABLE 'Take Photo' /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void initVideo_Click(object sender, RoutedEventArgs e) { // Disable all buttons until initialization completes SetInitButtonVisibility(Action.DISABLE); try { if (mediaCapture != null) { // Cleanup MediaCapture object if (isPreviewing) { await mediaCapture.StopPreviewAsync(); //captureImage.Source = null; captureImage.Children.Clear();//captureImage.Children.Clear();// isPreviewing = false; } await CleanUpFaceDetectionEffectAsync(); mediaCapture.Dispose(); mediaCapture = null; } status.Text = "Initializing camera to capture audio and video..."; // Use default initialization mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(); // Set callbacks for failure and recording limit exceeded //status.Text = "Device successfully initialized for video recording!"; mediaCapture.Failed += new MediaCaptureFailedEventHandler(mediaCapture_Failed); mediaCapture.RecordLimitationExceeded += new Windows.Media.Capture.RecordLimitationExceededEventHandler(MediaCapture_RecordLimitationExceeded); // Start Preview previewElement.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); isPreviewing = true; status.Text = "Camera preview succeeded"; //if (_faceDetectionEffect == null || !_faceDetectionEffect.Enabled) //{ // // clear any rectangles that may have been left over from a previous instance of the effect // captureImage.Children.Clear();//captureImage.Children.Clear(); // await CreateFaceDetectionEffectAsync(); //} //else //{ // await CleanUpFaceDetectionEffectAsync(); //} //UpdateCaptureControls(); } catch (Exception ex) { Debug.WriteLine("Unable to Initialize Camera for audio/video mode" + ex.Message); status.Text = "Unable to initialize camera for audio/video mode: " + ex.Message; } }
public async Task GetDataSources() { // If pose is supported and selected add pose source //var ver = Windows.System.Profile.AnalyticsInfo.VersionInfo.DeviceFamily; //if (ver == "Windows.Holographic") //{ // bool isSelected; // Config.SourceSelectionDictionary.TryGetValue(Config.Pose, out isSelected); // if (isSelected) // { // DataSources.Add(new PoseSource(ref RosConnector, ref SharedTimer) // { // SourceName = Config.Pose, // PublishPeriod = 1 / Config.HololensPoseFPS // }); // } //} // Check for any available cameras var possibleSourceKinds = new MediaFrameSourceKind[] { MediaFrameSourceKind.Depth, MediaFrameSourceKind.Infrared, MediaFrameSourceKind.Color }; var groups = await MediaFrameSourceGroup.FindAllAsync(); // Find the group that exposes all of the sensors for streaming foreach (var g in groups) { if (g.DisplayName == "Sensor Streaming") { Debug.WriteLine("Found Sensor Streaming Source Group"); var mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = g, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); var sources = mediaCapture.FrameSources.Where(fs => possibleSourceKinds.Contains(fs.Value.Info.SourceKind)).Select(fs => fs.Value); foreach (var source in sources) { string originalSourceName = source.Info.Id.Substring(source.Info.Id.IndexOf("Source#"), 8); string assignedSourceName; Config.DataSourceDictionary.TryGetValue(originalSourceName, out assignedSourceName); bool isSelected; Config.SourceSelectionDictionary.TryGetValue(assignedSourceName, out isSelected); if (isSelected) { double assignedFrameRate; Config.FrameRateDictionary.TryGetValue(assignedSourceName, out assignedFrameRate); double assignedPublishPeriod = 1.0 / (double)assignedFrameRate; int originalFPS = (int)source.Info.VideoProfileMediaDescription[0].FrameRate; CameraHandler handler = new CameraHandler(source.Info, mediaCapture, assignedPublishPeriod); await handler.SetupReaderAsync(); DataSources.Add(new CameraSource(ref RosConnector, handler, assignedSourceName, assignedPublishPeriod) { Resolution = $"{ source.Info.VideoProfileMediaDescription[0].Width } x { source.Info.VideoProfileMediaDescription[0].Height }", OriginalFPS = originalFPS, SourceName = assignedSourceName }); } } break; } } }
private async void InitVideoCapture() { ///摄像头的检测 var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { await new MessageDialog("没有找到摄像头!").ShowAsync(); Debug.WriteLine("No camera device found!"); return; } var settings = new MediaCaptureInitializationSettings { StreamingCaptureMode = StreamingCaptureMode.Video, MediaCategory = MediaCategory.Other, AudioProcessing = Windows.Media.AudioProcessing.Default, PhotoCaptureSource = PhotoCaptureSource.VideoPreview, VideoDeviceId = cameraDevice.Id }; await _mediaCapture.InitializeAsync(settings); var focusControl = _mediaCapture.VideoDeviceController.FocusControl; if (focusControl.Supported) { var focusSettings = new FocusSettings() { Mode = focusControl.SupportedFocusModes.FirstOrDefault(f => f == FocusMode.Continuous), DisableDriverFallback = true, AutoFocusRange = focusControl.SupportedFocusRanges.FirstOrDefault(f => f == AutoFocusRange.FullRange), Distance = focusControl.SupportedFocusDistances.FirstOrDefault(f => f == ManualFocusDistance.Nearest) }; //设置聚焦,最好使用FocusMode.Continuous,否则影响截图会很模糊,不利于识别 focusControl.Configure(focusSettings); } if (!SettingHelper.IsPc()) { _mediaCapture.SetPreviewRotation(VideoRotation.Clockwise90Degrees); } VideoCapture.Source = _mediaCapture; VideoCapture.FlowDirection = FlowDirection.LeftToRight; await _mediaCapture.StartPreviewAsync(); //SimpleOrientationSensor sensor = SimpleOrientationSensor.GetDefault(); //sensor.OrientationChanged += (s, arg) => //{ // switch (arg.Orientation) // { // case SimpleOrientation.Rotated90DegreesCounterclockwise: // _mediaCapture.SetPreviewRotation(VideoRotation.None); // break; // case SimpleOrientation.Rotated180DegreesCounterclockwise: // case SimpleOrientation.Rotated270DegreesCounterclockwise: // _mediaCapture.SetPreviewRotation(VideoRotation.Clockwise180Degrees); // break; // default: // _mediaCapture.SetPreviewRotation(VideoRotation.Clockwise90Degrees); // break; // } //}; try { if (_mediaCapture.VideoDeviceController.FlashControl.Supported) { //关闭闪光灯 _mediaCapture.VideoDeviceController.FlashControl.Enabled = false; } } catch { } if (focusControl.Supported) { //开始聚焦 await focusControl.FocusAsync(); } //var angle = CameraRotationHelper.ConvertSimpleOrientationToClockwiseDegrees(_rotationHelper.GetUIOrientation()); // var transform = new RotateTransform { Angle = 90 }; // VideoCapture.RenderTransform = transform; }
private async void init() { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); LogError("checkpoint 1.1"); var targetGroups = frameSourceGroups.Select(g => new { Group = g, SourceInfos = new MediaFrameSourceInfo[] { g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color), g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth), } }).Where(g => g.SourceInfos.Any(info => info != null)).ToList(); LogError("checkpoint 1.2"); if (targetGroups.Count == 0) { LogError("No source groups found."); return; } MediaFrameSourceGroup mediaSourceGroup = targetGroups[0].Group; LogError("checkpoint 1.3"); mediaCapture = new MediaCapture(); LogError("checkpoint 1.4"); var settings = new MediaCaptureInitializationSettings() { SourceGroup = mediaSourceGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; LogError("checkpoint 1.5"); await mediaCapture.InitializeAsync(settings); LogError("checkpoint 1.6"); MediaFrameSource colorSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Color); MediaFrameSource depthSource = mediaCapture.FrameSources.Values.FirstOrDefault( s => s.Info.SourceKind == MediaFrameSourceKind.Depth); LogError("checkpoint 1.7"); if (colorSource == null || depthSource == null) { LogError("Cannot find color or depth stream."); return; } MediaFrameFormat colorFormat = colorSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.Rgb24); }).FirstOrDefault(); MediaFrameFormat depthFormat = depthSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width >= 640 && format.Subtype == MediaEncodingSubtypes.D16); }).FirstOrDefault(); await colorSource.SetFormatAsync(colorFormat); await depthSource.SetFormatAsync(depthFormat); _colorSourceId = colorSource.Info.Id; _depthSourceId = depthSource.Info.Id; _frameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync( new[] { colorSource, depthSource }); _frameReader.FrameArrived += FrameReader_FrameArrived; MultiSourceMediaFrameReaderStartStatus startStatus = await _frameReader.StartAsync(); if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success) { throw new InvalidOperationException("Unable to start reader: " + startStatus); } this.CorrelationFailed += MainPage_CorrelationFailed; Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token)); }
private void OnLoaded(object sender, RoutedEventArgs e) { LogMessage("Initializing the WebRTC native plugin..."); // Populate the list of video capture devices (webcams). // On UWP this uses internally the API: // Devices.Enumeration.DeviceInformation.FindAllAsync(VideoCapture) // Note that there's no API to pass a given device to WebRTC, // so there's no way to monitor and update that list if a device // gets plugged or unplugged. Even using DeviceInformation.CreateWatcher() // would yield some devices that might become unavailable by the time // WebRTC internally opens the video capture device. // This is more for demo purpose here because using the UWP API is nicer. { // Use a local list accessible from a background thread List <VideoCaptureDevice> vcds = new List <VideoCaptureDevice>(4); PeerConnection.GetVideoCaptureDevicesAsync().ContinueWith((prevTask) => { if (prevTask.Exception != null) { throw prevTask.Exception; } var devices = prevTask.Result; vcds.Capacity = devices.Count; foreach (var device in devices) { vcds.Add(new VideoCaptureDevice() { Id = device.id, DisplayName = device.name, Symbol = Symbol.Video }); } // Assign on main UI thread because of XAML binding; otherwise it fails. RunOnMainThread(() => { VideoCaptureDevices.Clear(); foreach (var vcd in vcds) { VideoCaptureDevices.Add(vcd); LogMessage($"VCD id={vcd.Id} name={vcd.DisplayName}"); } }); }); } //localVideo.TransportControls = localVideoControls; PluginInitialized = false; // Assign STUN server(s) before calling InitializeAsync() _peerConnection.Servers.Clear(); // We use only one server in this demo _peerConnection.Servers.Add("stun:" + stunServer.Text); // Ensure that the UWP app was authorized to capture audio (cap:microphone) // and video (cap:webcam), otherwise the native plugin will fail. try { MediaCapture mediaAccessRequester = new MediaCapture(); var mediaSettings = new MediaCaptureInitializationSettings { AudioDeviceId = "", VideoDeviceId = "", StreamingCaptureMode = StreamingCaptureMode.AudioAndVideo, PhotoCaptureSource = PhotoCaptureSource.VideoPreview }; var uiThreadScheduler = TaskScheduler.FromCurrentSynchronizationContext(); mediaAccessRequester.InitializeAsync(mediaSettings).AsTask() .ContinueWith((accessTask) => { if (accessTask.Exception != null) { LogMessage($"Access to A/V denied, check app permissions: {accessTask.Exception.Message}"); throw accessTask.Exception; } _peerConnection.InitializeAsync().ContinueWith((initTask) => { if (initTask.Exception != null) { LogMessage($"WebRTC native plugin init failed: {initTask.Exception.Message}"); throw initTask.Exception; } OnPluginPostInit(); }, uiThreadScheduler); // run task on caller (UI) thread }); } //< TODO - This below shouldn't do anything since exceptions are caught and stored inside Task.Exception... catch (UnauthorizedAccessException uae) { LogMessage("Access to A/V denied: " + uae.Message); } catch (Exception ex) { if (ex.InnerException is UnauthorizedAccessException uae) { LogMessage("Access to A/V denied: " + uae.Message); } else { LogMessage("Failed to initialize A/V with unknown exception: " + ex.Message); } } }
/// <summary> /// 开始预览 /// </summary> private static async Task <bool> StartPreviewt(CaptureElement element) { CameraShowInfo = string.Empty; if (NowCamera == null) { return(false); } StopPreview(); MainCamera = new MediaCapture(); var settings = new MediaCaptureInitializationSettings(); settings.VideoDeviceId = NowCamera.Id; settings.PhotoCaptureSource = PhotoCaptureSource.Auto; settings.StreamingCaptureMode = StreamingCaptureMode.Video; settings.AudioDeviceId = ""; await MainCamera.InitializeAsync(settings); if (PreviewSizeList == null) { PreviewSizeList = new List <CameraSizeInfo>(); } try { GetPhotoSize(MediaStreamType.VideoPreview, PreviewSizeList); } catch (Exception ex) { } if (PhotoSizeList == null) { PhotoSizeList = new List <CameraSizeInfo>(); } try { GetPhotoSize(MediaStreamType.Photo, PhotoSizeList); } catch (Exception ex) { } if (OldCameraHelper.PhotoSizeList == null) { return(false); } if (VideoRecordSizeList == null) { VideoRecordSizeList = new List <CameraSizeInfo>(); } try { GetPhotoSize(MediaStreamType.VideoRecord, VideoRecordSizeList); } catch (Exception ex) { } element.Source = OldCameraHelper.MainCamera; await MainCamera.StartPreviewAsync(); //获取ZOOM范围 if (MainCamera != null && MainCamera.VideoDeviceController != null && MainCamera.VideoDeviceController.Zoom != null) { ZoomMin = MainCamera.VideoDeviceController.Zoom.Capabilities.Min; ZoomMax = MainCamera.VideoDeviceController.Zoom.Capabilities.Max; ZoomStep = MainCamera.VideoDeviceController.Zoom.Capabilities.Step; } else { ZoomMin = 0; ZoomMax = 0; ZoomStep = 0; } /* * // 获取或设置照相机的曝光时间。!!!!! * public MediaDeviceControl Exposure { get; }***** * // 获取此视频设备的曝光控件。!!!!! * public ExposureControl ExposureControl { get; }***** * // 获取或设置照相机的焦点设置。!!!!! * public MediaDeviceControl Focus { get; }***** * // 获取此视频设备的焦点控件。!!!!! * public FocusControl FocusControl { get; }***** * // 获取此视频设备的 ISO 感光度控件。!!!!! * public IsoSpeedControl IsoSpeedControl { get; }***** * // 获取此视频设备的低快门延迟照片控件。!!!!! * public LowLagPhotoControl LowLagPhoto { get; }***** * // 获取此视频设备的低快门延迟照片序列控件。!!!!! * public LowLagPhotoSequenceControl LowLagPhotoSequence { get; }***** * // 获取此视频设备的相关区域控件。!!!!! * public RegionsOfInterestControl RegionsOfInterestControl { get; }***** * // 获取和设置照相机的缩放设置。!!!!! * public MediaDeviceControl Zoom { get; }***** * // 指定是否在照相机上启用背光补偿。 * public MediaDeviceControl BacklightCompensation { get; }***** * // 获取或设置照相机的亮度级别。 * public MediaDeviceControl Brightness { get; }***** * // 获取或设置照相机的对比度。 * public MediaDeviceControl Contrast { get; }***** * // 获取此视频设备的曝光补偿控件。 * public ExposureCompensationControl ExposureCompensationControl { get; }***** * // 获取此视频设备的闪光控件。 * public FlashControl FlashControl { get; }***** * // 获取或设置照相机的色调设置。 * public MediaDeviceControl Hue { get; }***** * // 获取或设置照相机的全景设置。 * public MediaDeviceControl Pan { get; }***** * // 获取或设置设备的主要用途。 * public CaptureUse PrimaryUse { get; set; }***** * // 获取或设置照相机的滚拍设置。 * public MediaDeviceControl Roll { get; }***** * // 获取此视频设备的场景模式控件。 * public SceneModeControl SceneModeControl { get; }***** * // 获取或设置照相机的倾斜设置。 * public MediaDeviceControl Tilt { get; }***** * // 获取此视频设备的闪光灯控件。 * public TorchControl TorchControl { get; }***** * // 获取或设置照相机的白平衡。 * public MediaDeviceControl WhiteBalance { get; }***** * // 获取此视频设备的白平衡控件。 * public WhiteBalanceControl WhiteBalanceControl { get; } */ //组织设备信息 try { var vdc = MainCamera.VideoDeviceController; CameraShowInfo += "Id:" + NowCamera.Id + "\r\n"; CameraShowInfo += "PreviewSizeList:" + string.Join(",", PreviewSizeList.Select(li => li.Width + "*" + li.Height)) + "\r\n"; CameraShowInfo += "PhotoSizeList:" + string.Join(",", PhotoSizeList.Select(li => li.Width + "*" + li.Height)) + "\r\n"; CameraShowInfo += "VideoRecordSizeList:" + string.Join(",", VideoRecordSizeList.Select(li => li.Width + "*" + li.Height)) + "\r\n"; string strVideoDeviceCharacteristic = ""; switch (MainCamera.MediaCaptureSettings.VideoDeviceCharacteristic) { case VideoDeviceCharacteristic.AllStreamsIndependent: strVideoDeviceCharacteristic = "AllStreamsIndependent"; break; case VideoDeviceCharacteristic.PreviewRecordStreamsIdentical: strVideoDeviceCharacteristic = "PreviewRecordStreamsIdentical"; break; case VideoDeviceCharacteristic.PreviewPhotoStreamsIdentical: strVideoDeviceCharacteristic = "PreviewPhotoStreamsIdentical"; break; case VideoDeviceCharacteristic.RecordPhotoStreamsIdentical: strVideoDeviceCharacteristic = "RecordPhotoStreamsIdentical"; break; case VideoDeviceCharacteristic.AllStreamsIdentical: strVideoDeviceCharacteristic = "AllStreamsIdentical"; break; default: break; } CameraShowInfo += "VideoDeviceCharacteristic:" + strVideoDeviceCharacteristic + "\r\n"; //// 摘要: //// 所有流都是独立的。 //[SupportedOn(100794368, Platform.Windows)] //[SupportedOn(100859904, Platform.WindowsPhone)] //AllStreamsIndependent = 0, //// //// 摘要: //// 预览视频流是相同的。 //[SupportedOn(100794368, Platform.Windows)] //[SupportedOn(100859904, Platform.WindowsPhone)] //PreviewRecordStreamsIdentical = 1, //// //// 摘要: //// 预览图片流是相同的。 //[SupportedOn(100794368, Platform.Windows)] //[SupportedOn(100859904, Platform.WindowsPhone)] //PreviewPhotoStreamsIdentical = 2, //// //// 摘要: //// 视频和图片流是相同的。 //[SupportedOn(100794368, Platform.Windows)] //[SupportedOn(100859904, Platform.WindowsPhone)] //RecordPhotoStreamsIdentical = 3, //// //// 摘要: //// 所有流都是相同的。 //[SupportedOn(100794368, Platform.Windows)] //[SupportedOn(100859904, Platform.WindowsPhone)] //AllStreamsIdentical = 4, try { CameraShowInfo += "Exposure:" + vdc.Exposure.Capabilities.Supported + ",AutoModeSupported:" + vdc.Exposure.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Exposure:False\r\n"; } try { CameraShowInfo += "ExposureControl:" + vdc.ExposureControl.Supported + ",Auto" + vdc.ExposureControl.Auto + "\r\n"; } catch { CameraShowInfo += "ExposureControl:False\r\n"; } try { CameraShowInfo += "Focus:" + vdc.Focus.Capabilities.Supported + ",AutoModeSupported" + vdc.Focus.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Focus:False\r\n"; } try { CameraShowInfo += "FocusControl:" + vdc.FocusControl.Supported + "\r\n"; } catch { CameraShowInfo += "FocusControl:False\r\n"; } try { CameraShowInfo += "IsoSpeedControl:" + vdc.IsoSpeedControl.Supported + "\r\n"; } catch { CameraShowInfo += "IsoSpeedControl:False\r\n"; } try { CameraShowInfo += "LowLagPhoto-HardwareAcceleratedThumbnailSupported:" + vdc.LowLagPhoto.HardwareAcceleratedThumbnailSupported + "\r\n"; } catch { CameraShowInfo += "LowLagPhoto:False\r\n"; } try { CameraShowInfo += "LowLagPhotoSequence:" + vdc.LowLagPhotoSequence.Supported + ",HardwareAcceleratedThumbnailSupported:" + vdc.LowLagPhotoSequence.HardwareAcceleratedThumbnailSupported + "\r\n"; } catch { CameraShowInfo += "LowLagPhotoSequence:False\r\n"; } try { CameraShowInfo += "RegionsOfInterestControl-MaxRegions:" + vdc.RegionsOfInterestControl.MaxRegions + ",AutoExposureSupported:" + vdc.RegionsOfInterestControl.AutoExposureSupported + ",AutoFocusSupported:" + vdc.RegionsOfInterestControl.AutoFocusSupported + ",AutoWhiteBalanceSupported:" + vdc.RegionsOfInterestControl.AutoWhiteBalanceSupported + "\r\n"; } catch { CameraShowInfo += "RegionsOfInterestControl:False\r\n"; } try { CameraShowInfo += "Zoom:" + vdc.Zoom.Capabilities.Supported + ",AutoModeSupported:" + vdc.Zoom.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Zoom:False\r\n"; } try { CameraShowInfo += "BacklightCompensation:" + vdc.BacklightCompensation.Capabilities.Supported + ",AutoModeSupported:" + vdc.BacklightCompensation.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "BacklightCompensation:False\r\n"; } try { CameraShowInfo += "Brightness:" + vdc.Brightness.Capabilities.Supported + ",AutoModeSupported:" + vdc.Brightness.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Brightness:False\r\n"; } try { CameraShowInfo += "Contrast:" + vdc.Contrast.Capabilities.Supported + ",AutoModeSupported:" + vdc.Contrast.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Contrast:False\r\n"; } try { CameraShowInfo += "ExposureCompensationControl:" + vdc.ExposureCompensationControl.Supported + "\r\n"; } catch { CameraShowInfo += "ExposureCompensationControl:False\r\n"; } try { CameraShowInfo += "FlashControl:" + vdc.FlashControl.Supported + ",Auto:" + vdc.FlashControl.Auto + ",Enabled:" + vdc.FlashControl.Enabled + ",PowerPercent:" + vdc.FlashControl.PowerPercent + ",PowerSupported:" + vdc.FlashControl.PowerSupported + ",RedEyeReduction:" + vdc.FlashControl.RedEyeReduction + ",RedEyeReductionSupported:" + vdc.FlashControl.RedEyeReductionSupported + "\r\n"; } catch { CameraShowInfo += "FlashControl:False\r\n"; } try { CameraShowInfo += "Hue:" + vdc.Hue.Capabilities.Supported + ",AutoModeSupported:" + vdc.Hue.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Hue:False\r\n"; } try { CameraShowInfo += "Pan:" + vdc.Pan.Capabilities.Supported + ",AutoModeSupported:" + vdc.Pan.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Pan:False\r\n"; } try { CameraShowInfo += "Roll:" + vdc.Roll.Capabilities.Supported + ",AutoModeSupported:" + vdc.Roll.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Roll:False\r\n"; } try { CameraShowInfo += "Tilt:" + vdc.Tilt.Capabilities.Supported + ",AutoModeSupported:" + vdc.Tilt.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "Tilt:False\r\n"; } try { CameraShowInfo += "TorchControl:" + vdc.TorchControl.Supported + ",Enabled:" + vdc.TorchControl.Enabled + ",PowerPercent:" + vdc.TorchControl.PowerPercent + ",PowerSupported:" + vdc.TorchControl.PowerSupported + "\r\n"; } catch { CameraShowInfo += "TorchControl:False\r\n"; } try { CameraShowInfo += "WhiteBalance:" + vdc.WhiteBalance.Capabilities.Supported + ",AutoModeSupported:" + vdc.WhiteBalance.Capabilities.AutoModeSupported + "\r\n"; } catch { CameraShowInfo += "WhiteBalance:False\r\n"; } try { CameraShowInfo += "WhiteBalanceControl:" + vdc.WhiteBalanceControl.Supported + "\r\n"; } catch { CameraShowInfo += "WhiteBalanceControl:False\r\n"; } } catch (Exception ex) { CameraShowInfo = ex.ToString(); } return(true); }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, starts preview and unlocks the UI /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings mediaCapture = new MediaCapture(); // Register for a notification when video recording has reached the maximum time and when something goes wrong mediaCapture.RecordLimitationExceeded += MediaCapture_RecordLimitationExceeded; mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await mediaCapture.InitializeAsync(settings); isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } // If initialization succeeded, start the preview if (isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device externalCamera = true; } else { // Camera is fixed on the device externalCamera = false; // Only mirror the preview if the camera is on the front panel mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } // Initialize rotationHelper rotationHelper = new CameraRotationHelper(cameraDevice.EnclosureLocation); rotationHelper.OrientationChanged += RotationHelper_OrientationChanged; await StartPreviewAsync(); UpdateCaptureControls(); } } }
public async Task StartStreamAsync(bool isForRealTimeProcessing = false, DeviceInformation desiredCamera = null) { try { if (captureManager == null || captureManager.CameraStreamState == CameraStreamState.Shutdown || captureManager.CameraStreamState == CameraStreamState.NotStreaming) { loadingOverlay.Visibility = Visibility.Visible; if (captureManager != null) { captureManager.Dispose(); } captureManager = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); var allCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var selectedCamera = allCameras.FirstOrDefault(c => c.Name == SettingsHelper.Instance.CameraName); if (desiredCamera != null) { selectedCamera = desiredCamera; } else if (lastUsedCamera != null) { selectedCamera = lastUsedCamera; } if (selectedCamera != null) { settings.VideoDeviceId = selectedCamera.Id; lastUsedCamera = selectedCamera; } cameraSwitchButton.Visibility = allCameras.Count > 1 ? Visibility.Visible : Visibility.Collapsed; await captureManager.InitializeAsync(settings); await SetVideoEncodingToHighestResolution(isForRealTimeProcessing); isStreamingOnRealtimeResolution = isForRealTimeProcessing; //rotate the camera captureManager.SetPreviewRotation(SettingsHelper.Instance.CameraRotation); this.webCamCaptureElement.Source = captureManager; } if (captureManager.CameraStreamState == CameraStreamState.NotStreaming) { if (PerformFaceTracking || CameraFrameProcessor != null) { if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } if (this.frameProcessingTimer != null) { this.frameProcessingTimer.Cancel(); frameProcessingSemaphore.Release(); } TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); //15fps this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); } this.videoProperties = this.captureManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; await captureManager.StartPreviewAsync(); this.webCamCaptureElement.Visibility = Visibility.Visible; loadingOverlay.Visibility = Visibility.Collapsed; } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Error starting the camera."); } }
private async Task <bool> InitializeMediaCaptureAsync() { if (captureStatus != CaptureStatus.Clean) { Debug.Log(TAG + ": InitializeMediaCaptureAsync() fails because of incorrect status"); return(false); } if (mediaCapture != null) { return(false); } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (group.DisplayName == "MN34150") { selectedGroupIndex = i; HL = 1; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } // for HoloLens 2 else if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; HL = 2; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; //IReadOnlyList<MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], //VideoDeviceId = deviceId, //VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { // Check current media frame source resolution versus target resolution if (f.VideoFormat.Width == _targetVideoWidth && f.VideoFormat.Height == _targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); } // choose the smallest resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); // choose the specific resolution //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault(); await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); Debug.Log(TAG + ": mediaFrameSourceVideo.SetFormatAsync()"); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); Debug.Log(TAG + ": mediaCapture.CreateFrameReaderAsync()"); frameReader.FrameArrived += OnFrameArrived; videoWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); Debug.Log(TAG + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } captureStatus = CaptureStatus.Initialized; return(true); }
/// <summary> /// Start the video stream. This just prepares the stream for capture, and doesn't start collecting frames /// </summary> /// <param name="streamDesc">The description of the stream to start.</param> public async void Start(StreamDescription streamDesc) { #if CAN_USE_UWP_TYPES lock (stateLock) { if (State != CameraState.Initialized) { throw new InvalidOperationException("Start cannot be called until the camera is in the Initialized state"); } State = CameraState.Starting; } Resolution = streamDesc.Resolution; CameraType = streamDesc.CameraType; StreamDescriptionInternal desc = streamDesc as StreamDescriptionInternal; MediaCaptureInitializationSettings initSettings = new MediaCaptureInitializationSettings() { SourceGroup = desc.FrameSourceGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; // initialize the media device mediaCapture = new MediaCapture(); try { await mediaCapture.InitializeAsync(initSettings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine($"MediaCapture initialization failed: {ex.Message}"); mediaCapture.Dispose(); mediaCapture = null; } if (mediaCapture != null) { // get access to the video device controller for property settings videoDeviceController = mediaCapture.VideoDeviceController; // choose media source MediaFrameSource frameSource = mediaCapture.FrameSources[desc.FrameSourceInfo.Id]; MediaFrameFormat preferredFormat = null; foreach (MediaFrameFormat format in frameSource.SupportedFormats) { if (format.VideoFormat.Width == desc.Resolution.Width && format.VideoFormat.Height == desc.Resolution.Height && Math.Abs((double)format.FrameRate.Numerator / (double)format.FrameRate.Denominator - desc.Resolution.Framerate) < epsilon) { preferredFormat = format; break; } } if (preferredFormat != null && preferredFormat != frameSource.CurrentFormat) { await frameSource.SetFormatAsync(preferredFormat); } else { System.Diagnostics.Debug.WriteLine($"failed to set desired frame format"); } // set up frame readercapture frame data frameReader = await mediaCapture.CreateFrameReaderAsync(frameSource); frameReader.FrameArrived += OnMediaFrameArrived; await frameReader.StartAsync(); lock (stateLock) { State = CameraState.Ready; OnCameraStarted?.Invoke(this, true); } } else { lock (stateLock) { // drop back to initialized when the camera doesn't initialize State = CameraState.Initialized; OnCameraStarted?.Invoke(this, false); } } #else await Task.CompletedTask; #endif }
/// <summary> /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader /// will be initialized and register the callback function OnFrameArrived to each video /// frame. Note that this task does not start running the video preview, but configures the /// running behavior. This task should be executed when ARUWPController status is /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error /// occurred. [internal use] /// </summary> /// <returns>Whether video pipeline is successfully initialized</returns> public async Task <bool> InitializeMediaCaptureAsyncTask() { if (controller.status != ARUWP.ARUWP_STATUS_CLEAN) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status"); return(false); } if (mediaCapture != null) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null"); return(false); } int targetVideoWidth, targetVideoHeight; float targetVideoFrameRate; switch (videoParameter) { case VideoParameter.HL1Param1280x720x15: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1280x720x30: HL = 1; targetVideoWidth = 1280; targetVideoHeight = 720; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param1344x756x15: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param1344x756x30: HL = 1; targetVideoWidth = 1344; targetVideoHeight = 756; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL1Param896x504x15: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 15.0f; break; case VideoParameter.HL1Param896x504x30: HL = 1; targetVideoWidth = 896; targetVideoHeight = 504; targetVideoFrameRate = 30.0f; break; case VideoParameter.HL2Param1504x846x60: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 60.0f; break; case VideoParameter.HL2Param1504x846x30: HL = 2; targetVideoWidth = 1504; targetVideoHeight = 846; targetVideoFrameRate = 30.0f; break; default: return(false); break; } var allGroups = await MediaFrameSourceGroup.FindAllAsync(); int selectedGroupIndex = -1; for (int i = 0; i < allGroups.Count; i++) { var group = allGroups[i]; Debug.Log(group.DisplayName + ", " + group.Id); // for HoloLens 1 if (HL == 1) { if (group.DisplayName == "MN34150") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1"); break; } } // for HoloLens 2 else if (HL == 2) { if (group.DisplayName == "QC Back Camera") { selectedGroupIndex = i; Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2"); break; } } } if (selectedGroupIndex == -1) { Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group"); return(false); } // Initialize mediacapture with the source group. mediaCapture = new MediaCapture(); MediaStreamType mediaStreamType = MediaStreamType.VideoPreview; if (HL == 1) { var settings = new MediaCaptureInitializationSettings { SourceGroup = allGroups[selectedGroupIndex], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.SharedReadOnly, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1."); mediaStreamType = MediaStreamType.VideoPreview; } else if (HL == 2) { string deviceId = allGroups[selectedGroupIndex].Id; // Look up for all video profiles //IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(deviceId); //MediaCaptureVideoProfile selectedProfile; IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing); // Initialize mediacapture with the source group. var settings = new MediaCaptureInitializationSettings { //SourceGroup = allGroups[selectedGroupIndex], VideoDeviceId = deviceId, VideoProfile = profileList[0], // This media capture can share streaming with other apps. SharingMode = MediaCaptureSharingMode.ExclusiveControl, // Only stream video and don't initialize audio capture devices. StreamingCaptureMode = StreamingCaptureMode.Video, // Set to CPU to ensure frames always contain CPU SoftwareBitmap images // instead of preferring GPU D3DSurface images. MemoryPreference = MediaCaptureMemoryPreference.Cpu }; await mediaCapture.InitializeAsync(settings); Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2."); mediaStreamType = MediaStreamType.VideoRecord; } try { var mediaFrameSourceVideo = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType); MediaFrameFormat targetResFormat = null; float framerateDiffMin = 60f; foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height)) { if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight) { if (targetResFormat == null) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin) { targetResFormat = f; framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate); } } } if (targetResFormat == null) { targetResFormat = mediaFrameSourceVideo.SupportedFormats[0]; Debug.Log(TAG + ": Unable to choose the selected format, fall back"); targetResFormat = mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault(); } await mediaFrameSourceVideo.SetFormatAsync(targetResFormat); frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype); frameReader.FrameArrived += OnFrameArrived; controller.frameWidth = Convert.ToInt32(targetResFormat.VideoFormat.Width); controller.frameHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height); // if controller.frameWidth is not aligned with 64, then pad to 64 // on HoloLens 2, it is a must if (controller.frameWidth % 64 != 0) { int paddedFrameWidth = ((controller.frameWidth >> 6) + 1) << 6; Debug.Log(TAG + ": the width is padded to " + paddedFrameWidth); controller.frameWidth = paddedFrameWidth; } // Since v0.3, feature grayscale is forced frameData = new byte[controller.frameWidth * controller.frameHeight]; Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight + ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator); } catch (Exception e) { Debug.Log(TAG + ": FrameReader is not initialized"); Debug.Log(TAG + ": Exception: " + e); return(false); } controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED; signalInitDone = true; Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful"); return(true); }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, and starts preview. /// </summary> private async Task InitializeCameraAsync() { if (mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not. var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { rootPage.NotifyUser("No camera device!", NotifyType.ErrorMessage); return; } // Create MediaCapture and its settings. mediaCapture = new MediaCapture(); // Register for a notification when something goes wrong mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; // Initialize MediaCapture try { await mediaCapture.InitializeAsync(settings); isInitialized = true; } catch (UnauthorizedAccessException) { rootPage.NotifyUser("Denied access to the camera.", NotifyType.ErrorMessage); } catch (Exception ex) { rootPage.NotifyUser("Exception when init MediaCapture. " + ex.Message, NotifyType.ErrorMessage); } // If initialization succeeded, start the preview. if (isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device. externalCamera = true; } else { // Camera is fixed on the device. externalCamera = false; // Only mirror the preview if the camera is on the front panel. mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } await StartPreviewAsync(); } } }
private async void StartAsync(MediaCaptureInitializationSettings settings) { await Capture.InitializeAsync(settings); var veps = Capture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Where(x => x is VideoEncodingProperties).Cast <VideoEncodingProperties>().ToList(); if (veps.Count > 0) { // find the closest resolution if (DesiredWidth > 0 || DesiredHeight > 0) { var closest = new List <VideoEncodingProperties>(); var distance = -1; foreach (var vep in veps) { if (distance < 0) { closest.Add(vep); distance = GetSizeDistance(vep); } else { var d = GetSizeDistance(vep); if (d == distance) { closest.Add(vep); } else if (d < distance) { closest = new List <VideoEncodingProperties>(); closest.Add(vep); distance = d; } } } veps = closest; } // find the closest frame rate if (DesiredFrameRate > 0) { var closest = new List <VideoEncodingProperties>(); var distance = -1; foreach (var vep in veps) { if (distance < 0) { closest.Add(vep); distance = GetFrameRateDistance(vep); } else { var d = GetFrameRateDistance(vep); if (d == distance) { closest.Add(vep); } else if (d < distance) { closest = new List <VideoEncodingProperties>(); closest.Add(vep); distance = d; } } } veps = closest; } Log.DebugFormat("Found {0} matching video profile(s).", veps.Count.ToString()); if (veps.Count > 0) { var vep = veps[0]; Log.DebugFormat("Using ({0} x {1}, {2} fps, {3}) video profile.", vep.Width.ToString(), vep.Height.ToString(), (vep.FrameRate.Numerator / vep.FrameRate.Denominator).ToString(), vep.Subtype); // Set properties. await Capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vep); } } Preview.Source = Capture; var properties = new PropertySet(); properties.MapChanged += PropertiesChanges; await Capture.AddEffectAsync(MediaStreamType.VideoPreview, "VideoCaptureTransform.CaptureEffect", properties); await Capture.StartPreviewAsync(); }
/// <summary> /// Initializes the MediaCapture, registers events, gets camera device information for mirroring and rotating, starts preview and unlocks the UI /// </summary> /// <returns></returns> private async Task InitializeCameraAsync() { Debug.WriteLine("InitializeCameraAsync"); if (_mediaCapture == null) { // Attempt to get the back camera if one is available, but use any camera device if not var cameraDevice = await FindCameraDeviceByPanelAsync(Windows.Devices.Enumeration.Panel.Back); if (cameraDevice == null) { Debug.WriteLine("No camera device found!"); return; } // Create MediaCapture and its settings _mediaCapture = new MediaCapture(); // Register for a notification when video recording has reached the maximum time and when something goes wrong _mediaCapture.RecordLimitationExceeded += MediaCapture_RecordLimitationExceeded; _mediaCapture.Failed += MediaCapture_Failed; var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id, StreamingCaptureMode = StreamingCaptureMode.Video }; // Initialize MediaCapture try { await _mediaCapture.InitializeAsync(settings); _isInitialized = true; } catch (UnauthorizedAccessException) { Debug.WriteLine("The app was denied access to the camera"); } // If initialization succeeded, start the preview if (_isInitialized) { // Figure out where the camera is located if (cameraDevice.EnclosureLocation == null || cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown) { // No information on the location of the camera, assume it's an external camera, not integrated on the device _externalCamera = true; } else { // Camera is fixed on the device _externalCamera = false; // Only mirror the preview if the camera is on the front panel _mirroringPreview = (cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); } //We want the largest stream where the frame rate is above the minimum threshold IEnumerable <StreamPropertiesHelper> props = _mediaCapture.VideoDeviceController .GetAvailableMediaStreamProperties(MediaStreamType.VideoRecord) .Select(x => new StreamPropertiesHelper(x)) .OrderByDescending(x => x.Height * x.Width) .ThenByDescending(x => x.FrameRate) .Where(x => x.FrameRate > _minFrameRate) .ToArray(); foreach (var cap in props) { Debug.WriteLine(cap.GetFriendlyName()); } var desired = props.First(); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, desired.EncodingProperties); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, desired.EncodingProperties); await StartPreviewAsync(); var picturesLibrary = await StorageLibrary.GetLibraryAsync(KnownLibraryId.Pictures); UpdateCaptureControls(); UpdateManualControlCapabilities(); } } }
/// <summary> /// Attempts to initialise the local video capture device. /// </summary> /// <param name="width">The frame width to attempt to initialise the video capture device with. Set as 0 to use default.</param> /// <param name="height">The frame height to attempt to initialise the video capture device with. Set as 0 to use default.</param> /// <param name="fps">The frame rate, in frames per second, to attempt to initialise the video capture device with. /// Set as 0 to use default.</param> private async Task <bool> InitialiseDevice(uint width, uint height, uint fps) { if (width == 0 && height == 0 && fps == 0) { // If no specific width, height or frame rate was requested then use the device's current settings. // In shared mode it's not possible to adjust the source format so if the frame is the wrong pixel // format it will need to be transformed on a frame by frame basis. var mediaCaptureSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = MediaCaptureSharingMode.SharedReadOnly }; //await _mediaCapture.InitializeAsync(mediaCaptureSettings).AsTask().ConfigureAwait(false); await _mediaCapture.InitializeAsync(mediaCaptureSettings); var mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value; //_mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource).AsTask().ConfigureAwait(false); _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource); _mediaFrameSource = mediaFrameSource; } else { // If specific capture settings have been requested then the device needs to be initialised in // exclusive mode as the current settings and format will most likely be changed. var mediaCaptureSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, SharingMode = MediaCaptureSharingMode.ExclusiveControl }; await _mediaCapture.InitializeAsync(mediaCaptureSettings).AsTask().ConfigureAwait(false); var mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color && source.Value.SupportedFormats.Any(x => x.Subtype == VIDEO_DESIRED_PIXEL_FORMAT && (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator))).Value; if (mediaFrameSource == null) { // Fallback to accepting any pixel format and use a software transform on each frame. mediaFrameSource = _mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord && source.Value.Info.SourceKind == MediaFrameSourceKind.Color && source.Value.SupportedFormats.Any(x => (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator))).Value; } if (mediaFrameSource == null) { throw new ApplicationException("The video capture device does not support a compatible video format for the requested parameters."); } _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource).AsTask().ConfigureAwait(false); // If there's a format that matches the desired pixel format set that. var idealFormat = mediaFrameSource.SupportedFormats.FirstOrDefault(x => x.Subtype == VIDEO_DESIRED_PIXEL_FORMAT && (_width == 0 || x.VideoFormat.Width == _width) && (_height == 0 || x.VideoFormat.Height == _height) && (_fpsNumerator == 0 || x.FrameRate.Numerator == _fpsNumerator)); if (idealFormat != null) { await mediaFrameSource.SetFormatAsync(idealFormat).AsTask().ConfigureAwait(false); } _mediaFrameSource = mediaFrameSource; } // Frame source and format have now been successfully set. _width = _mediaFrameSource.CurrentFormat.VideoFormat.Width; _height = _mediaFrameSource.CurrentFormat.VideoFormat.Height; _fpsNumerator = _mediaFrameSource.CurrentFormat.FrameRate.Numerator; _fpsDenominator = _mediaFrameSource.CurrentFormat.FrameRate.Denominator; _vp8Encoder = new Vp8Codec(); _vp8Encoder.InitialiseEncoder(_width, _height); _mediaFrameReader.FrameArrived += FrameArrivedHandler; _mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; return(true); }
private async void ActionButton_Click(object sender, RoutedEventArgs e) { //<SnippetImageElementSource> imageElement.Source = new SoftwareBitmapSource(); //</SnippetImageElementSource> //<SnippetFindAllAsync> var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); //</SnippetFindAllAsync> // Color, infrared, and depth //<SnippetSelectColor> var selectedGroupObjects = frameSourceGroups.Select(group => new { sourceGroup = group, colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) => { // On XBox/Kinect, omit the MediaStreamType and EnclosureLocation tests return(sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color && sourceInfo.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front); }) }).Where(t => t.colorSourceInfo != null) .FirstOrDefault(); MediaFrameSourceGroup selectedGroup = selectedGroupObjects?.sourceGroup; MediaFrameSourceInfo colorSourceInfo = selectedGroupObjects?.colorSourceInfo; if (selectedGroup == null) { return; } //</SnippetSelectColor> //<SnippetInitMediaCapture> mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = selectedGroup, SharingMode = MediaCaptureSharingMode.ExclusiveControl, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video }; try { await mediaCapture.InitializeAsync(settings); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message); return; } //</SnippetInitMediaCapture> var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id]; var preferredFormat = colorFrameSource.SupportedFormats.Where(format => { return(format.VideoFormat.Width == 1920); }).FirstOrDefault(); if (preferredFormat == null) { // Our desired format is not supported return; } await colorFrameSource.SetFormatAsync(preferredFormat); //<SnippetCreateFrameReader> mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32); mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived; await mediaFrameReader.StartAsync(); //</SnippetCreateFrameReader> }