public Rect LocatePreviewStreamCoordinates( VideoEncodingProperties previewResolution, CaptureElement previewControl) { var uiRectangle = new Rect(); var mediaStreamWidth = previewResolution.Width; var mediaStreamHeight = previewResolution.Height; uiRectangle.Width = previewControl.ActualWidth; uiRectangle.Height = previewControl.ActualHeight; var uiRatio = previewControl.ActualWidth / previewControl.ActualHeight; var mediaStreamRatio = mediaStreamWidth / mediaStreamHeight; if (uiRatio > mediaStreamRatio) { var scaleFactor = previewControl.ActualHeight / mediaStreamHeight; var scaledWidth = mediaStreamWidth * scaleFactor; uiRectangle.X = (previewControl.ActualWidth - scaledWidth) / 2.0; uiRectangle.Width = scaledWidth; } else { var scaleFactor = previewControl.ActualWidth / mediaStreamWidth; var scaledHeight = mediaStreamHeight * scaleFactor; uiRectangle.Y = (previewControl.ActualHeight - scaledHeight) / 2.0; uiRectangle.Height = scaledHeight; } return(uiRectangle); }
/// <summary> /// Initializes the scenario /// </summary> /// <returns></returns> private async Task InitializeAsync(CancellationToken cancel = default(CancellationToken)) { var streamFilteringCriteria = new { //AspectRatio = 1.333333333333333, HorizontalResolution = (uint)480, SubType = "YUY2" }; currentState = State.Initializing; device = new CaptureDevice(); CameraPreview.Visibility = Visibility.Collapsed; PreviewPoster.Visibility = Visibility.Visible; Preview.Content = "Start Preview"; LoopbackClient.IsEnabled = false; mode = defaultMode; LatencyModeToggle.IsOn = (mode == LatencyMode.LowLatency); LatencyModeToggle.IsEnabled = false; await device.InitializeAsync(); var setting = await device.SelectPreferredCameraStreamSettingAsync(MediaStreamType.VideoPreview, ((x) => { var previewStreamEncodingProperty = x as Windows.Media.MediaProperties.VideoEncodingProperties; return(previewStreamEncodingProperty.Width >= streamFilteringCriteria.HorizontalResolution && previewStreamEncodingProperty.Subtype == streamFilteringCriteria.SubType); })); previewEncodingProperties = setting as VideoEncodingProperties; PreviewSetupCompleted(); }
//<SnippetSetUpVideoStabilizationRecommendationAsync> private async Task SetUpVideoStabilizationRecommendationAsync() { // Get the recommendation from the effect based on our current input and output configuration var recommendation = _videoStabilizationEffect.GetRecommendedStreamConfiguration(_mediaCapture.VideoDeviceController, _encodingProfile.Video); // Handle the recommendation for the input into the effect, which can contain a larger resolution than currently configured, so cropping is minimized if (recommendation.InputProperties != null) { // Back up the current input properties from before VS was activated _inputPropertiesBackup = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord) as VideoEncodingProperties; // Set the recommendation from the effect (a resolution higher than the current one to allow for cropping) on the input await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, recommendation.InputProperties); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, recommendation.InputProperties); } // Handle the recommendations for the output from the effect if (recommendation.OutputProperties != null) { // Back up the current output properties from before VS was activated _outputPropertiesBackup = _encodingProfile.Video; // Apply the recommended encoding profile for the output _encodingProfile.Video = recommendation.OutputProperties; } }
/// <summary> /// Asynchronously starts video mode. /// /// Activates the web camera with the various settings specified in CameraParameters. /// Only one VideoCapture instance can start the video mode at any given time. /// After starting the video mode, you listen for new video frame samples via the VideoCapture.FrameSampleAcquired event, /// or by calling VideoCapture.RequestNextFrameSample() when will return the next available sample. /// While in video mode, more power will be consumed so make sure that you call VideoCapture.StopVideoModeAsync qhen you can afford the start/stop video mode overhead. /// </summary> /// <param name="setupParams">Parameters that change how video mode is used.</param> /// <param name="onVideoModeStartedCallback">This callback will be invoked once video mode has been activated.</param> public async void StartVideoModeAsync(CameraParameters setupParams, OnVideoModeStartedCallback onVideoModeStartedCallback) { var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; //Returns a MediaFrameSource if (mediaFrameSource == null) { onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(1, ResultType.UnknownError, false)); return; } var pixelFormat = ConvertCapturePixelFormatToMediaEncodingSubtype(setupParams.pixelFormat); _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat); _frameReader.FrameArrived += HandleFrameArrived; await _frameReader.StartAsync(); VideoEncodingProperties properties = GetVideoEncodingPropertiesForCameraParams(setupParams); // Historical context: https://github.com/VulcanTechnologies/HoloLensCameraStream/issues/6 if (setupParams.rotateImage180Degrees) { properties.Properties.Add(ROTATION_KEY, 180); } // gr: taken from here https://forums.hololens.com/discussion/2009/mixedrealitycapture IVideoEffectDefinition ved = new VideoMRCSettings(setupParams.enableHolograms, setupParams.enableVideoStabilization, setupParams.videoStabilizationBufferSize, setupParams.hologramOpacity); await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview); await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(STREAM_TYPE, properties); onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(0, ResultType.Success, true)); }
async Task CameraInitializedAsync() { if (this.mediaCapture != null) { this.PreviewControl.Source = mediaCapture; displayRequest.RequestActive(); previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; await mediaCapture.StartPreviewAsync(); int w = (int)previewProperties.Width, h = (int)previewProperties.Height; var buf = new byte[w * h * 3]; reader = new BarcodeReader(); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { if (this.mediaCapture != null) { _cameraState = CameraLoadedState.Loaded; if (_scanStarted) { StartScanner(); } } }); } }
/// <summary> /// Disables and removes the video stabilization effect, and unregisters the event handler for the EnabledChanged event of the effect /// </summary> /// <returns></returns> private async Task CleanUpVideoStabilizationEffectAsync() { // No work to be done if there is no effect if (_videoStabilizationEffect == null) { return; } // Disable the effect _videoStabilizationEffect.Enabled = false; _videoStabilizationEffect.EnabledChanged -= VideoStabilizationEffect_EnabledChanged; // Remove the effect from the record stream await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoRecord); Debug.WriteLine("VS effect removed from pipeline"); // If backed up settings (stream properties and encoding profile) exist, restore them and clear the backups if (_inputPropertiesBackup != null) { await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, _inputPropertiesBackup); _inputPropertiesBackup = null; } if (_outputPropertiesBackup != null) { _encodingProfile.Video = _outputPropertiesBackup; _outputPropertiesBackup = null; } // Clear the member variable that held the effect instance _videoStabilizationEffect = null; }
public SavingResolutionsCollection(VideoEncodingProperties enc) : base() { uint width = enc.Width; uint height = enc.Height; Add(new Resolutions(new SavingResolutions { Width = width, Height = height }, true)); for (int i = 0; i < _resolutions.Length; i++) { if (_resolutions[i] != width) { uint calcHeight = _resolutions[i] * height / width; if (calcHeight % 2 != 0) { calcHeight = calcHeight + 1; } Add(new Resolutions(new SavingResolutions { Width = _resolutions[i], Height = calcHeight }, false)); } else { } } }
void VideoSubType() { // <SnippetVideoPropertiesSubType> VideoEncodingProperties videoProps = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 720, 480); // </SnippetVideoPropertiesSubType> }
public async Task StartStreamAsync(bool isForRealTimeProcessing = false) { try { if (captureManager == null || captureManager.CameraStreamState == CameraStreamState.Shutdown || captureManager.CameraStreamState == CameraStreamState.NotStreaming) { if (captureManager != null) { captureManager.Dispose(); } captureManager = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); var allCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var selectedCamera = allCameras.FirstOrDefault(c => c.Name == SettingsHelper.Instance.CameraName); if (selectedCamera != null) { settings.VideoDeviceId = selectedCamera.Id; } await captureManager.InitializeAsync(settings); await SetVideoEncodingToHighestResolution(isForRealTimeProcessing); this.webCamCaptureElement.Source = captureManager; } if (captureManager.CameraStreamState == CameraStreamState.NotStreaming) { if (PerformFaceTracking || CameraFrameProcessor != null) { if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } if (this.frameProcessingTimer != null) { this.frameProcessingTimer.Cancel(); frameProcessingSemaphore.Release(); } TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); //15fps this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); } this.videoProperties = this.captureManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; await captureManager.StartPreviewAsync(); this.cameraControlSymbol.Symbol = Symbol.Camera; this.webCamCaptureElement.Visibility = Visibility.Visible; } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Error starting the camera."); } }
/// <summary> /// Returns a /// </summary> /// <param name="delayTime"></param> /// <returns></returns> public static IEnumerable <SoftwareBitmap> Snapshots(CaptureElement previewControl, int width, int height, Func <bool> stop = null) { _mediaCapture = new MediaCapture(); _mediaCapture.Failed += _mediaCapture_Failed; _mediaCapture.InitializeAsync().GetAwaiter().GetResult(); previewControl.Dispatcher.TryRunAsync(CoreDispatcherPriority.Normal, () => { previewControl.Source = _mediaCapture; }).GetAwaiter().GetResult(); _mediaCapture.StartPreviewAsync().GetAwaiter().GetResult(); _previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; width = (int)_previewProperties.Width; //ignore passed-in width and height and get the ones from the preview height = (int)_previewProperties.Height; while (true) { if ((stop != null) && stop()) { //cleanup _mediaCapture.StopPreviewAsync().GetAwaiter().GetResult(); _mediaCapture = null; previewControl.Dispatcher.TryRunAsync(CoreDispatcherPriority.Normal, () => { previewControl.Source = null; }).GetAwaiter().GetResult(); yield break; } yield return(CameraImage(previewControl, width, height)); } }
private void OnCameraIntrinsicsGot(CameraIntrinsics cameraIntrinsics, VideoEncodingProperties property) { if (cameraIntrinsics == null) { Debug.LogError("Getting the CameraIntrinsics object failed."); return; } double calculatedFrameRate = (double)property.FrameRate.Numerator / (double)property.FrameRate.Denominator; String result = "\n" + "============================================="; result += "\n" + "==== Size: " + property.Width + "x" + property.Height + " FrameRate: " + (int)Math.Round(calculatedFrameRate) + "===="; result += "\n" + "FocalLength: " + cameraIntrinsics.FocalLength; result += "\n" + "ImageHeight: " + cameraIntrinsics.ImageHeight; result += "\n" + "ImageWidth: " + cameraIntrinsics.ImageWidth; result += "\n" + "PrincipalPoint: " + cameraIntrinsics.PrincipalPoint; result += "\n" + "RadialDistortion: " + cameraIntrinsics.RadialDistortion; result += "\n" + "TangentialDistortion: " + cameraIntrinsics.TangentialDistortion; result += "\n" + "============================================="; Debug.Log(result); UnityEngine.WSA.Application.InvokeOnAppThread(() => { ResultText.text += result; }, false); }
private void OnCameraIntrinsicsGot(CameraIntrinsics cameraIntrinsics, VideoEncodingProperties property) { if (cameraIntrinsics == null) { Debug.LogError("Getting the CameraIntrinsics object failed."); return; } //When building the application for Hololens, uncomment the following line in Visual Studio. /* * double calculatedFrameRate = (double)property.FrameRate.Numerator / (double)property.FrameRate.Denominator; * * String result = "\n" + "============================================="; * result += "\n" + "==== Size: " + property.Width + "x" + property.Height + " FrameRate: " + (int)Math.Round(calculatedFrameRate) + "===="; * result += "\n" + "FocalLength: " + cameraIntrinsics.FocalLength; * result += "\n" + "ImageHeight: " + cameraIntrinsics.ImageHeight; * result += "\n" + "ImageWidth: " + cameraIntrinsics.ImageWidth; * result += "\n" + "PrincipalPoint: " + cameraIntrinsics.PrincipalPoint; * result += "\n" + "RadialDistortion: " + cameraIntrinsics.RadialDistortion; * result += "\n" + "TangentialDistortion: " + cameraIntrinsics.TangentialDistortion; * result += "\n" + "============================================="; * * Debug.Log(result); * * UnityEngine.WSA.Application.InvokeOnAppThread(() => * { * ResultText.text += result; * }, false); */ }
private void InitializeVideo() { if (!this.videoInitilized) { this.videoInitilized = true; var videoEncodingProperties = VideoEncodingProperties.CreateH264(); videoEncodingProperties.Height = 720; videoEncodingProperties.Width = 960; var mediaStreamSource = new MediaStreamSource(new VideoStreamDescriptor(videoEncodingProperties)) { // never turn live on because it tries to skip frame which breaks the h264 decoding // IsLive = true, BufferTime = TimeSpan.FromSeconds(0.0), }; mediaStreamSource.SampleRequested += this.MediaStreamSource_SampleRequested; this.VideoElement.SetMediaStreamSource(mediaStreamSource); // never turn real time playback on // _mediaElement.RealTimePlayback = true; } }
async void ButtonPlayKiosk_Click(object sender, RoutedEventArgs e) { // -- Auto snap feature -- if (this.mediaCapture == null || !this.isPreviewActive) { return; } // get media stream properties from the capture device VideoEncodingProperties previewProperties = this.mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; for (int i = 0; i < maxFrameCount; ++i) { // create a single preview frame using the specified format VideoFrame videoFrameType = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height); VideoFrame previewFrame = await this.mediaCapture.GetPreviewFrameAsync(videoFrameType); SoftwareBitmap previewBitmap = previewFrame.SoftwareBitmap; previewFrame.Dispose(); previewFrame = null; if (previewBitmap != null) { int currImageIndex = (this.lastImageIndex + 1) % this.maxFrameCount; // check if previously captured frame should be released SoftwareBitmap existingBitmap = this.bitmapFrames[currImageIndex]; if (existingBitmap != null) { existingBitmap.Dispose(); } // set the current captured bitmap frame this.bitmapFrames[currImageIndex] = previewBitmap; // create image source, needed to assign to xaml Image element SoftwareBitmapSource imageSource = new SoftwareBitmapSource(); await imageSource.SetBitmapAsync(previewBitmap); // check if current xaml Image has previous image source associated Image currImage = (Image)this.stackPanelImages.Children[currImageIndex]; if (currImage.Source != null) { SoftwareBitmapSource releaseImageSource = (SoftwareBitmapSource)currImage.Source; releaseImageSource.Dispose(); currImage.Source = null; } // set current Image element bitmap source currImage.Source = imageSource; // update the last set image index this.lastImageIndex = currImageIndex; } await WaitMethod(this.frameDuration); } }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void Page_Loaded(object sender, RoutedEventArgs e) { //クライアント client = new EmotionServiceClient("{your subscription key}"); //キャプチャーの設定 MediaCaptureInitializationSettings captureInitSettings = new MediaCaptureInitializationSettings(); captureInitSettings.StreamingCaptureMode = StreamingCaptureMode.Video; captureInitSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview; var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); captureInitSettings.VideoDeviceId = devices[0].Id; capture = new MediaCapture(); await capture.InitializeAsync(captureInitSettings); //キャプチャーのサイズなど VideoEncodingProperties vp = new VideoEncodingProperties(); vp.Width = 320; vp.Height = 240; vp.Subtype = "YUY2"; await capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vp); preview.Source = capture; await capture.StartPreviewAsync(); }
private MediaStreamSource CreateI420VideoStreamSource( uint width, uint height, int framerate) { if (width == 0) { throw new ArgumentException("Invalid zero width for video.", "width"); } if (height == 0) { throw new ArgumentException("Invalid zero height for video.", "height"); } // Note: IYUV and I420 have same memory layout (though different FOURCC) // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids var videoProperties = VideoEncodingProperties.CreateUncompressed( MediaEncodingSubtypes.Iyuv, width, height); var videoStreamDesc = new VideoStreamDescriptor(videoProperties); videoStreamDesc.EncodingProperties.FrameRate.Numerator = (uint)framerate; videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1; // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12); var videoStreamSource = new MediaStreamSource(videoStreamDesc); videoStreamSource.BufferTime = TimeSpan.Zero; videoStreamSource.SampleRequested += OnMediaStreamSourceRequested; videoStreamSource.IsLive = true; // Enables optimizations for live sources videoStreamSource.CanSeek = false; // Cannot seek live WebRTC video stream return(videoStreamSource); }
/// <summary> /// Configures the pipeline to use the optimal resolutions for VS based on the settings currently in use /// </summary> /// <returns></returns> private async Task SetUpVideoStabilizationRecommendationAsync() { Debug.WriteLine("Setting up VS recommendation..."); // Get the recommendation from the effect based on our current input and output configuration var recommendation = _videoStabilizationEffect.GetRecommendedStreamConfiguration(_mediaCapture.VideoDeviceController, _encodingProfile.Video); // Handle the recommendation for the input into the effect, which can contain a larger resolution than currently configured, so cropping is minimized if (recommendation.InputProperties != null) { // Back up the current input properties from before VS was activated _inputPropertiesBackup = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord) as VideoEncodingProperties; // Set the recommendation from the effect (a resolution higher than the current one to allow for cropping) on the input await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, recommendation.InputProperties); Debug.WriteLine("VS recommendation for the MediaStreamProperties (input) has been applied"); } // Handle the recommendations for the output from the effect if (recommendation.OutputProperties != null) { // Back up the current output properties from before VS was activated _outputPropertiesBackup = _encodingProfile.Video; // Apply the recommended encoding profile for the output, which will result in a video with the same dimensions as configured // before VideoStabilization was added if an appropriate padded capture resolution was available. Otherwise, it will be slightly // smaller (due to cropping). This prevents upscaling back to the original size, which can result in a loss of quality _encodingProfile.Video = recommendation.OutputProperties; Debug.WriteLine("VS recommendation for the MediaEncodingProfile (output) has been applied"); } }
private MediaEncodingProfile GetEncodingProfile() { var profile = new MediaEncodingProfile(); var containerEncoding = new ContainerEncodingProperties { Subtype = MediaEncodingSubtypes.Mpeg4 }; const uint bitsPerKiloBit = 1000; var videoEncoding = new VideoEncodingProperties { Subtype = MediaEncodingSubtypes.H264, Width = Configuration.Width, Height = Configuration.Height, Bitrate = Configuration.KiloBitsPerSecond * bitsPerKiloBit, FrameRate = { Denominator = 1, Numerator = Configuration.FramesPerSecond }, PixelAspectRatio = { Denominator = 1, Numerator = 1 } }; profile.Container = containerEncoding; profile.Video = videoEncoding; return(profile); }
/// <summary> /// Инициализирует работу с камерой и с локальным распознавателем лиц /// </summary> private async Task Init() { MC = new MediaCapture(); var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var camera = cameras.First(); var settings = new MediaCaptureInitializationSettings() { VideoDeviceId = camera.Id }; await MC.InitializeAsync(settings); ViewFinder.Source = MC; // Create face detection var def = new FaceDetectionEffectDefinition(); def.SynchronousDetectionEnabled = false; def.DetectionMode = FaceDetectionMode.HighPerformance; FaceDetector = (FaceDetectionEffect)(await MC.AddVideoEffectAsync(def, MediaStreamType.VideoPreview)); FaceDetector.FaceDetected += FaceDetectedEvent; FaceDetector.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100); FaceDetector.Enabled = true; await MC.StartPreviewAsync(); var props = MC.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview); VideoProps = props as VideoEncodingProperties; }
/// <summary> /// Initializes the resolutions. /// </summary> private async Task InitResolutions() { if (_mediaCapture != null) { _availablePreviewResolutions = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo); #if DEBUG foreach (var mediaEncodingPropertiese in _availablePreviewResolutions) { var prop = mediaEncodingPropertiese as VideoEncodingProperties; Debug.WriteLine("{0} {1}", prop.Width, prop.Height); } #endif // Some devices produces black stripes around picture if highest 4:3 ratio is being used // For now switching to 16/9 resolution as default. _selectedPreviewResolution = _availablePreviewResolutions .Where(r => ((VideoEncodingProperties)r).GetAspectRatio() == AspectRatio.Ratio16To9) .OrderByDescending(r => ((VideoEncodingProperties)r).Width) .FirstOrDefault() as VideoEncodingProperties; // Now set the resolution on the device if (_selectedPreviewResolution != null) { await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo, _selectedPreviewResolution); } } }
/// <summary> /// Create a new 30-fps NV12-encoded video source for the specified video size. /// </summary> /// <param name="width">The width of the video in pixels.</param> /// <param name="height">The height of the video in pixels.</param> /// <returns>The newly created video source.</returns> private MediaStreamSource CreateVideoStreamSource(uint width, uint height, uint framerate) { if (width == 0) { throw new ArgumentException("Invalid zero width for video stream source.", "width"); } if (height == 0) { throw new ArgumentException("Invalid zero height for video stream source.", "height"); } // Note: IYUV and I420 have same memory layout (though different FOURCC) // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height); var videoStreamDesc = new VideoStreamDescriptor(videoProperties); videoStreamDesc.EncodingProperties.FrameRate.Numerator = framerate; videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1; videoStreamDesc.EncodingProperties.Bitrate = (framerate * width * height * 12); // NV12=12bpp var videoStreamSource = new MediaStreamSource(videoStreamDesc); videoStreamSource.BufferTime = TimeSpan.Zero; // TODO : playback breaks if buffering, need to investigate videoStreamSource.Starting += OnMediaStreamSourceStarting; videoStreamSource.Closed += OnMediaStreamSourceClosed; videoStreamSource.Paused += OnMediaStreamSourcePaused; videoStreamSource.SampleRequested += OnMediaStreamSourceRequested; videoStreamSource.IsLive = true; // Enables optimizations for live sources videoStreamSource.CanSeek = false; // Cannot seek live WebRTC video stream return(videoStreamSource); }
private VideoEncodingProperties maxResolution() { VideoEncodingProperties resolutionMax = null; //get all photo properties var resolutions = captureManager.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo); //generate new list to work with var vidProps = new List <VideoEncodingProperties>(); //add only those properties that are 16:9 to our own list for (var i = 0; i < resolutions.Count; i++) { var res = (VideoEncodingProperties)resolutions[i]; if (MatchScreenFormat(new Size(res.Width, res.Height)) != CameraResolutionFormat.FourByThree) { vidProps.Add(res); } } //order the list, and select the highest resolution that fits our limit if (vidProps.Count != 0) { vidProps = vidProps.OrderByDescending(r => r.Width).ToList(); resolutionMax = vidProps.Where(r => r.Width < 2600).First(); } return(resolutionMax); }
public FaceTrackerProxy(Canvas canvas, MainPage page, CaptureElement capture, MediaCapture mediacapture) { if (this.faceTracker == null) { this.faceTracker = FaceTracker.CreateAsync().AsTask().Result; } rootPage = page; VisualizationCanvas = canvas; this.VisualizationCanvas.Children.Clear(); mediaCapture = mediacapture; var deviceController = mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; currentState = ScenarioState.Streaming; // Ensure the Semaphore is in the signalled state. this.frameProcessingSemaphore.Release(); // Use a 66 milisecond interval for our timer, i.e. 15 frames per second TimeSpan timerInterval = TimeSpan.FromMilliseconds(200); this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); }
/// <summary> /// Resolves all the instances of video encoding properties with the highest frame rate /// available matching the given media stream type. /// </summary> /// <param name="controller">The video device controller.</param> /// <param name="mediaStreamType">The media stream type.</param> /// <returns>All the instances of video encoding properties with the highest frame rate available.</returns> public static IList <VideoEncodingProperties> ResolveAllVideoEncodingPropertiesWithHighestFrameRate( VideoDeviceController controller, MediaStreamType mediaStreamType) { uint highestFrameRate = ResolveHighestFrameRate(controller, mediaStreamType); IList <VideoEncodingProperties> listOfPropertiesWithHighestFrameRate = null; if (highestFrameRate > 0) { listOfPropertiesWithHighestFrameRate = new List <VideoEncodingProperties>(); IReadOnlyList <IMediaEncodingProperties> availableProperties = controller.GetAvailableMediaStreamProperties(mediaStreamType); foreach (IMediaEncodingProperties properties in availableProperties) { VideoEncodingProperties videoEncodingProperties = properties as VideoEncodingProperties; if (videoEncodingProperties != null) { uint frameRate = ResolveFrameRate(videoEncodingProperties); if (frameRate == highestFrameRate) { listOfPropertiesWithHighestFrameRate.Add(videoEncodingProperties); } } } } return(listOfPropertiesWithHighestFrameRate); }
private static async Task SetMaxResolution(MediaCapture mediaCapture) { IReadOnlyList <IMediaEncodingProperties> res = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview); uint maxResolution = 0; int indexMaxResolution = 0; if (res.Count >= 1) { for (int i = 0; i < res.Count; i++) { VideoEncodingProperties vp = (VideoEncodingProperties)res[i]; if (vp.Width <= maxResolution) { continue; } indexMaxResolution = i; maxResolution = vp.Width; } await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, res[indexMaxResolution]); } }
/// <summary> /// Resolves a video encoding properties instance based on the given arguments. /// </summary> /// <param name="controller">The video device controller.</param> /// <param name="mediaStreamType">The media stream type.</param> /// <param name="frameRate">The desired framerate.</param> /// <param name="width">The desired width in pixels.</param> /// <param name="height">The desired height in pixels.</param> /// <returns>A video encoding properties instance matching the given arguments or null if not found.</returns> public static VideoEncodingProperties FindVideoEncodingProperties( VideoDeviceController controller, MediaStreamType mediaStreamType, uint frameRate, uint width, uint height) { VideoEncodingProperties matchingProperties = null; if (controller != null) { IReadOnlyList <IMediaEncodingProperties> availableProperties = controller.GetAvailableMediaStreamProperties(mediaStreamType); foreach (IMediaEncodingProperties properties in availableProperties) { VideoEncodingProperties videoEncodingProperties = properties as VideoEncodingProperties; if (videoEncodingProperties != null) { if (ResolveFrameRate(videoEncodingProperties) == frameRate && videoEncodingProperties.Width == width && videoEncodingProperties.Height == height) { matchingProperties = videoEncodingProperties; break; } } } } return(matchingProperties); }
private void CreateMediaObjects() { // Create our encoding profile based on the size of the item // TODO: This only really makes sense for monitors, we need // to change this to make sense in all cases. int width = _captureItem.Size.Width; int height = _captureItem.Size.Height; // Describe our input: uncompressed BGRA8 buffers comming in at the monitor's refresh rate // TODO: We pick 60Hz here because it applies to most monitors. However this should be // more robust. var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height); _videoDescriptor = new VideoStreamDescriptor(videoProperties); _videoDescriptor.EncodingProperties.FrameRate.Numerator = c_frameRateN; _videoDescriptor.EncodingProperties.FrameRate.Denominator = c_frameRateD; _videoDescriptor.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * width * height * 4); // Create our MediaStreamSource _mediaStreamSource = new MediaStreamSource(_videoDescriptor); _mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0); _mediaStreamSource.Starting += OnMediaStreamSourceStarting; _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested; // Create our device manager _mediaGraphicsDevice = MediaGraphicsDevice.CreateFromMediaStreamSource(_mediaStreamSource); _mediaGraphicsDevice.RenderingDevice = _device; // Create our transcoder _transcoder = new MediaTranscoder(); _transcoder.HardwareAccelerationEnabled = true; }
/// <summary> /// Resolves an instance of video encoding properties with the highest frame rate available /// matching the given media stream type. /// </summary> /// <param name="controller">The video device controller.</param> /// <param name="mediaStreamType">The media stream type.</param> /// <returns>An instance of video encoding properties with the highest frame rate available.</returns> public static VideoEncodingProperties ResolveVideoEncodingPropertiesWithHighestFrameRate( VideoDeviceController controller, MediaStreamType mediaStreamType) { VideoEncodingProperties propertiesWithHighestFrameRate = null; if (controller != null) { IReadOnlyList <IMediaEncodingProperties> availableProperties = controller.GetAvailableMediaStreamProperties(mediaStreamType); uint highestFrameRate = 0; foreach (IMediaEncodingProperties properties in availableProperties) { VideoEncodingProperties videoEncodingProperties = properties as VideoEncodingProperties; if (videoEncodingProperties != null) { uint frameRate = ResolveFrameRate(videoEncodingProperties); if (frameRate > highestFrameRate) { propertiesWithHighestFrameRate = videoEncodingProperties; highestFrameRate = frameRate; } } } } return(propertiesWithHighestFrameRate); }
//</SnippetVideoStabilizationEnabledChanged> public async Task CleanUpVideoStabilizationEffectAsync() { //<SnippetCleanUpVisualStabilizationEffect> // Clear all effects in the pipeline await _mediaCapture.RemoveEffectAsync(_videoStabilizationEffect); // If backed up settings (stream properties and encoding profile) exist, restore them and clear the backups if (_inputPropertiesBackup != null) { await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoRecord, _inputPropertiesBackup); _inputPropertiesBackup = null; } if (_outputPropertiesBackup != null) { _encodingProfile.Video = _outputPropertiesBackup; _outputPropertiesBackup = null; } _videoStabilizationEffect.EnabledChanged -= VideoStabilizationEffect_EnabledChanged; _videoStabilizationEffect = null; //</SnippetCleanUpVisualStabilizationEffect> }
//Method to build connection to video-device and connecting to video private MediaStreamSource CreateI420VideoStreamSource(uint width, uint height, int framerate) { if (width == 0) { throw new ArgumentException("Invalid zero width for video", "width"); } if (height == 0) { throw new ArgumentException("Invalid zero height for video", "height"); } VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height); VideoStreamDescriptor videoStreamDesc = new VideoStreamDescriptor(videoProperties); videoStreamDesc.EncodingProperties.FrameRate.Numerator = (uint)framerate; videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1; // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12); MediaStreamSource videoStreamSource = new MediaStreamSource(videoStreamDesc) { BufferTime = TimeSpan.Zero, // Enables optimizations for live sources IsLive = true, // Cannot seek live WebRTC video stream CanSeek = false }; //Event called by request for new frame (?) videoStreamSource.SampleRequested += OnMediaStreamSourceRequested; return(videoStreamSource); }