private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e?.VideoFrame?.SoftwareBitmap == null) { return; } _stopwatch = Stopwatch.StartNew(); ImageFeatureValue _image = ImageFeatureValue.CreateFromVideoFrame(e.VideoFrame); var input = new Yolov2Input { image = _image }; var output = _model.EvaluateAsync(input).GetAwaiter().GetResult(); _stopwatch.Stop(); IReadOnlyList <float> vectorImage = output.grid.GetAsVectorView(); IList <float> imageList = vectorImage.ToList(); _boxes = _parser.ParseOutputs(vectorImage.ToArray()); var maxIndex = imageList.IndexOf(imageList.Max()); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_cameraModel.Width} x Height {_cameraModel}"; DrawOverlays(e.VideoFrame); }); }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e?.VideoFrame?.SoftwareBitmap == null) { return; } SoftwareBitmap softwareBitmap = SoftwareBitmap.Convert(e.VideoFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); VideoFrame inputFrame = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap); _input.image = ImageFeatureValue.CreateFromVideoFrame(inputFrame); // Evaluate the model _stopwatch = Stopwatch.StartNew(); _output = await _model.EvaluateAsync(_input); _stopwatch.Stop(); IReadOnlyList <float> VectorImage = _output.grid.GetAsVectorView(); float[] ImageAry = VectorImage.ToArray(); _boxes = _parser.ParseOutputs(ImageAry); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_canvasActualWidth} x Height {_canvasActualHeight}"; DrawOverlays(e.VideoFrame); }); //Debug.WriteLine(ImageList.ToString()); }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e?.VideoFrame?.SoftwareBitmap == null) { return; } if (_model == null) { return; } var input = new TinyYoloV212ModelInput() { Image = e.VideoFrame }; _stopwatch = Stopwatch.StartNew(); var output = await _model.EvaluateAsync(input); _stopwatch.Stop(); _boxes = _parser.ParseOutputs(output.Grid.ToArray()); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_yoloCanvasActualWidth} x Height {_yoloCanvasActualHeight}"; DrawOverlays(e.VideoFrame); }); }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e.VideoFrame.SoftwareBitmap == null) { return; } await LoadAndEvaluateModelAsync(e.VideoFrame, "model.onnx"); }
private void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e?.VideoFrame?.SoftwareBitmap == null) { return; } else { _currentVideoFrame = e.VideoFrame; } }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e.VideoFrame.SoftwareBitmap == null) { return; } try { string prettyJson = string.Empty; _stopwatch = Stopwatch.StartNew(); var sb = e.VideoFrame.SoftwareBitmap; var newVf = new VideoFrame(BitmapPixelFormat.Rgba8, sb.PixelWidth, sb.PixelHeight); await e.VideoFrame.CopyToAsync(newVf); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { var client = new HttpClient(); var url = "http://127.0.0.1:8080/image"; var tempPath = string.Empty; //var storageFolder = ApplicationData.Current.LocalFolder; //var of = await storageFolder.CreateFileAsync("cameraframe.jpg", CreationCollisionOption.ReplaceExisting); //SaveSoftwareBitmapToFile(sb, of); //tempPath = of.Path; //var byteData = GetImageAsByteArray(tempPath); var byteData = ConvertFrameToByteArray(newVf.SoftwareBitmap); using (var content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); var response = await client.PostAsync(url, content); var jsonResponse = await response.Content.ReadAsStringAsync(); prettyJson = JToken.Parse(jsonResponse).ToString(Formatting.Indented); Debug.WriteLine(prettyJson); } }); _stopwatch.Stop(); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { var message = $"{DateTime.Now.ToLongTimeString()} - {1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps"; TextBlockResults.Text = message;; }); } catch (Exception exception) { Debug.WriteLine(exception); } }
// snippet 2 private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e.VideoFrame.SoftwareBitmap == null) { return; } var input = new CNTKGraphModelInput() { Input338 = e.VideoFrame }; var output = await model.EvaluateAsync(input); }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e.VideoFrame.SoftwareBitmap == null) { return; } try { await LoadAndEvaluateModelAsync(e.VideoFrame); } catch (Exception exception) { Debug.WriteLine(exception); } }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { if (e?.VideoFrame?.SoftwareBitmap == null) { return; } SoftwareBitmap softwareBitmap = SoftwareBitmap.Convert(e.VideoFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); VideoFrame inputFrame = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap); _input.image = ImageFeatureValue.CreateFromVideoFrame(inputFrame); // Evaluate the model _stopwatch = Stopwatch.StartNew(); _output = await _model.EvaluateAsync(_input); _stopwatch.Stop(); IReadOnlyList <float> VectorImage = _output.grid.GetAsVectorView(); float[] ImageAry = VectorImage.ToArray(); _boxes = _parser.ParseOutputs(ImageAry); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_canvasActualWidth} x Height {_canvasActualHeight}"; DrawOverlays(e.VideoFrame); var Distance = DateTime.Now - LastDetect; if (personCount > 0 && Distance.TotalSeconds > DetectionIntervalInSeconds && DateTime.Now.TimeOfDay >= TimeStartPicker.SelectedTime && DateTime.Now.TimeOfDay <= TimeStopPicker.SelectedTime) { if ((bool)ChkGuardian.IsChecked) { PlaySound("alarm.wav"); } else { await speech.Read($"I saw {personCount} person in the camera"); } LastDetect = DateTime.Now; } }); //Debug.WriteLine(ImageList.ToString()); }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { try { var input = new CNTKGraphModelInput() { Input338 = e.VideoFrame }; var output = await _model.EvaluateAsync(input); var max = output.Plus692_Output_0.Max(); var index = output.Plus692_Output_0.IndexOf(max); var text = $"{_emotions[index]}"; Update(text); } catch (Exception exception) { //kill the exception } }
private void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { lastFrame = e.VideoFrame; }
private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { // demo code goes here }
private void CameraPreviewControl_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e) { _ = SanFrameAsync(e.VideoFrame); }