private void video_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (Detector1.ProcessFrame(eventArgs.Frame) > motionLevel) { try { Invoke(new Action(() => backgroundBorder.Visible = true)); } catch (Exception ex) { System.Environment.Exit(1); //MessageBox.Show(ex.Message, "Message", MessageBoxButtons.OK, MessageBoxIcon.Error); } } else { try { Invoke(new Action(() => backgroundBorder.Visible = false)); } catch (Exception ex) { System.Environment.Exit(1); //MessageBox.Show(ex.Message, "Message", MessageBoxButtons.OK, MessageBoxIcon.Error); } } pictureBox1.Image = (Image)eventArgs.Frame.Clone(); }
private void Srouce_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { var frame = eventArgs.Frame.Clone() as Bitmap; if (this.Dispatcher != null) { Dispatcher.BeginInvoke(new Action(() => { if (writeBitmap == null) { writeBitmap = new WriteableBitmap(frame.Width, frame.Height, 96, 96, PixelFormats.Pbgra32, null); image.Source = writeBitmap; imageSize = new System.Drawing.Rectangle(0, 0, frame.Width, frame.Height); imageRect = new Int32Rect(0, 0, frame.Width, frame.Height); imageDataSize = frame.Height * writeBitmap.BackBufferStride; } if (capture) { frame.Save(DateTime.Now.ToString("yyyyMMddHHmmss") + ".png", ImageFormat.Png); capture = false; } BitmapData bData = frame.LockBits(imageSize, ImageLockMode.ReadWrite, frame.PixelFormat); writeBitmap.WritePixels(imageRect, bData.Scan0, imageDataSize, writeBitmap.BackBufferStride); frame.UnlockBits(bData); frame.Dispose(); })); } }
/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrameReceived(object sender, Accord.Video.NewFrameEventArgs eventArgs) { // convert image to dlib format var img = eventArgs.Frame.ToArray2D <RgbPixel>(); // detect face every 4 frames if (frameIndex % 4 == 0) { var faces = faceDetector.Detect(img); if (faces.Length > 0) { currentFace = faces.First(); } } // abort if we don't have a face at this point if (currentFace == default(DlibDotNet.Rectangle)) { return; } // detect facial landmarks var shape = shapePredictor.Detect(img, currentFace); // detect head pose if (shape.Parts == 68) { DetectHeadPose(eventArgs.Frame, shape); } // update frame counter frameIndex++; }
private void VideoSource_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { Image OldBitmap = Webcam_Picturebox.Image; //lock (Webcam_Picturebox) Webcam_Picturebox.Image = (Bitmap)eventArgs.Frame.Clone(); eventArgs.Frame.Dispose(); OldBitmap?.Dispose(); }
private void Stream_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { try { frameCount++; WriteableBitmap bmp = new WriteableBitmap(eventArgs.Frame.ToBitmapSource()); bmp.Freeze(); updateFrameStats(bmp, (int)bmp.Width, (int)bmp.Height); } catch { } }
private void Video_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (_currentFrame != null) { _previousFrame = _currentFrame; //Update the frames } _currentFrame = new Bitmap(eventArgs.Frame); //Create a new copy so that when eventArgs is disposed it doesn't take the frame reference with it if (_firstFrameFlag) { _video.SignalToStop(); //Get the first frame only to begin with for any preprocess necessary } }
private void NewFrameCamera(object sender, Accord.Video.NewFrameEventArgs e) { System.Drawing.Bitmap bitmap = (System.Drawing.Bitmap)e.Frame.Clone(); BitmapImage bi = new BitmapImage(); bi.BeginInit(); System.IO.MemoryStream ms = new System.IO.MemoryStream(); bitmap.Save(ms, System.Drawing.Imaging.ImageFormat.Bmp); ms.Seek(0, SeekOrigin.Begin); bi.StreamSource = ms; bi.EndInit(); bi.Freeze(); camerawiewer.Dispatcher.Invoke(() => this.camerawiewer.Source = bi); }
private void VideoSource_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { eventArgs.Frame = filter.Apply(eventArgs.Frame); if (_SaveFrame) { try { _SaveFrame = false; eventArgs.Frame.Save(_FrameFile); } catch { } } }
private void Video_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (_currentFrame != null) { _previousFrame = _currentFrame; //Update the frames } _currentFrame = new Bitmap(eventArgs.Frame); //Create a new copy so that when eventArgs is disposed it doesn't take the frame reference with it if (_firstFrameFlag) { _video.SignalToStop(); //Get the first frame only to begin with for any preprocess necessary } else { _detector.ProcessFrame(UnmanagedImage.FromManagedImage(_currentFrame)); Bitmap motionFrame = _detector.MotionFrame.ToManagedImage(); if (_frameIndex == 0) { DrawGridLines(_currentFrame, false); DrawGridLines(motionFrame, true); } else { _currentFrame.Save(_outputFolder + "NA_" + _frameIndex.ToString() + ".png"); motionFrame.Save(_outputFolder + "AN_" + _frameIndex.ToString() + ".png"); } if (!_motions.ContainsKey(_frameIndex)) { _motions.Add(_frameIndex, new Dictionary <int, double>()); } foreach (ROI r in _regionsOfInterest) { double motion = r.CompareMotionAgainstBase(_currentFrame, _frameIndex); if (!_motions[_frameIndex].ContainsKey(r.HumanID)) { _motions[_frameIndex].Add(r.HumanID, 0); } _motions[_frameIndex][r.HumanID] = motion; } } _frameIndex++; _maxFrame = Math.Max(_maxFrame, _frameIndex); }
/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrameReceived(object sender, Accord.Video.NewFrameEventArgs eventArgs) { // get the current camera frame var frame = eventArgs.Frame; // get the landmark points landmarkPoints = DetectLandmarks(frame, frameIndex); // do we have 68 landmark points? if (landmarkPoints != null && landmarkPoints.Parts == 68) { // draw the landmark points in the bottom right box var poseImage = new Bitmap(frame.Width, frame.Height); Utility.DrawLandmarkPoints(landmarkPoints, poseImage); // draw the eye area in the bottom left box var eyeImage = new Bitmap(frame.Width, frame.Height); Utility.DrawEyeArea(landmarkPoints, eyeImage); eyeBox.Image = eyeImage; // build a quick and dirty camera calibration matrix var cameraMatrix = Utility.GetCameraMatrix(eventArgs.Frame.Width, eventArgs.Frame.Height); // detect head angle Utility.DetectHeadAngle( landmarkPoints, cameraMatrix, out Mat rotation, out Mat translation, out MatOfDouble coefficients); // draw the pose line in the bottom right box Utility.DrawPoseLine(rotation, translation, cameraMatrix, coefficients, landmarkPoints, poseImage); headBox.Image = poseImage; // get the euler angles headRotation = Utility.GetEulerMatrix(rotation); } // update frame counter frameIndex++; }
private void Device_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { var frame = eventArgs.Frame.Clone() as System.Drawing.Bitmap; imageD3D.Dispatcher.BeginInvoke(new Action(() => { var ldata = frame.LockBits(rcsrc, System.Drawing.Imaging.ImageLockMode.ReadOnly, frame.PixelFormat); imageD3D.Display(ldata.Scan0); // imageWB.Display(ldata.Scan0); frame.UnlockBits(ldata); frame.Dispose(); })); //this.Dispatcher.BeginInvoke(new Action(() => //{ // var ldata = frame.LockBits(rcsrc, System.Drawing.Imaging.ImageLockMode.ReadOnly, frame.PixelFormat); // imageD3D.Display(ldata.Scan0); // // imageWB.Display(ldata.Scan0); // frame.UnlockBits(ldata); // frame.Dispose(); //})); }
/// <summary> /// Called from Accord.Video.DirectShow /// Thread is created by Accord.Video.DirectShow. /// The thread also communicates with Capture Card. /// The performance here will impact the frame rate of capturing. /// </summary> private void NewFrameArrived(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (width == 0) // Is initialized { width = eventArgs.Frame.Width; height = eventArgs.Frame.Height; // Create enough UnusedMat for (int i = 0; i < PreCreateMatCount; ++i) { bitmapBuffer.PushUnusedMat(CreateSuitableMat()); } // Set ThreadPriority of Accord.Video.DirectShow Thread.CurrentThread.Priority = ThreadPriority.Highest; } // Is Start Capturing if (!IsStartCapturing) { return; } // Get Frame from Capture Card Bitmap SrcBitmap = eventArgs.Frame; // Get Mat from Buffer Mat ProcessingMat = bitmapBuffer.GetUnusedMat(); if (ProcessingMat == null) { ProcessingMat = CreateSuitableMat(); } // Copy Frame to Processing Mat Copy_24bppRgb_BitmapTo_32Argb_Mat(in SrcBitmap, in ProcessingMat); // Push to buffer bitmapBuffer.PushProcessingMat(ProcessingMat); }
private static void Device_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { var bitmap = (Bitmap)eventArgs.Frame.Clone(); MemoryStream buffert = new MemoryStream(); bitmap.Save(buffert, System.Drawing.Imaging.ImageFormat.Jpeg); byte[] boundary = new ASCIIEncoding().GetBytes("\r\n--myboundary\r\nContent-Type: image/jpeg\r\nContent-Length:" + buffert.Length + "\r\n\r\n"); var bitmapdata = buffert.ToArray(); //Console.WriteLine(Convert.ToBase64String(bitmapdata)); foreach (var mjpegstream in mjpegstreams) { try { mjpegstream.Write(boundary, 0, boundary.Length); mjpegstream.Write(bitmapdata, 0, bitmapdata.Length); mjpegstream.Flush(); } catch (Exception e) { //System.Diagnostics.Debugger.Break(); } } }
private void Camera_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { //Grab the video frame itself var video_frame = eventArgs.Frame; //Create a graphics object using (Graphics g = Graphics.FromImage(video_frame)) { //Grab a separate copy of the video frame to save to the file var video_frame_copy = video_frame.Clone() as System.Drawing.Bitmap; //Make a semi-transparent rectangle near the top of the image SolidBrush b = new SolidBrush(System.Drawing.Color.FromArgb(128, System.Drawing.Color.CornflowerBlue)); g.FillRectangle(b, new RectangleF(0, 0, _camera_frame_width, _overlay_height)); //Draw each box SolidBrush b2 = new SolidBrush(System.Drawing.Color.FromArgb(128, System.Drawing.Color.Red)); for (int i = 0; i < _box_lefts.Count; i++) { g.DrawRectangle(new System.Drawing.Pen(b2, 4), new System.Drawing.Rectangle(_box_lefts[i], _box_top, _box_width, _box_height)); } //Draw some text under each box List <string> _box_strings = new List <string>() { "Left NP", "Left Prox", "Right Prox", "Right NP", "Sound Cue" }; for (int i = 0; i < _box_lefts.Count; i++) { g.DrawString(_box_strings[i], System.Drawing.SystemFonts.DefaultFont, b2, _box_lefts[i], _box_top + 110); } //Now let's draw some graphics based on the current state of the session. var model = MMazeBehaviorSession.GetInstance(); var vm = MMazeBehaviorViewModel.GetInstance(); if (model != null && vm != null) { if (model.LeftNosepokeState) { g.FillRectangle(b2, _box_lefts[0], _box_top, _box_width, _box_height); } if (model.LeftProxState) { g.FillRectangle(b2, _box_lefts[1], _box_top, _box_width, _box_height); } if (model.RightProxState) { g.FillRectangle(b2, _box_lefts[2], _box_top, _box_width, _box_height); } if (model.RightNosepokeState) { g.FillRectangle(b2, _box_lefts[3], _box_top, _box_width, _box_height); } if (model.IsSoundPlaying) { g.FillRectangle(b2, _box_lefts[4], _box_top, _box_width, _box_height); } //Create blank canvas for saving image to video file Bitmap resized_canvas = new Bitmap(video_frame_copy.Width, video_frame_copy.Height + _overlay_height); //Create a new graphics object with the blank canvas using (Graphics g2 = Graphics.FromImage(resized_canvas)) { //Paste the video frame on the blank canvas g2.DrawImage(video_frame_copy, 0, _overlay_height, video_frame_copy.Width, video_frame_copy.Height); //Draw the overlay on the top of the image g2.FillRectangle(b, new RectangleF(0, 0, _camera_frame_width, _overlay_height)); for (int i = 0; i < _box_lefts.Count; i++) { g2.DrawRectangle(new System.Drawing.Pen(b2, 4), new System.Drawing.Rectangle(_box_lefts[i], _box_top, _box_width, _box_height)); g2.DrawString(_box_strings[i], System.Drawing.SystemFonts.DefaultFont, b2, _box_lefts[i], _box_top + 110); } if (model.LeftNosepokeState) { g2.FillRectangle(b2, _box_lefts[0], _box_top, _box_width, _box_height); } if (model.LeftProxState) { g2.FillRectangle(b2, _box_lefts[1], _box_top, _box_width, _box_height); } if (model.RightProxState) { g2.FillRectangle(b2, _box_lefts[2], _box_top, _box_width, _box_height); } if (model.RightNosepokeState) { g2.FillRectangle(b2, _box_lefts[3], _box_top, _box_width, _box_height); } if (model.IsSoundPlaying) { g2.FillRectangle(b2, _box_lefts[4], _box_top, _box_width, _box_height); } //Now let's take the current video frame and write it out to a saved video for the session vm.WriteVideoFrame(resized_canvas); } //Release some resouces resized_canvas.Dispose(); } //Release some resources video_frame_copy.Dispose(); } }
private void _videoFileSource_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { //Send the frame from the webcam to our controller for processing... _frameController.ProcessFrame((Bitmap)eventArgs.Frame.Clone()); eventArgs.Frame.Dispose(); // dispose this frame (avoid high mem usage) }
private static void _captures_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventargs) { }