private void VideoSource_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { Dispatcher.Invoke(() => { System.Drawing.Image myCurrentImage = (Bitmap)eventArgs.Frame.Clone(); BitmapImage bi = new BitmapImage(); bi.BeginInit(); MemoryStream ms = new MemoryStream(); myCurrentImage.Save(ms, ImageFormat.Bmp); ms.Seek(0, SeekOrigin.Begin); bi.StreamSource = ms; bi.EndInit(); //Using the freeze function to avoid cross thread operations bi.Freeze(); ImageWebcam.Source = bi; if (isRecording) { captureFunction(eventArgs.Frame); } }); }
private void Screen_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (_StartTime == DateTime.MinValue) { _StartTime = DateTime.Now; } _ScreenTotalFrames++; _Writer.WriteVideoFrame((Bitmap)eventArgs.Frame.Clone(), DateTime.Now - _StartTime); presentTime = DateTime.Now; if (_MicrophoneSignal != null && (presentTime - oldTime).TotalMilliseconds >= 77 * 2) { Trace.WriteLine("Time: " + _MicrophoneSignal.Duration.TotalMilliseconds); if (_RecordingMicrophone && _MicrophoneSignal != null) { _Writer.WriteAudioFrame(_MicrophoneSignal); } if (_RecordingAudio && _SpeakerSignal != null) { Trace.WriteLine("speaker"); _Writer.WriteAudioFrame(_SpeakerSignal); } oldTime = presentTime; } }
void controller_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!backproj) { Bitmap image = eventArgs.Frame; if (image == null) return; if (parent.faceForm != null && !parent.faceForm.IsDisposed) { MatchingTracker matching = parent.faceForm.faceController.Tracker as MatchingTracker; Rectangle rect = new Rectangle( matching.TrackingObject.Center.X, 0, image.Width - matching.TrackingObject.Center.X, matching.TrackingObject.Center.Y); rect.Intersect(new Rectangle(0, 0, image.Width, image.Height)); marker.Rectangles = new[] { matching.TrackingObject.Rectangle }; image = marker.Apply(image); } pictureBox.Image = image; } }
private void CaptureStream_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { long currentTick = DateTime.Now.Ticks; StartTick = StartTick ?? currentTick; var frameOffset = new TimeSpan(currentTick - StartTick.Value); video.WriteVideoFrame(eventArgs.Frame, frameOffset); }
private void Camera_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { //Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); //BitmapImage bi = new BitmapImage(); //bi.BeginInit(); //MemoryStream ms = new MemoryStream(); //bitmap.Save(ms, ImageFormat.Bmp); //ms.Seek(0, SeekOrigin.Begin); //bi.StreamSource = ms; //bi.EndInit(); //bi.Freeze(); }
private void NewVideoFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { if (_aviStream is null) { _aviStream = AviManager.AddVideoStream(false, 5, eventArgs.Frame); } else { _aviStream.AddFrame(eventArgs.Frame); } if (CaptureType.Equals(CaptureTypeEnum.VideoCaptureWithVoice) || CaptureType.Equals(CaptureTypeEnum.ScreenCaptureWithVoice)) { BitmapsCount++; } }
private void VideoSource_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { //if (videoSource.IsRunning) //{ // Bitmap b = (Bitmap)eventArgs.Frame.Clone(); // try // { // pictureBox1.Invoke(new MethodInvoker(delegate // { // try // { // pictureBox1.Image = b; // } // catch (Exception) { } // })); // } // catch (Exception) { } //} }
// New frame event handler, which is invoked on each new available video frame private void video_NewFrame(object sender, Accord.Video.NewFrameEventArgs eventArgs) { // get new frame try { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); if (bitmap != null) { VideoFrame = BitmapTools.BitmapToImageSource(bitmap); if (IsRecording) { VideoWriter.WriteVideoFrame(bitmap); } } } catch { Stop(); throw new Exception("error receiving frame!"); } }
// New frame from nested video source private void nestedVideoSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { // don't even try doing something if there are no clients if (NewFrame == null) return; if (skipFramesIfBusy) { if (!isProcessingThreadAvailable.WaitOne(0, false)) { // return in the case if image processing thread is still busy and // we are allowed to skip frames return; } } else { // make sure image processing thread is available in the case we cannot skip frames isProcessingThreadAvailable.WaitOne(); } // pass the image to processing frame and exit lastVideoFrame = CloneImage(eventArgs.Frame); isNewFrameAvailable.Set(); }
private void source_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (requestedToStop) return; if (!IsTracking && !IsDetecting) return; lock (syncObject) { // skip first frames during initialization if (skip < 10) { skip++; return; } Bitmap frame = eventArgs.Frame; int width = frame.Width; int height = frame.Height; BitmapData data = frame.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, frame.PixelFormat); UnmanagedImage image = new UnmanagedImage(data); if (IsDetecting) { // Reduce frame size to process it faster float xscale = (float)width / resize.NewWidth; float yscale = (float)height / resize.NewHeight; UnmanagedImage downsample = resize.Apply(image); // Process the face detector in the downsampled image Rectangle[] regions = detector.ProcessFrame(downsample); // Check if the face has been steady 5 frames in a row if (detector.Steady >= 5) { // Yes, so track the face Rectangle face = regions[0]; // Reduce the face size to avoid tracking background Rectangle window = new Rectangle( (int)((face.X + face.Width / 2f) * xscale), (int)((face.Y + face.Height / 2f) * yscale), 1, 1); window.Inflate((int)(0.25f * face.Width * xscale), (int)(0.40f * face.Height * yscale)); // Re-initialize tracker tracker.Reset(); tracker.SearchWindow = window; tracker.ProcessFrame(image); // Update initial position computeCurrentPosition(); OnHeadEnter(new HeadEventArgs(currentX, currentY, currentAngle, currentScale)); } } else if (IsTracking) { tracker.Extract = (NewFrame != null); // Track the object tracker.ProcessFrame(image); // Get the object position TrackingObject obj = tracker.TrackingObject; // Update current position computeCurrentPosition(); if (obj.IsEmpty || !tracker.IsSteady) { OnHeadLeave(EventArgs.Empty); } else { OnHeadMove(new HeadEventArgs(currentX, currentY, currentAngle, currentScale)); if (NewFrame != null && obj.Image != null) { Bitmap headFrame = obj.Image.ToManagedImage(); NewFrame(this, new NewFrameEventArgs(headFrame)); } } } frame.UnlockBits(data); } }
private void source_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!IsRunning || (!IsTracking && !IsDetecting)) return; lock (syncObject) { Bitmap frame = eventArgs.Frame; int width = frame.Width; int height = frame.Height; BitmapData data = frame.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, frame.PixelFormat); UnmanagedImage head = new UnmanagedImage(data); if (IsDetecting) { IsDetecting = false; // Process the nose detector in the head image Rectangle[] regions = detector.ProcessFrame(head); if (regions.Length >= 1) { // Re-initialize tracker tracker.Reset(); tracker.SearchWindow = regions[0]; tracker.ProcessFrame(head); // Update initial position computeCurrentPosition(width, height); OnFaceEnter(new FaceEventArgs(currentX, currentY)); } else { IsDetecting = true; } } else if (IsTracking) { // Track the object tracker.ProcessFrame(head); // Get the object position TrackingObject obj = tracker.TrackingObject; // Update current position computeCurrentPosition(width, height); if (obj.IsEmpty) { OnFaceLeave(EventArgs.Empty); } else { OnFaceMove(new FaceEventArgs(currentX, currentY)); } } frame.UnlockBits(data); } }
void VideoPlayer_NewFrameReceived(object sender, Accord.Video.NewFrameEventArgs eventArgs) { DateTime currentFrameTime = eventArgs.CaptureFinished; // Encode the last frame at the same time we prepare the new one Task.WaitAll( Task.Run(() => { lock (syncObj) // Save the frame to the video file. { if (IsRecording) { if (RecordingStartTime == DateTime.MinValue) { RecordingStartTime = DateTime.Now; } TimeSpan timestamp = currentFrameTime - RecordingStartTime; if (timestamp > TimeSpan.Zero) { videoWriter.WriteVideoFrame(this.lastFrame, timestamp, this.lastFrameRegion); } } } }), Task.Run(() => { // Adjust the window according to the current capture // mode. Also adjusts to keep even widths and heights. CaptureRegion = AdjustWindow(); // Crop the image if the mode requires it if (CaptureMode == CaptureRegionOption.Fixed || CaptureMode == CaptureRegionOption.Window) { crop.Rectangle = CaptureRegion; eventArgs.Frame = croppedImage = crop.Apply(eventArgs.Frame, croppedImage); eventArgs.FrameSize = crop.Rectangle.Size; } //// Draw extra information on the screen bool captureMouse = Settings.Default.CaptureMouse; bool captureClick = Settings.Default.CaptureClick; bool captureKeys = Settings.Default.CaptureKeys; if (captureMouse || captureClick || captureKeys) { cursorCapture.CaptureRegion = CaptureRegion; clickCapture.CaptureRegion = CaptureRegion; keyCapture.Font = Settings.Default.KeyboardFont; using (Graphics g = Graphics.FromImage(eventArgs.Frame)) { g.CompositingQuality = CompositingQuality.HighSpeed; g.SmoothingMode = SmoothingMode.HighSpeed; float invWidth = 1; // / widthScale; float invHeight = 1; // / heightScale; if (captureMouse) { cursorCapture.Draw(g, invWidth, invHeight); } if (captureClick) { clickCapture.Draw(g, invWidth, invHeight); } if (captureKeys) { keyCapture.Draw(g, invWidth, invHeight); } } } }) ); // Save the just processed frame and mark // it to be encoded in the next iteration: lastFrame = eventArgs.Frame.Copy(lastFrame); //lastFrameTime = currentFrameTime; lastFrameRegion = new Rectangle(0, 0, eventArgs.FrameSize.Width, eventArgs.Frame.Height); }
// On new frame ready private void videoSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!requestedToStop) { Bitmap newFrame = (Bitmap)eventArgs.Frame.Clone(); // let user process the frame first if (NewFrame != null) { NewFrame(this, ref newFrame); } // now update current frame of the control lock (sync) { // dispose previous frame if (currentFrame != null) { if (currentFrame.Size != eventArgs.Frame.Size) { needSizeUpdate = true; } currentFrame.Dispose(); currentFrame = null; } if (convertedFrame != null) { convertedFrame.Dispose(); convertedFrame = null; } currentFrame = newFrame; frameSize = currentFrame.Size; lastMessage = null; // check if conversion is required to lower bpp rate if ((currentFrame.PixelFormat == PixelFormat.Format16bppGrayScale) || (currentFrame.PixelFormat == PixelFormat.Format48bppRgb) || (currentFrame.PixelFormat == PixelFormat.Format64bppArgb)) { convertedFrame = Accord.Imaging.Image.Convert16bppTo8bpp(currentFrame); } } // update control Invalidate(); } }
void controller_NewFrame(object sender, NewFrameEventArgs eventArgs) { pictureBox2.Image = eventArgs.Frame; }
// New snapshot frame is available private void videoDevice_SnapshotFrame( object sender, NewFrameEventArgs eventArgs ) { Console.WriteLine( eventArgs.Frame.Size ); ShowSnapshot( (Bitmap) eventArgs.Frame.Clone( ) ); }