private Bitmap FacePicDetect(Bitmap bitmap) { lock (this) { //留存原始的照片,如果正在拍照,则保存此未添加人脸识别框的照片 currentPicture = (Bitmap)bitmap.Clone(); if (detector == null) { //先实例化用于检测人脸的对象detector detector = new HaarObjectDetector(new FaceHaarCascade(), 100) { SearchMode = ObjectDetectorSearchMode.Single, //搜索模式 ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller, //缩放模式 ScalingFactor = 1.5f, //在搜索期间重新缩放搜索窗口时要使用的重新缩放因子 UseParallelProcessing = true }; //面部级联对象 + 搜索对象时使用的最小窗口大小 } // 开始对检测区域进行检测并返回结果数组 Rectangle[] regions = detector.ProcessFrame(bitmap); if (regions != null && regions.Length > 0) { //人脸标记 RectanglesMarker marker = new RectanglesMarker(regions, Color.Orange); regions = null; return(marker.Apply(bitmap)); } regions = null; return(bitmap); } }
private void button1_Click(object sender, EventArgs e) { detector.SearchMode = (ObjectDetectorSearchMode)cbMode.SelectedValue; detector.ScalingMode = (ObjectDetectorScalingMode)cbScaling.SelectedValue; detector.ScalingFactor = 1.5f; detector.MinSize = new Size(32, 32); detector.UseParallelProcessing = cbParallel.Checked; Stopwatch sw = Stopwatch.StartNew(); // prepare grayscale image picture = Accord.Imaging.Image.Clone(new Bitmap(pictureBox1.Image), System.Drawing.Imaging.PixelFormat.Format24bppRgb); // Process frame to detect objects Rectangle[] objects = detector.ProcessFrame(picture); sw.Stop(); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Red); pictureBox1.Image = marker.Apply(picture); } toolStripStatusLabel1.Text = string.Format("Completed detection of {0} objects in {1}.", objects.Length, sw.Elapsed); }
void controller_NewFrame(object sender, NewFrameEventArgs eventArgs) { if (!backproj) { Bitmap image = eventArgs.Frame; if (image == null) { return; } if (parent.faceForm != null && !parent.faceForm.IsDisposed) { MatchingTracker matching = parent.faceForm.faceController.Tracker as MatchingTracker; Rectangle rect = new Rectangle( matching.TrackingObject.Center.X, 0, image.Width - matching.TrackingObject.Center.X, matching.TrackingObject.Center.Y); rect.Intersect(new Rectangle(0, 0, image.Width, image.Height)); marker.Rectangles = new[] { matching.TrackingObject.Rectangle }; image = marker.Apply(image); } pictureBox.Image = image; } }
private void FaceDetect_Click(object sender, EventArgs e) { if (!sw.IsRunning) { sw.Reset(); sw.Start(); } Process.Image = Image.FromFile(filepath, true); if (bvis == false) { bvis = true; } else { bvis = false; } GridOn.GridPaint(bvis, (Bitmap)Process.Image);//绘制网格线 label3.Text = "网络线划分"; try { Bitmap gray_img = Knowledge.ToGray(originalimg); //将原图转化为灰度图 Bitmap BW = Knowledge.Thresholding(gray_img); //利用大津法进行二值化处理 Bitmap detectimg = Knowledge.FaceDetection(BW); //最后利用算法进行检测 AForge.Imaging.BlobCounter blobCounter = new AForge.Imaging.BlobCounter(detectimg); Rectangle[] rects = blobCounter.GetObjectsRectangles(); Rectangle[] outputrects = new Rectangle[rects.Count()]; ////object count int minx = 100; int tmp = 0; int rectsCount = rects.Count(); List <decimal> test = new List <decimal>(); for (int c = 0; c < rectsCount; c++) { int p = rects[c].Width * rects[c].Height; decimal bl = (decimal)rects[c].Height / (decimal)rects[c].Width; int maxx = (gray_img.Width * gray_img.Height) / 2; if (p > minx && (double)bl < 1.8 && (double)bl > 0.8 && p < maxx) { test.Add(bl); outputrects[tmp++] = rects[c]; } } RectanglesMarker marker = new RectanglesMarker(outputrects, Color.Red); this.Detected.Image = marker.Apply((Bitmap)Image.FromFile(filepath, true)); label4.Text = "检测结果"; ShowMsg("检测结束!"); } catch (ArgumentException) { MessageBox.Show("Invalid usage!"); } if (sw.IsRunning) { sw.Stop(); string str = sw.Elapsed.ToString(); timeElpse.Text = string.Empty; timeElpse.Text = "运行时间:" + str; } }
public Bitmap FaceTracking(Bitmap _bitmap) { if (!isDetecting & !isTracking) { return(_bitmap); } //if (FaceTracked.Length > 0) // this.FaceTracked = null; if (this.isDetecting) { return(this.Detecting(_bitmap)); } else { if (this.isTracking) { return(this.Tracking(_bitmap)); } else if (marker != null) { return(marker.Apply(_bitmap)); } return(_bitmap); } }
/* * private async void MainPage_OnLoaded() * { * var assembly = Assembly_.GetExecutingAssembly(); * using (var stream = assembly.GetManifestResourceStream("FaceDetection.Images.judybats.jpg")) * { * this.ImageView.Source = this.GetImageSourceFromStream(stream); * * stream.Seek(0, SeekOrigin.Begin); * this.bitmap = ((Bitmap)Image.FromStream(stream)).Clone(PixelFormat.Format32bppArgb); * } * } * * private void DetectButton_OnClick(object sender, EventArgs e) * { * var threshold = 10;// (float)Math.Pow(10.0, this.LogThresholdSlider.Value); * var octaves = 10;// (int)this.OctaveSlider.Value; * var initial = 10;// (int)this.InitialSlider.Value; * * // Create a new SURF Features Detector using the given parameters * var surf = new SpeededUpRobustFeaturesDetector(threshold, octaves, initial); * * var points = surf.ProcessImage(this.bitmap); * * // Create a new AForge's Corner Marker Filter * var features = new FeaturesMarker(points); * * // Apply the filter and display it on a picturebox * this.LenaImage.Source = (BitmapSource)features.Apply(bitmap); * } */ #endregion #region EVENT HANDLERS private void OnDetectButtonClicked(object sender, EventArgs args) { int searchModeIndex, scalingModeIndex; ObjectDetectorSearchMode searchMode; ObjectDetectorScalingMode scalingMode; if ((searchModeIndex = this.SearchModePicker.SelectedIndex) < 0 || (scalingModeIndex = this.ScalingModePicker.SelectedIndex) < 0 || !Enum.TryParse(this.SearchModePicker.Items[searchModeIndex], out searchMode) || !Enum.TryParse(this.ScalingModePicker.Items[scalingModeIndex], out scalingMode)) { return; } this.detector.SearchMode = searchMode; this.detector.ScalingMode = scalingMode; this.detector.ScalingFactor = 1.5f; this.detector.UseParallelProcessing = false; // Process frame to detect objects var objects = this.detector.ProcessFrame(this.bitmap); if (objects.Length > 0) { var marker = new RectanglesMarker(objects, Color.FromArgb(0xff, 0xff, 0x00, 0xff)); using (var stream = new MemoryStream()) { marker.Apply(this.bitmap).Save(stream, ImageFormat.Jpeg); this.ImageView.Source = this.GetImageSourceFromStream(stream); } } }
private void button1_Click(object sender, EventArgs e) { detector.SearchMode = (ObjectDetectorSearchMode)cbMode.SelectedValue; detector.ScalingMode = (ObjectDetectorScalingMode)cbScaling.SelectedValue; detector.ScalingFactor = 1.5f; detector.UseParallelProcessing = cbParallel.Checked; detector.Suppression = 2; Stopwatch sw = Stopwatch.StartNew(); // Process frame to detect objects Rectangle[] objects = detector.ProcessFrame(picture); sw.Stop(); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia); pictureBox1.Image = marker.Apply(picture); } toolStripStatusLabel1.Text = string.Format("Completed detection of {0} objects in {1}.", objects.Length, sw.Elapsed); }
private void btnTrainFace_Click(object sender, EventArgs e) { try { detector = new HaarObjectDetector(cascade, 30); detector.SearchMode = ObjectDetectorSearchMode.Single;//.NoOverlap;//.Default; //.Average; detector.ScalingFactor = 1.5f; detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller; detector.UseParallelProcessing = true; detector.Suppression = 3; Bitmap picture = new Bitmap(pictureBox1.Image); Image <Bgr, byte> Frame = new Image <Bgr, byte>(picture); Stopwatch sw = Stopwatch.StartNew(); Rectangle[] faceObjects = detector.ProcessFrame(picture); sw.Stop(); if (faceObjects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia); pictureBox1.Image = marker.Apply(picture); //Graphics g = Graphics.FromImage(pictureBox1.Image); foreach (var face in faceObjects) { //g.DrawRectangle(Pens.DeepSkyBlue, face); Frame.Draw(face, new Bgr(Color.Red), 3); Bitmap c = Frame.ToBitmap(); Bitmap bmp = new Bitmap(face.Width, face.Height); Graphics gg = Graphics.FromImage(bmp); gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel); pictureBox2.Image = bmp; //bmp.Save("myface(accord) " + DateTime.Now.Second.ToString()); gg.Dispose(); } //g.Dispose(); //label1.Text = "Completed operation!! " + faceObjects.Length.ToString() + " Face detected"; MessageBox.Show("Train Face operation successful!!! " + faceObjects.Length.ToString() + " Face detected", "Train face", MessageBoxButtons.OK, MessageBoxIcon.Information); txtFaceId.Text = genFaceId(); txtUsername.Text = "User" + txtFaceId.Text; btnUpload.Enabled = false; btnTrainFace.Enabled = false; btnSave.Enabled = true; } else { MessageBox.Show("Image cannot be trained!!! No face detected in the current image", "Fail to Train face", MessageBoxButtons.OK, MessageBoxIcon.Error); } } catch (Exception er) { MessageBox.Show(er.Message, "Face Detection and Recognition Failure", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private void timer1_Tick(object sender, EventArgs e) { Mat frame = new Mat(); frame = cap.QueryFrame(); Image <Bgr, byte> frameImage = frame.ToImage <Bgr, byte>(); Image <Gray, byte> grayFrameImage = frameImage.Convert <Gray, byte>(); if (frame != null) { Rectangle[] faces = detector.ProcessFrame(grayFrameImage.ToBitmap()); RectanglesMarker marker = new RectanglesMarker(faces, Color.Fuchsia); imageViewer.Image = new Image <Bgr, byte>(marker.Apply(frame.Bitmap)); } }
public Bitmap Detect() { _detector.ScalingFactor = 1.5f; _detector.UseParallelProcessing = true; Rectangle[] objects = _detector.ProcessFrame(image); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Yellow); image = marker.Apply(image); } return(image); }
private void button1_Click(object sender, EventArgs e) { try { detector = new HaarObjectDetector(cascade, 30); detector.SearchMode = ObjectDetectorSearchMode.Single; //.Default;//.NoOverlap;// detector.ScalingFactor = 2.5f; detector.ScalingMode = ObjectDetectorScalingMode.SmallerToGreater; //.GreaterToSmaller; detector.UseParallelProcessing = true; detector.Suppression = 3; Bitmap picture = new Bitmap(pictureBox2.Image); Image <Bgr, byte> Frame = new Image <Bgr, byte>(picture); Stopwatch sw = Stopwatch.StartNew(); Rectangle[] faceObjects = detector.ProcessFrame(picture); sw.Stop(); if (faceObjects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia); pictureBox2.Image = marker.Apply(picture); } label1.Text = "Operation Completed!!! " + faceObjects.Length.ToString() + " Face detected"; Graphics g = Graphics.FromImage(pictureBox2.Image); foreach (var face in faceObjects) { g.DrawRectangle(Pens.DeepSkyBlue, face); Frame.Draw(face, new Bgr(Color.Red), 3); Bitmap c = Frame.ToBitmap(); Bitmap bmp = new Bitmap(face.Width, face.Height); Graphics gg = Graphics.FromImage(bmp); gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel); //pictureBox2.Image = bmp; bmp.Save("myface(accord) " + DateTime.Now.Second.ToString()); MessageBox.Show("Face Detected!!!, Face Save as:" + "myface(accord) " + DateTime.Now.Second.ToString(), "Face Detection Successfully", MessageBoxButtons.OK, MessageBoxIcon.Information); } //g.Dispose() } catch (Exception er) { MessageBox.Show(er.Message, "Face Detection Failure", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private void skindetect_Click(object sender, EventArgs e) { sw.Reset(); sw.Start(); Bitmap img = (Bitmap)Image.FromFile(filepath, true); //将原图进行导入并进行格式转化 Bitmap normalizedImg = YCbCr.Lightingconpensate(img); Bitmap skin = YCbCr.SkinDetect1(normalizedImg); //肤色检测 Bitmap BWskin = Knowledge.Thresholding(skin); //图片二值化 Bitmap gray_img = Knowledge.ToGray(img); AForge.Imaging.SusanCornersDetector scd = new AForge.Imaging.SusanCornersDetector(); //susan检测子 //检测人眼部分 AForge.Imaging.BlobCounter blobCounter = new AForge.Imaging.BlobCounter(BWskin); //对图片进行检测联通区域 Rectangle[] rects = blobCounter.GetObjectsRectangles(); Rectangle[] outputrects = new Rectangle[rects.Count()]; ////object count int minx = 150; int tmp = 0; int rectsCount = rects.Count(); List <decimal> test = new List <decimal>(); for (int c = 0; c < rectsCount; c++) { int p = rects[c].Width * rects[c].Height; decimal bl = (decimal)rects[c].Height / (decimal)rects[c].Width; int maxx = (img.Width * img.Height) / 2; if (p > minx && (double)bl < 1.8 && (double)bl > 0.9 && p < maxx) { test.Add(bl); outputrects[tmp++] = rects[c]; } } RectanglesMarker marker = new RectanglesMarker(outputrects, Color.Red); Process2.Image = img; Detected.Image = marker.Apply((Bitmap)Image.FromFile(filepath, true)); sw.Stop(); string str = sw.Elapsed.ToString(); timeElpsed.Text = string.Empty; timeElpsed.Text = "运行时间:" + str; }
public static string DetectFace(string path) { GetPicture(path); // Process frame to detect objects HaarCascade cascade = new FaceHaarCascade(); HaarObjectDetector detector = new HaarObjectDetector(cascade, 30); detector.SearchMode = (ObjectDetectorSearchMode)cbMode; detector.ScalingMode = (ObjectDetectorScalingMode)cbScaling; detector.ScalingFactor = 1.5f; Stopwatch sw = Stopwatch.StartNew(); Rectangle[] objects = detector.ProcessFrame(picture); sw.Stop(); if (objects.Length > 0) { Console.WriteLine("here"); RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia); picture = marker.Apply(picture); } if (picture != null) { Console.WriteLine("trying to print picture"); /*ImageCodecInfo myici = ImageCodecInfo.GetImageEncoders(); * int numCodecs = myici.GetLength(0); * Encoder myEncoder = Encoder.Quality; * EncoderParameters param = new EncoderParameters(1); * EncoderParameter param1 = new EncoderParameter(myEncoder, 25L); * * picture.Save(@"output.jpg", myici, param); */ picture.Save("file.png", ImageFormat.Png);; } return(path); }
public static void Main(string[] args) { var picture = Resource.lena_color; HaarCascade cascade = new FaceHaarCascade(); var detector = new HaarObjectDetector(cascade, 30); //detector.SearchMode = ObjectDetectorSearchMode.Average; //detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller; //detector.ScalingFactor = 1.5f; //detector.UseParallelProcessing = false; //detector.Suppression = 2; Rectangle[] objects = detector.ProcessFrame(picture); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia); var markedup = marker.Apply(picture); markedup.Save($"{basedir}{sep}{sep}markedup.jpg"); } }
private void mytimer_Elapsed(object sender, ElapsedEventArgs e) { pic1 = this.videoSourcePlayer1.GetCurrentVideoFrame(); try { Rectangle[] objects = detector.ProcessFrame(pic1); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Black); pic2 = marker.Apply(pic1); this.detectedFaceimg.Image = pic2; } else { this.detectedFaceimg.Image = pic1; } } catch { this.detectedFaceimg.Image = pic1; } }
/// <summary> /// This method helps to face detection /// </summary> /// <param name="image">Original image</param> /// <returns>Face detection result</returns> public static FaceDetectionResult FaceDetection(Bitmap image) { // Creating an face detection instance var result = new FaceDetectionResult(); var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade(); // set min size in the picture to detect faces var detector = new HaarObjectDetector(cascade, minSize: 100, searchMode: ObjectDetectorSearchMode.NoOverlap); var bmp = Accord.Imaging.Image.Clone(image); // execute detection var faces = detector.ProcessFrame(bmp); // set rectangle color var objectMarker = new RectanglesMarker(Color.Red) { Rectangles = faces }; // apply filter and get a new image var resultImage = objectMarker.Apply(image); // overwrite the frame result.ImageResult = resultImage; // set result parameters result.TotalFacesDetected = faces?.Length ?? 0; result.Rectangles = faces; return(result); }
public void DetectFaces() { detector.SearchMode = (ObjectDetectorSearchMode)ObjectDetectorSearchMode.NoOverlap; detector.ScalingMode = (ObjectDetectorScalingMode)ObjectDetectorScalingMode.SmallerToGreater; detector.ScalingFactor = 1.5f; detector.UseParallelProcessing = true; //cbParallel.Checked; detector.Suppression = 2; //Stopwatch sw = Stopwatch.StartNew(); // Process frame to detect objects Rectangle[] objects = detector.ProcessFrame(picture); //sw.Stop(); if (objects.Length > 0) { RectanglesMarker marker = new RectanglesMarker(objects, Color.Red); image.Image = marker.Apply(picture); } //toolStripStatusLabel1.Text = string.Format("Completed detection of {0} objects in {1}.", // objects.Length, sw.Elapsed); }
// New frame received by the player private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (!detecting && !tracking) { return; } lock (this) { if (detecting) { detecting = false; tracking = false; UnmanagedImage im = UnmanagedImage.FromManagedImage(image); float xscale = image.Width / 160f; float yscale = image.Height / 120f; ResizeNearestNeighbor resize = new ResizeNearestNeighbor(160, 120); UnmanagedImage downsample = resize.Apply(im); Rectangle[] regions = detector.ProcessFrame(downsample); if (regions.Length > 0) { tracker.Reset(); // Will track the first face found Rectangle face = regions[0]; // Reduce the face size to avoid tracking background Rectangle window = new Rectangle( (int)((regions[0].X + regions[0].Width / 2f) * xscale), (int)((regions[0].Y + regions[0].Height / 2f) * yscale), 1, 1); window.Inflate( (int)(0.2f * regions[0].Width * xscale), (int)(0.4f * regions[0].Height * yscale)); // Initialize tracker tracker.SearchWindow = window; tracker.ProcessFrame(im); marker = new RectanglesMarker(window); marker.ApplyInPlace(im); image = im.ToManagedImage(); tracking = true; //detecting = true; } else { detecting = true; } } else if (tracking) { UnmanagedImage im = UnmanagedImage.FromManagedImage(image); // Track the object tracker.ProcessFrame(im); // Get the object position var obj = tracker.TrackingObject; var wnd = tracker.SearchWindow; if (displayBackprojectionToolStripMenuItem.Checked) { var backprojection = tracker.GetBackprojection(PixelFormat.Format24bppRgb); im = UnmanagedImage.FromManagedImage(backprojection); } if (drawObjectAxisToolStripMenuItem.Checked) { LineSegment axis = obj.GetAxis(); // Draw X axis Drawing.Line(im, axis.Start.Round(), axis.End.Round(), Color.Red); } if (drawObjectBoxToolStripMenuItem.Checked && drawTrackingWindowToolStripMenuItem.Checked) { marker = new RectanglesMarker(new Rectangle[] { wnd, obj.Rectangle }); } else if (drawObjectBoxToolStripMenuItem.Checked) { //InteractionPoints p = new InteractionPoints(); //p.setHead(obj.Rectangle); marker = new RectanglesMarker(obj.Rectangle); } else if (drawTrackingWindowToolStripMenuItem.Checked) { marker = new RectanglesMarker(wnd); } else { marker = null; } if (marker != null) { marker.ApplyInPlace(im); } image = im.ToManagedImage(); } else { if (marker != null) { image = marker.Apply(image); } } } }
// New frame received by the player private void videoSourcePlayer_NewFrame(object sender, NewFrameEventArgs args) { var direccion = "Centro"; int direccionServo = 2; if (!detecting && !tracking) { return; } lock (this) { if (detecting) { detecting = false; tracking = false; UnmanagedImage im = UnmanagedImage.FromManagedImage(args.Frame); float xscale = im.Width / 160f; float yscale = im.Height / 120f; ResizeNearestNeighbor resize = new ResizeNearestNeighbor(160, 120); UnmanagedImage downsample = resize.Apply(im); Rectangle[] regions = detector.ProcessFrame(downsample); if (regions.Length > 0) { tracker.Reset(); // Will track the first face found Rectangle face = regions[0]; // Reduce the face size to avoid tracking background Rectangle window = new Rectangle( (int)((regions[0].X + regions[0].Width / 2f) * xscale), (int)((regions[0].Y + regions[0].Height / 2f) * yscale), 1, 1); Console.Write("x:" + (int)((regions[0].X + regions[0].Width / 2f) * xscale)); Console.Write("y:" + (int)((regions[0].X + regions[0].Height / 2f) * xscale)); window.Inflate( (int)(0.2f * regions[0].Width * xscale), (int)(0.4f * regions[0].Height * yscale)); // Initialize tracker tracker.SearchWindow = window; tracker.ProcessFrame(im); marker = new RectanglesMarker(window); marker.ApplyInPlace(im); args.Frame = im.ToManagedImage(); tracking = true; //detecting = true; } else { detecting = true; } } else if (tracking) { UnmanagedImage im = UnmanagedImage.FromManagedImage(args.Frame); // Track the object tracker.ProcessFrame(im); // Get the object position var obj = tracker.TrackingObject; var wnd = tracker.SearchWindow; //if (displayBackprojectionToolStripMenuItem.Checked) //{ // var backprojection = tracker.GetBackprojection(PixelFormat.Format24bppRgb); // im = UnmanagedImage.FromManagedImage(backprojection); //} //if (drawObjectAxisToolStripMenuItem.Checked) //{ // LineSegment axis = obj.GetAxis(); // // Draw X axis // if (axis != null) // Drawing.Line(im, axis.Start.Round(), axis.End.Round(), Color.Red); // else detecting = true; //} if (obj.Rectangle.Width < (args.Frame.Width / 3) * 2) { if (obj.Rectangle.X < args.Frame.Width / 3) { direccion = "Izquierda"; direccionServo = 1; } else if (obj.Rectangle.X > (args.Frame.Width / 3) * 2) { direccion = "Derecha"; direccionServo = 3; } } try { this.Invoke((MethodInvoker) delegate { if (textBox1 != null) { textBox1.Text = obj.Rectangle.X.ToString(); textBox2.Text = obj.Rectangle.Y.ToString(); label1.Text = direccion; } }); ComunicacionPuertoSerie.Instance.enviarEvento(direccionServo.ToString()); } catch (Exception e) { } if (/*drawObjectBoxToolStripMenuItem.Checked && drawTrackingWindowToolStripMenuItem.Checked*/ false) { marker = new RectanglesMarker(new Rectangle[] { wnd, obj.Rectangle }); } else if (/*drawObjectBoxToolStripMenuItem.Checked*/ true) { marker = new RectanglesMarker(obj.Rectangle); } else if (/*drawTrackingWindowToolStripMenuItem.Checked*/ true) { marker = new RectanglesMarker(wnd); } else { marker = null; } if (marker != null) { marker.ApplyInPlace(im); } args.Frame = im.ToManagedImage(); } else { if (marker != null) { args.Frame = marker.Apply(args.Frame); } } } }