private static void IVA_Mask_From_ROI(VisionImage image, Roi roi, bool invertMask, bool extractRegion) { using (VisionImage imageMask = new VisionImage(ImageType.U8, 7)) { PixelValue fillValue = new PixelValue(255); // Transforms the region of interest into a mask image. Algorithms.RoiToMask(imageMask, roi, fillValue, image); if (invertMask) { // Inverts the mask image. Algorithms.Xor(imageMask, fillValue, imageMask); } // Masks the input image using the mask image we just created. Algorithms.Mask(image, image, imageMask); if (extractRegion) { // Extracts the bounding box. Algorithms.Extract(image, image, roi.GetBoundingRectangle()); } } }
public static async Task <List <BarcodeResult> > ScanFromImage(byte[] imageArray) { UIImage image = new UIImage(NSData.FromArray(imageArray)); var visionImage = new VisionImage(image); VisionImageMetadata metadata = new VisionImageMetadata(); VisionApi vision = VisionApi.Create(); VisionBarcodeDetector barcodeDetector = vision.GetBarcodeDetector(Configuration.BarcodeDetectorSupportFormat); VisionBarcode[] barcodes = await barcodeDetector.DetectAsync(visionImage); if (barcodes == null || barcodes.Length == 0) { return(new List <BarcodeResult>()); } List <BarcodeResult> resultList = new List <BarcodeResult>(); foreach (var barcode in barcodes) { resultList.Add(new BarcodeResult { BarcodeType = Methods.ConvertBarcodeResultTypes(barcode.ValueType), DisplayValue = barcode.DisplayValue, RawValue = barcode.RawValue }); } return(resultList); }
public List <StepMeasurements> InspectEngine() { string guid = ""; List <StepMeasurements> listMeasurements = new List <StepMeasurements>(); try { bool newImageAvailable;//, inspectionStatus; VBAIDateTime timeStamp; InspectionMeasurements[] measurements; vBAIEngine.EnableInspectionMeasurements(); vBAIEngine.RunInspectionOnce(-1); measurements = vBAIEngine.GetInspectionMeasurements(null, out timeStamp); InspectionStep[] steps = vBAIEngine.GetInspectionSteps(); // string guid = ""; foreach (var inspectionstep in steps) { if (inspectionstep.stepName == "Select Image 1") { guid = inspectionstep.stepGUID; break; } } VBAIImage = vBAIEngine.GetInspectionImage(guid, 1, 1, out newImageAvailable); foreach (var inspectMeasurements in measurements) { listMeasurements.AddRange(inspectMeasurements.measurements); } return(listMeasurements); } catch (Exception ex) { return(null); } }
private void GetNextImage(VisionImage dest) { VisionImage nextImage; // Load an image or return to the previous image if (imageNumber >= images.Count) { nextImage = new VisionImage(); nextImage.ReadFile(System.IO.Path.Combine(ExampleImagesFolder.GetExampleImagesFolder(), @"Parts\Parts0" + imageNumber + ".png")); images.Add(nextImage); } else { nextImage = images[imageNumber]; // Clear any overlays. nextImage.Overlays.Default.Clear(); } // Copy the image to the destination image Algorithms.Copy(nextImage, dest); // Advance the image number to the next image imageNumber++; if (imageNumber > 4) { imageNumber = 0; } }
private void btnFindTemp_Click(object sender, EventArgs e) { CPKTools tools = new CPKTools(); VisionImage align = this.image_Edit.Image; VisionImage image = Form_Main.Instance.imageSet.Image; Roi roi = Form_Main.Instance.imageSet.Roi; short Score = 600; double MinR = -10; double MaxR = 10; short rtn = 0; Variable.CamReturn camReturn = new Variable.CamReturn(); // 初步匹配 rtn = Form_Main.Instance.CamDetect_SearchGeometric(image, align, roi, Score, 1, MinR, MaxR, 100, 100, 0, 25, ref camReturn, 0, 0); if (rtn != 0) { MessageBox.Show("寻找模板失败,请检查参数!!!"); } // 记录ROI 的坐标 if (roi.Count > 0) { RectangleContour rect = (RectangleContour)roi[0].Shape; // save rect string strRoi = string.Format("{0},{1},{2},{3}", rect.Top, rect.Left, rect.Width, rect.Height); this.cpkIni.IniWriteValue("vision", "TemplateRoi", strRoi); } }
private void startButton_Click(object sender, EventArgs e) { try { imageScrollBar.Enabled = false; int numberOfImages = (int)numImages.Value; // Open camera _session = new ImaqdxSession(cameraComboBox.Text); // Create array of images images = new VisionImage[numberOfImages]; for (int i = 0; i < images.Length; ++i) { images[i] = new VisionImage(); } // Acquire the sequence of images _session.Sequence(images, numberOfImages); // Close the camera _session.Close(); // Update UI controls imageViewer.Attach(images[0]); imageScrollBar.Minimum = 0; imageScrollBar.Value = imageScrollBar.Minimum; imageScrollBar.Maximum = numberOfImages - 1; imageScrollBar.Enabled = true; } catch (ImaqdxException ex) { MessageBox.Show(ex.Message, "NI-IMAQdx Error"); } }
public CameraController(string cameraName) { this.cameraName = cameraName; windowShowing = false; image = new VisionImage(); state = CameraState.FREE; }
public CameraController(string cameraName) { this.cameraName = cameraName; windowShowing = false; image = new VisionImage(); state = CameraState.FREE; }
/// <summary> /// 移动结束后拍照 /// </summary> /// <param name="param"></param> public override void MoveXYRFinished(MoveParam param) { if (this.machine.PCBReach) { if (this.MarkIndex == 0 && this.PCBIndex == 0) { Thread.Sleep(SystemConfig.Instance.General.ReachAfterDelay); } // 拍照等待 Thread.Sleep(SystemConfig.Instance.General.UpCamDelay); var item = new Tool.ResultItem(); item.Camera = Camera.Top; item.funcName = this.markParam.VisionName; item.CaptruePos = this.markParam.Pos; VisionImage image = entiy.GrabImage(Camera.Top); item.Key = ResultKey.Mark; item.PCBIndex = PCBIndex; item.PCSIndex = MarkIndex; item.Mark = this.machine.Program.PasteInfos[PCBIndex].MarkPtList[MarkIndex].MarkID; VisionCalHelper.Instance.VisionDetect(entiy.Module, item, image); VisionCalHelper.Instance.ImageShow(this.entiy.Module, Camera.Top, image); MarkIndex++; base.MoveXYRFinished(param); } }
void UseTextRecognitionModel() { VisionTextRecognizer textRecognizer; if (currentApiResource == ApiResource.OnDevice) { textRecognizer = vision.GetOnDeviceTextRecognizer(); } else { // To provide language hints to assist with language detection: // See https://cloud.google.com/vision/docs/languages for supported languages var options = new VisionCloudTextRecognizerOptions { LanguageHints = new [] { "es" } }; textRecognizer = vision.GetCloudTextRecognizer(options); } var image = new VisionImage(ImgSample.Image); textRecognizer.ProcessImage(image, HandleVisionTextRecognitionCallback); void HandleVisionTextRecognitionCallback(VisionText text, NSError error) { TxtData.Text = error?.Description ?? text?.Text; } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { lastRunTime = DateTimeOffset.Now.ToUnixTimeMilliseconds(); if (lastRunTime - lastAnalysisTime > scanIntervalInMs && Configuration.IsScanning) { lastAnalysisTime = lastRunTime; try { var image = GetImageFromSampleBuffer(sampleBuffer); if (image == null) { return; } Width = (float)image.Size.Width; Height = (float)image.Size.Height; var visionImage = new VisionImage(image) { Metadata = metadata }; releaseSampleBuffer(sampleBuffer); DetectBarcodeActionAsync(visionImage); } catch (Exception exception) { System.Diagnostics.Debug.WriteLine(exception.Message); } } releaseSampleBuffer(sampleBuffer); }
public HttpResponseMessage Post() { HttpResponseMessage result = null; var httpRequest = HttpContext.Current.Request; var form = httpRequest.Form; if (form.Count == 2) { string param1 = form.Get(0); string param2 = form.Get(1); } VisCalculation calc = new VisCalculation(); //VisCalculationSize calcSize = new VisCalculationSize(); if (httpRequest.Files.Count > 0) { var docfiles = new List <string>(); foreach (string file in httpRequest.Files) { var postedFile = httpRequest.Files[file]; //string myFile = Server.MapPath("images") + "\" + FileUpload1.PostedFile.FileName; string fileName = Path.GetFileName(postedFile.FileName); string foderDate = DateTime.Now.ToString("yyyyMMdd"); var filePath = HttpContext.Current.Server.MapPath("~/images/" + foderDate + "/" + fileName); //postedFile.FileName); string fullPath = Path.GetDirectoryName(filePath); if (!Directory.Exists(fullPath)) { Directory.CreateDirectory(fullPath); } postedFile.SaveAs(filePath); //Image image = Image.FromFile(HttpContext.Current.Server.MapPath("~/images/" + foderDate + "/" + fileName)); FileInformation fileinfo = Algorithms.GetFileInformation(filePath); VisionImage image = new VisionImage(); image.Type = fileinfo.ImageType; image.ReadFile(filePath); //calculation grain type calc = new VisCalculation(image); //calculation grain size //calcSize = new VisCalculationSize(image); docfiles.Add(filePath); } //result = Request.CreateResponse(HttpStatusCode.Created, docfiles); result = Request.CreateResponse(HttpStatusCode.OK, calc.GrainResults); } else { //result = Request.CreateResponse(HttpStatusCode.BadRequest); VisGrainTypeCollection cols = new VisGrainTypeCollection(); cols.Message = "BadRequest"; result = Request.CreateResponse(HttpStatusCode.OK, cols); } return(result); }
public void RightPosProcess(VisionImage visionImage, Bitmap bitmap) { switch (Config.Instance.CurrentProductType) { case "Special_Shape": RightPos.ProcessImage(visionImage, frmAAVision.RightOffset); break; case "Rectangle": RightPos.RectRightPos(visionImage, frmAAVision.RightOffset); break; default: break; } SendCmd = RightPos.RightCali; PosCheck_C(bitmap, frmAAVision.acq.hWindowControl1.HalconWindow, RightPos.RightCaliArrary); ClearOtherBmp(); if (frmAAVision.acq.SaveImage) { SaveImage.SaveResult(frmAAVision.acq.hWindowControl1.HalconWindow, "C1"); } ReceiveCmd = ""; }
private void bGetImage_Click(object sender, EventArgs e) { try { VisionImage img = GetImage?.Invoke(); //VisionImage img = new VisionImage(); //img.ReadFile(@"C:\Users\lichen\Desktop\黑到白\1.bmp"); if (img != null) { m_hoModedImage?.Dispose(); m_hoModedImage = HalconHelper.NI2HImage(img); HTuple htWidth = new HTuple(); HTuple htHeight = new HTuple(); HOperatorSet.GetImageSize(m_hoModedImage, out htWidth, out htHeight); HOperatorSet.SetPart(m_htWindowHandle, 0, 0, htHeight, htWidth); HOperatorSet.DispObj(m_hoModedImage, m_htWindowHandle); zoom_beginRow = 0; zoom_beginCol = 0; zoom_endCol = htWidth; zoom_endRow = htHeight; isLoadImage = true; } } catch { } }
public static void LearnPattern(ImageViewer SourceImage, float fUpper = 0, float fLower = 0) { using (VisionImage plane = new VisionImage(ImageType.U8, 7)) { // Extract the green color plane and copy it to the main image. if (SourceImage.Image.Type == ImageType.Rgb32) { Algorithms.ExtractColorPlanes(SourceImage.Image, NationalInstruments.Vision.ColorMode.Rgb, null, plane, null); Algorithms.Copy(plane, SourceImage.Image); } } // Algorithms.LearnPattern2(SourceImage.Image); OvalContour vaRect2 = new OvalContour(0, 0, 0, 0); Roi roi = new Roi(); roi.Add(vaRect2); // Histogram Grayscale using (VisionImage imageMask = new VisionImage(ImageType.U8, 7)) { RotationAngleRange ra = new RotationAngleRange(fLower, fUpper); PixelValue fillValue = new PixelValue(255); Algorithms.RoiToMask(imageMask, roi, fillValue, SourceImage.Image); Algorithms.LearnPattern2(SourceImage.Image, imageMask, MatchingAlgorithm.MatchGrayValuePyramid, ra); } roi.Dispose(); }
private async void DetectBarcodeActionAsync(VisionImage image) { if (Configuration.IsScanning) { try { VisionBarcode[] barcodes = await barcodeDetector.DetectAsync(image); if (barcodes == null || barcodes.Length == 0) { return; } Configuration.IsScanning = false; if (Configuration.IsVibrate) { SystemSound.Vibrate.PlayAlertSound(); } List <BarcodeResult> resultList = new List <BarcodeResult>(); foreach (var barcode in barcodes) { resultList.Add(new BarcodeResult { BarcodeType = Methods.ConvertBarcodeResultTypes(barcode.ValueType), DisplayValue = barcode.DisplayValue }); } OnDetected?.Invoke(resultList); } catch (Exception ex) { return; } } }
/// <summary> /// 找到一个圆 /// </summary> /// <param name="image">图片</param> /// <param name="rect">ROI</param> /// <param name="minr">最小圆半径</param> /// <param name="maxr">最大圆半径</param> /// <param name="center">圆心</param> /// <param name="radius">半径</param> /// <param name="gain">预处理参数</param> /// <param name="offset">预处理参数</param> /// <param name="cycle">预处理参数</param> /// <returns>找到/没找到</returns> public static bool FindCircle(VisionImage image, RectangleContour rect, short minr, short maxr, ref PointContour center, ref double radius, double gain, double offset, int cycle) { if (image == null) { return(false); } for (int i = 0; i < cycle; i++) { image = Form_Main.Instance.GainOffset(image, gain, offset); } Roi roi = rect.ConvertToRoi(); short rtn = 0; try { rtn = Form_Main.Instance.CamDetect_Circle(image, roi, minr, maxr, ref center, ref radius); if (rtn != 0) { return(false); } } catch { return(false); } return(true); }
public string InitCamera() { string rtn = string.Empty; try { _Session = new ImaqdxSession(this.Config.DevName); using (VisionImage A = new VisionImage()) { this._Session.Acquisition.Unconfigure(); this._Session.Snap(A); } //this._session.Attributes["CameraAttributes::UserSetControl::UserSetSelector"].SetValue(1);//68 //this._session.Attributes["CameraAttributes::UserSetControl::UserSetLoad"].SetValue(1);//69 this._Session.Attributes["AcquisitionAttributes::Timeout"].SetValue(1000); this._Session.Attributes["CameraAttributes::ImageFormatControl::Width"].SetValue(this.Config.FOV.Width); this._Session.Attributes["CameraAttributes::ImageFormatControl::Height"].SetValue(this.Config.FOV.Height); this._Session.Attributes["CameraAttributes::ImageFormatControl::OffsetX"].SetValue(this.Config.FOV.Left); this._Session.Attributes["CameraAttributes::ImageFormatControl::OffsetY"].SetValue(this.Config.FOV.Top); this.Shutter = this.Config.Expouse; } catch (Exception ex) { rtn = ex.Message; } return(rtn); }
private VisionImage GetNextImage() { VisionImage nextImage; // Load an image or return to the previous image. if (imageNumber >= images.Count) { nextImage = new VisionImage(); nextImage.ReadFile(System.IO.Path.Combine(imagePath, "Image" + String.Format("{0:00}", imageNumber) + ".jpg")); images.Add(nextImage); } else { nextImage = images[imageNumber]; // Clear any overlays nextImage.Overlays.Default.Clear(); } // Advance the number number to the next image imageNumber++; if (imageNumber > 6) { imageNumber = 0; } return(nextImage); }
private static Collection <double> IVA_GetDistance(VisionImage image, IVA_Data ivaData, int stepIndex, int stepIndex1, int resultIndex1, int stepIndex2, int resultIndex2) { Collection <PointContour> points = new Collection <PointContour>(); points.Add(Functions.IVA_GetPoint(ivaData, stepIndex1, resultIndex1)); points.Add(Functions.IVA_GetPoint(ivaData, stepIndex2, resultIndex2)); // Computes the distance between the points. Collection <double> distances = Algorithms.FindPointDistances(points); // Store the results in the data structure. ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Pix.)", distances[0])); // If the image is calibrated, compute the real world distance. if ((image.InfoTypes & InfoTypes.Calibration) != 0) { CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, points); Collection <double> calibratedDistance = Algorithms.FindPointDistances(realWorldPosition.Points); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Calibrated)", calibratedDistance[0])); distances.Add(calibratedDistance[0]); } return(distances); }
// GetDataMatrixCodeSettings initializes the settings for the given barcode. // The settings are stored in the custom data of each image. // Since these settings are stored in Visual Basic 6 format, we use the VBCustomData class. private void GetDataMatrixCodeSettings(VisionImage barcode, ref DataMatrixDescriptionOptions descOptions, ref DataMatrixSizeOptions sizeOptions, ref DataMatrixSearchOptions searchOptions) { descOptions.AspectRatio = new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionAspectRatio")).Numeric[0]; VBCustomData sizeData = new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionRowsColumns")); descOptions.Rows = (uint)sizeData.Numeric[0]; descOptions.Columns = (uint)sizeData.Numeric[1]; descOptions.Rectangle = new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionRectangle")).Numeric[0] != 0.0; descOptions.Ecc = (DataMatrixEcc)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionECC")).Numeric[0]); descOptions.Polarity = (DataMatrixPolarity)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionPolarity")).Numeric[0]); descOptions.CellFill = (DataMatrixCellFillMode)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionCellFill")).Numeric[0]); descOptions.MinimumBorderIntegrity = new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionMinimumBorderIntegrity")).Numeric[0]; descOptions.MirrorMode = (DataMatrixMirrorMode)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMDescriptionMirror")).Numeric[0]); sizeData = new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSizeBarcode")); sizeOptions.MinimumSize = (uint)sizeData.Numeric[0]; sizeOptions.MaximumSize = (uint)sizeData.Numeric[1]; sizeOptions.QuietZoneWidth = (uint)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSizeQuietZoneWidth"))).Numeric[0]; searchOptions.RotationMode = (DataMatrixRotationMode)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchRotation"))).Numeric[0]; searchOptions.SkipLocation = (new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchSkipLocation"))).Numeric[0] != 0.0; searchOptions.EdgeThreshold = (uint)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchEdgeThreshold"))).Numeric[0]; searchOptions.DemodulationMode = (DataMatrixDemodulationMode)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchDemodulation"))).Numeric[0]; searchOptions.CellSampleSize = (DataMatrixCellSampleSize)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchCellSampleSize"))).Numeric[0]; searchOptions.CellFilterMode = (DataMatrixCellFilterMode)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchCellFilter"))).Numeric[0]; searchOptions.SkewDegreesAllowed = (uint)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchSkewDegrees"))).Numeric[0]; searchOptions.MaximumIterations = (uint)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchMaxIterations"))).Numeric[0]; searchOptions.InitialSearchVectorWidth = (uint)(new VBCustomData(barcode.CustomData.GetDataRawBytes("DMSearchInitialSearchVectorWidth"))).Numeric[0]; }
private VisionImage GetNextImage() { VisionImage toReturn; // Load an image or return to the previous image. if (imageNumber >= images.Count) { toReturn = new VisionImage(); toReturn.ReadFile(System.IO.Path.Combine(imagePath, "Image" + String.Format("{0:00}", imageNumber) + ".jpg")); images.Add(toReturn); } else { toReturn = images[imageNumber]; // Remove any overlays. toReturn.Overlays.Default.Clear(); } // Advance the image number to the next image imageNumber++; if (imageNumber > 14) { imageNumber = 0; } return(toReturn); }
public Form1() { InitializeComponent(); // Set up initial button states startButton.Enabled = true; stopButton.Enabled = false; startButton.Enabled = false; // Set up image viewer VisionImage image = new VisionImage(); imageViewer.Attach(image); // Enumerate cameras and update controls ImaqdxCameraInformation[] cameras = ImaqdxSystem.GetCameraInformation(true); foreach (ImaqdxCameraInformation camera in cameras) { cameraName.Items.Add(camera.Name); } cameraName.SelectedIndex = cameraName.Items.Count > 0 ? 0 : -1; // Set up background acquisition worker acquisitionWorker = new System.ComponentModel.BackgroundWorker(); acquisitionWorker.DoWork += new DoWorkEventHandler(acquisitionWorker_DoWork); acquisitionWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(acquisitionWorker_RunWorkerCompleted); acquisitionWorker.ProgressChanged += new ProgressChangedEventHandler(acquisitionWorker_ProgressChanged); acquisitionWorker.WorkerReportsProgress = true; acquisitionWorker.WorkerSupportsCancellation = true; }
// The function that does the actual circle computation. private void ComputeCircles() { // Perform the operation only if an image has been loaded. if (imageViewer1.Image.Width > 0) { // Automatic threshold of the image using (VisionImage image = new VisionImage()) { Algorithms.AutoThreshold(imageViewer1.Image, image, 2, ThresholdMethod.Entropy); // Extract the shape descriptors. double[,] particleMeasurements = Algorithms.ParticleMeasurements(image, new Collection <MeasurementType>(new MeasurementType[] { MeasurementType.HeywoodCircularityFactor, MeasurementType.CenterOfMassX, MeasurementType.CenterOfMassY })).PixelMeasurements; // Keep circular parts using the Heywood circularity factor. Collection <PointContour> centers = new Collection <PointContour>(); double minimumHeywoodValue = (double)minimumHeywood.Value; for (int i = 0; i < particleMeasurements.GetLength(0); ++i) { if (particleMeasurements[i, 0] < minimumHeywoodValue) { centers.Add(new PointContour(particleMeasurements[i, 1], particleMeasurements[i, 2])); } } // Compute and draw distances between circular parts. DisplayResults(centers); } } }
public override VisionResult Detected(VisionImage image, Dictionary <string, VisionResult> Result = null, VisionFlow parent = null, Shape newRoi = null) { VisionResult rtn = new VisionResult(); rtn.State = VisionResultState.WaitCal; if (parent != null && Result != null) { if (Result.ContainsKey(this.StartPt) && Result.ContainsKey(this.EndPt)) { rtn.Line = new LineContour(Result[this.StartPt].Point, Result[this.EndPt].Point); image.Overlays.Default.AddLine(rtn.Line, Rgb32Value.GreenColor); rtn.Angle = Common.MathHelper.GetAngle(rtn.Line.Start.X, rtn.Line.Start.Y, rtn.Line.End.X, rtn.Line.End.Y); } this.AddVisionResc(rtn, "拟合直线成功"); rtn.State = VisionResultState.OK; } else { this.AddVisionResc(rtn, "拟合直线失败"); rtn.State = VisionResultState.NG; } return(rtn); }
/// <summary> /// 异步执行 计算结果 /// 在 async 包含的函数代码里执行就不会影响主逻辑 /// </summary> /// <param name="image"></param> /// <returns></returns> public Task <VisionResult> DetectAsync(VisionImage image, Shape roi = null) { return(Task <VisionResult> .Factory.StartNew(() => { return this.Detect(image, roi); })); }
private async void DetectBarcodeActionAsync(VisionImage image) { if (Configuration.IsScanning) { try { VisionBarcode[] barcodes = await barcodeDetector.DetectAsync(image); if (barcodes == null || barcodes.Length == 0) { return; } Console.WriteLine($"Successfully read barcode"); Configuration.IsScanning = false; if (_vibrationOnDetected) { SystemSound.Vibrate.PlayAlertSound(); } List <BarcodeResult> resultList = new List <BarcodeResult>(); foreach (var barcode in barcodes) { var points = barcode.CornerPoints.ToList().ConvertAll(nsvalue => nsvalue.PointFValue); resultList.Add(new BarcodeResult { BarcodeType = Methods.ConvertBarcodeResultTypes(barcode.ValueType), DisplayValue = barcode.DisplayValue, Points = points.Select(p => (p.X / (double)Width, p.Y / (double)Height)).ToList() }); } OnDetected?.Invoke(resultList); }
private void btnHandleImage_Click(object sender, EventArgs e) { double gain = 1; double offset = 0; double cycle = 1; try { gain = double.Parse(this.tGainValue.Text); offset = double.Parse(this.tOffsetValue.Text); VisionImage image = Form_Main.Instance.imageSet.Image; for (int i = 0; i < cycle; ++i) { image = Form_Main.Instance.GainOffset(image, gain, offset); } Algorithms.Copy(image, Form_Main.Instance.imageSet.Image); this.cpkIni.IniWriteValue("vision", "Gain", this.tGainValue.Text); this.cpkIni.IniWriteValue("vision", "Offset", this.tOffsetValue.Text); this.cpkIni.IniWriteValue("vision", "Cycle", this.tHanldeCycle.Text); } catch { MessageBox.Show("请输入正确数字", "提示"); } }
public override VisionResult Detected(VisionImage image, Dictionary <string, VisionResult> Result = null, VisionFlow parent = null, Shape newRoi = null) { VisionResult rtn = new VisionResult(); rtn.State = VisionResultState.WaitCal; try { if (parent.Detects.Count > 0 && Result != null) { if (Result.ContainsKey(this.Line1ID) && Result.ContainsKey(this.Line2ID)) { rtn.Point = Algorithms.FindIntersectionPoint(Result[this.Line1ID].Line, Result[this.Line2ID].Line); image.Overlays.Default.AddPoint(rtn.Point, Rgb32Value.RedColor, new PointSymbol(PointSymbolType.Cross)); this.AddVisionResc(rtn, $"焦点({rtn.Point.X:n2},{rtn.Point.Y:n2})"); rtn.State = VisionResultState.OK; } } else { rtn.State = VisionResultState.NG; } } catch (Exception ex) { this.AddVisionResc(rtn, ex.Message); rtn.State = VisionResultState.NG; } return(rtn); }
//////////////////////////////////////////////////////////////////////////////// // // Function Name: IVA_Classification_Extract_Particles // // Description : Extracts the region of interests of the bounding rectangles of // all particles. // // Parameters : image - Input image // imageMask - Image mask // rois - Array of ROIs // // Return Value : success // //////////////////////////////////////////////////////////////////////////////// public static Collection <Roi> IVA_Classification_Extract_Particles(VisionImage image, VisionImage imageMask) { Collection <MeasurementType> measurements = new Collection <MeasurementType>(); measurements.Add(MeasurementType.BoundingRectLeft); measurements.Add(MeasurementType.BoundingRectTop); measurements.Add(MeasurementType.BoundingRectRight); measurements.Add(MeasurementType.BoundingRectBottom); // Compute the region of interests around each particle. ParticleMeasurementsReport particleReport = Algorithms.ParticleMeasurements(imageMask, measurements, Connectivity.Connectivity8, ParticleMeasurementsCalibrationMode.Pixel); Collection <Roi> rois = new Collection <Roi>(); for (int i = 0; i < particleReport.PixelMeasurements.GetLength(0); i++) { double top = particleReport.PixelMeasurements[i, 1] + imageMask.MaskOffset.Y - 5; top = (top < 0 ? 0 : top); double left = particleReport.PixelMeasurements[i, 0] + imageMask.MaskOffset.X - 5; left = (left < 0 ? 0 : left); double bottom = particleReport.PixelMeasurements[i, 3] + imageMask.MaskOffset.Y + 5; bottom = (bottom > (image.Height - 1) ? (image.Height - 1) : bottom); double right = particleReport.PixelMeasurements[i, 2] + imageMask.MaskOffset.X + 5; right = (right > (image.Width - 1) ? (image.Width - 1) : right); Roi particleROI = new Roi(); particleROI.Add(new RectangleContour(left, top, right - left + 1, bottom - top + 1)); rois.Add(particleROI); } return(rois); }
private void CalNz(Nozzle nz, string funcName, VisionImage image) { if (this.machine.RunData.RUN_NzData[nz].State == NZ_State.Sucked) { // 移动完成开始拍照 var item = new Tool.ResultItem(); if (nz == Nozzle.Nz1 || nz == Nozzle.Nz2) { item.Camera = Camera.Bottom1; } else { item.Camera = Camera.Bottom2; } int offset = 0; if (nz == Nozzle.Nz2 || nz == Nozzle.Nz4) { offset = 800; } item.Key = ResultKey.DownVision; item.ROI = this.machine.MachineEntiy.MachineConfig[nz].ViewRoi; item.NZIndex = nz; item.funcName = funcName; item.PCBIndex = this.machine.RunData.RUN_NzData[nz].PCBIndex; item.PCSIndex = this.machine.RunData.RUN_NzData[nz].PCSIndex; item.CaptruePos = this.entiy.MachineConfig[Nozzle.Nz1].RotateCamPoint; VisionCalHelper.Instance.VisionDetect(entiy.Module, item, image, offset); } }
public MainDlg() { InitializeComponent(); camera = new MikrotronCamera(settings); // Configure the image display. displayImage = new VisionImage((ImageType)camera.Session.Attributes[ImaqStandardAttribute.ImageType].GetValue()); imageViewer.Attach(displayImage); acquisitionWorker = new BackgroundWorker(); acquisitionWorker.DoWork += acquisitionWorker_DoWork; acquisitionWorker.RunWorkerCompleted += acquisitionWorker_RunWorkerCompleted; acquisitionWorker.ProgressChanged += acquisitionWorker_ProgressChanged; acquisitionWorker.WorkerSupportsCancellation = true; }
public static VisionRequestBody Default(byte[] content = null) { VisionImage image = new VisionImage(); if (content != null) { image.EncodeContent(content); } VisionFeature feature = new VisionFeature(); feature.type = Enum.GetName(typeof(VisionFeature.FeatureType), 0); VisionRequest request = new VisionRequest(); request.features = new VisionFeature[] { feature }; request.image = image; VisionRequestBody body = new VisionRequestBody(); body.requests = new VisionRequest[] { request}; return body; }
public byte[,] SingleSnapshot(string attributesPath, bool addToImageList) { imageWindow.WriteToConsole("Taking snapshot"); imageWindow.WriteToConsole("Applied camera attributes from " + attributesPath); SetCameraAttributes(attributesPath); try { if (state == CameraState.FREE || state == CameraState.READY_FOR_ACQUISITION) { image = new VisionImage(); state = CameraState.READY_FOR_ACQUISITION; try { imaqdxSession.Snap(image); if (windowShowing) { imageWindow.AttachToViewer(image); } if (addToImageList) { imageList.Add(image); } image.WriteFile("test.bmp"); PixelValue2D pval = image.ImageToArray(); byte[,] u8array = Getthearray.convertToU8(pval.Rgb32); double max = Getthearray.Findthemaximum(u8array); imageWindow.WriteToConsole(max.ToString("F6")); state = CameraState.FREE; return u8array; } catch (ObjectDisposedException e) { MessageBox.Show(e.Message); throw new ObjectDisposedException(""); } catch (ImaqdxException e) { MessageBox.Show(e.Message); throw new ImaqdxException(); } catch (VisionException e) { MessageBox.Show(e.VisionErrorText); throw e; } } else return null; } catch (TimeoutException) { return null; } }
//An irritating number of threadsafe delegates for the viewer controlWindow. private void attachToViewer(NationalInstruments.Vision.WindowsForms.ImageViewer viewer, VisionImage image) { viewer.Invoke(new AttachImageToViewerDelegate(AttachImageHelper), new object[] { viewer, image }); }
private void startButton_Click(object sender, EventArgs e) { try { // Update the UI. startButton.Enabled = false; stopButton.Enabled = true; bufNumTextBox.Text = ""; //pixelValTextBox.Text = ""; interfaceTextBox.Enabled = false; numImages.Enabled = false; volumeDepthTextBox.Enabled = false; thresholdDeltaVoltageTextBox.Enabled = false; #if ACQDATA // TODO: Params from UI // Create a new task myTask = new Task(); physicalChannelText = "Dev1/ai0"; minimumValue = -10.00; maximumValue = 10.00; rateValue = 10000.00; samplesPerChannelValue = 1000; // Create a virtual channel myTask.AIChannels.CreateVoltageChannel(physicalChannelText, "", (AITerminalConfiguration)(-1), Convert.ToDouble(minimumValue), Convert.ToDouble(maximumValue), AIVoltageUnits.Volts); analogInReader = new AnalogMultiChannelReader(myTask.Stream); // Verify the Task myTask.Control(TaskAction.Verify); // Create a session. _session = new ImaqSession(interfaceTextBox.Text); // Configure the image viewer. displayImage = new VisionImage((ImageType)_session.Attributes[ImaqStandardAttribute.ImageType].GetValue()); imageViewer.Attach(displayImage); // Create a buffer collection for the acquisition with the requested // number of images, and configure the buffers to loop continuously. int numberOfImages = (int)numImages.Value; bufList = _session.CreateBufferCollection(numberOfImages, ImaqBufferCollectionType.PixelValue2D); for (int i = 0; i < bufList.Count; ++i) { bufList[i].Command = (i == bufList.Count - 1) ? ImaqBufferCommand.Loop : ImaqBufferCommand.Next; } // Configure and start the acquisition. _session.Acquisition.Configure(bufList); _session.Acquisition.AcquireAsync(); _thresholdDeltaVoltage = Convert.ToDouble(thresholdDeltaVoltageTextBox.Text); _volumeDepth = Convert.ToInt32(volumeDepthTextBox.Text); #endif RenderUIArgs renderUIArgs; renderUIArgs.rotxTextBox = rotxTextBox; renderUIArgs.rotyTextBox = rotyTextBox; renderUIArgs.rotzTextBox = rotzTextBox; renderUIArgs.transxTextBox = transxTextBox; renderUIArgs.transyTextBox = transyTextBox; renderUIArgs.transzTextBox = transzTextBox; renderUIArgs.densityTextBox = densityTextBox; renderUIArgs.brightnessTextBox = brightnessTextBox; renderUIArgs.transoffsetTextBox = transoffsetTextBox; renderUIArgs.transscaleTextBox = transscaleTextBox; renderUIArgs.linfilterCheckBox = linfilterCheckBox; // Start the background worker threads acquisitionWorker.RunWorkerAsync(subCheckBox); renderWorker.RunWorkerAsync(renderUIArgs); } catch (ImaqException ex) { MessageBox.Show(ex.Message, "NI-IMAQ Error"); Cleanup(); } catch (FormatException ex) { MessageBox.Show(ex.Message, "Format Error"); Cleanup(); } }
public void AttachToViewer(VisionImage image) { attachToViewer(imageViewer, image); }
private void AttachImageHelper(NationalInstruments.Vision.WindowsForms.ImageViewer viewer, VisionImage image) { viewer.Attach(image); }
public void AttachImagesToViewer(List<VisionImage> images, int frame) { disImage=images[frame]; attachToViewer(imageViewer, disImage); }
public byte[][,] MultipleSnapshot(string attributesPath, int numberOfShots) { SetCameraAttributes(attributesPath); VisionImage[] images = new VisionImage[numberOfShots]; Stopwatch watch = new Stopwatch(); try { watch.Start(); state = CameraState.READY_FOR_ACQUISITION; imaqdxSession.Sequence(images, numberOfShots); watch.Stop(); if (windowShowing) { long interval = watch.ElapsedMilliseconds; imageWindow.WriteToConsole(interval.ToString()); } List<byte[,]> byteList = new List<byte[,]>(); foreach (VisionImage i in images) { byteList.Add((i.ImageToArray()).U8); // if (windowShowing) //{ // imageWindow.AttachToViewer(i); // } } state = CameraState.FREE; return byteList.ToArray(); } catch (ImaqdxException e) { MessageBox.Show(e.Message); state = CameraState.FREE; throw new TimeoutException(); } }
public byte[,] SingleSnapshot(string attributesPath, bool addToImageList) { imageWindow.WriteToConsole("Taking snapshot"); imageWindow.WriteToConsole("Applied camera attributes from " + attributesPath); SetCameraAttributes(attributesPath); try { if (state == CameraState.FREE) { image = new VisionImage(); state = CameraState.READY_FOR_ACQUISITION; try { imaqdxSession.Snap(image); if (windowShowing) { imageWindow.AttachToViewer(image); } if (addToImageList) { imageList.Add(image); } PixelValue2D pval = image.ImageToArray(); state = CameraState.FREE; return pval.U8; } catch (ObjectDisposedException e) { MessageBox.Show(e.Message); throw new ObjectDisposedException(""); } catch (ImaqdxException e) { MessageBox.Show(e.Message); throw new ImaqdxException(); } } else return null; } catch (TimeoutException) { return null; } }
private void stream() { image = new VisionImage(); try { imaqdxSession.ConfigureGrab(); } catch (ObjectDisposedException e) { MessageBox.Show(e.Message); return; } for (; ; ) { lock (streamStopLock) { try { imaqdxSession.Grab(image, true); } catch (InvalidOperationException e) { MessageBox.Show("Something bad happened. Stopping the image stream.\n" + e.Message); state = CameraState.FREE; return; } try { if (windowShowing) { imageWindow.AttachToViewer(image); } } catch (InvalidOperationException e) { MessageBox.Show("I have a leftover image without anywhere to display it. Dumping...\n\n" + e.Message); imaqdxSession.Acquisition.Stop(); state = CameraState.FREE; return; } if (state != CameraState.STREAMING) { imaqdxSession.Acquisition.Stop(); state = CameraState.FREE; return; } } } }
private void stream() { image = new VisionImage(); try { imaqdxSession.ConfigureGrab(); } catch (ObjectDisposedException e) { MessageBox.Show(e.Message); return; } for (; ; ) { lock (streamStopLock) { try { imaqdxSession.Grab(image, true); if (analyse) { PixelValue2D pval = image.ImageToArray(); byte[,] u8array = Getthearray.convertToU8(pval.Rgb32); max = Getthearray.Findthemaximum(u8array); imageWindow.WriteToConsole(max.ToString("F6")); } } catch (InvalidOperationException e) { MessageBox.Show("Something bad happened. Stopping the image stream.\n" + e.Message); state = CameraState.FREE; return; } try { if (windowShowing) { imageWindow.AttachToViewer(image); } } catch (InvalidOperationException e) { MessageBox.Show("I have a leftover image without anywhere to display it. Dumping...\n\n" + e.Message); imaqdxSession.Acquisition.Stop(); state = CameraState.FREE; return; } if (state != CameraState.STREAMING) { imaqdxSession.Acquisition.Stop(); state = CameraState.FREE; return; } } } }