コード例 #1
0
        private void HandleSignal(ref MC.SIGNALINFO signalInfo)
        {
            UInt32 hSurface;

            switch (signalInfo.Signal)
            {
            // Handles captured image
            case MC.SIG_SURFACE_FILLED:
                ProcessingCallback(signalInfo);
                hSurface = signalInfo.SignalInfo;
                MC.SetParam(hSurface, MC.SurfaceState, MC.SURFACESTATE_FREE);
                break;

            // Handle Acquisition errors
            case MC.SIG_ACQUISITION_FAILURE:
                AcqFailureCallback(signalInfo);
                break;

            // Terminate live thread gracefully
            case MC.SIG_END_CHANNEL_ACTIVITY:
                m_RunLoop = false;
                break;

            default:
                throw new Euresys.MultiCamException("Unknown signal");
            }
        }
コード例 #2
0
        private void ProcessingCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            statusBar.Text = "Processing";
            currentSurface = signalInfo.SignalInfo;

            // + PicoloVideoTrigger Sample Program

            try
            {
                // Update the image with the acquired image buffer data
                Int32  width, height, bufferPitch;
                IntPtr bufferAddress;
                MC.GetParam(currentChannel, "ImageSizeX", out width);
                MC.GetParam(currentChannel, "ImageSizeY", out height);
                MC.GetParam(currentChannel, "BufferPitch", out bufferPitch);
                MC.GetParam(currentSurface, "SurfaceAddr", out bufferAddress);

                try
                {
                    imageMutex.WaitOne();

                    image = new Bitmap(width, height, bufferPitch, PixelFormat.Format24bppRgb, bufferAddress);

                    /* Insert image analysis and processing code here */
                }
                finally
                {
                    imageMutex.ReleaseMutex();
                }

                // Retrieve the frame rate
                Double frameRate_Hz;
                MC.GetParam(channel, "PerSecond_Fr", out frameRate_Hz);

                // Retrieve the channel state
                String channelState;
                MC.GetParam(channel, "ChannelState", out channelState);

                // Display frame rate and channel state
                statusBar.Text = String.Format("Frame Rate: {0:f2}, Channel State: {1}", frameRate_Hz, channelState);

                // Display the new image
                this.BeginInvoke(new PaintDelegate(Redraw), new object[1] {
                    CreateGraphics()
                });
            }
            catch (Euresys.MultiCamException exc)
            {
                MessageBox.Show(exc.Message, "MultiCam Exception");
            }
            catch (System.Exception exc)
            {
                MessageBox.Show(exc.Message, "System Exception");
            }
        }
コード例 #3
0
        public void MultiCamCallback(ref MC.SIGNALINFO signalInfo)
        {
            lock (lockerMultiCam)
            {
                switch (signalInfo.Signal)
                {
                case MC.SIG_START_ACQUISITION_SEQUENCE:
                    watchTakeImg.Reset();
                    watchTakeImg.Start();
                    #region     /////如果1号机发送过来的产品规格信息改变,停止采相,设置相机采相帧数,再启动相机,根据玻璃长度设置采像线数  //添加重新加载所有的参数by cxx
                    if (FrmMain.comm.productInfoChanged)
                    {
                        //左相机关闭后重启
                        Stop(true);
                        SetPageLines(true);
                        GrabImage(true);
                        //右相机关闭后重启
                        Stop(false);
                        SetPageLines(false);
                        GrabImage(false);

                        //重新加载不同规格玻璃的参数
                        FrmMain.ini.ReadParam(Convert.ToInt32(FrmMain.frmMain.tboxGlassLength), Convert.ToInt32(FrmMain.frmMain.tboxGlassWidth), Convert.ToDouble(FrmMain.frmMain.tboxGlassThick));
                        //int length = (int)(double.Parse(comm.StrMsgRec[1]) / FrmMain.systemP.mmPerPix + FrmMain.cameraP.correctLine);
                        //FrmMain.cameraP.pageLength_Ln = length;
                        //FrmMain.cameraP.seqLength_Ln = length;
                    }
                    #endregion
                    FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + "下幅图采像开始");
                    break;

                case MC.SIG_SURFACE_PROCESSING:
                    //ProcessingCallback(signalInfo);
                    break;

                case MC.SIG_ACQUISITION_FAILURE:
                    AcqFailureCallback(signalInfo);
                    FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + "采像失败");
                    break;

                case MC.SIG_SURFACE_FILLED:
                    ProcessingCallback(signalInfo);
                    break;

                case MC.SIG_END_ACQUISITION_SEQUENCE:
                    watchTakeImg.Stop();
                    FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + "采像结束,采像时间:" + watchTakeImg.ElapsedMilliseconds.ToString() + "ms");
                    break;

                default:
                    throw new DefectDetection.MultiCamException("Unknown signal");
                }
            }
        }
コード例 #4
0
        public void ProcessingCallback(MC.SIGNALINFO signalInfo)
        {
            lock (lockerProcess)
            {
                UInt32 currentChannel = (UInt32)signalInfo.Context;
                //statusBar.Text = "Processing";
                currentSurface = signalInfo.SignalInfo;
                try
                {
                    // Update the image with the acquired image buffer data
                    Int32  width, height, bufferPitch, gElapse;
                    IntPtr bufferAddress;
                    MC.GetParam(currentChannel, "ImageSizeX", out width);
                    MC.GetParam(currentChannel, "ImageSizeY", out height);
                    ////MC.GetParam(currentChannel, "BufferPitch", out bufferPitch);
                    MC.GetParam(currentSurface, "SurfaceAddr", out bufferAddress);
                    MC.GetParam(currentChannel, "Elapsed_Ln", out gElapse);
                    //try
                    //{
                    //imageMutex.WaitOne();
                    //if (tmpImage != null)
                    //{
                    //    tmpImage.Dispose();
                    //    tmpImage = null;
                    //}
                    ////获取彩色图像;    !!!!此行耗时可移动位置到委托中
                    //HOperatorSet.GenImageInterleaved(out tmpImage, bufferAddress, "bgr", width, gElapse, -1, "byte", width, gElapse, 0, 0, -1, 0);

                    //}
                    //finally
                    //{
                    //    imageMutex.ReleaseMutex();
                    //    //Stop();
                    //}

                    if (ImageReadyEvent != null)
                    {
                        ImageReadyEvent("Pass", bufferAddress, gElapse, width, height);//进入图像分析,实际函数m_ImageReadyEvent_GrabImage;
                    }
                }

                catch (DefectDetection.MultiCamException exc)
                {
                    FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + exc.Message + "ProcessingCallback");
                }
                catch (System.Exception exc)
                {
                    FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + exc.Message + "ProcessingCallback");
                }
                // - GrablinkSnapshotTrigger Sample Program
            }
        }
コード例 #5
0
        private void ProcessingCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            currentSurface = signalInfo.SignalInfo;

            // + GrablinkSnapshot Sample Program

            try
            {
                // Update the image with the acquired image buffer data
                Int32  width, height, bufferPitch;
                IntPtr bufferAddress;
                MC.GetParam(currentChannel, "ImageSizeX", out width);
                MC.GetParam(currentChannel, "ImageSizeY", out height);
                MC.GetParam(currentChannel, "BufferPitch", out bufferPitch);
                MC.GetParam(currentSurface, "SurfaceAddr", out bufferAddress);

                byte[] GrabImage = new byte[width * height];
                Marshal.Copy(bufferAddress, GrabImage, 0, GrabImage.Length);

                try
                {
                    imageMutex.WaitOne();

                    var _EuresysGrabEvent = EuresysGrabEvent;
                    _EuresysGrabEvent?.Invoke(GrabImage);
                }
                finally
                {
                    imageMutex.ReleaseMutex();
                }

                // Retrieve the frame rate
                Double frameRate_Hz;
                MC.GetParam(channel, "PerSecond_Fr", out frameRate_Hz);

                // Retrieve the channel state
                String channelState;
                MC.GetParam(channel, "ChannelState", out channelState);
            }
            catch (Euresys.MultiCamException exc)
            {
                CLogManager.AddInspectionLog(CLogManager.LOG_TYPE.ERR, "cEuresysIOTAManager() ProcessingCallback Exception!! : MultiCanException", CLogManager.LOG_LEVEL.LOW);
            }
            catch (System.Exception exc)
            {
                CLogManager.AddInspectionLog(CLogManager.LOG_TYPE.ERR, "cEuresysIOTAManager() System Exception!! : SystemException", CLogManager.LOG_LEVEL.LOW);
            }
            // - GrablinkSnapshot Sample Program
        }
コード例 #6
0
        private void AcqFailureCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            // + DominoSnapshot Sample Program

            try
            {
            }
            catch (System.Exception exc)
            {
                CLogManager.AddInspectionLog(CLogManager.LOG_TYPE.ERR, "CEuresysManager AcqFailureCallback Exception!!", CLogManager.LOG_LEVEL.LOW);
            }
        }
コード例 #7
0
        private static void AcqFailureCallback(MC.SIGNALINFO signalInfo)
        {
            uint currentChannel = (uint)signalInfo.Context;

            try
            {
                isImageReady = false;
            }
            catch (System.Exception ex)
            {
                VisionLogger.Log(WaftechLibraries.Log.LogType.Exception, "E2VCameraHelper", ex);
                VisionNotifier.AddNotification("System Exception: " + ex.Message);
                errorMessage = ex.Message;
            }
        }
コード例 #8
0
        private void MultiCamCallback(ref MC.SIGNALINFO signalInfo)
        {
            switch (signalInfo.Signal)
            {
            case MC.SIG_SURFACE_PROCESSING:
                ProcessingCallback(signalInfo);
                break;

            case MC.SIG_ACQUISITION_FAILURE:
                AcqFailureCallback(signalInfo);
                break;

            default:
                throw new Euresys.MultiCamException("Unknown signal");
            }
        }
コード例 #9
0
        private void AcqFailureCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            // + GrablinkSnapshot Sample Program

            try
            {
                // Display frame rate and channel state
                ProcessStatus = String.Format("Acquisition Failure, Channel State: IDLE");
                //this.BeginInvoke(new PaintDelegate(Redraw), new object[1] { CreateGraphics() });
            }
            catch (System.Exception exc)
            {
                throw exc;
            }

            // - GrablinkSnapshot Sample Program
        }
コード例 #10
0
 private void multiCamProcessingCallback(ref MC.SIGNALINFO signalInfo)
 {
     if (signalInfo.Signal == MC.SIG_SURFACE_PROCESSING)
     {
         int      width;
         int      height;
         IntPtr   bufferAddress;
         uint     currentChannel = signalInfo.Instance;
         uint     currentSurface = signalInfo.SignalInfo;
         IplImage output;
         unsafe
         {
             MC.GetParam(currentChannel, "ImageSizeX", out width);
             MC.GetParam(currentChannel, "ImageSizeY", out height);
             MC.GetParam(currentSurface, "SurfaceAddr:0", out bufferAddress);
             output = new IplImage(new Size(width, height), IplDepth.U8, 1, bufferAddress).Clone();
         }
         global_observer.OnNext(output);
     }
 }
コード例 #11
0
        private void AcqFailureCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            // + GrablinkSnapshotTrigger Sample Program

            try
            {
                // Display frame rate and channel state
                //statusBar.Text = String.Format("Acquisition Failure, Channel State: IDLE");
                //   this.BeginInvoke(new ImageReadyEventHandler(m_ImageReadyEvent_GrabImage));// new object[1] { CreateGraphics() });
            }
            catch (System.Exception exc)
            {
                msg = DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + exc.Message + "System Exception";
                // FrmMain.T.queueIn(DateTime.Now.ToString("yy-MM-dd HH:mm:ss fff-") + exc.Message + "System Exception");
            }

            // - GrablinkSnapshotTrigger Sample Program
        }
コード例 #12
0
        private void AcqFailureCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            // + DominoSnapshotTrigger Sample Program

            try
            {
                // Display frame rate and channel state
                statusBar.Text = String.Format("Acquisition Failure, Channel State: IDLE");
                this.BeginInvoke(new PaintDelegate(Redraw), new object[1] {
                    CreateGraphics()
                });
            }
            catch (System.Exception exc)
            {
                MessageBox.Show(exc.Message, "System Exception");
            }

            // - DominoSnapshotTrigger Sample Program
        }
コード例 #13
0
        public void LiveThreadProc()
        {
            MC.SIGNALINFO sigInfo = new MC.SIGNALINFO();

            while (m_RunLoop)
            {
                //Get Signal Information
                try
                {
                    MC.WaitSignal(channel, MC.SIG_ANY, 10000, out sigInfo);
                    HandleSignal(ref sigInfo);
                }
                catch (Euresys.MultiCamException exc)
                {
                    // An exception has occurred in the try {...} block.
                    // Retrieve its description and display it in a message box.
                    MessageBox.Show(exc.Message, "MultiCam Exception");
                    Close();
                }
            }
            Console.WriteLine("worker thread: terminating gracefully.");
        }
コード例 #14
0
        private static void MultiCamCallback(ref MC.SIGNALINFO signalInfo)
        {
            try
            {
                switch (signalInfo.Signal)
                {
                case MC.SIG_SURFACE_PROCESSING:
                    ProcessingCallback(signalInfo);
                    break;

                case MC.SIG_ACQUISITION_FAILURE:
                    AcqFailureCallback(signalInfo);
                    break;

                default:
                    throw new Euresys.MultiCamException("Unknown signal");
                }
            }
            catch (Exception ex)
            {
                errorMessage = ex.Message;
                throw ex;
            }
        }
コード例 #15
0
        private void ProcessingCallback(MC.SIGNALINFO signalInfo)
        {
            UInt32 currentChannel = (UInt32)signalInfo.Context;

            ProcessStatus  = "Processing";
            currentSurface = signalInfo.SignalInfo;

            // + GrablinkSnapshot Sample Program

            try
            {
                // Update the image with the acquired image buffer data
                Int32  width, height, bufferPitch;
                IntPtr bufferAddress;
                MC.GetParam(currentChannel, "ImageSizeX", out width);
                MC.GetParam(currentChannel, "ImageSizeY", out height);
                MC.GetParam(currentChannel, "BufferPitch", out bufferPitch);
                MC.GetParam(currentSurface, "SurfaceAddr", out bufferAddress);

                try
                {
                    imageMutex.WaitOne();

                    image = new Bitmap(width, height, bufferPitch, PixelFormat.Format8bppIndexed, bufferAddress);

                    imgpal = image.Palette;

                    // Build bitmap palette Y8
                    for (uint i = 0; i < 256; i++)
                    {
                        imgpal.Entries[i] = Color.FromArgb(
                            (byte)0xFF,
                            (byte)i,
                            (byte)i,
                            (byte)i);
                    }

                    image.Palette = imgpal;

                    /* Insert image analysis and processing code here */
                }
                finally
                {
                    imageMutex.ReleaseMutex();
                }

                // Retrieve the frame rate
                Double frameRate_Hz;
                MC.GetParam(channel, "PerSecond_Fr", out frameRate_Hz);

                // Retrieve the channel state
                String channelState;
                MC.GetParam(channel, "ChannelState", out channelState);

                // Display frame rate and channel state
                ProcessStatus = String.Format("Frame Rate: {0:f2}, Channel State: {1}", frameRate_Hz, channelState);

                // Display the new image
                //this.BeginInvoke(new PaintDelegate(Redraw), new object[1] { CreateGraphics() });
            }
            catch (Euresys.MultiCamException exc)
            {
                throw exc;
            }
            catch (System.Exception exc)
            {
                throw exc;
            }
            // - GrablinkSnapshot Sample Program
        }
コード例 #16
0
        private static void ProcessingCallback(MC.SIGNALINFO signalInfo)
        {
            isImageReady = false;

            UInt32 currentChannel = (UInt32)signalInfo.Context;

            currentSurface = signalInfo.SignalInfo;

            // + GrablinkSnapshotTrigger Sample Program

            try
            {
                // Update the image with the acquired image buffer data
                Int32  width, height, bufferPitch;
                IntPtr bufferAddress;
                MC.GetParam(currentChannel, "ImageSizeX", out width);
                MC.GetParam(currentChannel, "ImageSizeY", out height);
                MC.GetParam(currentChannel, "BufferPitch", out bufferPitch);
                MC.GetParam(currentSurface, "SurfaceAddr", out bufferAddress);

                try
                {
                    imageMutex.WaitOne();

                    image  = new System.Drawing.Bitmap(width, height, bufferPitch, PixelFormat.Format8bppIndexed, bufferAddress);
                    imgpal = image.Palette;

                    // Build bitmap palette Y8
                    for (uint i = 0; i < 256; i++)
                    {
                        imgpal.Entries[i] = Color.FromArgb(
                            (byte)0xFF,
                            (byte)i,
                            (byte)i,
                            (byte)i);
                    }

                    image.Palette = imgpal;

                    string path_directory = @"D:\Waftech\BDMVision\Log\Temp\";
                    System.IO.Directory.CreateDirectory(path_directory);
                    string fullPath = path_directory + "test.jpg";

                    image.Save(fullPath);
                    eImage = new EImageBW8();
                    eImage.SetSize(image.Width, image.Height);
                    eImage.Load(fullPath);
                }
                finally
                {
                    imageMutex.ReleaseMutex();
                }

                isImageReady = true;

                // Retrieve the frame rate
                double frameRate_Hz;
                MC.GetParam(channel, "PerSecond_Fr", out frameRate_Hz);

                // Retrieve the channel state
                string channelState;
                MC.GetParam(channel, "ChannelState", out channelState);

                // Log frame rate and channel state
                VisionLogger.Log(WaftechLibraries.Log.LogType.Log, "E2VCameraHelper", string.Format("Frame Rate: {0:f2}, Channel State: {1}", frameRate_Hz, channelState));
            }
            catch (Euresys.MultiCamException ex)
            {
                VisionLogger.Log(WaftechLibraries.Log.LogType.Exception, "E2VCameraHelper", ex);
                VisionNotifier.AddNotification("MultiCam Exception: " + ex.Message);
                errorMessage = "MultiCam Exception: " + ex.Message;
            }
            catch (System.Exception ex)
            {
                VisionLogger.Log(WaftechLibraries.Log.LogType.Exception, "E2VCameraHelper", ex);
                VisionNotifier.AddNotification("System Exception: " + ex.Message);
                errorMessage = "System Exception: " + ex.Message;
            }
        }
コード例 #17
0
        public void LiveThreadProc()
        {
            MC.SIGNALINFO sigInfo = new MC.SIGNALINFO();
            Microsoft.Win32.SafeHandles.SafeWaitHandle pHandle;
            int signalledHandle;

            // Define an array with 3 ManualResetEvent WaitHandles.
            WaitHandle[] waitHandles = new WaitHandle[]
            {
                new ManualResetEvent(false),
                new ManualResetEvent(false),
                new ManualResetEvent(false)
            };

            // Assignment of WaitHandles
            MC.GetParam(channel, MC.SignalEvent + MC.SIG_SURFACE_FILLED, out pHandle);
            waitHandles[0].SafeWaitHandle = pHandle;

            MC.GetParam(channel, MC.SignalEvent + MC.SIG_ACQUISITION_FAILURE, out pHandle);
            waitHandles[1].SafeWaitHandle = pHandle;

            MC.GetParam(channel, MC.SignalEvent + MC.SIG_END_CHANNEL_ACTIVITY, out pHandle);
            waitHandles[2].SafeWaitHandle = pHandle;

            while (m_RunLoop)
            {
                try
                {
                    // Wait for any of the 3 signal
                    signalledHandle = WaitHandle.WaitAny(waitHandles, 5000);

                    // Get the signal information
                    switch (signalledHandle)
                    {
                    case 0:
                        MC.GetSignalInfo(channel, MC.SIG_SURFACE_FILLED, out sigInfo);
                        break;

                    case 1:
                        MC.GetSignalInfo(channel, MC.SIG_ACQUISITION_FAILURE, out sigInfo);
                        break;

                    case 2:
                        MC.GetSignalInfo(channel, MC.SIG_END_CHANNEL_ACTIVITY, out sigInfo);
                        break;

                    case  WaitHandle.WaitTimeout:
                        throw new Euresys.MultiCamException("Timeout");


                    default:
                        throw new Euresys.MultiCamException("Unknown signal");
                    }

                    HandleSignal(ref sigInfo);
                }
                catch (Euresys.MultiCamException exc)
                {
                    // An exception has occurred in the try {...} block.
                    // Retrieve its description and display it in a message box.
                    MessageBox.Show(exc.Message, "MultiCam Exception");
                    Close();
                }
            }
        }