Esempio n. 1
0
        public ulong GetImageTimeStamp()
        {
            try
            {
                managedCamera.BeginAcquisition();
                while (true)
                {
                    using (IManagedImage rawImage = managedCamera.GetNextImage())
                    {
                        if (!rawImage.IsIncomplete)
                        {
                            return(rawImage.TimeStamp);
                        }
                    }
                }
            }
            catch (SpinnakerException ex)
            {
                Debug.WriteLine("Error: {0}", ex.Message);
            }
            finally
            {
                managedCamera.EndAcquisition();
            }

            return(0);
        }
Esempio n. 2
0
        public HImage Snap()
        {
            HImage hImage;

            cam.BeginAcquisition();
            using (IManagedImage rawImage = cam.GetNextImage())
            {
                hImage = new HImage("byte", (int)rawImage.Width, (int)rawImage.Height, rawImage.DataPtr);
            }
            cam.EndAcquisition();
            return(hImage);
        }
Esempio n. 3
0
        public override Bitmap GrabImageBitmap(PixelType pixelType)
        {
            Bitmap image = null;

            try
            {
                ClearBeforeNewOne();
                IManagedImage m_rawImage = m_Camera.GetNextImage();
                if (m_rawImage.ImageStatus != ImageStatus.IMAGE_NO_ERROR)
                {
                    LogHelper.AppLoger.Error("grab img error,error:" + m_rawImage.ImageStatus);
                }
                lock (this)
                {
                    if (pixelType == PixelType.BGR8)
                    {
                        using (IManagedImage convertedImage = m_rawImage.Convert(PixelFormatEnums.BGR8))
                        {
                            image = convertedImage.bitmap.Clone() as Bitmap;
                        }
                    }
                    else if (pixelType == PixelType.Mono8)
                    {
                        using (IManagedImage convertedImage = m_rawImage.Convert(PixelFormatEnums.Mono8))
                        {
                            image = convertedImage.bitmap.Clone() as Bitmap;
                        }
                    }
                    m_rawImage.Release();
                }
            }
            catch (Exception ex)
            {
                LogHelper.AppLoger.Error(ex);
            }
            return(image);
        }
Esempio n. 4
0
        public IManagedImage RetrieveMonoImage()
        {
            IManagedImage imgResult = null;

            // Retrieve singleton reference to system object
            ManagedSystem system = new ManagedSystem();

            // Retrieve list of cameras from the system
            IList <IManagedCamera> camList = system.GetCameras();

            if (camList.Count < 1)
            {
                writeLog(String.Format("No camera detected. Aborted.\n\n"));
                return(null);
            }
            else
            {
                writeLog(String.Format("Number of cameras detected: {0}\n\n", camList.Count));
            }
            // Use the first camera
            using (camList[0])
            {
                writeLog(String.Format("Running example for the 1st camera...\n"));

                IManagedCamera cam = camList[0];

                try
                {
                    // Run for a camera

                    // Retrieve TL device nodemap and print device information
                    INodeMap nodeMapTLDevice = cam.GetTLDeviceNodeMap();

                    PrintDeviceInfo(nodeMapTLDevice);

                    // Initialize camera
                    cam.Init();

                    // Retrieve GenICam nodemap
                    INodeMap nodeMap = cam.GetNodeMap();

                    /*****  Acquire single BW image from the camera  *****/

                    writeLog(String.Format("\n*** BW IMAGE ACQUISITION ***\n\n"));
                    SetNodeMapItem(nodeMap, "AcquisitionMode", "Continuous");
                    cam.BeginAcquisition();
                    using (IManagedImage rawImage = cam.GetNextImage())
                    {
                        if (rawImage.IsIncomplete)
                        {
                            writeLog(String.Format(
                                         "Image incomplete with image status {0}...\n", rawImage.ImageStatus));
                            imgResult = null;
                        }
                        else
                        {
                            // TODO: Need to return the acquired rawImage here.
                            //IManagedImage monoImage = rawImage.Convert(
                            //    PixelFormatEnums.Mono16, ColorProcessingAlgorithm.EDGE_SENSING);
                            IManagedImage monoImage = rawImage.Convert(PixelFormatEnums.Mono8);
                            imgResult = monoImage;
                        }
                    }
                    cam.EndAcquisition();

                    /*****  Acquiring Complete  *****/

                    // Deinitialize camera
                    cam.DeInit();
                }
                catch (SpinnakerException ex)
                {
                    writeLog(String.Format("Error: {0}\n", ex.Message));
                    imgResult = null;
                }
                writeLog(String.Format("Camera example complete...\n"));
            }

            // Clear camera list before releasing system
            camList.Clear();

            // Release system
            system.Dispose();

            writeLog(String.Format("Done!\n"));

            return(imgResult);
        }
Esempio n. 5
0
        // Code below is directly copied from example_acquisition

        // This function acquires and saves 10 images from a device.
        public int AcquireImages(IManagedCamera cam, INodeMap nodeMap, INodeMap nodeMapTLDevice)
        {
            int result = 0;

            writeLog(String.Format("\n*** IMAGE ACQUISITION ***\n\n"));

            try
            {
                //
                // Set acquisition mode to continuous
                //
                // *** NOTES ***
                // Because the example acquires and saves 10 images, setting
                // acquisition mode to continuous lets the example finish. If
                // set to single frame or multiframe (at a lower number of
                // images), the example would just hang. This is because the
                // example has been written to acquire 10 images while the
                // camera would have been programmed to retrieve less than that.
                //
                // Setting the value of an enumeration node is slightly more
                // complicated than other node types. Two nodes are required:
                // first, the enumeration node is retrieved from the nodemap and
                // second, the entry node is retrieved from the enumeration node.
                // The symbolic of the entry node is then set as the new value
                // of the enumeration node.
                //
                // Notice that both the enumeration and entry nodes are checked
                // for availability and readability/writability. Enumeration
                // nodes are generally readable and writable whereas entry
                // nodes are only ever readable.
                //
                // Retrieve enumeration node from nodemap
                IEnum iAcquisitionMode = nodeMap.GetNode <IEnum>("AcquisitionMode");
                if (iAcquisitionMode == null || !iAcquisitionMode.IsWritable)
                {
                    writeLog(String.Format(
                                 "Unable to set acquisition mode to continuous (node retrieval). Aborting...\n\n"));
                    return(-1);
                }

                // Retrieve entry node from enumeration node
                IEnumEntry iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("Continuous");
                if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable)
                {
                    writeLog(String.Format(
                                 "Unable to set acquisition mode to continuous (enum entry retrieval). Aborting...\n\n"));
                    return(-1);
                }

                // Set symbolic from entry node as new value for enumeration node
                iAcquisitionMode.Value = iAcquisitionModeContinuous.Symbolic;

                writeLog(String.Format("Acquisition mode set to continuous...\n"));

                //
                // Begin acquiring images
                //
                // *** NOTES ***
                // What happens when the camera begins acquiring images depends
                // on which acquisition mode has been set. Single frame captures
                // only a single image, multi frame catures a set number of
                // images, and continuous captures a continuous stream of images.
                // Because the example calls for the retrieval of 10 images,
                // continuous mode has been set for the example.
                //
                // *** LATER ***
                // Image acquisition must be ended when no more images are needed.
                //
                cam.BeginAcquisition();

                writeLog(String.Format("Acquiring images...\n"));

                //
                // Retrieve device serial number for filename
                //
                // *** NOTES ***
                // The device serial number is retrieved in order to keep
                // different cameras from overwriting each other's images.
                // Grabbing image IDs and frame IDs make good alternatives for
                // this purpose.
                //
                String deviceSerialNumber = "";

                IString iDeviceSerialNumber = nodeMapTLDevice.GetNode <IString>("DeviceSerialNumber");
                if (iDeviceSerialNumber != null && iDeviceSerialNumber.IsReadable)
                {
                    deviceSerialNumber = iDeviceSerialNumber.Value;

                    writeLog(String.Format(
                                 "Device serial number retrieved as {0}...\n", deviceSerialNumber));
                }
                writeLog(String.Format("\n"));

                // Retrieve, convert, and save images
                const int NumImages = 10;

                for (int imageCnt = 0; imageCnt < NumImages; imageCnt++)
                {
                    try
                    {
                        //
                        // Retrieve next received image
                        //
                        // *** NOTES ***
                        // Capturing an image houses images on the camera buffer.
                        // Trying to capture an image that does not exist will
                        // hang the camera.
                        //
                        // Using-statements help ensure that images are released.
                        // If too many images remain unreleased, the buffer will
                        // fill, causing the camera to hang. Images can also be
                        // released manually by calling Release().
                        //
                        using (IManagedImage rawImage = cam.GetNextImage())
                        {
                            //
                            // Ensure image completion
                            //
                            // *** NOTES ***
                            // Images can easily be checked for completion. This
                            // should be done whenever a complete image is
                            // expected or required. Alternatively, check image
                            // status for a little more insight into what
                            // happened.
                            //
                            if (rawImage.IsIncomplete)
                            {
                                writeLog(String.Format(
                                             "Image incomplete with image status {0}...\n", rawImage.ImageStatus));
                            }
                            else
                            {
                                //
                                // Print image information; width and height
                                // recorded in pixels
                                //
                                // *** NOTES ***
                                // Images have quite a bit of available metadata
                                // including CRC, image status, and offset
                                // values to name a few.
                                //
                                uint width = rawImage.Width;

                                uint height = rawImage.Height;

                                writeLog(String.Format(
                                             "Grabbed image {0}, width = {1}, height = {1}\n", imageCnt, width, height));
                                writeLog(String.Format(
                                             "Pixel format is {0}\n", rawImage.PixelFormatName));

                                //
                                // Convert image to mono 8
                                //
                                // *** NOTES ***
                                // Images can be converted between pixel formats
                                // by using the appropriate enumeration value.
                                // Unlike the original image, the converted one
                                // does not need to be released as it does not
                                // affect the camera buffer.
                                //
                                // Using statements are a great way to ensure code
                                // stays clean and avoids memory leaks.
                                // leaks.
                                //
                                using (IManagedImage convertedImage = rawImage.Convert(PixelFormatEnums.Mono8))
                                {
                                    // Create a unique filename
                                    String filename = "Acquisition-CSharp-";
                                    if (deviceSerialNumber != "")
                                    {
                                        filename = filename + deviceSerialNumber + "-";
                                    }
                                    filename = filename + imageCnt + ".jpg";

                                    //
                                    // Save image
                                    //
                                    // *** NOTES ***
                                    // The standard practice of the examples is
                                    // to use device serial numbers to keep
                                    // images of one device from overwriting
                                    // those of another.
                                    //
                                    convertedImage.Save(filename);

                                    writeLog(String.Format("Image saved at {0}\n\n", filename));
                                }
                            }
                        }
                    }
                    catch (SpinnakerException ex)
                    {
                        writeLog(String.Format("Error: {0}\n", ex.Message));
                        result = -1;
                    }
                }

                //
                // End acquisition
                //
                // *** NOTES ***
                // Ending acquisition appropriately helps ensure that devices
                // clean up properly and do not need to be power-cycled to
                // maintain integrity.
                //
                cam.EndAcquisition();
            }
            catch (SpinnakerException ex)
            {
                writeLog(String.Format("Error: {0}\n", ex.Message));
                result = -1;
            }

            return(result);
        }
Esempio n. 6
0
        // This function acquires and saves 10 images from a device; please see
        // Acquisition_CSharp example for more in-depth comments on the
        // acquisition of images.
        int AcquireImages(IManagedCamera cam, INodeMap nodeMap, ref List <IManagedImage> images)
        {
            int result = 0;

            Console.WriteLine("\n*** IMAGE ACQUISITION ***\n");

            try {
                // Set acquisition mode to continuous
                IEnum iAcquisitionMode = nodeMap.GetNode <IEnum>("AcquisitionMode");
                if (iAcquisitionMode == null || !iAcquisitionMode.IsWritable)
                {
                    Console.WriteLine("Unable to set acquisition mode to continuous (node retrieval). Aborting...\n");
                    return(-1);
                }

                IEnumEntry iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("Continuous");
                if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable)
                {
                    Console.WriteLine("Unable to set acquisition mode to continuous (entry retrieval). Aborting...\n");
                    return(-1);
                }

                iAcquisitionMode.Value = iAcquisitionModeContinuous.Value;

                Console.WriteLine("Acquisition mode set to continuous...");

                // Begin acquiring images
                cam.BeginAcquisition();

                Console.WriteLine("Acquiring images...\n");

                // Retrieve and convert images
                const int NumImages = 10;

                for (int imageCnt = 0; imageCnt < NumImages; imageCnt++)
                {
                    // Retrieve the next received images
                    using (IManagedImage rawImage = cam.GetNextImage()) {
                        try {
                            if (rawImage.IsIncomplete)
                            {
                                Console.WriteLine("Image incomplete with image status {0}...\n", rawImage.ImageStatus);
                            }
                            else
                            {
                                // Print image information
                                Console.WriteLine("Grabbed image {0}, width = {1}, height {2}", imageCnt, rawImage.Width, rawImage.Height);

                                // Deep copy image into list
                                images.Add(rawImage.Convert(PixelFormatEnums.BayerBG8));
                            }
                        } catch (SpinnakerException ex) {
                            Console.WriteLine("Error: {0}", ex.Message);
                            result = -1;
                        }
                    }
                }

                cam.EndAcquisition();
            } catch (SpinnakerException ex) {
                Console.WriteLine("Error: {0}", ex.Message);
                result = -1;
            }

            return(result);
        }
Esempio n. 7
0
        private void DisplayLoop()
        {
            // Setup EncodeQueue and EncodeThread
            ConcurrentQueue <byte[]> encodeQueue = new ConcurrentQueue <byte[]>();
            Thread encodeThread = null;

            // Use enum to indicate loop state and which DisplayLoop block to execute
            DisplayLoopState loopState = DisplayLoopState.WaitingForMessagesWhileNotStreaming;

            int         streamImageCtr      = 0;
            List <long> skipEvents          = new List <long>();
            int         rawImageSizeInBytes = frameSize.Width * frameSize.Height;

            bool isMessageDequeueSuccess;
            bool go = true;

            while (go)
            {
                if (loopState == DisplayLoopState.WaitingForMessagesWhileNotStreaming)
                {
                    isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                    if (isMessageDequeueSuccess)
                    {
                        if (message == ButtonCommands.BeginAcquisition)
                        {
                            Console.WriteLine("{0} message received in WaitForMessagesWhileNotStreaming block on camera {1}. Press BeginStreaming Button after memory allocation complete.", message, camNumber.ToString());
                            managedCamera.BeginAcquisition();
                            continue;
                        }
                        else if (message == ButtonCommands.BeginStreaming)
                        {
                            loopState = DisplayLoopState.BeginStreaming;
                            continue;
                        }
                        else if (message == ButtonCommands.PlayInitiateTrialTone || message == ButtonCommands.PlayRewardTone)
                        {
                            continue;
                        }
                        else if (message == ButtonCommands.Exit)
                        {
                            loopState = DisplayLoopState.Exit;
                            continue;
                        }
                        else
                        {
                            Console.WriteLine("Invalid message ({0}) received in WaitForMessagesWhileNotStreaming on camera {1}", message, camNumber.ToString());
                            continue;
                        }
                    }
                    Thread.Sleep(50);
                    continue;
                }

                else if (loopState == DisplayLoopState.BeginStreaming)
                {
                    loopState = DisplayLoopState.StreamingAndWaitingForMessages;

                    if (!managedCamera.IsStreaming())
                    {
                        managedCamera.BeginAcquisition();
                    }

                    streamImageCtr = 0;

                    using (IManagedImage rawImage = managedCamera.GetNextImage())
                    {
                        streamImageCtr++;
                        long currFrameID        = rawImage.ChunkData.FrameID;
                        long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                    }
                    continue;
                }

                else if (loopState == DisplayLoopState.StreamingAndWaitingForMessages)
                {
                    try
                    {
                        using (IManagedImage rawImage = managedCamera.GetNextImage())
                        {
                            streamImageCtr++;
                            long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                            long currFrameID        = rawImage.ChunkData.FrameID;

                            if (streamImageCtr % streamEnqueueDutyCycle == 0)
                            {
                                Mat streamImageMat        = new Mat(rows: frameSize.Height, cols: frameSize.Width, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1, data: rawImage.DataPtr, step: frameSize.Width);
                                Mat streamImageMatResized = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                                CvInvoke.Resize(src: streamImageMat, dst: streamImageMatResized, dsize: streamFrameSize, interpolation: Emgu.CV.CvEnum.Inter.Linear);

                                RawMat matWithMetaData = new RawMat(frameID: currFrameID, frameTimestamp: currFrameTimestamp,
                                                                    isNewBackgroundImage: false, closeForm: false, rawMat: streamImageMatResized);

                                streamOutputQueue.Enqueue(matWithMetaData);
                                streamImageMat.Dispose();
                            }
                        }
                    }
                    catch (SpinnakerException ex)
                    {
                        Console.WriteLine("Error in DisplayLoop block on camera {0}:   {1}", camNumber.ToString(), ex.Message);
                    }

                    if (streamImageCtr % 10 == 0)
                    {
                        isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                        if (isMessageDequeueSuccess)
                        {
                            if (message == ButtonCommands.StartRecording)
                            {
                                loopState = DisplayLoopState.InitiateFFProcessAndRecord;
                                continue;
                            }
                            else if (message == ButtonCommands.EndStreaming)
                            {
                                loopState = DisplayLoopState.EndAcquisition;
                                continue;
                            }
                            else
                            {
                                Console.WriteLine("{0} message invalid: LoopState = Streaming.", message);
                                continue;
                            }
                        }
                    }
                }

                else if (loopState == DisplayLoopState.InitiateFFProcessAndRecord)
                {
                    loopState    = DisplayLoopState.StreamingAndRecordingWhileWaitingForMessages;
                    encodeThread = new Thread(() => EncodeThreadInit(_camNumber: camNumber, _encodePipeName: encodePipeName, _sessionPath: sessionPath, _count: 7057600, _encodeQueue: encodeQueue));
                    encodeThread.Start();
                }

                else if (loopState == DisplayLoopState.StreamingAndRecordingWhileWaitingForMessages)
                {
                    try
                    {
                        using (IManagedImage rawImage = managedCamera.GetNextImage())
                        {
                            streamImageCtr++;
                            long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                            long currFrameID        = rawImage.ChunkData.FrameID;

                            // Write image to pipe for encoding:
                            byte[] encodeImageCopy = new byte[rawImageSizeInBytes];
                            Marshal.Copy(source: rawImage.DataPtr, destination: encodeImageCopy, startIndex: 0, length: rawImageSizeInBytes);
                            encodeQueue.Enqueue(item: encodeImageCopy);

                            if (streamImageCtr % streamEnqueueDutyCycle == 0)
                            {
                                Mat streamImageMat        = new Mat(rows: frameSize.Height, cols: frameSize.Width, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1, data: rawImage.DataPtr, step: frameSize.Width);
                                Mat streamImageMatResized = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                                CvInvoke.Resize(src: streamImageMat, dst: streamImageMatResized, dsize: streamFrameSize, interpolation: Emgu.CV.CvEnum.Inter.Linear);

                                RawMat matWithMetaData = new RawMat(frameID: currFrameID, frameTimestamp: currFrameTimestamp,
                                                                    isNewBackgroundImage: false, closeForm: false, rawMat: streamImageMatResized);

                                streamOutputQueue.Enqueue(matWithMetaData);
                                streamImageMat.Dispose();
                            }
                        }
                    }
                    catch (SpinnakerException ex)
                    {
                        Console.WriteLine("Error in DisplayLoop block on camera {0}:   {1}", camNumber.ToString(), ex.Message);
                    }

                    if (streamImageCtr % 10 == 0)
                    {
                        isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                        if (isMessageDequeueSuccess)
                        {
                            if (message == ButtonCommands.StopRecording)
                            {
                                loopState = DisplayLoopState.InitiateProcessTermination;
                                continue;
                            }
                            else if (message == ButtonCommands.EndStreaming)
                            {
                                loopState = DisplayLoopState.EndAcquisition;
                                continue;
                            }
                            {
                                Console.WriteLine("{0} message invalid: LoopState = StreamingAndRecording.", message);
                                continue;
                            }
                        }
                    }
                }

                else if (loopState == DisplayLoopState.InitiateProcessTermination)
                {
                    Console.WriteLine("InitiateProcessTermination Block reached. Back to StreamingAndWaitingForMessages.");



                    loopState = DisplayLoopState.StreamingAndWaitingForMessages;
                }

                else if (loopState == DisplayLoopState.EndAcquisition)
                {
                    Mat    emptyMat        = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                    RawMat matWithMetaData = new RawMat(frameID: 0, frameTimestamp: 0,
                                                        isNewBackgroundImage: false, closeForm: true, rawMat: emptyMat);

                    managedCamera.EndAcquisition();
                    loopState = DisplayLoopState.WaitingForMessagesWhileNotStreaming;
                    Console.WriteLine("Acquisition ended on camera {0}. LoopState = WaitingForMessagesWhileNotStreaming.", camNumber.ToString());
                }

                else if (loopState == DisplayLoopState.Exit)
                {
                    go = false;
                    CloseOryxCamera(Util.CloseCameraMethod.DeInitAndDeviceReset);
                }
            }
        }
Esempio n. 8
0
        void AcquireImages(AcquireMode acquireMode)
        {
            try
            {
                try
                {
                    if (acquireMode.Equals(AcquireMode.CONTINUOUS))
                    {
                        cam.AcquisitionMode.Value = AcquisitionModeEnums.Continuous.ToString();
                    }
                    else if (acquireMode.Equals(AcquireMode.MULTIFRAME))
                    {
                        cam.AcquisitionMode.Value       = AcquisitionModeEnums.MultiFrame.ToString();
                        cam.AcquisitionFrameCount.Value = NumImages;
                    }
                    else if (acquireMode.Equals(AcquireMode.SINGLEFRAME))
                    {
                        cam.AcquisitionMode.Value = AcquisitionModeEnums.SingleFrame.ToString();
                    }
                    //Console.WriteLine("设置获取模式-成功!");
                }
                catch (Exception)
                {
                    Console.WriteLine("连续获取模式设置-失败!");
                }

                //
                // 获取图像前
                //
                // *** NOTES ***
                // 获取图像前的操作取决于 图像获取模式。
                // Single frame captures :单图模式只获取一张图像
                // multi frame catures :多图模式获取多张图像
                // continuous captures :连续模式获得图像流
                //
                // *** LATER ***
                // 不需要获取图像后需要关闭图像获取
                //
                cam.BeginAcquisition();

                //Console.WriteLine("获取图像...");

                switch (acquireMode)
                {
                case AcquireMode.CONTINUOUS:
                    stopwatch2.Start();
                    int miss = 0;
                    for (int imageCnt = 0; imageCnt < 100; imageCnt++)
                    {
                        try
                        {
                            //
                            // 获取下一幅图像
                            //
                            // *** NOTES ***
                            // 获取相机缓冲器上的图像,若图像不存在则导致相机挂起
                            //
                            // 使用using关键字可以保证图像正确释放
                            // 缓冲器图像超出容量导致相机挂起,调用 Release() 可以手动释放图像。
                            //
                            using (IManagedImage rawImage = cam.GetNextImage())
                            {
                                //
                                // 确保图像的完整性
                                //
                                // *** NOTES ***
                                // 检测完整性,并检测错误
                                //
                                if (rawImage.IsIncomplete)
                                {
                                    miss++;
                                    Console.WriteLine("图像不完整,状态为: {0}...", rawImage.ImageStatus);
                                    Console.WriteLine("{1} miss={0}", miss, imageCnt);
                                }
                                else
                                {
                                    //
                                    // 输出长宽
                                    //
                                    // *** NOTES ***
                                    // 图像包含大量元数据,包括CRC、图像状态和偏移值等等
                                    //
                                    //uint width = rawImage.Width;

                                    //uint height = rawImage.Height;

                                    //Console.WriteLine("当前图像 {0}, width = {1}, height = {2}", imageCnt, width, height);

                                    //
                                    // 转换为8位单通道
                                    //
                                    // *** NOTES ***
                                    // 图像格式可以在已有枚举间任意转换
                                    // 与原始图像不同,转换的图像无需释放且不影响图像缓冲器
                                    //
                                    // using避免内存溢出.
                                    //
                                    using (IManagedImage convertedImage = rawImage.Convert(PixelFormatEnums.Mono8))
                                    {
                                        //显示
                                        //ImageBox.Source = ToBitmapSource(convertedImage.bitmap);
                                    }
                                }
                            }
                        }
                        catch (SpinnakerException ex)
                        {
                            Console.WriteLine("Error: {0}", ex.Message);
                        }
                    }
                    Console.WriteLine("miss:{0} in 1000", miss);
                    stopwatch2.Stop();
                    Console.WriteLine("time:{0}", stopwatch2.ElapsedMilliseconds / 100.0);
                    stopwatch2.Reset();
                    break;

                case AcquireMode.MULTIFRAME:
                    for (int imageCnt = 0; imageCnt < NumImages; imageCnt++)
                    {
                        try
                        {
                            using (IManagedImage rawImage = cam.GetNextImage())
                            {
                                if (rawImage.IsIncomplete)
                                {
                                    Console.WriteLine("图像不完整,状态为: {0}...", rawImage.ImageStatus);
                                }
                                else
                                {
                                    uint width = rawImage.Width;

                                    uint height = rawImage.Height;

                                    Console.WriteLine("当前图像 {0}, width = {1}, height = {2}", imageCnt, width, height);

                                    using (IManagedImage convertedImage = rawImage.Convert(PixelFormatEnums.Mono8))
                                    {
                                        ImageBox.Source = ToBitmapSource(convertedImage.bitmap);
                                        String filename = SavePath + "/" + "Acquisition-CSharp-";
                                        if (deviceSerialNumber != "")
                                        {
                                            filename = filename + deviceSerialNumber + "-";
                                        }
                                        filename = filename + imageCnt + ".jpg";
                                        convertedImage.Save(filename);
                                        //Console.WriteLine("图像 {0} 已储存\n", filename);
                                    }
                                }
                            }
                        }
                        catch (SpinnakerException ex)
                        {
                            Console.WriteLine("Error: {0}", ex.Message);
                        }
                    }
                    break;

                case AcquireMode.SINGLEFRAME:
                    try
                    {
                        //stopwatch2.Start();
                        using (IManagedImage rawImage = cam.GetNextImage())
                        {
                            if (rawImage.IsIncomplete)
                            {
                                Console.WriteLine("图像不完整,状态为: {0}...", rawImage.ImageStatus);
                            }
                            else
                            {
                                int width  = (int)rawImage.Width;
                                int height = (int)rawImage.Height;
                                Console.WriteLine("当前图像 {0}, width = {1}, height = {2}", 0, width, height);


                                //Matrix<double> cameraMat = new Matrix<double>(3, 3);
                                //cameraMat.Data[0, 0] = 2578.42;  cameraMat.Data[0, 1] = 0;         cameraMat.Data[0, 2] = 1266.60;
                                //cameraMat.Data[1, 0] = 0;        cameraMat.Data[1, 1] = 2491.45;   cameraMat.Data[1, 2] = 1111.35;
                                //cameraMat.Data[2, 0] = 0;        cameraMat.Data[2, 1] = 0;         cameraMat.Data[2, 2] = 1;
                                //Matrix<double> distortionMat = new Matrix<double>(1, 4);
                                //distortionMat.Data[0, 0] = -0.078470991609759;
                                //distortionMat.Data[0, 1] = 0.109106631527471;
                                //distortionMat.Data[0, 2] = 0;
                                //distortionMat.Data[0, 3] = 0;
                                //Matrix<double> mapxMat = new Matrix<double>(height, width);
                                //Matrix<double> mapyMat = new Matrix<double>(height, width);
                                //CvInvoke.InitUndistortRectifyMap(cameraMat, distortionMat, null, cameraMat, new System.Drawing.Size((int)width, (int)height), DepthType.Cv8U, mapxMat,mapyMat);

                                using (IManagedImage convertedImage = rawImage.Convert(PixelFormatEnums.Mono8))
                                {
                                    //ManagedImage currentImage = new ManagedImage(convertedImage);
                                    //畸变矫正
                                    //CvInvoke.Remap(convertedImage, currentImage, mapxMat, mapyMat, Inter.Linear);


                                    //显示
                                    ImageBox.Source = ToBitmapSource(convertedImage.bitmap);
                                    //检测高亮点
                                    IntPtr imgPtr = convertedImage.DataPtr;
                                    byte   maxPiexl;
                                    double X_ind, Y_ind;

                                    List <double[, ]> list = new List <double[, ]>();
                                    for (int y = 0; y < height; y++)
                                    {
                                        //mono8格式,忽略边缘两个像素
                                        maxPiexl = Marshal.ReadByte(imgPtr, (int)(y * width + 2));
                                        X_ind    = 2;
                                        Y_ind    = y;



                                        for (int x = 3; x < width - 2; x++)
                                        {
                                            byte currentPiexl = Marshal.ReadByte(imgPtr, (int)(y * width + x));
                                            if (currentPiexl > maxPiexl)
                                            {
                                                maxPiexl = currentPiexl;
                                                X_ind    = x;
                                                Y_ind    = y;
                                            }
                                        }

                                        if (maxPiexl < acceptPointValue.slider.Value)
                                        {
                                            continue;
                                        }

                                        //重心法求光心,取邻域2个像素距离
                                        byte piexel0 = Marshal.ReadByte(imgPtr, (int)(Y_ind * width + X_ind - 2));
                                        byte piexel1 = Marshal.ReadByte(imgPtr, (int)(Y_ind * width + X_ind - 1));
                                        byte piexel2 = Marshal.ReadByte(imgPtr, (int)(Y_ind * width + X_ind + 1));
                                        byte piexel3 = Marshal.ReadByte(imgPtr, (int)(Y_ind * width + X_ind + 2));

                                        X_ind = (piexel0 * (X_ind - 2) + piexel1 * (X_ind - 1) + maxPiexl * X_ind + piexel2 * (X_ind + 1) + piexel3 * (X_ind + 2)) / (piexel0 + piexel1 + maxPiexl + piexel2 + piexel3);
                                        //Console.WriteLine("(x,y)=>({0},{1})", X_ind, Y_ind);

                                        //距离计算
                                        double halfHeightPic = 1111.35;
                                        double halfWidthPic  = 1266.60;
                                        //全部转换为mm
                                        double perPiexl = 3.45 / 1000.0;      //单位像素大小
                                        double f        = 8.596;              //8.596;//焦距
                                        double s        = 265;                //基线长度
                                        double beta0    = 90 * Math.PI / 180; //初始度数
                                        //double epsilon = 0 * Math.PI / 180;
                                        //double offset0 = -halfWidthPic* perPiexl;//待拟合
                                        double beta1;

                                        double _d, _f, d, offset;

                                        _f = f / Math.Cos(Math.Atan(Math.Abs(Y_ind - halfHeightPic) * perPiexl / f));
                                        //offset = offset0 + f * (Math.Tan(epsilon + Math.PI/2 - beta0) - Math.Tan(Math.PI/2 - beta0));
                                        offset = f / Math.Tan(beta0);
                                        //beta1 = Math.Atan(_f / (halfWidthPic*perPiexl + offset));
                                        beta1 = Math.Atan(_f / offset);
                                        _d    = s * _f / ((X_ind - halfWidthPic) * perPiexl + offset);
                                        d     = f * s / ((X_ind - halfWidthPic) * perPiexl + offset);

                                        double xx, yy, zz;
                                        xx = _d * Math.Tan(Math.PI / 2 - beta1);
                                        yy = _d * Math.Sin(Math.Atan(Math.Abs(Y_ind - halfHeightPic) * perPiexl / f));
                                        zz = d;

                                        double[,] xyz = new double[1, 3];
                                        xyz[0, 0]     = beta0 > Math.PI / 2 ? xx : -xx;
                                        xyz[0, 1]     = Y_ind > halfHeightPic ? yy : -yy;
                                        xyz[0, 2]     = zz;
                                        list.Add(xyz);
                                        Console.WriteLine("[{3}\t,{4}\t](x,y,z)=>({0}\t,{1}\t,{2}\t)", Math.Round(xyz[0, 0], 3), Math.Round(xyz[0, 1], 3), Math.Round(xyz[0, 2], 3), Y_ind, Math.Round(X_ind, 3));//math.round(x,3)保留3为小数
                                    }
                                    //保存list
                                    Util.PLYUtil ply = new Util.PLYUtil(".");
                                    ply.PLYWriter(list, "raw.ply");
                                }
                            }
                        }

                        Console.WriteLine("获取单张显示计算耗时:{0}", stopwatch2.ElapsedMilliseconds);
                    }
                    catch (SpinnakerException ex)
                    {
                        Console.WriteLine("Error: {0}", ex.Message);
                    }
                    break;
                }

                //
                // 结束捕获图像
                //
                // *** NOTES ***
                // 保持设备内存干净,而不用重启设备
                //
                cam.EndAcquisition();
            }
            catch (SpinnakerException ex)
            {
                Console.WriteLine("Error: {0}", ex.Message);
            }
        }