Beispiel #1
0
 public override bool AcquisitionStatus()
 {
     if (m_Camera != null)
     {
         return(m_Camera.IsStreaming());
     }
     else
     {
         return(false);
     }
 }
Beispiel #2
0
        public static void CloseOryxCamera(IManagedCamera managedCamera, INodeMap nodeMap, int camNumber, CloseCameraMethod closeMethod)
        {
            if (!managedCamera.IsInitialized())
            {
                Console.WriteLine("Camera number {0} not initialized. Cannot execute DeviceReset or FactoryReset command", camNumber.ToString());
                return;
            }

            if (managedCamera.IsStreaming())
            {
                managedCamera.EndAcquisition();
                Console.WriteLine("EndAcquisition executed from CloseOryxCamera block on camera {0}", camNumber.ToString());
            }

            if (closeMethod == CloseCameraMethod.DeInit)
            {
                managedCamera.DeInit();
                Console.WriteLine("Camera number {0} deinitialized.", camNumber.ToString());
            }
            else if (closeMethod == CloseCameraMethod.DeInitAndDeviceReset)
            {
                nodeMap.GetNode <ICommand>("DeviceReset").Execute();
                Console.WriteLine("DeviceReset command executed on camera number {0}.", camNumber.ToString());
            }
            else if (closeMethod == CloseCameraMethod.DeInitAndFactoryReset)
            {
                nodeMap.GetNode <ICommand>("FactoryReset").Execute();
                Console.WriteLine("FactoryReset command executed on camera number {0}.", camNumber.ToString());
            }
        }
Beispiel #3
0
        private void DisplayLoop()
        {
            // Setup EncodeQueue and EncodeThread
            ConcurrentQueue <byte[]> encodeQueue = new ConcurrentQueue <byte[]>();
            Thread encodeThread = null;

            // Use enum to indicate loop state and which DisplayLoop block to execute
            DisplayLoopState loopState = DisplayLoopState.WaitingForMessagesWhileNotStreaming;

            int         streamImageCtr      = 0;
            List <long> skipEvents          = new List <long>();
            int         rawImageSizeInBytes = frameSize.Width * frameSize.Height;

            bool isMessageDequeueSuccess;
            bool go = true;

            while (go)
            {
                if (loopState == DisplayLoopState.WaitingForMessagesWhileNotStreaming)
                {
                    isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                    if (isMessageDequeueSuccess)
                    {
                        if (message == ButtonCommands.BeginAcquisition)
                        {
                            Console.WriteLine("{0} message received in WaitForMessagesWhileNotStreaming block on camera {1}. Press BeginStreaming Button after memory allocation complete.", message, camNumber.ToString());
                            managedCamera.BeginAcquisition();
                            continue;
                        }
                        else if (message == ButtonCommands.BeginStreaming)
                        {
                            loopState = DisplayLoopState.BeginStreaming;
                            continue;
                        }
                        else if (message == ButtonCommands.PlayInitiateTrialTone || message == ButtonCommands.PlayRewardTone)
                        {
                            continue;
                        }
                        else if (message == ButtonCommands.Exit)
                        {
                            loopState = DisplayLoopState.Exit;
                            continue;
                        }
                        else
                        {
                            Console.WriteLine("Invalid message ({0}) received in WaitForMessagesWhileNotStreaming on camera {1}", message, camNumber.ToString());
                            continue;
                        }
                    }
                    Thread.Sleep(50);
                    continue;
                }

                else if (loopState == DisplayLoopState.BeginStreaming)
                {
                    loopState = DisplayLoopState.StreamingAndWaitingForMessages;

                    if (!managedCamera.IsStreaming())
                    {
                        managedCamera.BeginAcquisition();
                    }

                    streamImageCtr = 0;

                    using (IManagedImage rawImage = managedCamera.GetNextImage())
                    {
                        streamImageCtr++;
                        long currFrameID        = rawImage.ChunkData.FrameID;
                        long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                    }
                    continue;
                }

                else if (loopState == DisplayLoopState.StreamingAndWaitingForMessages)
                {
                    try
                    {
                        using (IManagedImage rawImage = managedCamera.GetNextImage())
                        {
                            streamImageCtr++;
                            long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                            long currFrameID        = rawImage.ChunkData.FrameID;

                            if (streamImageCtr % streamEnqueueDutyCycle == 0)
                            {
                                Mat streamImageMat        = new Mat(rows: frameSize.Height, cols: frameSize.Width, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1, data: rawImage.DataPtr, step: frameSize.Width);
                                Mat streamImageMatResized = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                                CvInvoke.Resize(src: streamImageMat, dst: streamImageMatResized, dsize: streamFrameSize, interpolation: Emgu.CV.CvEnum.Inter.Linear);

                                RawMat matWithMetaData = new RawMat(frameID: currFrameID, frameTimestamp: currFrameTimestamp,
                                                                    isNewBackgroundImage: false, closeForm: false, rawMat: streamImageMatResized);

                                streamOutputQueue.Enqueue(matWithMetaData);
                                streamImageMat.Dispose();
                            }
                        }
                    }
                    catch (SpinnakerException ex)
                    {
                        Console.WriteLine("Error in DisplayLoop block on camera {0}:   {1}", camNumber.ToString(), ex.Message);
                    }

                    if (streamImageCtr % 10 == 0)
                    {
                        isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                        if (isMessageDequeueSuccess)
                        {
                            if (message == ButtonCommands.StartRecording)
                            {
                                loopState = DisplayLoopState.InitiateFFProcessAndRecord;
                                continue;
                            }
                            else if (message == ButtonCommands.EndStreaming)
                            {
                                loopState = DisplayLoopState.EndAcquisition;
                                continue;
                            }
                            else
                            {
                                Console.WriteLine("{0} message invalid: LoopState = Streaming.", message);
                                continue;
                            }
                        }
                    }
                }

                else if (loopState == DisplayLoopState.InitiateFFProcessAndRecord)
                {
                    loopState    = DisplayLoopState.StreamingAndRecordingWhileWaitingForMessages;
                    encodeThread = new Thread(() => EncodeThreadInit(_camNumber: camNumber, _encodePipeName: encodePipeName, _sessionPath: sessionPath, _count: 7057600, _encodeQueue: encodeQueue));
                    encodeThread.Start();
                }

                else if (loopState == DisplayLoopState.StreamingAndRecordingWhileWaitingForMessages)
                {
                    try
                    {
                        using (IManagedImage rawImage = managedCamera.GetNextImage())
                        {
                            streamImageCtr++;
                            long currFrameTimestamp = rawImage.ChunkData.Timestamp;
                            long currFrameID        = rawImage.ChunkData.FrameID;

                            // Write image to pipe for encoding:
                            byte[] encodeImageCopy = new byte[rawImageSizeInBytes];
                            Marshal.Copy(source: rawImage.DataPtr, destination: encodeImageCopy, startIndex: 0, length: rawImageSizeInBytes);
                            encodeQueue.Enqueue(item: encodeImageCopy);

                            if (streamImageCtr % streamEnqueueDutyCycle == 0)
                            {
                                Mat streamImageMat        = new Mat(rows: frameSize.Height, cols: frameSize.Width, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1, data: rawImage.DataPtr, step: frameSize.Width);
                                Mat streamImageMatResized = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                                CvInvoke.Resize(src: streamImageMat, dst: streamImageMatResized, dsize: streamFrameSize, interpolation: Emgu.CV.CvEnum.Inter.Linear);

                                RawMat matWithMetaData = new RawMat(frameID: currFrameID, frameTimestamp: currFrameTimestamp,
                                                                    isNewBackgroundImage: false, closeForm: false, rawMat: streamImageMatResized);

                                streamOutputQueue.Enqueue(matWithMetaData);
                                streamImageMat.Dispose();
                            }
                        }
                    }
                    catch (SpinnakerException ex)
                    {
                        Console.WriteLine("Error in DisplayLoop block on camera {0}:   {1}", camNumber.ToString(), ex.Message);
                    }

                    if (streamImageCtr % 10 == 0)
                    {
                        isMessageDequeueSuccess = camControlMessageQueue.TryDequeue(out ButtonCommands message);
                        if (isMessageDequeueSuccess)
                        {
                            if (message == ButtonCommands.StopRecording)
                            {
                                loopState = DisplayLoopState.InitiateProcessTermination;
                                continue;
                            }
                            else if (message == ButtonCommands.EndStreaming)
                            {
                                loopState = DisplayLoopState.EndAcquisition;
                                continue;
                            }
                            {
                                Console.WriteLine("{0} message invalid: LoopState = StreamingAndRecording.", message);
                                continue;
                            }
                        }
                    }
                }

                else if (loopState == DisplayLoopState.InitiateProcessTermination)
                {
                    Console.WriteLine("InitiateProcessTermination Block reached. Back to StreamingAndWaitingForMessages.");



                    loopState = DisplayLoopState.StreamingAndWaitingForMessages;
                }

                else if (loopState == DisplayLoopState.EndAcquisition)
                {
                    Mat    emptyMat        = new Mat(size: streamFrameSize, type: Emgu.CV.CvEnum.DepthType.Cv8U, channels: 1);
                    RawMat matWithMetaData = new RawMat(frameID: 0, frameTimestamp: 0,
                                                        isNewBackgroundImage: false, closeForm: true, rawMat: emptyMat);

                    managedCamera.EndAcquisition();
                    loopState = DisplayLoopState.WaitingForMessagesWhileNotStreaming;
                    Console.WriteLine("Acquisition ended on camera {0}. LoopState = WaitingForMessagesWhileNotStreaming.", camNumber.ToString());
                }

                else if (loopState == DisplayLoopState.Exit)
                {
                    go = false;
                    CloseOryxCamera(Util.CloseCameraMethod.DeInitAndDeviceReset);
                }
            }
        }