示例#1
0
        static void Main(string[] args)
        {
            tErr err = 0;

            // Initialize the API.
            if ((err = Pv.InitializeNoDiscovery()) == 0)
            {
                System.Net.IPAddress IP = System.Net.IPAddress.Parse(args[0]);
                tCamera Camera          = new tCamera();

                Camera.S_addr = (uint)IP.Address;

                // Open the camera.
                if (CameraOpen(ref Camera))
                {
                    // List some infomation.
                    DisplayInfo(ref Camera);

                    // Unsetup the camera.
                    CameraClose(ref Camera);
                }
                else
                {
                    Console.WriteLine("Failed to open camera.");
                }

                // Uninitialize the API.
                Pv.UnInitialize();
            }
            else
            {
                Console.WriteLine("Failed to initalize.");
            }
        }
示例#2
0
        // Setup the camera for capture.
        static bool CameraSetup(ref tCamera Camera)
        {
            UInt32 FrameSize = 0;

            if (Pv.CameraOpen(Camera.UID, tAccessFlags.eAccessMaster, out Camera.Handle) == 0)
            {
                // Get the bytes size of the buffer.
                if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0)
                {
                    Camera.Buffer = new byte[FrameSize];
                    Camera.GC     = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned);
                    // Address of the pinned buffer.
                    Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject();
                    // Buffer size.
                    Camera.Frame.ImageBufferSize = FrameSize;
                    return(true);
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                Pv.CameraClose(Camera.Handle);
                Camera.Handle = 0;
                return(false);
            }
        }
示例#3
0
 // Close the camera.
 static void CameraClose(ref tCamera Camera)
 {
     // Reset the trigger mode.
     Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
 }
示例#4
0
        // Display infomation about the camera detected.
        static void DisplayInfo(ref tCamera Camera)
        {
            // Display the serial number and the display name.
            Console.WriteLine("-> {0} - {1}", Camera.Info.SerialString, Camera.Info.DisplayName);

            Console.Write("Mode supported: ");

            if ((Camera.Conf.ConfigModeSupport & (uint)tIpConfig.eIpConfigPersistent.GetTypeCode()) != 0)
            {
                Console.Write("Fixed ");
            }
            if ((Camera.Conf.ConfigModeSupport & (uint)tIpConfig.eIpConfigDhcp.GetTypeCode()) != 0)
            {
                Console.Write("DHCP&AutoIP ");
            }
            if ((Camera.Conf.ConfigModeSupport & (uint)tIpConfig.eIpConfigAutoIp.GetTypeCode()) != 0)
            {
                Console.Write("AutoIP");
            }

            Console.WriteLine();

            // Check to see which mode the camera is in.
            Console.Write("Current mode: ");

            if (Camera.Conf.ConfigMode == tIpConfig.eIpConfigPersistent)
            {
                Console.WriteLine("Fixed");
            }
            else
            if (Camera.Conf.ConfigMode == tIpConfig.eIpConfigDhcp)
            {
                Console.WriteLine("DHCP&AutoIP");
            }
            else
            if (Camera.Conf.ConfigMode == tIpConfig.eIpConfigAutoIp)
            {
                Console.WriteLine("AutoIP");
            }
            else
            {
                Console.WriteLine("none");
            }

            // Display the IP address.
            Camera.S_addr = Camera.Conf.CurrentIpAddress;
            System.Net.IPAddress Address = new IPAddress(Camera.S_addr);
            Console.WriteLine("Current address: {0}", Address);

            // Display the subnet.
            Camera.S_addr = Camera.Conf.CurrentIpSubnet;
            System.Net.IPAddress Subnet = new IPAddress(Camera.S_addr);
            Console.WriteLine("Current subnet: {0}", Subnet);

            //display Gateway
            Camera.S_addr = Camera.Conf.CurrentIpGateway;
            System.Net.IPAddress GateWay = new IPAddress(Camera.S_addr);
            Console.WriteLine("Current gateway: {0}", GateWay);
        }
示例#5
0
 // Unsetup the camera.
 static void CameraUnsetup(ref tCamera Camera)
 {
     Console.WriteLine("Closing the camera now...");
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
     Console.WriteLine("Closed.");
     // Reset the handle.
     Camera.Handle = 0;
 }
示例#6
0
        // frame callback dummy for the stream process.
        static void FrameDummyCBStream(IntPtr pFrame)
        {
            tCamera Camera = new tCamera();

            // copy the frame structure back in the frame we have
            Camera.Frame = (tFrame)Marshal.PtrToStructure(pFrame, typeof(tFrame));

            UInt32 status = (UInt32)(Camera.Frame.Status);

            Console.WriteLine("Frame returned with status = {0:0} ", status);
        }
示例#7
0
 // Close the camera.
 static void CameraClose(ref tCamera Camera)
 {
     // Reset the trigger mode.
     Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
     // Delete the allocated buffer.
     Camera.GC.Free();
     // Reset the handle.
     Camera.Handle = 0;
 }
示例#8
0
        // Callback called when a frame is done.
        static void FrameDoneCB(ref tFrame pFrame, ref tCamera Camera)
        {
            GCHandle pFrame1 = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);

            tFrameCallback FrameCB = new tFrameCallback(FrameDummyCB);

            // If the frame was completed, re-enqueue it.
            if (pFrame.Status != tErr.eErrUnplugged && pFrame.Status != tErr.eErrCancelled)
            {
                Pv.CaptureQueueFrame(Camera.Handle, pFrame1.AddrOfPinnedObject(), FrameCB);
            }
        }
示例#9
0
 // Open the Camera by IP.
 static bool CameraOpen(ref tCamera Camera)
 {
     // Open by the address of camera.
     if ((Pv.CameraOpenByAddr(Camera.S_addr, tAccessFlags.eAccessMaster, out Camera.Handle) == tErr.eErrSuccess))
     {
         Pv.CameraInfoByAddr(Camera.S_addr, ref Camera.Info, ref Camera.Conf);
         return(true);
     }
     else
     {
         return(false);
     }
 }
示例#10
0
        // Reset the camera.
        static void CameraReset(ref tCamera Camera)
        {
            UInt32 [] Address = new UInt32 [1];
            // Register address.
            Address[0] = 0x10008;
            UInt32 [] Value = new UInt32 [1];
            // Hard-reset value.
            Value[0] = 2;
            UInt32 NumComplete = 0;

            Pv.RegisterWrite(Camera.Handle, 1, Address, Value, ref NumComplete);

            Console.WriteLine("The camera: {0} was reset.", Camera.DisplayName);
        }
示例#11
0
 // Close the camera.
 static void CameraClose(ref tCamera Camera)
 {
     // Dequeue all the frame still queued (this will block until they all have been dequeued).
     Pv.CaptureQueueClear(Camera.Handle);
     // Reset the trigger mode.
     Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
     Console.WriteLine("camera is closed, deleting the frames now");
     // Delete the allocated buffer.
     Camera.GC.Free();
     // Reset the handle.
     Camera.Handle = 0;
 }
示例#12
0
        // Setup the camera for capture.
        static bool CameraSetup(ref tCamera Camera)
        {
            UInt32 FrameSize  = 0;
            UInt32 FrameCount = 600;

            // Get the byte size of the buffer.
            if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0)
            {
                Camera.Buffer = new byte[FrameSize];
                Camera.GC     = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned);
                // Address of the pinned buffer
                Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject();
                // Buffer size.
                Camera.Frame.ImageBufferSize = FrameSize;

                Console.WriteLine("We have allocated {0} frames of {1} bytes each.", FrameCount, FrameSize);

                // Start the capture mode.
                if (Pv.CaptureStart(Camera.Handle) == 0)
                {
                    // Set the camera in software acquisition mode.
                    if (Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software") == 0)
                    {
                        // Set the acquisition mode into continuous.
                        if (Pv.CommandRun(Camera.Handle, "AcquisitionStart") != 0)
                        {
                            // If that fails, reset the camera to non capture mode.
                            Pv.CaptureEnd(Camera.Handle);
                            return(false);
                        }
                        else
                        {
                            return(true);
                        }
                    }
                    else
                    {
                        return(false);
                    }
                }
                else
                {
                    return(true);
                }
            }
            else
            {
                return(false);
            }
        }
示例#13
0
 // Close the camera.
 static void CameraClose(ref tCamera Camera)
 {
     // Reset the trigger mode.
     Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
     // Clear the queue.
     Console.WriteLine("Clearing the queue...");
     Pv.CaptureQueueClear(Camera.Handle);
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
     Console.WriteLine("Closing the camera.");
     // Delete the allocated buffer.
     Camera.GC.Free();
     // Reset the handle.
     Camera.Handle = 0;
 }
示例#14
0
        // Get the UID and Serial Number of the first camera listed.
        static bool CameraGet(ref tCamera Camera)
        {
            UInt32 count, connected = 0;

            tCameraInfo[] list = new tCameraInfo[1];

            if ((count = Pv.CameraList(list, 1, ref connected)) != 0)
            {
                Camera.UID = list[0].UniqueId;
                return(true);
            }
            else
            {
                return(false);
            }
        }
示例#15
0
        static void Main(string[] args)
        {
            tErr err = 0;

            tCameraInfo[] List = new tCameraInfo[10];

            // Initialize the API.
            if ((err = Pv.Initialize()) == 0)
            {
                tCamera Camera = new tCamera();

                // Wait for a camera to be detected.
                WaitForCamera();

                // Get info from the camera.
                CameraGet(ref Camera);

                if (CameraGrab())
                {
                    // Open the camera.
                    if (CameraOpen(ref Camera))
                    {
                        // Change some attributes.
                        CameraAdjust(ref Camera);

                        // Unsetup the camera.
                        CameraClose(ref Camera);
                    }

                    else
                    {
                        Console.WriteLine("Failed to open camera.");
                    }
                }
                else
                {
                    Console.WriteLine("Failed to grab camera.");
                }

                // Uninitialize the API.
                Pv.UnInitialize();
            }
            else
            {
                Console.WriteLine("Failed to initalize.");
            }
        }
示例#16
0
        // Snap a frame from the camera.
        static bool CameraSnap(ref tCamera Camera)
        {
            bool           result;
            GCHandle       pFrame  = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);
            tFrameCallback FrameCB = new tFrameCallback(FrameDummyCB);

            // Queue the frame.
            if (Pv.CaptureQueueFrame(Camera.Handle, pFrame.AddrOfPinnedObject(), FrameCB) == 0)
            {
                Console.WriteLine("Triggering the camera ...");

                // Trigger the capture.
                if (Pv.CommandRun(Camera.Handle, "FrameStartTriggerSoftware") == 0)
                {
                    Console.WriteLine("Waiting for the frame to be done ...");

                    // Wait for the frame to be returned.
                    while (Pv.CaptureWaitForFrameDone(Camera.Handle, pFrame.AddrOfPinnedObject(), 10) == tErr.eErrTimeout)
                    {
                        Console.WriteLine("Still waiting ...");
                    }

                    // Copy the frame structure back in the frame we have.
                    Camera.Frame = (tFrame)Marshal.PtrToStructure(pFrame.AddrOfPinnedObject(), typeof(tFrame));

                    // Check the status flag.
                    if (!(result = (Camera.Frame.Status == tErr.eErrSuccess)))
                    {
                        Console.WriteLine("Frame captured un-succesfully {0}", Camera.Frame.Status);
                    }
                }
                else
                {
                    Pv.CaptureQueueClear(Camera.Handle);
                    result = false;
                }
            }
            else
            {
                result = false;
            }

            pFrame.Free();

            return(result);
        }
示例#17
0
        // Start streaming frames.
        static bool DoStream(ref tCamera Camera)
        {
            tErr           Err;
            UInt32         FrameCount = 0;
            GCHandle       pFrame     = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);
            tFrameCallback FrameCB    = new tFrameCallback(FrameDummyCBStream);
            uint           timeout    = 1000;

            // Adjust packet for optimal preformance.
            Pv.CaptureAdjustPacketSize(Camera.Handle, 8228);

            Console.WriteLine("Streaming has started...");

            // Set the camera in acquisition mode.
            if (((Err = Pv.CaptureStart(Camera.Handle)) == 0))
            {
                if ((Pv.CommandRun(Camera.Handle, "AcquisitionStart")) != 0)
                {
                    // If that fails, reset the camera to non capture mode.
                    Pv.CaptureEnd(Camera.Handle);
                    Console.WriteLine("PvCommandRun() failed!");
                    return(false);
                }
                else
                {
                    // Stream for a total of 30 frames.
                    while (FrameCount < 100)
                    {
                        if (Pv.CaptureQueueFrame(Camera.Handle, pFrame.AddrOfPinnedObject(), FrameCB) == 0)
                        {
                            Pv.CaptureWaitForFrameDone(Camera.Handle, pFrame.AddrOfPinnedObject(), timeout);
                            FrameCount++;
                        }
                    }
                    // After the 30 frames, stop streaming.
                    CameraStop(ref Camera);
                    Console.WriteLine("Streaming stopped.");
                    return(true);
                }
            }
            else
            {
                Console.WriteLine("PvCaptureStart() failed with error code {0}.", Err);
                return(false);
            }
        }
示例#18
0
        // Snap a frame from the camera.
        static bool CameraSnap(ref tCamera Camera)
        {
            bool           result;
            GCHandle       pFrame  = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);
            tFrameCallback FrameCB = new tFrameCallback(FrameDummyCB);

            // Adjust packet for optimal preformance.
            Pv.CaptureAdjustPacketSize(Camera.Handle, 8228);

            // Queue the frame.
            if (Pv.CaptureQueueFrame(Camera.Handle, pFrame.AddrOfPinnedObject(), FrameCB) == 0)
            {
                // Trigger the capture.
                if (Pv.CommandRun(Camera.Handle, "FrameStartTriggerSoftware") == 0)
                {
                    // Then wait for the frame to be returned.
                    if (Pv.CaptureWaitForFrameDone(Camera.Handle, pFrame.AddrOfPinnedObject(), 0) == 0)
                    {
                        // Check the status flag.
                        if (!(result = (Camera.Frame.Status == tErr.eErrSuccess)))
                        {
                            Console.WriteLine("Frame captured un-succesfully {0}.", Camera.Frame.Status);
                        }
                    }
                    else
                    {
                        Pv.CaptureQueueClear(Camera.Handle);
                        result = false;
                    }
                }
                else
                {
                    Pv.CaptureQueueClear(Camera.Handle);
                    result = false;
                }
            }
            else
            {
                result = false;
            }

            pFrame.Free();

            return(result);
        }
示例#19
0
        // get the UID and Serial Number of the first camera listed.
        static bool CameraGrab(ref tCamera Camera)
        {
            UInt32 count, connected = 0;

            tCameraInfo[] list = new tCameraInfo[1];

            if ((count = Pv.CameraList(list, 1, ref connected)) != 0)
            {
                Camera.UID    = list[0].UniqueId;
                Camera.Serial = list[0].SerialString;
                Console.WriteLine("Grabbing camera: {0}", Camera.Serial);
                return(true);
            }
            else
            {
                return(false);
            }
        }
示例#20
0
        // Save a frame to a file.
        static bool FrameSave(ref tCamera Camera, string file)
        {
            Bitmap     lBitmap = new Bitmap((int)Camera.Frame.Width, (int)Camera.Frame.Height, PixelFormat.Format24bppRgb);
            Rectangle  lRect   = new Rectangle(new Point(0, 0), new Size((int)Camera.Frame.Width, (int)Camera.Frame.Height));
            BitmapData lData   = lBitmap.LockBits(lRect, ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb);

            if (Frame2Data(ref Camera, ref lData))
            {
                lBitmap.UnlockBits(lData);
                lBitmap.Save(file);
                return(true);
            }
            else
            {
                lBitmap.UnlockBits(lData);
                return(false);
            }
        }
示例#21
0
        // Opens the camera by IP.
        static bool CameraOpen(ref tCamera Camera)
        {
            // Get the camera's IP configuration.
            if ((Camera.lErr = Pv.CameraIpSettingsGet(Camera.UID, ref Camera.Conf)) == tErr.eErrSuccess)
            {
                Camera.sAddr = Camera.Conf.CurrentIpAddress;
            }

            // Open the Camera by the address of camera.
            if ((Pv.CameraOpenByAddr(Camera.sAddr, tAccessFlags.eAccessMaster, out Camera.Handle) == tErr.eErrSuccess))
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
示例#22
0
        // Adjust the packet size.
        static void CameraAdjust(ref tCamera Camera)
        {
            UInt32 MaxSize = 16456;

            Console.WriteLine("Adjusting...");

            if ((Pv.CaptureAdjustPacketSize(Camera.Handle, MaxSize) == 0))
            {
                UInt32 Size = 0;

                Pv.AttrUint32Get(Camera.Handle, "PacketSize", ref Size);

                Console.WriteLine("The best packet size is " + Size + " bytes.");
            }
            else
            {
                Console.WriteLine("Sorry, there was an error while trying to adjust the packet size.");
            }
        }
示例#23
0
        // Snap a frame.
        static bool DoSnap(ref tCamera Camera)
        {
            GCHandle       pFrame  = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);
            tFrameCallback FrameCB = new tFrameCallback(FrameDummyCBSnap);
            uint           timeout = 1000;

            // Set the camera is acquisition mode.
            if ((Pv.CaptureStart(Camera.Handle)) == 0)
            {
                // Set the acquisition mode to continuous.
                if ((Pv.CommandRun(Camera.Handle, "AcquisitionStart")) != 0)
                {
                    // If that fails,reset the camera to non capture mode.
                    Pv.CaptureEnd(Camera.Handle);
                    return(false);
                }
                else
                {
                    bool failed = false;

                    // Enqueue the single frame.
                    if (Pv.CaptureQueueFrame(Camera.Handle, pFrame.AddrOfPinnedObject(), FrameCB) == 0)
                    {
                        Console.WriteLine("Waiting for the frame...");
                        failed = Pv.CaptureWaitForFrameDone(Camera.Handle, pFrame.AddrOfPinnedObject(), timeout) != 0;
                        Console.WriteLine("Frame is done...");
                        Console.WriteLine("Snapping done.");
                    }
                    else
                    {
                        failed = true;
                    }

                    Pv.CommandRun(Camera.Handle, "AcquisitionStop");
                    Pv.CaptureEnd(Camera.Handle);
                    return(!failed);
                }
            }
            else
            {
                return(false);
            }
        }
示例#24
0
        // Setup the camera for capture.
        static bool CameraStart(ref tCamera Camera)
        {
            UInt32 FrameSize = 0;

            // Adjust packet for optimal preformance.
            Pv.CaptureAdjustPacketSize(Camera.Handle, 8228);

            // Get the bytes size of the buffer.
            if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0)
            {
                Camera.Buffer = new byte[FrameSize];
                Camera.GC     = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned);
                // Address of the pinned buffer.
                Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject();
                // Buffer size.
                Camera.Frame.ImageBufferSize = FrameSize;

                // Start the capture mode.
                if (Pv.CaptureStart(Camera.Handle) == 0)
                {
                    //  Set the acquisition mode into continuous.
                    if (Pv.CommandRun(Camera.Handle, "AcquisitionStart") != 0)
                    {
                        // If that fails, reset the camera to non capture mode.
                        Pv.CaptureEnd(Camera.Handle);
                        return(false);
                    }
                    else
                    {
                        return(true);
                    }
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                return(false);
            }
        }
示例#25
0
        static void Main(string[] args)
        {
            tErr err = 0;

            // Initialize the API.
            if ((err = Pv.Initialize()) == 0)
            {
                tCamera       Camera  = new tCamera();
                tLinkCallback lLinkCB = new tLinkCallback(LinkCB);


                // Add callbacks for camera events.
                if (Pv.LinkCallbackRegister(lLinkCB, tLinkEvent.eLinkAdd, IntPtr.Zero) != 0)
                {
                    Console.WriteLine("Failed to register the callback.");
                }
                if (Pv.LinkCallbackRegister(lLinkCB, tLinkEvent.eLinkRemove, IntPtr.Zero) != 0)
                {
                    Console.WriteLine("Failed to register the callback.");
                }

                // Wait for a camera to be plugged in.
                WaitForCamera();

                // Grab a camera from the list.
                if (CameraGrab(ref Camera))
                {
                    // Setup the camera.
                    if (CameraSetup(ref Camera))
                    {
                        DoSnap(ref Camera);
                        DoSnap(ref Camera);
                        DoStream(ref Camera);
                        DoStream(ref Camera);
                        DoSnap(ref Camera);
                        DoStream(ref Camera);
                        DoStream(ref Camera);

                        // Unsetup the camera.
                        CameraUnsetup(ref Camera);
                    }

                    else
                    {
                        Console.WriteLine("Camera {0} failed to be setup.", Camera.UID);
                    }
                }
                else
                {
                    Console.WriteLine("Failed to find a camera.");
                }

                // Remove callbacks.
                if (Pv.LinkCallbackUnregister(lLinkCB, tLinkEvent.eLinkAdd) != 0)
                {
                    Console.WriteLine("Failed to unregister the callback.");
                }
                if (Pv.LinkCallbackUnregister(lLinkCB, tLinkEvent.eLinkRemove) != 0)
                {
                    Console.WriteLine("Failed to unregister the callback.");
                }

                // Uninitialize the API.
                Pv.UnInitialize();
            }
            else
            {
                Console.Write("Failed to initialize the API.");
                Console.WriteLine(err);
            }
        }
示例#26
0
 // Stop the streaming.
 static void CameraStop(ref tCamera Camera)
 {
     Pv.CommandRun(Camera.Handle, "AcquisitionStop");
     Pv.CaptureEnd(Camera.Handle);
     Pv.CaptureQueueClear(Camera.Handle);
 }
示例#27
0
        static void Main(string[] args)
        {
            tErr err = 0;

            // Initialize the API.
            if ((err = Pv.Initialize()) == 0)
            {
                tCamera Camera = new tCamera();

                // Wait for a camera to be detected.
                WaitForCamera();

                if (CameraGet(ref Camera))
                {
                    // Open the first camera detected.
                    if (CameraOpen(ref Camera))
                    {
                        // Set the camera up for triggering.
                        if (CameraSetup(ref Camera))
                        {
                            UInt32 Count = 0;
                            String filename;

                            Console.WriteLine("The camera is ready now. Press Q to quit or S to take a picture.");

                            while (WaitForUserToQuitOrSnap())
                            {
                                if (CameraSnap(ref Camera))
                                {
                                    filename  = "Snap";
                                    filename += Count++;
                                    filename += ".bmp";

                                    if (FrameSave(ref Camera, filename) == false)
                                    {
                                        Console.WriteLine("Failed to save the image.");
                                    }
                                    else
                                    {
                                        Console.WriteLine("Frame saved.");
                                    }
                                    Console.WriteLine("The camera is waiting for a trigger. Press Q to quit or S to take a picture.");
                                }
                                else
                                {
                                    Console.WriteLine("Failed to snap an image.");
                                }
                            }
                        }
                        else
                        {
                            Console.WriteLine("Failed to setup the camera.");
                        }

                        CameraClose(ref Camera);
                    }
                    else
                    {
                        Console.WriteLine("Camera {0} failed to be open.", Camera.UID);
                    }
                }
                else
                {
                    Console.WriteLine("Failed to get a camera.");
                }

                Pv.UnInitialize();
            }
            else
            {
                Console.Write("Failed to initialize the API : ");
                Console.WriteLine(err);
            }

            Thread.Sleep(800);
        }
示例#28
0
        public static int LuminanceAdjust(ref tCamera Camera, double mean)
        {
            int result = 0;

            if (mean < lowIlluminance)
            {
                result = -1;
            }
            if (mean > highIlluminance)
            {
                result = 1;
            }
            if (result * lastStatus == -1)//如果上一级过暗,此级过亮(或反之)
            {
                if (gain == 0)
                {
                    //exposuretime = (exposuretime + lastExposuretime) / 2;
                    if ((int)(exposuretime * scale * 0.5) > 0)
                    {
                        scale *= 0.5;
                    }
                }
                else
                {
                    result = 0;//增益为整数,无法调整,直接不进行调整
                }
            }
            if (result == -1)
            {
                //低照度情况
                isNoon = false;
                //if (frame<=200)
                {
                    //自动增加曝光时间或增益来调整图像亮度
                    if (exposuretime >= 40000)
                    {
                        exposuretime = 40000;
                        if (gain < 10)
                        {
                            gain++;
                        }
                        else
                        {
                            gain = 10;
                        }
                    }
                    else if (exposuretime <= 10)
                    {
                        exposuretime = 10;
                    }
                    else if (10000 <= exposuretime && exposuretime < 40000)
                    {
                        exposuretime = exposuretime + (uint)(10000 * scale);
                        gain         = 0;
                    }
                    else if (1000 <= exposuretime && exposuretime < 10000)
                    {
                        exposuretime = exposuretime + (uint)(1000 * scale);
                        gain         = 0;
                    }
                    else if (exposuretime >= 100 && exposuretime < 1000)
                    {
                        exposuretime = exposuretime + (uint)(100 * scale);
                        gain         = 0;
                    }
                    else if (exposuretime >= 10 && exposuretime < 100)
                    {
                        exposuretime = exposuretime + (uint)(10 * scale);
                        gain         = 0;
                    }
                }
                if (exposuretime == 40000 && gain == 10)
                {
                    //当曝光时间和增益调整到最大值后,对低照度图像进行非线性变换调整图像质量
                    if (isNight == false)
                    {
                        isNight = true;
                    }
                    else
                    {
                        result = 0;
                    }
                }
            }
            else if (result == 1)
            {
                //强光照情况
                isNight = false;
                //if (frame<=200)
                {
                    //自动降低曝光时间和增益来降低图像亮度
                    if (gain > 0)
                    {
                        gain = 0;
                    }
                    else
                    {
                        if (exposuretime > 40000)
                        {
                            exposuretime = 40000;
                        }
                        else if (exposuretime <= 10)
                        {
                            exposuretime = 10;
                        }
                        else if (10000 < exposuretime && exposuretime <= 40000)
                        {
                            exposuretime = exposuretime - (uint)(10000 * scale);
                        }
                        else if (1000 < exposuretime && exposuretime <= 10000)
                        {
                            exposuretime = exposuretime - (uint)(1000 * scale);
                        }
                        else if (exposuretime > 100 && exposuretime <= 1000)
                        {
                            exposuretime = exposuretime - (uint)(100 * scale);
                        }
                        else if (exposuretime > 10 && exposuretime <= 100)
                        {
                            exposuretime = exposuretime - (uint)(10 * scale);
                        }
                    }
                }
                if (exposuretime == 10 && gain == 0)
                {
                    //当无法对曝光时间和增益进行调整时对图像进行gamma变换来调整图像亮度
                    if (isNoon == false)
                    {
                        isNoon = true;
                    }
                    else
                    {
                        result = 0;
                    }
                }
            }
            else if (mean < highIlluminance && mean >= lowIlluminance)
            {
                //正常光照情况,不对图像进行任何处理
            }
            if (result != 0)
            {
                Pv.AttrUint32Set(Camera.Handle, "ExposureValue", exposuretime);
                Pv.AttrUint32Set(Camera.Handle, "GainValue", gain);
            }
            lastStatus = result;
            Console.WriteLine("Brightness: {0}, ExposureValue: {1}, GainValue: {2}", mean, exposuretime, gain);
            return(result);
        }
示例#29
0
        // Convert the raw data in the frame's buffer into the bitmap's data, this method doesn't support
        // the following Pixel formats: eFmtRgb48, eFmtYuv411 and eFmtYuv444.
        static unsafe bool Frame2Data(ref tCamera Camera, ref BitmapData Data)
        {
            double PixelMean = 0;

            if (NightLUT == null || NightLUT.Length == 0)
            {
                NightLUT = new Byte[256];
                for (int i = 0; i < 256; i++)
                {
                    double t = i + 0.01 * i * (255 - i);
                    if (t > 255)
                    {
                        t = 255;
                    }
                    if (t < 0)
                    {
                        t = 0;
                    }
                    NightLUT[i] = (byte)t;
                }
            }
            if (NoonLUT == null || NoonLUT.Length == 0)
            {
                NoonLUT = new Byte[256];
                double gamma_r = 1.5;
                double gamma_b = 2;
                double gamma_c = 0.9;
                for (int i = 0; i < 256; i++)
                {
                    double t = (gamma_c * Math.Pow(i / 255.0, gamma_r) * 255.0 - gamma_b);
                    if (t > 255)
                    {
                        t = 255;
                    }
                    if (t < 0)
                    {
                        t = 0;
                    }
                    NoonLUT[i] = (byte)t;
                }
            }
            switch (Camera.Frame.Format)
            {
            case tImageFormat.eFmtMono8:
            {
                UInt32 lOffset = 0;
                UInt32 lPos    = 0;
                byte * lDst    = (byte *)Data.Scan0;
                byte * lSrc    = (byte *)Camera.Frame.ImageBuffer;


                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    try
                    {
                        byte thisPixel = lSrc[lOffset];
                        if (isNight)
                        {
                            thisPixel = NightLUT[thisPixel];
                        }
                        else if (isNoon)
                        {
                            thisPixel = NoonLUT[thisPixel];
                        }
                        lDst[lPos]     = thisPixel;
                        lDst[lPos + 1] = thisPixel;
                        lDst[lPos + 2] = thisPixel;
                        PixelMean     += lSrc[lOffset];
                    }
                    catch (Exception e)
                    {
                        //System.Windows.MessageBox.Show(e.Message);
                        return(false);
                    }
                    lOffset++;
                    lPos += 3;
                    try
                    {
                        // Take care of the padding in the destination bitmap.
                        if ((lOffset % Camera.Frame.Width) == 0)
                        {
                            lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
                        }
                    }
                    catch (Exception e)
                    {
                        //System.Windows.MessageBox.Show(e.Message);
                        return(false);
                    }
                }
                PixelMean /= (double)Camera.Frame.ImageBufferSize;
                illumiance = PixelMean;
                //System.Windows.MessageBox.Show(PixelMean.ToString());
                LuminanceAdjust(ref Camera, PixelMean);
                return(true);
            }

            case tImageFormat.eFmtMono16:
            {
                UInt32  lOffset  = 0;
                UInt32  lPos     = 0;
                byte *  lDst     = (byte *)Data.Scan0;
                byte    bitshift = (byte)((int)Camera.Frame.BitDepth - 8);
                UInt16 *lSrc     = (UInt16 *)Camera.Frame.ImageBuffer;

                while (lOffset < Camera.Frame.Width * Camera.Frame.Height)
                {
                    lDst[lPos]     = (byte)(lSrc[lOffset] >> bitshift);
                    lDst[lPos + 1] = lDst[lPos];
                    lDst[lPos + 2] = lDst[lPos];

                    lOffset++;
                    lPos += 3;

                    // Take care of the padding in the destination bitmap.
                    if ((lOffset % Camera.Frame.Width) == 0)
                    {
                        lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
                    }
                }

                return(true);
            }

            case tImageFormat.eFmtBayer8:
            {
                UInt32   WidthSize = Camera.Frame.Width * 3;
                GCHandle pFrame    = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);
                UInt32   remainder = (((WidthSize + 3U) & ~3U) - WidthSize);

                // Interpolate the colors.
                IntPtr pRed   = (IntPtr)((byte *)Data.Scan0 + 2);
                IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1);
                IntPtr pBlue  = (IntPtr)((byte *)Data.Scan0);
                Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder);
                UInt32 lOffset = 0;
                byte * lSrc    = (byte *)Camera.Frame.ImageBuffer;
                //double pixelMean2 = 0;
                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    PixelMean += ((double)lSrc[lOffset + 2] + (double)lSrc[lOffset + 1] + (double)lSrc[lOffset]) / 3;

                    if (isNight)
                    {
                        lSrc[lOffset + 2] = NightLUT[lSrc[lOffset + 2]];
                        lSrc[lOffset + 1] = NightLUT[lSrc[lOffset + 1]];
                        lSrc[lOffset]     = NightLUT[lSrc[lOffset]];
                    }
                    if (isNoon)
                    {
                        lSrc[lOffset + 2] = NoonLUT[lSrc[lOffset + 2]];
                        lSrc[lOffset + 1] = NoonLUT[lSrc[lOffset + 1]];
                        lSrc[lOffset]     = NoonLUT[lSrc[lOffset]];
                    }
                    //pixelMean2 += ((double)lSrc[lOffset + 2] + (double)lSrc[lOffset + 1] + (double)lSrc[lOffset]) / 3;
                    lOffset += 3;
                }
                PixelMean /= (double)Camera.Frame.ImageBufferSize / 3;
                illumiance = PixelMean;
                //System.Windows.MessageBox.Show(PixelMean.ToString());
                LuminanceAdjust(ref Camera, PixelMean);
                pFrame.Free();

                return(true);
            }

            case tImageFormat.eFmtBayer16:
            {
                UInt32   WidthSize = Camera.Frame.Width * 3;
                UInt32   lOffset   = 0;
                byte     bitshift  = (byte)((int)Camera.Frame.BitDepth - 8);
                UInt16 * lSrc      = (UInt16 *)Camera.Frame.ImageBuffer;
                byte *   lDst      = (byte *)Camera.Frame.ImageBuffer;
                UInt32   remainder = (((WidthSize + 3U) & ~3U) - WidthSize);
                GCHandle pFrame;

                Camera.Frame.Format = tImageFormat.eFmtBayer8;

                pFrame = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned);

                // Shift the pixel.
                while (lOffset < Camera.Frame.Width * Camera.Frame.Height)
                {
                    lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift);
                }

                // Interpolate the colors.
                IntPtr pRed   = (IntPtr)((byte *)Data.Scan0 + 2);
                IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1);
                IntPtr pBlue  = (IntPtr)((byte *)Data.Scan0);
                Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder);

                pFrame.Free();

                return(true);
            }

            case tImageFormat.eFmtRgb24:
            {
                UInt32 lOffset = 0;
                UInt32 lPos    = 0;
                byte * lDst    = (byte *)Data.Scan0;

                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    // Copy the data.
                    lDst[lPos]     = Camera.Buffer[lOffset + 2];
                    lDst[lPos + 1] = Camera.Buffer[lOffset + 1];
                    lDst[lPos + 2] = Camera.Buffer[lOffset];
                    PixelMean     += ((double)Camera.Buffer[lOffset + 2] + (double)Camera.Buffer[lOffset + 1] + (double)Camera.Buffer[lOffset]) / 3;

                    lOffset += 3;
                    lPos    += 3;

                    // Take care of the padding in the destination bitmap.
                    if ((lOffset % (Camera.Frame.Width * 3)) == 0)
                    {
                        lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
                    }
                }
                PixelMean /= (double)Camera.Frame.ImageBufferSize / 3;
                illumiance = PixelMean;
                //System.Windows.MessageBox.Show(PixelMean.ToString());
                LuminanceAdjust(ref Camera, PixelMean);
                return(true);
            }

            case tImageFormat.eFmtRgb48:
            {
                UInt32  lOffset  = 0;
                UInt32  lPos     = 0;
                UInt32  lLength  = Camera.Frame.ImageBufferSize / sizeof(UInt16);
                UInt16 *lSrc     = (UInt16 *)Camera.Frame.ImageBuffer;
                byte *  lDst     = (byte *)Data.Scan0;
                byte    bitshift = (byte)((int)Camera.Frame.BitDepth - 8);

                while (lOffset < lLength)
                {
                    // Copy the data.
                    lDst[lPos]     = (byte)(lSrc[lOffset + 2] >> bitshift);
                    lDst[lPos + 1] = (byte)(lSrc[lOffset + 1] >> bitshift);
                    lDst[lPos + 2] = (byte)(lSrc[lOffset] >> bitshift);

                    lOffset += 3;
                    lPos    += 3;

                    // Take care of the padding in the destination bitmap.
                    if ((lOffset % (Camera.Frame.Width * 3)) == 0)
                    {
                        lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
                    }
                }

                return(true);
            }

            case tImageFormat.eFmtYuv411:
            {
                UInt32 lOffset = 0;
                UInt32 lPos = 0;
                byte * lDst = (byte *)Data.Scan0;
                int    y1, y2, y3, y4, u, v;
                int    r, g, b;

                r = g = b = 0;

                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    u  = Camera.Buffer[lOffset++];
                    y1 = Camera.Buffer[lOffset++];
                    y2 = Camera.Buffer[lOffset++];
                    v  = Camera.Buffer[lOffset++];
                    y3 = Camera.Buffer[lOffset++];
                    y4 = Camera.Buffer[lOffset++];

                    YUV2RGB(y1, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                    YUV2RGB(y2, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                    YUV2RGB(y3, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                    YUV2RGB(y4, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                }

                return(true);
            }

            case tImageFormat.eFmtYuv422:
            {
                UInt32 lOffset = 0;
                UInt32 lPos = 0;
                byte * lDst = (byte *)Data.Scan0;
                int    y1, y2, u, v;
                int    r, g, b;

                r = g = b = 0;

                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    u  = Camera.Buffer[lOffset++];
                    y1 = Camera.Buffer[lOffset++];
                    v  = Camera.Buffer[lOffset++];
                    y2 = Camera.Buffer[lOffset++];

                    YUV2RGB(y1, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                    YUV2RGB(y2, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                }

                return(true);
            }

            case tImageFormat.eFmtYuv444:
            {
                UInt32 lOffset = 0;
                UInt32 lPos = 0;
                byte * lDst = (byte *)Data.Scan0;
                int    y1, y2, u, v;
                int    r, g, b;

                r = g = b = 0;

                while (lOffset < Camera.Frame.ImageBufferSize)
                {
                    u  = Camera.Buffer[lOffset++];
                    y1 = Camera.Buffer[lOffset++];
                    v  = Camera.Buffer[lOffset++];
                    lOffset++;
                    y2 = Camera.Buffer[lOffset++];
                    lOffset++;

                    YUV2RGB(y1, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                    YUV2RGB(y2, u, v, ref r, ref g, ref b);
                    lDst[lPos++] = (byte)b;
                    lDst[lPos++] = (byte)g;
                    lDst[lPos++] = (byte)r;
                }

                return(true);
            }

            default:
                return(false);
            }
        }
示例#30
0
 // Close the camera.
 static void CameraClose(ref tCamera Camera)
 {
     // Close the camera.
     Pv.CameraClose(Camera.Handle);
 }