// Setup the camera for capture. static bool CameraSetup(ref tCamera Camera) { UInt32 FrameSize = 0; if (Pv.CameraOpen(Camera.UID, tAccessFlags.eAccessMaster, out Camera.Handle) == 0) { // Get the bytes size of the buffer. if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { Camera.Buffer = new byte[FrameSize]; Camera.GC = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned); // Address of the pinned buffer. Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject(); // Buffer size. Camera.Frame.ImageBufferSize = FrameSize; return(true); } else { return(false); } } else { Pv.CameraClose(Camera.Handle); Camera.Handle = 0; return(false); } }
// Keeps track of data being streamed from the camera. static void ThreadFunc() { tErr Err; UInt32 Dropped, Done, Completed; UInt32 Total, Missed, Errs; int Before, Now, Elapsed; double Fps; float Rate; Dropped = 0; Missed = 0; Completed = 0; Errs = 0; Rate = 0; Fps = 0; Elapsed = 0; Total = 0; Done = 0; // Start timing. Before = GetTickCount(); // Get some infomation about the streaming process. while (Close == false && ((Err = Pv.AttrUint32Get(GCamera.Handle, "StatFramesCompleted", ref Completed)) == 0) && ((Err = Pv.AttrUint32Get(GCamera.Handle, "StatFramesDropped", ref Dropped)) == 0) && ((Err = Pv.AttrUint32Get(GCamera.Handle, "StatPacketsMissed", ref Missed)) == 0) && ((Err = Pv.AttrUint32Get(GCamera.Handle, "StatPacketsErroneous", ref Errs)) == 0) && ((Err = Pv.AttrFloat32Get(GCamera.Handle, "StatFrameRate", ref Rate)) == 0)) { // Store the elapsed time. Now = GetTickCount(); // Keep track of the total amount of frames completed. Total += (Completed - Done); // Keep track of the total time that has elapsed. Elapsed += (Now - Before); // Updates the Fps rate every 500 milliseconds. if (Elapsed >= 500) { Fps = (double)(Total * 1000.0) / (double)Elapsed; Elapsed = 0; Total = 0; } // Display the current infomation. //Console.WriteLine("Completed : {0} Dropped : {1} Missed : {2} Err {3} Rate: {4:.00} <{5:.00}>", Completed, Dropped, Missed, Errs, Rate, Fps); Before = GetTickCount(); Done = Completed; Thread.Sleep(20); } Console.WriteLine(); }
// Setup the camera for capture. static bool CameraSetup() { UInt32 FrameSize = 0; // Get the byte size of the buffer. if (Pv.AttrUint32Get(GCamera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { GCamera.Buffer = new byte[FrameSize]; GCamera.GC = GCHandle.Alloc(GCamera.Buffer, GCHandleType.Pinned); // Address of pinned buffer. GCamera.Frame.ImageBuffer = GCamera.GC.AddrOfPinnedObject(); // Buffer size. GCamera.Frame.ImageBufferSize = FrameSize; // Start the capture mode. if (Pv.CaptureStart(GCamera.Handle) == 0) { // Set the acquisition mode into continuous and hardware trigger mode, using SyncIn2 (non-isolated). if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "SyncIn2") == 0) // Set the acquisition mode into continuous and hardware trigger mode, using SyncIn1 (isolated). // if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "SyncIn1") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(GCamera.Handle, "AcquisitionStart") != 0) { // if that fails, reset the camera to non capture mode. Pv.CaptureEnd(GCamera.Handle); return(false); } else { return(true); } } else { return(false); } } else { return(true); } } else { return(false); } }
// Setup the camera for capture. static bool CameraSetup(ref tCamera Camera) { UInt32 FrameSize = 0; UInt32 FrameCount = 600; // Get the byte size of the buffer. if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { Camera.Buffer = new byte[FrameSize]; Camera.GC = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned); // Address of the pinned buffer Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject(); // Buffer size. Camera.Frame.ImageBufferSize = FrameSize; Console.WriteLine("We have allocated {0} frames of {1} bytes each.", FrameCount, FrameSize); // Start the capture mode. if (Pv.CaptureStart(Camera.Handle) == 0) { // Set the camera in software acquisition mode. if (Pv.AttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(Camera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non capture mode. Pv.CaptureEnd(Camera.Handle); return(false); } else { return(true); } } else { return(false); } } else { return(true); } } else { return(false); } }
// Adjust the packet size. static void CameraAdjust(ref tCamera Camera) { UInt32 MaxSize = 16456; Console.WriteLine("Adjusting..."); if ((Pv.CaptureAdjustPacketSize(Camera.Handle, MaxSize) == 0)) { UInt32 Size = 0; Pv.AttrUint32Get(Camera.Handle, "PacketSize", ref Size); Console.WriteLine("The best packet size is " + Size + " bytes."); } else { Console.WriteLine("Sorry, there was an error while trying to adjust the packet size."); } }
// Setup the camera for capture. static bool CameraStart(ref tCamera Camera) { UInt32 FrameSize = 0; // Adjust packet for optimal preformance. Pv.CaptureAdjustPacketSize(Camera.Handle, 8228); // Get the bytes size of the buffer. if (Pv.AttrUint32Get(Camera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { Camera.Buffer = new byte[FrameSize]; Camera.GC = GCHandle.Alloc(Camera.Buffer, GCHandleType.Pinned); // Address of the pinned buffer. Camera.Frame.ImageBuffer = Camera.GC.AddrOfPinnedObject(); // Buffer size. Camera.Frame.ImageBufferSize = FrameSize; // Start the capture mode. if (Pv.CaptureStart(Camera.Handle) == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(Camera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non capture mode. Pv.CaptureEnd(Camera.Handle); return(false); } else { return(true); } } else { return(false); } } else { return(false); } }
// Setup the camera up for streaming. static bool CameraStart() { UInt32 FrameSize = 0; // Adjust packet size for optimal performance. Pv.CaptureAdjustPacketSize(GCamera.Handle, 8228); // Determines how big the frame buffers should be. if (Pv.AttrUint32Get(GCamera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { tFrame Frame = new tFrame(); byte[] Buffer = new byte[FrameSize]; GCHandle pBuffer = GCHandle.Alloc(Buffer, GCHandleType.Pinned); // Set the frame's fields. // Handle to the Camera. Frame.Context.Field0 = new IntPtr(GCamera.Handle); // Address of the pinned object. Frame.ImageBuffer = pBuffer.AddrOfPinnedObject(); // Buffer size. Frame.ImageBufferSize = FrameSize; // Start the capture mode. if (Pv.CaptureStart(GCamera.Handle) == 0) { // Set the camera in continuous acquisition mode,and in "Freerun". if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "Freerun") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(GCamera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non-capture mode. Pv.CaptureEnd(GCamera.Handle); Console.WriteLine("Failed to start."); return(false); } else { // Pin down a copy of the frame structure. GCHandle pFrame = GCHandle.Alloc(Frame, GCHandleType.Pinned); // Enqueue the frame. Pv.CaptureQueueFrame(GCamera.Handle, pFrame.AddrOfPinnedObject(), FrameCB); return(true); } } else { return(false); } } else { return(false); } } else { return(false); } }
// Setup the camera up for streaming. static bool CameraStart() { tFrameCallback lFrameCB = new tFrameCallback(FrameDoneCB); tCameraEventCallback lEventCB = new tCameraEventCallback(EventDone); UInt32 FrameSize = 0; IntPtr Context = IntPtr.Zero; if (Pv.AttrExists(GCamera.Handle, "EventsEnable1") == tErr.eErrNotFound) { Console.WriteLine("This camera does not support event notifications."); return(false); } // Adjust packet size for optimal performance. Pv.CaptureAdjustPacketSize(GCamera.Handle, 8228); // Determines how big the frame buffers should be. if (Pv.AttrUint32Get(GCamera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { tFrame Frame = new tFrame(); byte[] Buffer = new byte[FrameSize]; GCHandle pBuffer = GCHandle.Alloc(Buffer, GCHandleType.Pinned); // Set the frame's fields. // Handle to the Camera. Frame.Context.Field0 = new IntPtr(GCamera.Handle); // Address of the pinned object. Frame.ImageBuffer = pBuffer.AddrOfPinnedObject(); // Buffer size. Frame.ImageBufferSize = FrameSize; // Setup the event channel. Pv.AttrUint32Set(GCamera.Handle, "EventsEnable1", 0); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "AcquisitionStart"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "AcquisitionEnd"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "FrameTrigger"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); if (Pv.CameraEventCallbackRegister(GCamera.Handle, lEventCB, Context) != tErr.eErrSuccess) { Console.WriteLine("There was an error accessing the driver."); return(false); } // Start the capture mode. if (Pv.CaptureStart(GCamera.Handle) == 0) { if (Pv.AttrFloat32Set(GCamera.Handle, "FrameRate", 5) == 0) { // Set the camera in continuous acquisition mode,and in "Freerun". if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "FixedRate") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(GCamera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non-capture mode. Pv.CaptureEnd(GCamera.Handle); Console.WriteLine("Failed to start."); return(false); } else { // Pin down a copy of the frame structure. GCHandle pFrame = GCHandle.Alloc(Frame, GCHandleType.Pinned); // Enqueue the frame. Pv.CaptureQueueFrame(GCamera.Handle, pFrame.AddrOfPinnedObject(), lFrameCB); return(true); } } else { return(false); } } else { return(false); } } else { return(false); } } else { return(false); } }
public void BeginCapture(tImageFormat fmt) { tErr error; if (!camera.HasValue) { error = tErr.eErrUnavailable; throw new PvException(error); } ImageFormat.pixelformat = fmt; error = Pv.CaptureStart(camera.Value); if (error != tErr.eErrSuccess) { goto error; } frameBufferHandles = new GCHandle[FRAME_POOL_SIZE]; framePoolHandles = new GCHandle[FRAME_POOL_SIZE]; frames = new tFrame[FRAME_POOL_SIZE]; uint bufferSize = 0; error = Pv.AttrUint32Get(camera.Value, "TotalBytesPerFrame", ref bufferSize); if (error != tErr.eErrSuccess) { goto error; } for (int count = FRAME_POOL_SIZE - 1; count >= 0; count--) { byte[] buffer = new byte[bufferSize]; GCHandle bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); frameBufferHandles[count] = bufferHandle; tFrame frame = new tFrame { ImageBuffer = bufferHandle.AddrOfPinnedObject(), ImageBufferSize = bufferSize, AncillaryBufferSize = 0 }; frames[count] = frame; GCHandle frameHandle = GCHandle.Alloc(frame, GCHandleType.Pinned); framePoolHandles[count] = frameHandle; IntPtr framePointer = frameHandle.AddrOfPinnedObject(); buffers.Add(framePointer, buffer); if (!communicationManager.QueueFrame(framePointer, callback)) { goto error; } } this.FrameRate = 30; error = Pv.AttrEnumSet(this.camera.Value, "FrameStartTriggerMode", "FixedRate"); if (error != tErr.eErrSuccess) { goto error; } error = Pv.AttrEnumSet(this.camera.Value, "AcquisitionMode", "Continuous"); if (error != tErr.eErrSuccess) { goto error; } error = Pv.CommandRun(this.camera.Value, "AcquisitionStart"); if (error != tErr.eErrSuccess) { goto error; } return; error: EndCapture(); throw new PvException(error); }