/// <summary> /// Initialize this frame with the received data by Device.Stream_GetFrame(). /// </summary> /// <param name="data">image data</param> /// <param name="cam">camera instance</param> /// <param name="log">metri log from camera instance</param> public FrameData(byte[] data, Camera cam, MetriLog log) { imageData = data; this.cam = cam; this.log = log; DefaultValues(); }
/// <summary> /// Initialize this frame with the received data by Device.Stream_GetFrame(). /// </summary> /// <param name="data">image data</param> /// <param name="cam">camera instance</param> /// <param name="log">metri log from camera instance</param> internal FrameData(byte[] data, VisionaryT cam, MetriLog log) { ImageBuffer = data; this.cam = cam; this.log = log; SetDefaultValues(); Parse(); }
/// <summary> /// Creates a new Device instance which can be used to handle the low level TCP communication /// between camera and client. /// </summary> /// <param name="ip">IP address of client</param> /// <param name="cam">MetriCam2 camera object used for exceptions</param> /// <param name="log">MetriLog</param> public Device(string ip, Camera cam, MetriLog log) { ipAddress = ip; this.cam = cam; this.log = log; sockControl = null; sockData = null; streamControl = null; streamData = null; }
public Form1() { InitializeComponent(); log = new MetriLog(); this.FormClosing += Form1_FormClosing; int margin = 20; int halfWidth = (panelContainerLeft.Parent.Width - margin) / 2; panelContainerLeft.Width = halfWidth; panelContainerRight.Width = halfWidth; panelTL.Height = (panelTL.Parent.Height - margin) / 2; panelTR.Height = (panelTR.Parent.Height - margin) / 2; panelBL.Height = (panelBL.Parent.Height - margin) / 2; panelBR.Height = (panelBR.Parent.Height - margin) / 2; }
public Control(MetriLog log, string ipAddress) { this.log = log; try { sockControl = new TcpClient(ipAddress, TCP_PORT_SOPAS); streamControl = sockControl.GetStream(); } catch (Exception ex) { string msg = string.Format("Failed to connect to IP={0}, reasons={1}", ipAddress, ex.Message); log.Error(msg); throw new Exceptions.ConnectionFailedException(msg, ex); } _accessMode = GetAccessMode(); InitStream(); }
/// <summary> /// Creates a new Device instance which can be used to handle the low level TCP communication /// between camera and client. /// </summary> /// <param name="ipAddress">IP address of client</param> /// <param name="cam">MetriCam2 camera object used for exceptions</param> /// <param name="log">MetriLog</param> internal Device(string ipAddress, VisionaryT cam, MetriLog log) { this.cam = cam; this.log = log; try { sockData = new TcpClient(ipAddress, TCP_PORT_BLOBSERVER); } catch (Exception ex) { string msg = string.Format("{0}: Failed to connect to IP {1}{2}Reason: {3}", cam.Name, ipAddress, Environment.NewLine, ex.Message); log.Error(msg); throw new Exceptions.ConnectionFailedException(msg, ex); } streamData = sockData.GetStream(); // say "hello" to camera byte[] hbBytes = Encoding.ASCII.GetBytes(HEARTBEAT_MSG); streamData.Write(hbBytes, 0, hbBytes.Length); }
static void Main(string[] args) { MetriLog log = new MetriLog(); Random rand = new Random(); const int MAX_DELAY_IN_MIN = 20; Camera leftCam = new TheImagingSource(); Camera rightCam = new TheImagingSource(); SerialPort triggerPort = new SerialPort("COM7", 9600); TriggeredStereoCamera cam = new TriggeredStereoCamera(leftCam, rightCam, triggerPort); FloatImage left, leftOld = null, right, rightOld = null; float thres; // thres = 1000.0f; // uEye thres = 100000.0f; // TIS int cnt = 0; const float MAX_EXPOSURE = 10f; const float DARK_THRES = 50f; // uEye: 10f, TIS: 50f const int NUM_WARMUP_IMAGES = 50; ConfigureLogging(StressTests.Freeze.Resources.LoggingConfigInfo); log.SetLogFile(@"D:\temp\stress_test_TIS.log"); cam.Connect(); cam.Exposure = 4; log.Info("Warming up."); for (int i = 0; i < NUM_WARMUP_IMAGES; i++) { Capture(cam, out left, out right, ref cnt); } log.Info("Starting test."); bool running = true; while (running) { log.Debug("Another round starts."); for (int i = 0; i < 10; i++) { if (cam.Exposure > MAX_EXPOSURE) { cam.Exposure = MAX_EXPOSURE; leftOld = null; rightOld = null; continue; } Capture(cam, out left, out right, ref cnt); float minL, maxL, minR, maxR; left.GetMinMax(out minL, out maxL); right.GetMinMax(out minR, out maxR); log.Debug("MAX = " + maxL + " " + maxR); if (maxL == 255f || maxR == 255f) { log.Info("Overexposed, reducing exposure time."); cam.Exposure = cam.Exposure * (3f / 4f); leftOld = null; rightOld = null; continue; } if (maxL < DARK_THRES && maxR < DARK_THRES) { if (cam.Exposure < MAX_EXPOSURE) { log.Info("Underexposed, increasing exposure time."); cam.Exposure = cam.Exposure * (4f / 3f); leftOld = null; rightOld = null; continue; } log.Info("seems to be dark, let's sleep an hour."); Thread.Sleep(1000 * 60 * 60); leftOld = null; rightOld = null; continue; } rightOld = Compare(right, rightOld, thres, cnt, "R", log); leftOld = Compare(left, leftOld, thres, cnt, "L", log); if (null == leftOld || null == rightOld) { break; } } int random = rand.Next(100); float delayInMinutes = (float)random / 100f * (float)MAX_DELAY_IN_MIN; log.Debug("Sleeping for " + delayInMinutes + " minutes"); Thread.Sleep((int)(1000 * 60 * delayInMinutes)); //Thread.Sleep(500); } cam.Disconnect(); }
private static FloatImage Compare(FloatImage currentImg, FloatImage oldImg, float thres, int cnt, string channel, MetriLog log) { if (null != oldImg) { FloatImage diff = oldImg - currentImg; float sad = diff.Abs().Sum(); log.Debug("SAD = " + sad); if (sad < thres) { log.ErrorFormat("Image {0}{1}: SAD ({2}) was below the threshold of {3}.", cnt, channel, sad, thres); return(null); } } oldImg = currentImg; return(oldImg); }
/// <summary> /// Static constructor. /// Registers all camera types from the currently loaded assemblies. /// </summary> static CameraManagement() { log = new MetriLog(LoggerName); ScanLoadedAssemblies(); }
static void Main(string[] args) { MetriLog log = new MetriLog(); Kinect2 cam = new Kinect2(); try { cam.Connect(); } catch (MetriCam2.Exceptions.ConnectionFailedException) { log.Error("Connection failed. Closing window in 5 sec."); Thread.Sleep(5 * 1000); return; } cam.ActivateChannel(ChannelNames.Color); bool running = false; while (running) { cam.Update(); } ProjectiveTransformationZhang pt; try { pt = (ProjectiveTransformationZhang)cam.GetIntrinsics(ChannelNames.Color); } catch (FileNotFoundException) { log.Warn("No PT found."); } try { pt = (ProjectiveTransformationZhang)cam.GetIntrinsics(ChannelNames.Color); } catch (FileNotFoundException) { log.Warn("No PT found."); } try { pt = (ProjectiveTransformationZhang)cam.GetIntrinsics(ChannelNames.Color); } catch (FileNotFoundException) { log.Warn("No PT found."); } try { RigidBodyTransformation rbt = cam.GetExtrinsics(ChannelNames.Color, ChannelNames.ZImage); } catch (FileNotFoundException) { log.Warn("No fwd RBT found."); } try { RigidBodyTransformation rbtInverse = cam.GetExtrinsics(ChannelNames.ZImage, ChannelNames.Color); } catch (FileNotFoundException) { log.Warn("No inverse RBT found."); } cam.Disconnect(); log.Info("Program ended. Closing window in 5 sec."); Thread.Sleep(5 * 1000); }