/// <summary> /// Find all camera implementations in the currently loaded assemblies (DLLs). /// </summary> public static void ScanLoadedAssemblies() { log.Debug("CameraManagement: Scanning all loaded assemblies"); foreach (Assembly assembly in AppDomain.CurrentDomain.GetAssemblies()) { InspectAssembly(assembly, "memory"); } }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { DateTime lastSecond = DateTime.Now; int fps = 0; while (!backgroundWorker.CancellationPending) { try { cam.Update(); } catch (Exception ex) { log.Debug("Update failed", ex); continue; } ImageBase camImg = cam.CalcSelectedChannel(); if (null == camImg) { // ignore errors. However, this might be a hint that something is wrong in your application. continue; } if (camImg is FloatImage && (cam.SelectedChannel == ChannelNames.Distance || cam.SelectedChannel == ChannelNames.ZImage)) { TrimImage((FloatImage)camImg, Properties.Settings.Default.MinDepthToDisplay, Properties.Settings.Default.MaxDepthToDisplay); } Bitmap bmp = camImg.ToBitmap(); if (saveSnapshot) { string snapName = "MetriCam 2 Snapshot.png"; string snapFilename = Path.GetTempPath() + snapName; bmp.Save(snapFilename); MessageBox.Show(string.Format("Snapshot saved as '{0}'.", snapFilename), "Snapshot saved", MessageBoxButtons.OK, MessageBoxIcon.Asterisk); saveSnapshot = false; } this.BeginInvokeEx(f => pictureBox.Image = bmp); fps++; DateTime now = DateTime.Now; if (now - lastSecond > new TimeSpan(0, 0, 1)) { int fpsCopy = fps; this.BeginInvokeEx(f => labelFps.Text = $"{fpsCopy} fps"); lastSecond = now; fps = 0; } } DisconnectCamera(); isBgwFinished.Set(); }
/// <summary> /// Gets the raw frame data from camera. /// </summary> /// <remarks>Data is checked for correct protocol version and packet type.</remarks> /// <returns>Raw frame</returns> internal byte[] GetFrameData() { if (!Utils.SyncCoLa(streamData)) { string msg = string.Format("{0}: Could not sync to CoLa bus", cam.Name); log.Error(msg); throw new IOException(msg); } log.Debug("Start getting frame"); byte[] buffer = new byte[0]; if (!Utils.Receive(streamData, ref buffer, 4)) { string msg = string.Format("{0}: Could not read package length", cam.Name); log.Error(msg); throw new IOException(msg); } uint pkgLength = Utils.FromBigEndianUInt32(buffer, 0); if (!Utils.Receive(streamData, ref buffer, (int)pkgLength)) { string msg = string.Format("{0}: Could not read package payload", cam.Name); log.Error(msg); throw new IOException(msg); } // check buffer content int offset = 0; ushort protocolVersion = Utils.FromBigEndianUInt16(buffer, offset); offset += 2; byte packetType = buffer[offset]; offset += 1; if (0x0001 != protocolVersion) { string msg = string.Format("{0}: The protocol version is not 0x0001 as expected: {1:X4}", cam.Name, protocolVersion); log.Error(msg); throw new InvalidDataException(msg); } if (0x62 != packetType) { string msg = string.Format("{0}: The packet type is not 0x62 as expected: {1:X2}", cam.Name, packetType); log.Error(msg); throw new InvalidDataException(msg); } return(buffer.Skip(offset).ToArray()); }
/// <summary> /// Tells the device that there is a streaming channel. /// </summary> private void InitStream() { log.Debug("InitStream"); SendCommand("sMN GetBlobClientConfig"); bool success = ReceiveResponse(out byte[] payload, out byte checkSum); if (!success) { throw new InvalidOperationException("Failed to init control stream."); } }
private static FloatImage Compare(FloatImage currentImg, FloatImage oldImg, float thres, int cnt, string channel, MetriLog log) { if (null != oldImg) { FloatImage diff = oldImg - currentImg; float sad = diff.Abs().Sum(); log.Debug("SAD = " + sad); if (sad < thres) { log.ErrorFormat("Image {0}{1}: SAD ({2}) was below the threshold of {3}.", cnt, channel, sad, thres); return(null); } } oldImg = currentImg; return(oldImg); }
static void Main(string[] args) { MetriLog log = new MetriLog(); Random rand = new Random(); const int MAX_DELAY_IN_MIN = 20; Camera leftCam = new TheImagingSource(); Camera rightCam = new TheImagingSource(); SerialPort triggerPort = new SerialPort("COM7", 9600); TriggeredStereoCamera cam = new TriggeredStereoCamera(leftCam, rightCam, triggerPort); FloatImage left, leftOld = null, right, rightOld = null; float thres; // thres = 1000.0f; // uEye thres = 100000.0f; // TIS int cnt = 0; const float MAX_EXPOSURE = 10f; const float DARK_THRES = 50f; // uEye: 10f, TIS: 50f const int NUM_WARMUP_IMAGES = 50; ConfigureLogging(StressTests.Freeze.Resources.LoggingConfigInfo); log.SetLogFile(@"D:\temp\stress_test_TIS.log"); cam.Connect(); cam.Exposure = 4; log.Info("Warming up."); for (int i = 0; i < NUM_WARMUP_IMAGES; i++) { Capture(cam, out left, out right, ref cnt); } log.Info("Starting test."); bool running = true; while (running) { log.Debug("Another round starts."); for (int i = 0; i < 10; i++) { if (cam.Exposure > MAX_EXPOSURE) { cam.Exposure = MAX_EXPOSURE; leftOld = null; rightOld = null; continue; } Capture(cam, out left, out right, ref cnt); float minL, maxL, minR, maxR; left.GetMinMax(out minL, out maxL); right.GetMinMax(out minR, out maxR); log.Debug("MAX = " + maxL + " " + maxR); if (maxL == 255f || maxR == 255f) { log.Info("Overexposed, reducing exposure time."); cam.Exposure = cam.Exposure * (3f / 4f); leftOld = null; rightOld = null; continue; } if (maxL < DARK_THRES && maxR < DARK_THRES) { if (cam.Exposure < MAX_EXPOSURE) { log.Info("Underexposed, increasing exposure time."); cam.Exposure = cam.Exposure * (4f / 3f); leftOld = null; rightOld = null; continue; } log.Info("seems to be dark, let's sleep an hour."); Thread.Sleep(1000 * 60 * 60); leftOld = null; rightOld = null; continue; } rightOld = Compare(right, rightOld, thres, cnt, "R", log); leftOld = Compare(left, leftOld, thres, cnt, "L", log); if (null == leftOld || null == rightOld) { break; } } int random = rand.Next(100); float delayInMinutes = (float)random / 100f * (float)MAX_DELAY_IN_MIN; log.Debug("Sleeping for " + delayInMinutes + " minutes"); Thread.Sleep((int)(1000 * 60 * delayInMinutes)); //Thread.Sleep(500); } cam.Disconnect(); }
private void ApplyConfiguration() { log.EnterMethod(); int nrChannelsBeforeConfigurationChange = camera.Channels.Count; List <string> channelsNotDeactivated = new List <string>(); List <string> channelsNotActivated = new List <string>(); // BUG: If currently selected channel will be deactivated, then we are in trouble Task channelsTask = Task.Factory.StartNew(() => { log.Debug("Deactivate unchecked channels"); for (int i = 0; i < checkedListBoxChannels.Items.Count; i++) { var item = checkedListBoxChannels.Items[i]; string channel = item.ToString(); if (!checkedListBoxChannels.CheckedItems.Contains(item)) { try { camera.DeactivateChannel(channel); } catch (Exception ex) { log.ErrorFormat("Could not deactivate channel '{0}': {1}", channel, ex.Message); channelsNotDeactivated.Add(channel); } } } }).ContinueWith((t) => { log.Debug("Activate checked channels"); for (int i = 0; i < checkedListBoxChannels.CheckedItems.Count; i++) { var item = checkedListBoxChannels.CheckedItems[i]; string channel = item.ToString(); try { camera.ActivateChannel(channel); } catch (Exception ex) { log.ErrorFormat("Could not activate channel '{0}': {1}", channel, ex.Message); channelsNotActivated.Add(channel); } } }); channelsTask.Wait(); log.Debug("Try to select a channel"); if (1 == camera.ActiveChannels.Count) { camera.SelectChannel(camera.ActiveChannels[0].Name); } else if (1 == checkedListBoxChannels.SelectedItems.Count && checkedListBoxChannels.CheckedItems.Contains(checkedListBoxChannels.SelectedItem)) { camera.SelectChannel(checkedListBoxChannels.SelectedItem.ToString()); } if (channelsNotDeactivated.Count + channelsNotActivated.Count > 0) { StringBuilder sb = new StringBuilder(); if (channelsNotDeactivated.Count > 0) { sb.AppendLine(string.Format("Could not deactivate the channels '{0}'", string.Join("', '", channelsNotDeactivated))); } if (channelsNotDeactivated.Count > 0) { sb.AppendLine(string.Format("Could not activate the channels '{0}'", string.Join("', '", channelsNotActivated))); } MessageBox.Show(sb.ToString(), "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } log.Debug("Apply camera parameters"); cameraSettingsControl.ApplyCameraSettings(); //If configuring the camera involves an automatic change of the available channels, we should update the channel panel size. //TODO: Perform a deep comparison of all channels instead of just comparing the number of elements. if (camera.Channels.Count != nrChannelsBeforeConfigurationChange) { LoadChannels(); AdjustLayout(); } }
/// <summary> /// Tells the device that there is a streaming channel. /// </summary> public void Control_InitStream() { log.Debug("Initializing streaming"); byte[] toSend = AddFraming("sMN GetBlobClientConfig"); byte[] receive = new byte[50]; // send ctrl message streamControl.Write(toSend, 0, toSend.Length); // get response if (streamControl.Read(receive, 0, receive.Length) == 0) { log.Error("Got no answer from camera"); ExceptionBuilder.Throw(typeof(InvalidOperationException), cam, "error_setParameter", "Failed to init stream."); } else { string response = Encoding.ASCII.GetString(receive); log.DebugFormat("Got response: {0}", response); } log.Debug("Done: Initializing streaming"); }