public void ProcessFrame(VideoFrameWrapper frame, Bitmap bmp) { // TODO: Make this processing Asynchronous so the painting is not delayed unnecessary (is this actually possible?) m_TargetSignalMonitor.ProcessFrame(frame); m_PlateSolveManager.ProcessFrameAsync(frame, bmp); }
public void ProcessFrame(VideoFrameWrapper frame, LastTrackedPosition locatedGuidingStar) { if (m_RunAutoFocusNow && locatedGuidingStar != null && locatedGuidingStar.IsLocated && m_ObservatoryController.IsConnectedToTelescope()) { // TODO: Start the auto focusing and monitor it. Do we want to use a state machine for this?? } else if (m_IsAutoFocusing && m_ObservatoryController.IsConnectedToTelescope()) { // TODO: Check the effect of the last focuser movement and issue a correction or end the focusing // TODO: Use a state machine to manage this. } }
internal override void ProcessFrame(VideoFrameWrapper frame, LastTrackedPosition locatedGuidingStar) { if (!m_FirstFrameReceived) { // TODO: Record the starting position m_FirstFrameReceived = true; m_StateMachine.ObservatoryController.TelescopePulseGuide(GuideDirections.guideEast, PulseRate.Fast, CallType.Async, m_ObservatoryControlLock, ObservatoryController_SlewCompleted); m_PulseIssued = true; } else if (m_PulseFinished) { // TODO: Recod last position } }
public MinimalVideoFrame(VideoFrameWrapper source, Bitmap previewBitmap) { FrameNumber = source.FrameNumber; ExposureDuration = double.NaN; ExposureStartTime = null; ImageInfo = source.ImageInfo; MaxSignalValue = (uint)(source.IntegrationRate.HasValue ? 255 * source.IntegrationRate.Value : 255); if (source.ImageArray is int[,]) { ImageArray = source.ImageArray; } else if (source.ImageArray != null) throw new NotSupportedException("Unsupported ImageArray Format."); else if (source.ImageArray == null) { if (previewBitmap != null) ConstructFromBitmap(previewBitmap); else throw new NotSupportedException("Both ImageArray and PreviewBitmap are null!"); } }
public void ProcessFrameAsync(VideoFrameWrapper frame, Bitmap bmp) { lock (m_SyncLock) { if (m_WaitingForFrameToSolve && m_TelescopeIsConnected && m_TelescopePositionIsKnown) { var mvf = new MinimalVideoFrame(frame, bmp); m_CurrentFramePixels = mvf.ImageArray as int[,]; if (!m_FOVKnown) { // http://www.wilmslowastro.com/software/formulae.htm // arc sec per pixel = pixel size [um] * 206.3 / focal length [mm] int width = m_CurrentFramePixels.GetLength(0); int height = m_CurrentFramePixels.GetLength(1); m_FieldOfViewDegrees = (Math.Sqrt(width * width + height * height) * 206.3 * ASSUMED_MAX_PIXEL_SIZE_MICRONS / m_FocalLengthMillimeters) / 3600; } m_WaitingForFrameToSolve = false; } } }
public void ProcessFrame(VideoFrameWrapper frame) { if (TrackingContext.Current.TargetStar != null && TrackingContext.Current.GuidingStar != null) { if (TrackingContext.Current.TargetStar.IsLocated && TrackingContext.Current.GuidingStar.IsLocated) { float normalizedMeasurement = TrackingContext.Current.TargetStar.Measurement / TrackingContext.Current.GuidingStar.Measurement; while (m_AllMeasurements.Count > 200) { m_AllMeasurements.RemoveAt(0); } var mea = new TargetMeasurement() { NormalizedMeasurement = normalizedMeasurement, FrameNumber = frame.IntegratedFrameNo }; m_AllMeasurements.Add(mea); } if (TrackingContext.Current.GuidingStar.IsLocated) { if (m_AutoPulseGuiding) { // m_PositionTimeStamps // TODO: Check the average value and error during the past 5 seconds. Make sure wind doesn't trigger auto guiding corrections // If the postion in the past 5 sec is consistent and check if it is too far away from the expected position and issue a // pulse guiding command // TODO: Check if a correction needs to be made in the current thread // TODO: Issue any pulse guiding commands on a seaprate thread (or asynchronously) // TODO: There will ne an issue with the orientation of the video camera field // Will need to find the correct directions (would have found the correct directions) this current session // Probably a button on the Telescope Control screen?? } m_AutoFocusingManager.ProcessFrame(frame, TrackingContext.Current.GuidingStar); } } else if (m_AllMeasurements.Count > 0) { m_AllMeasurements.Clear(); } }
internal abstract void ProcessFrame(VideoFrameWrapper frame, LastTrackedPosition locatedGuidingStar);
internal override void ProcessFrame(VideoFrameWrapper frame, LastTrackedPosition locatedGuidingStar) { // This is the idle state. Nothing to do. }
public void ProcessFrame(VideoFrameWrapper frame, LastTrackedPosition locatedGuidingStar) { m_CurrentState.ProcessFrame(frame, locatedGuidingStar); }
internal void PaintVideoFrame(VideoFrameWrapper frame, Bitmap bmp) { bool isEmptyFrame = frame == null || bmp == null; if (isEmptyFrame) { if (picVideoFrame.Image != null) { using (Graphics g = Graphics.FromImage(picVideoFrame.Image)) { if (bmp == null) g.Clear(Color.Green); else g.DrawImage(bmp, 0, 0); if (m_OverlayManager != null) m_OverlayManager.ProcessFrame(g, frame); g.Save(); } } picVideoFrame.Invalidate(); return; } currentFrameNo = frame.FrameNumber; UpdateState(frame); renderedFrameCounter++; if (renderedFrameCounter == 20) { renderedFrameCounter = 0; endTicks = DateTime.Now.Ticks; if (startTicks != 0) { renderFps = 20.0 / new TimeSpan(endTicks - startTicks).TotalSeconds; } startTicks = DateTime.Now.Ticks; } if (picVideoFrame.Image != null) { using (Graphics g = Graphics.FromImage(picVideoFrame.Image)) { g.DrawImage(bmp, 0, 0); if (m_OverlayManager != null) m_OverlayManager.ProcessFrame(g, frame); g.Save(); } } picVideoFrame.Invalidate(); bmp.Dispose(); if (framesBeforeUpdatingCameraVideoFormat >= 0) framesBeforeUpdatingCameraVideoFormat--; if (framesBeforeUpdatingCameraVideoFormat == 0) { lblVideoFormat.Text = videoObject.CameraVideoFormat; } if (m_StateManager.ProvidesOcredTimestamps) { if (m_StateManager.OcrErrors > 0) { if (tssOcrErr.Tag == null || (int)tssOcrErr.Tag != 2) { tssOcrErr.ForeColor = Color.Red; tssOcrErr.Tag = (int) 2; } tssOcrErr.Text = string.Format("OCR ERR {0}", m_StateManager.OcrErrors); } else { if (tssOcrErr.Tag == null || (int) tssOcrErr.Tag != 1) { tssOcrErr.Text = "OCR"; tssOcrErr.ForeColor = Color.Green; tssOcrErr.Tag = (int)1; } } if (!tssOcrErr.Visible) tssOcrErr.Visible = true; } else { if (tssOcrErr.Visible) tssOcrErr.Visible = false; } if (m_StateManager.DroppedFrames != 0) { tssDroppedFrames.Text = string.Format("{0} Dropped", m_StateManager.DroppedFrames); if (!tssDroppedFrames.Visible) tssDroppedFrames.Visible = true; } else { if (tssDroppedFrames.Visible) tssDroppedFrames.Visible = false; } if (frame.PerformedAction.HasValue && frame.PerformedAction.Value > 0 && frame.PerformedActionProgress.HasValue) { ttsProgressBar.Value = (int) Math.Max(0, Math.Min(100, 100*frame.PerformedActionProgress.Value)); ttsProgressBar.Visible = true; } else ttsProgressBar.Visible = false; }
private void UpdateState(VideoFrameWrapper frame) { if (IsDisposed) // It is possible this method to be called during Disposing and we don't need to do anything in that case return; // TODO: Many of things below only change their state when something changes. Rather than always resetting their state with each rendered frame, we should really // use events to update the state! if (ChangedToDisconnectedState()) { tssCameraState.Text = "Disconnected"; tssFrameNo.Text = string.Empty; tssDisplayRate.Text = string.Empty; tssFrameNo.Visible = false; tssDisplayRate.Visible = false; tssIntegrationRate.Visible = false; tssDroppedFrames.Visible = false; tssOcrErr.Visible = false; tsbConnectDisconnect.ToolTipText = "Connect"; tsbConnectDisconnect.Image = imageListToolbar.Images[0]; tbsAddTarget.Visible = false; tsbAddGuidingStar.Visible = false; tbsInsertSpectra.Visible = false; tbsClearTargets.Visible = false; tssToolBorder.Visible = false; //tsbCamControl.Enabled = false; } else if (ChangedToConnectedState()) { tbsAddTarget.Visible = videoObject.SupportsTargetTracking; tbsClearTargets.Visible = videoObject.SupportsTargetTracking; tsbAddGuidingStar.Visible = videoObject.SupportsTargetTracking; tbsInsertSpectra.Visible = videoObject.SupportsTargetTracking && Settings.Default.SpectraUseAid; tssToolBorder.Visible = videoObject.SupportsTargetTracking; tbsAddTarget.Enabled = false; tbsClearTargets.Enabled = false; tsbAddGuidingStar.Enabled = true; tbsInsertSpectra.Enabled = true; TrackingContext.Current.Reset(); TrackingContext.Current.ReConfigureNativeTracking(videoObject.Width, videoObject.Height); //tsbCamControl.Enabled = CameraSupportsSoftwareControl(); tsbConnectDisconnect.ToolTipText = "Disconnect"; tsbConnectDisconnect.Image = imageListToolbar.Images[1]; } if (videoObject != null) { UpdateApplicationStateFromCameraState(); if (frame != null) { if (!tssFrameNo.Visible) tssFrameNo.Visible = true; if (m_StateManager.IsIntegrationLocked) tssFrameNo.Text = frame.IntegratedFrameNo.ToString("Integrated Frame: 0", CultureInfo.InvariantCulture); else tssFrameNo.Text = frame.FrameNumber.ToString("Current Frame: 0", CultureInfo.InvariantCulture); if (m_StateManager.IsIntegrationLocked) { if (!string.IsNullOrEmpty(frame.ImageInfo)) { if (frame.IntegrationRate != null) tssIntegrationRate.Text = string.Format("Integration Rate: x{0}", frame.IntegrationRate); else tssIntegrationRate.Text = "Integration Rate: ..."; if (!tssIntegrationRate.Visible) tssIntegrationRate.Visible = true; } else { if (tssIntegrationRate.Visible) tssIntegrationRate.Visible = false; } } else { if (tssIntegrationRate.Visible) tssIntegrationRate.Visible = false; } } tbsAddTarget.Enabled = TrackingContext.Current.GuidingStar != null; tbsClearTargets.Enabled = TrackingContext.Current.GuidingStar != null; tsbAddGuidingStar.Enabled = true; tbsInsertSpectra.Enabled = true; #if DEBUG if (!double.IsNaN(renderFps)) { if (!tssDisplayRate.Visible) tssDisplayRate.Visible = true; tssDisplayRate.Text = renderFps.ToString("Display Rate: 0 fps"); } else tssDisplayRate.Text = "Display Rate: N/A"; #endif if (videoObject.State == VideoCameraState.videoCameraRecording && File.Exists(recordingfileName)) { var fi = new FileInfo(recordingfileName); tssRecordingFile.Text = string.Format("{0} ({1:0.0} Mb)", fi.Name, 1.0 * fi.Length / (1024 * 1024)); if (Settings.Default.WarnForFileSystemIssues && Settings.Default.WarnOnFreeDiskSpaceLeft && fi.Directory != null) { ulong freeBytes; NativeHelpers.GetDriveFreeBytes(fi.Directory.FullName, out freeBytes); if (freeBytes < ((ulong)1024 * (ulong)1024 * (ulong)1024 * (ulong)Settings.Default.WarnMinDiskFreeSpaceGb)) { tssFreeDiskSpace.Visible = true; tssFreeDiskSpace.Text = string.Format("{0:0.0} Gb free", 1.0 * freeBytes / (1024 * 1024 * 1024)); } else tssFreeDiskSpace.Visible = false; } else tssFreeDiskSpace.Visible = false; tssRecordingFile.Visible = true; btnStopRecording.Enabled = true; btnRecord.Enabled = false; } else if ( videoObject.State == VideoCameraState.videoCameraRunning && lbSchedule.Items.Count == 0 && (m_StateManager.CanStartRecording || Settings.Default.IntegrationDetectionTuning)) { tssRecordingFile.Visible = false; tssFreeDiskSpace.Visible = false; btnStopRecording.Enabled = false; btnRecord.Enabled = true; } else { tssRecordingFile.Visible = false; tssFreeDiskSpace.Visible = false; btnRecord.Enabled = false; btnStopRecording.Enabled = false; } if (videoObject.State == VideoCameraState.videoCameraRunning && OccuRecContext.Current.IsAAV) { if (m_StateManager.VtiOsdPositionUnknown) { if (pnlAAV.Visible) pnlAAV.Visible = false; if (!pnlVtiOsd.Visible) { pnlVtiOsd.Visible = true; if (!Settings.Default.PreserveVTIUserSpecifiedValues) { Settings.Default.PreserveVTIFirstRow = videoObject.Height - 28; Settings.Default.PreserveVTILastRow = videoObject.Height; } } if (tssVTIOSD.Visible ^ !Settings.Default.PreserveVTIUserSpecifiedValues) tssVTIOSD.Visible = !Settings.Default.PreserveVTIUserSpecifiedValues; } else { if (!pnlAAV.Visible) pnlAAV.Visible = true; if (pnlVtiOsd.Visible) pnlVtiOsd.Visible = false; if (tssVTIOSD.Visible) tssVTIOSD.Visible = false; } if (btnConfirmUserVtiOsd.Enabled ^ Settings.Default.PreserveVTIUserSpecifiedValues) { btnConfirmUserVtiOsd.Enabled = Settings.Default.PreserveVTIUserSpecifiedValues; } } else { if (pnlAAV.Visible) pnlAAV.Visible = false; if (pnlVtiOsd.Visible) pnlVtiOsd.Visible = false; if (tssVTIOSD.Visible) tssVTIOSD.Visible = false; } btnLockIntegration.Enabled = ( m_StateManager.CanLockIntegrationNow && m_StateManager.IntegrationRate > 0 && m_StateManager.IsValidIntegrationRate && (frame == null || !frame.PerformedAction.HasValue || frame.PerformedAction.Value == 0) ) || m_StateManager.IsIntegrationLocked; btnCalibrateIntegration.Visible = !m_StateManager.IsIntegrationLocked; btnManualIntegration.Visible = !m_StateManager.IsIntegrationLocked; if (!m_StateManager.IsIntegrationLocked && m_StateManager.PercentDoneDetectingIntegration < 100) { pbarIntDetPercentDone.Value = m_StateManager.PercentDoneDetectingIntegration; if (!pbarIntDetPercentDone.Visible) pbarIntDetPercentDone.Visible = true; } else if (pbarIntDetPercentDone.Visible) pbarIntDetPercentDone.Visible = false; pnlOneStacking.Visible = m_StateManager.IsIntegrationLocked && m_ManualIntegration == 1; if (frame != null && frame.PerformedAction.HasValue && frame.PerformedAction.Value > 0) { // When there is an action in progress, then don't show anything btnLockIntegration.Text = "Busy ..."; } else if (m_StateManager.IntegrationRate > 0 && m_StateManager.IsValidIntegrationRate && !m_StateManager.IsIntegrationLocked && m_StateManager.CanLockIntegrationNow) btnLockIntegration.Text = string.Format("Lock at x{0} Frames", m_StateManager.IntegrationRate); else if (m_StateManager.IsIntegrationLocked) btnLockIntegration.Text = "Unlock"; else btnLockIntegration.Text = "Checking Integration ..."; if (m_StateManager.IsCalibratingIntegration) { btnCalibrateIntegration.Text = "Cancel Calibration"; tssCameraState.Text = "Calibrating"; EnsureSchedulesState(false); } else if (frame != null && frame.PerformedAction.HasValue) { if (frame.PerformedAction.Value == 2) { // Checking Manually Entered Integration tssCameraState.Text = "Busy"; } } else { btnCalibrateIntegration.Text = "Calibrate"; btnOcrTesting.Text = "Run OCR Testing"; UpdateApplicationStateFromCameraState(); EnsureSchedulesState(true); } if (m_StateManager.IsUsingManualIntegration) btnManualIntegration.Text = "Automatic"; else btnManualIntegration.Text = "Manual"; } }
private void PaintVideoFrameCallback(VideoFrameWrapper frame, Bitmap bmp) { if (m_VideoFrameCopyRequested) { try { m_LastRenderedFrame = new MinimalVideoFrame(frame, new Bitmap(bmp)); } catch (Exception ex) { Trace.WriteLine(ex.GetFullStackTrace()); } finally { m_VideoFrameCopyRequested = false; m_GetNextVideoFrameSignal.Set(); } } m_MainForm.PaintVideoFrame(frame, bmp); }
private void DisplayVideoFrames(object state) { while (running) { if (videoObject != null && videoObject.IsConnected && previewOn) { try { IVideoFrame frame = videoObject.LastVideoFrame; if (frame != null) { var frameWrapper = new VideoFrameWrapper(frame); if (frameWrapper.UniqueFrameId == -1 || frameWrapper.UniqueFrameId != lastDisplayedVideoFrameNumber) { lastDisplayedVideoFrameNumber = frameWrapper.UniqueFrameId; Bitmap bmp = frame.PreviewBitmap; if (bmp == null) { cameraImage.SetImageArray( frame.ImageArray, imageWidth, imageHeight, videoObject.SensorType); bmp = cameraImage.GetDisplayBitmap(); } if (frame.ImageArray == null) { frameWrapper.ImageArray = cameraImage.GetImageArray(bmp, SensorType.Monochrome, LumaConversionMode.R, Settings.Default.HorizontalFlip, Settings.Default.VerticalFlip); } stateManager.ProcessFrame(frameWrapper); analysisManager.ProcessFrame(frameWrapper, bmp); if (m_DisplayIntensifyMode != DisplayIntensifyMode.Off || m_DisplayInvertedMode || m_DisplayHueIntensityMode) { using (var memStr = new MemoryStream()) { bmp.Save(memStr, ImageFormat.Bmp); memStr.Seek(0, SeekOrigin.Begin); bmp = new Bitmap(memStr); } // For display purposes only we apply display gamma and/or invert when requested by the user if (m_DisplayIntensifyMode != DisplayIntensifyMode.Off) BitmapFilter.ApplyGamma(bmp, m_DisplayIntensifyMode == DisplayIntensifyMode.Hi, m_DisplayInvertedMode, m_DisplayHueIntensityMode); else if (m_DisplayInvertedMode || m_DisplayHueIntensityMode) BitmapFilter.ProcessInvertAndHueIntensity(bmp, m_DisplayInvertedMode, m_DisplayHueIntensityMode); } try { m_MainForm.Invoke(new PaintVideoFrameDelegate(PaintVideoFrameCallback), new object[] {frameWrapper, bmp}); } catch (InvalidOperationException) { } catch (IndexOutOfRangeException) { } } } } catch (Exception ex) { Trace.WriteLine(ex.GetFullStackTrace()); Bitmap errorBmp = new Bitmap(m_MainForm.picVideoFrame.Width, m_MainForm.picVideoFrame.Height); using (Graphics g = Graphics.FromImage(errorBmp)) { g.Clear(Color.MidnightBlue); //g.DrawString(ex.Message, debugTextFont, Brushes.Black, 10, 10); g.Save(); } try { m_MainForm.Invoke(new PaintVideoFrameDelegate(m_MainForm.PaintVideoFrame), new object[] { null, errorBmp }); } catch (InvalidOperationException) { } catch (IndexOutOfRangeException) { } } } Thread.Sleep(1); Application.DoEvents(); } }
public void ProcessFrame(VideoFrameWrapper frame) { if (currentState != null) currentState.ProcessFrame(this, frame); if ((Settings.Default.SimulatorRunOCR && Settings.Default.OcrSimulatorNativeCode) || Settings.Default.AavOcrEnabled) { if (frame.OcrErrorsSinceReset.HasValue) { ocrErrors = frame.OcrErrorsSinceReset.Value; providesOcredTimestamps = true; } else { ocrErrors = 0; providesOcredTimestamps = false; } } isUsingManualIntegration = frame.ManualIntegrationRateHint > 0; }