private void frmLightCurve_FormClosed(object sender, FormClosedEventArgs e) { if (!m_NoSendMessage) NotificationManager.Instance.NotifyLightCurveFormClosed(); HideZoomedAreas(); if (m_frmZoomedPixels != null) { m_frmZoomedPixels.Close(); m_frmZoomedPixels.Dispose(); m_frmZoomedPixels = null; } HidePSFFits(); if (m_frmPSFFits != null) { m_frmPSFFits.Close(); m_frmPSFFits.Dispose(); m_frmPSFFits = null; } HideBackgroundHistograms(); if (m_frmBackgroundHistograms != null) { m_frmBackgroundHistograms.Close(); m_frmBackgroundHistograms.Dispose(); m_frmBackgroundHistograms = null; } if (m_LightCurveController.Context != null) CleanUpAllReadings(m_LightCurveController.Context.AllReadings); foreach(List<BinnedValue> list in m_AllBinnedReadings) list.Clear(); m_FrameTiming.Clear(); m_LCFile = null; m_Header = new LCMeasurementHeader(); m_Footer = new LCMeasurementFooter(); m_FrameTiming = null; m_LightCurveController.ClearContext(); GC.Collect(); }
private void OnNewLCFile() { miReprocess.Enabled = TangraContext.Current.CanProcessLightCurvePixels; miShowPSFFits.Enabled = TangraContext.Current.CanProcessLightCurvePixels; miShowZoomedAreas.Enabled = TangraContext.Current.CanProcessLightCurvePixels; miBackgroundHistograms.Enabled = TangraContext.Current.CanProcessLightCurvePixels; if (!TangraContext.Current.CanProcessLightCurvePixels) { miShowPSFFits.Checked = false; miShowZoomedAreas.Checked = false; miBackgroundHistograms.Checked = false; HideZoomedAreas(); HideBackgroundHistograms(); HidePSFFits(); } m_Header.MinAdjustedReading = 0; // If there are a lot of measurements choose an appropriate binning value // But only when this is a mutual event if (m_Footer.ReductionContext.LightCurveReductionType == LightCurveReductionType.MutualEvent) { if (m_Header.MeasuredFrames > 20000) m_LightCurveController.Context.Binning = 32; else if (m_Header.MeasuredFrames > 10000) m_LightCurveController.Context.Binning = 16; else if (m_Header.MeasuredFrames > 5000) m_LightCurveController.Context.Binning = 8; else if (m_Header.MeasuredFrames > 2500) m_LightCurveController.Context.Binning = 4; else m_LightCurveController.Context.Binning = 0; } else m_LightCurveController.Context.Binning = 0; m_LightCurveController.Context.BackgroundMethod = (TangraConfig.BackgroundMethod)m_Header.BackgroundType; m_LightCurveController.Context.SignalMethod = m_Footer.ReductionContext.ReductionMethod; m_LightCurveController.Context.PsfFittingMethod = m_Footer.ProcessedWithTangraConfig.Photometry.PsfFittingMethod; m_LightCurveController.Context.PsfQuadratureMethod = m_Footer.ProcessedWithTangraConfig.Photometry.PsfQuadrature; m_LightCurveController.Context.ManualAverageFWHM = m_Footer.ProcessedWithTangraConfig.Photometry.UseUserSpecifiedFWHM ? m_Footer.ProcessedWithTangraConfig.Photometry.UserSpecifiedFWHM : float.NaN; m_LightCurveController.Context.EncodingGamma = m_Footer.ProcessedWithTangraConfig.Photometry.EncodingGamma; m_LightCurveController.Context.ReverseCameraResponse = m_Footer.ProcessedWithTangraConfig.Photometry.KnownCameraResponse; m_LightCurveController.Context.UseClipping = m_Footer.ReductionContext.UseClipping; m_LightCurveController.Context.UseStretching = m_Footer.ReductionContext.UseStretching; m_LightCurveController.Context.UseBrightnessContrast = m_Footer.ReductionContext.UseBrightnessContrast; m_LightCurveController.Context.FromByte = m_Footer.ReductionContext.FromByte; m_LightCurveController.Context.ToByte = m_Footer.ReductionContext.ToByte; m_LightCurveController.Context.Brightness = m_Footer.ReductionContext.Brightness; m_LightCurveController.Context.Contrast = m_Footer.ReductionContext.Contrast; m_LightCurveController.Context.BitPix = m_Footer.ReductionContext.BitPix; m_LightCurveController.Context.InitFrameBytePreProcessors(); m_LightCurveController.Context.MaxPixelValue = m_Footer.ReductionContext.MaxPixelValue; m_LightCurveController.Context.DisplayBitmapConverter = m_Footer.ReductionContext.DisplayBitmapConverterImpl; m_LightCurveController.Context.Filter = LightCurveContext.FilterType.NoFilter; if (m_Header.FilterType == 1) m_LightCurveController.Context.Filter = LightCurveContext.FilterType.LowPass; else if (m_Header.FilterType == 2) m_LightCurveController.Context.Filter = LightCurveContext.FilterType.LowPassDifference; m_LightCurveController.Context.InstrumentalDelayConfigName = m_Footer.InstrumentalDelayConfigName; m_LightCurveController.Context.CameraName = m_Footer.CameraName; m_LightCurveController.Context.AAVFrameIntegration = m_Footer.AAVFrameIntegration; m_LightCurveController.Context.InstrumentalDelayCorrectionsNotRequired = m_Header.InstrumentalDelayCorrectionsNotRequired(); m_LightCurveController.Context.TimingType = m_Header.TimingType; m_LightCurveController.Context.MinFrame = m_Header.MinFrame; m_LightCurveController.Context.MaxFrame = m_Header.MaxFrame; m_LightCurveController.Context.ObjectCount = m_Header.ObjectCount; ToolStripMenuItem[] allObjMenuItems = new ToolStripMenuItem[] { miIncludeObj1, miIncludeObj2, miIncludeObj3, miIncludeObj4 }; for (int i = 0; i < 4; i++) { allObjMenuItems[i].Checked = false; allObjMenuItems[i].Visible = false; m_IncludeObjects[i] = false; DrawColoredRectangleWithCheckBox(allObjMenuItems[i], i); } for (int i = 0; i < m_Header.ObjectCount; i++) { allObjMenuItems[i].Checked = true; allObjMenuItems[i].Visible = true; m_IncludeObjects[i] = true; allObjMenuItems[i].Text = string.Format("Object {0} ({1})", i + 1, ExplainTrackingType(m_Footer.TrackedObjects[i].TrackingType)); } m_LightCurveController.Context.ProcessingType = ProcessingType.SignalMinusBackground; m_MinDisplayedFrame = m_Header.MinFrame; m_MaxDisplayedFrame = m_Header.MaxFrame; m_ZoomLevel = 1; sbZoomStartFrame.Minimum = (int)m_Header.MinFrame; sbZoomStartFrame.Maximum = (int)m_Header.MaxFrame; // Mark it dirty so the values are computed for the first time m_LightCurveController.Context.MarkDirtyNoFullReprocessing(); bool hasEmbeddedTimeStamps = m_Footer.ReductionContext.HasEmbeddedTimeStamps; m_CameraCorrectionsHaveBeenAppliedFlag = !string.IsNullOrEmpty(m_LightCurveController.Context.InstrumentalDelayConfigName) || m_LightCurveController.Context.InstrumentalDelayCorrectionsNotRequired; m_HasEmbeddedTimeStamps = m_Footer.ReductionContext.HasEmbeddedTimeStamps; if (m_Header.SecondTimedFrameTime != DateTime.MinValue || hasEmbeddedTimeStamps) { if (!hasEmbeddedTimeStamps) { string videoSystem; double timeDelta = m_Header.GetAbsoluteTimeDeltaInMilliseconds(out videoSystem); m_TimestampDiscrepencyFlag = Math.Abs(timeDelta) > TangraConfig.Settings.Special.MaxAllowedTimestampShiftInMs; if (m_TimestampDiscrepencyFlag) { if (videoSystem == null) { MessageBox.Show(this, "This video has an unusual frame rate.\r\n\r\nPlease use the timestamps on the corresponding video frames when timing events from this video.", "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); lblFrameTime.ForeColor = Color.Red; } else { if (m_LCFile.Header.GetVideoFileFormat() != VideoFileFormat.SER) { string extraMessage = timeDelta < 0 ? "This may indicate that video frames have been duplicated by the recording software. " : "This may indicate that video frames have been dropped by the recording software. "; MessageBox.Show(this, string.Format( "The time derived from entered frame times in this {1} video shows an error of {0} ms.\r\n\r\n{2}\r\n\r\nPlease use the timestamps on the corresponding video frames when timing events from this video." , timeDelta.ToString("0.0"), videoSystem, extraMessage), "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); } lblFrameTime.ForeColor = Color.Red; } } else { lblFrameTime.ForeColor = SystemColors.WindowText; } } else { lblFrameTime.ForeColor = Color.DarkGreen; m_TimestampDiscrepencyFlag = false; } m_LightCurveController.Context.XAxisLabels = LightCurveContext.XAxisMode.Time; miXAxisUserFrameNumbers.Checked = false; miXAxisUserTime.Checked = true; } else { lblFrameTime.ForeColor = Color.Red; m_LightCurveController.Context.XAxisLabels = LightCurveContext.XAxisMode.FrameNo; miXAxisUserFrameNumbers.Checked = true; miXAxisUserTime.Checked = false; } m_LightCurveController.Context.YAxisLabels = LightCurveContext.YAxisMode.Flux; m_LightCurveController.Context.ChartType = LightCurveContext.LightCurveMode.Line; UpdateContextDisplays(); UpdateFormTitle(); if (m_frmZoomedPixels != null) { m_frmZoomedPixels.Close(); m_frmZoomedPixels.Dispose(); } //TODO: Remember the last shown form: PSF or Zoom m_frmZoomedPixels = new frmZoomedPixels(m_LightCurveController.Context, m_LCFile, m_DisplaySettings, m_LightCurveController); if (TangraContext.Current.CanProcessLightCurvePixels) { miShowZoomedAreas.Checked = true; ShowZoomedAreas(); } m_frmPSFFits = new frmPSFFits(m_LightCurveController.Context, m_LCFile, m_DisplaySettings); miShowPSFFits.Checked = false; HidePSFFits(); m_frmBackgroundHistograms = new frmBackgroundHistograms(m_LightCurveController.Context, m_LCFile, m_DisplaySettings); miBackgroundHistograms.Checked = false; HideBackgroundHistograms(); Regex regexAavSourceInfo = new Regex("^Video \\(AAV\\.\\d+\\)$"); lblFrameTime.BackColor = SystemColors.Control; lblInstDelayWarning.SendToBack(); lblInstDelayWarning.Visible = // Can determine the frame times m_LCFile.CanDetermineFrameTimes && ( // AAV file with embedded timestamps (The given time is a central exposure time) (m_Header.TimingType == MeasurementTimingType.EmbeddedTimeForEachFrame && regexAavSourceInfo.IsMatch(m_Header.SourceInfo)) || // or timestamps read off the screen m_Header.TimingType == MeasurementTimingType.OCRedTimeForEachFrame || // or user entered star/end times from the VTI OSD m_Header.TimingType == MeasurementTimingType.UserEnteredFrameReferences ); m_AddinsController.SetLightCurveDataProvider(this); // If the current .LC file has NTP timestamps then make the menu for exporting them visible // NOTE: This is for debugging purposes only! miExportNTPDebugData.Visible = m_LCFile.FrameTiming.Count > 0 && m_LCFile.FrameTiming[0].FrameMidTimeNTPRaw.HasValue; }