Ejemplo n.º 1
0
        internal frmNoiseDistribution(
            LCMeasurementHeader header,
            List<List<LCMeasurement>> allReadings,
            List<frmLightCurve.BinnedValue>[] allBinnedReadings,
            Brush[] allBrushes,
            Color[] allColors,
			Color background)
        {
            InitializeComponent();

            m_Header = header;
            m_AllReadings = allReadings;
            m_AllBinnedReadings = allBinnedReadings;
            m_AllBrushes = allBrushes;
            m_AllColors = allColors;
            m_BackgroundColor = background;

            pictureBox.Image = new Bitmap(pictureBox.Width, pictureBox.Height);

            pb1.BackColor = m_AllColors[0];
            pb2.BackColor = m_AllColors[1];
            pb3.BackColor = m_AllColors[2];
            pb4.BackColor = m_AllColors[3];

            rbTarget1.Enabled = header.ObjectCount > 0;
            rbTarget2.Enabled = header.ObjectCount > 1;
            rbTarget3.Enabled = header.ObjectCount > 2;
            rbTarget4.Enabled = header.ObjectCount > 3;

            if (header.ObjectCount > 0)
                DisplayDistributionForObject(0);
        }
Ejemplo n.º 2
0
        internal frmNoiseDistribution(
            LCMeasurementHeader header,
            List <List <LCMeasurement> > allReadings,
            List <frmLightCurve.BinnedValue>[] allBinnedReadings,
            Brush[] allBrushes,
            Color[] allColors,
            Color background)
        {
            InitializeComponent();

            m_Header            = header;
            m_AllReadings       = allReadings;
            m_AllBinnedReadings = allBinnedReadings;
            m_AllBrushes        = allBrushes;
            m_AllColors         = allColors;
            m_BackgroundColor   = background;

            pictureBox.Image = new Bitmap(pictureBox.Width, pictureBox.Height);

            pb1.BackColor = m_AllColors[0];
            pb2.BackColor = m_AllColors[1];
            pb3.BackColor = m_AllColors[2];
            pb4.BackColor = m_AllColors[3];

            rbTarget1.Enabled = header.ObjectCount > 0;
            rbTarget2.Enabled = header.ObjectCount > 1;
            rbTarget3.Enabled = header.ObjectCount > 2;
            rbTarget4.Enabled = header.ObjectCount > 3;

            if (header.ObjectCount > 0)
            {
                DisplayDistributionForObject(0);
            }
        }
Ejemplo n.º 3
0
        private static bool TestWhetherVideoFileMatchesLcHeader(string fileName, LCMeasurementHeader header)
        {
            //using (FramePlayer player = new FramePlayer())
            //{
            //    player.OpenVideo(fileName);
            //    if (header.FirstFrameInVideoFile == player.Video.FirstFrame &&
            //        header.CountFrames == player.Video.CountFrames)
            //    {
            //        return true;
            //    }
            //}

            return(true);
        }
        internal frmConfigureReprocessing(
            LCMeasurementHeader header,
            LCMeasurementFooter footer,
            LightCurveContext context,
            Color[] allColors,
            Brush[] allBrushes,
            Pen[] allPens)
        {
            InitializeComponent();

            m_Context    = context;
            m_Header     = header;
            m_Footer     = footer;
            m_AllColors  = allColors;
            m_AllBrushes = allBrushes;
            m_AllPens    = allPens;

            SetComboboxIndexFromBackgroundMethod(m_Context.BackgroundMethod);
            SetComboboxIndexFromPhotometryReductionMethod(m_Context.SignalMethod);
            SetComboboxIndexFromPsfQuadratureMethod(m_Context.PsfQuadratureMethod);
            cbxDigitalFilter.SelectedIndex = (int)m_Context.Filter;
            nudGamma.Value = (decimal)m_Context.EncodingGamma;
            cbxKnownResponse.SetCBXIndex((int)m_Context.ReverseCameraResponse);

            int maxApertureSize = m_Footer.ReductionContext.GetMaxApertureSize();

            nudAperture1.Maximum = maxApertureSize;
            nudAperture2.Maximum = maxApertureSize;
            nudAperture3.Maximum = maxApertureSize;
            nudAperture4.Maximum = maxApertureSize;

            m_Saturation = TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(context.BitPix, context.MaxPixelValue);

            if (TangraContext.Current.CanProcessLightCurvePixels)
            {
                SetupControls();
            }
        }
Ejemplo n.º 5
0
        internal frmConfigureReprocessing(
            LCMeasurementHeader header,
			LCMeasurementFooter footer, 
            LightCurveContext context,
            Color[] allColors,
            Brush[] allBrushes,
            Pen[] allPens)
        {
            InitializeComponent();

            m_Context = context;
            m_Header = header;
            m_Footer = footer;
            m_AllColors = allColors;
            m_AllBrushes = allBrushes;
            m_AllPens = allPens;

            SetComboboxIndexFromBackgroundMethod(m_Context.BackgroundMethod);
            SetComboboxIndexFromPhotometryReductionMethod(m_Context.SignalMethod);
            SetComboboxIndexFromPsfQuadratureMethod(m_Context.PsfQuadratureMethod);
            cbxDigitalFilter.SelectedIndex = (int)m_Context.Filter;
            nudGamma.Value = (decimal)m_Context.EncodingGamma;
            cbxKnownResponse.SetCBXIndex((int)m_Context.ReverseCameraResponse);

            int maxApertureSize = m_Footer.ReductionContext.GetMaxApertureSize();

            nudAperture1.Maximum = maxApertureSize;
            nudAperture2.Maximum = maxApertureSize;
            nudAperture3.Maximum = maxApertureSize;
            nudAperture4.Maximum = maxApertureSize;

            m_Saturation = TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(context.BitPix, context.MaxPixelValue);

            if (TangraContext.Current.CanProcessLightCurvePixels)
                SetupControls();
        }
Ejemplo n.º 6
0
        private void frmLightCurve_FormClosed(object sender, FormClosedEventArgs e)
        {
            if (!m_NoSendMessage)
                NotificationManager.Instance.NotifyLightCurveFormClosed();

            HideZoomedAreas();
            if (m_frmZoomedPixels != null)
            {
                m_frmZoomedPixels.Close();
                m_frmZoomedPixels.Dispose();
                m_frmZoomedPixels = null;
            }

            HidePSFFits();
            if (m_frmPSFFits != null)
            {
                m_frmPSFFits.Close();
                m_frmPSFFits.Dispose();
                m_frmPSFFits = null;
            }

            HideBackgroundHistograms();
            if (m_frmBackgroundHistograms != null)
            {
                m_frmBackgroundHistograms.Close();
                m_frmBackgroundHistograms.Dispose();
                m_frmBackgroundHistograms = null;
            }

            if (m_LightCurveController.Context != null)
                CleanUpAllReadings(m_LightCurveController.Context.AllReadings);

            foreach(List<BinnedValue> list in m_AllBinnedReadings) list.Clear();
            m_FrameTiming.Clear();

            m_LCFile = null;
            m_Header = new LCMeasurementHeader();
            m_Footer = new LCMeasurementFooter();
            m_FrameTiming = null;
            m_LightCurveController.ClearContext();

            GC.Collect();
        }
Ejemplo n.º 7
0
        private static bool TestWhetherVideoFileMatchesLcHeader(string fileName, LCMeasurementHeader header)
        {
            //using (FramePlayer player = new FramePlayer())
            //{
            //    player.OpenVideo(fileName);
            //    if (header.FirstFrameInVideoFile == player.Video.FirstFrame &&
            //        header.CountFrames == player.Video.CountFrames)
            //    {
            //        return true;
            //    }
            //}

            return true;
        }
Ejemplo n.º 8
0
        internal LCFile FlushLightCurveFile()
        {
            var matrixSizes = new List<int>();
            var apertures = new List<float>();
            var fixedFlags = new List<bool>();
            var psfGroupIds = new List<int>();

            m_Tracker.TrackedObjects.ForEach(
                delegate(ITrackedObject o)
                {
                    matrixSizes.Add(o.OriginalObject.PsfFitMatrixSize);
                    apertures.Add(o.OriginalObject.ApertureInPixels);
                    fixedFlags.Add(o.OriginalObject.IsWeakSignalObject);
                    psfGroupIds.Add(o.OriginalObject.GroupId);
                }
            );

            MeasurementTimingType measurementTimingType = MeasurementTimingType.UserEnteredFrameReferences;
            if (m_VideoController.HasEmbeddedTimeStamps())
                measurementTimingType = MeasurementTimingType.EmbeddedTimeForEachFrame;
            else if (m_VideoController.HasTimestampOCR())
                measurementTimingType = MeasurementTimingType.OCRedTimeForEachFrame;

            SerUseTimeStamp serTimingType = SerUseTimeStamp.None;
            if (m_VideoController.IsSerVideo)
                serTimingType = m_VideoController.GetSerTimingType();

            LCMeasurementHeader finalHeader = new LCMeasurementHeader(
                m_VideoController.CurrentVideoFileName,
                string.Format("Video ({0})", m_VideoController.CurrentVideoFileType),
                m_VideoController.VideoFirstFrame,
                m_VideoController.VideoCountFrames,
                m_VideoController.VideoFrameRate,
                m_MinFrame,
                m_MaxFrame,
                (uint)m_TotalFrames,
                (uint)m_MeasurementInterval,
                (byte)m_Tracker.TrackedObjects.Count,
                LightCurveReductionContext.Instance.LightCurveReductionType,
                measurementTimingType,
                serTimingType,
                (int)LightCurveReductionContext.Instance.NoiseMethod,
                (int)LightCurveReductionContext.Instance.DigitalFilter,
                matrixSizes.ToArray(), apertures.ToArray(), fixedFlags.ToArray(), psfGroupIds.ToArray(), (float)m_Tracker.PositionTolerance);

            finalHeader.FirstTimedFrameTime = m_StartFrameTime;
            finalHeader.SecondTimedFrameTime = m_EndFrameTime;

            finalHeader.FirstTimedFrameNo = m_StartTimeFrame;
            finalHeader.LastTimedFrameNo = m_EndTimeFrame;

            if (m_AveragedFrame == null)
            {
                if (m_StackedAstroImage == null) EnsureStackedAstroImage();
                m_AveragedFrame = new AveragedFrame(m_StackedAstroImage);
            }

            LCMeasurementFooter footer = new LCMeasurementFooter(
                m_AveragedFrame.Pixelmap,
                TangraConfig.Settings,
                LightCurveReductionContext.Instance,
                m_StateMachine.MeasuringStars,
                m_Tracker,
                m_VideoController.GetTimestampOCRNameAndVersion(),
                null,
                m_InstumentalDelaySelectedConfig,
                m_InstumentalDelaySelectedCamera,
                m_CameraName,
                m_AavNativeVideoFormat,
                m_AavFrameIntegration,
                m_AavNtpTimestampError,
                m_AavNtpFitOneSigmaError,
                TangraConfig.Settings.AAV.NtpTimeUseDirectTimestamps,
                PSFFit.BitPix,
                PSFFit.NormVal,
                m_AavStackedFrameRate);

            return LCFile.FlushOnTheFlyOutputFile(finalHeader, footer, m_VideoController);
        }