Beispiel #1
0
        public static bool AddAviVideoFrame(Pixelmap pixmap, double addedGamma, int? adv16NormalisationValue)
        {
            int[,] pixels = new int[pixmap.Height, pixmap.Width];
            bool usesGamma = Math.Abs(addedGamma - 1) > 0.01;
            bool needsBitPixConversion = pixmap.BitPixCamera > 8 || (adv16NormalisationValue != null && adv16NormalisationValue.Value > 0);

            double maxVal = pixmap.BitPixCamera.GetMaxValueForBitPix();
            if (adv16NormalisationValue != null && adv16NormalisationValue.Value > 0)
                maxVal = adv16NormalisationValue.Value;

            if (Math.Abs(s_BitPixTableBitPixVal - pixmap.BitPixCamera) > 0.01 || Math.Abs(s_BitPixTableMaxVal - maxVal) > 0.01)
            {
                for (int i = 0; i <= maxVal; i++)
                {
                    double convVal = 255.0 * i / maxVal;

                    s_BitPixTable[i] = Math.Max(0, Math.Min(255, (int)Math.Round(convVal)));
                }

                s_BitPixTableBitPixVal = pixmap.BitPixCamera;
                s_BitPixTableMaxVal = maxVal;
            }

            if (Math.Abs(s_GammaTableGammaVal - addedGamma) > 0.01)
            {
                double gammaFactor = usesGamma ? (255.0 / Math.Pow(255.0, addedGamma)) : 0;

                for (int i = 0; i < 256; i++)
                {
                    double convVal = gammaFactor * Math.Pow(i, addedGamma);

                    s_GammaTable[i] = Math.Max(0, Math.Min(255, (int) Math.Round(convVal)));
                }

                s_GammaTableGammaVal = addedGamma;
            }

            for (int x = 0; x < pixmap.Width; x++)
            {
                for (int y = 0; y < pixmap.Height; y++)
                {
                    uint conv8BitVal = pixmap.Pixels[x + y*pixmap.Width];
                    if (needsBitPixConversion)
                        conv8BitVal = (uint)s_BitPixTable[conv8BitVal];

                    if (usesGamma)
                        pixels[y, x] = s_GammaTable[conv8BitVal];
                    else
                        pixels[y, x] =  Math.Max(0, Math.Min(255, (int)conv8BitVal));
                }
            }

            if (TangraAviFileAddFrame(pixels) != 0)
            {
                TraceLastNativeError();
                return false;
            }

            return true;
        }
Beispiel #2
0
        private static Pixelmap ConstructForLCFileAveragedFrame(uint[] data, int width, int height, int bitpixCamera, byte[] bytes)
        {
            Bitmap   bmp = ConstructBitmapFromBitmapPixels(bytes, width, height);
            Pixelmap rv  = new Pixelmap(width, height, bitpixCamera, data, bmp, bytes);

            return(rv);
        }
Beispiel #3
0
        public static Pixelmap ConstructForLCFile32bppArgbAveragedFrame(byte[] bytes, int width, int height, int bitpixCamera)
        {
            Bitmap   bmp = ConstructBitmapFrom32bppArgbBitmapPixels(bytes, width, height);
            Pixelmap rv  = new Pixelmap(width, height, bitpixCamera, new uint[bytes.Length], bmp, bytes);

            return(rv);
        }
Beispiel #4
0
        public Pixelmap CreatePixelmap(uint[] pixels)
        {
            byte[] displayBitmapBytes = new byte[m_AdvFile.Width * m_AdvFile.Height];
            for (int y = 0; y < m_AdvFile.Height; y++)
            {
                for (int x = 0; x < m_AdvFile.Width; x++)
                {
                    int index = x + y * m_AdvFile.Width;

                    if (m_AdvFile.MaxPixelValue == 8)
                        displayBitmapBytes[index] = (byte)((pixels[index] & 0xFF));
                    else if (m_AdvFile.DataBpp == 12)
                        displayBitmapBytes[index] = (byte)((pixels[index] >> 4));
                    else if (m_AdvFile.DataBpp == 14)
                        displayBitmapBytes[index] = (byte)((pixels[index] >> 6));
                    else if (m_AdvFile.DataBpp == 16)
                    {
                        if (m_AdvFile.MaxPixelValue > 0)
                            displayBitmapBytes[index] = (byte)((255.0 * pixels[index] / m_AdvFile.MaxPixelValue));
                        else
                            displayBitmapBytes[index] = (byte)((pixels[index] >> 8));
                    }
                    else
                        displayBitmapBytes[index] = (byte)((pixels[index] >> (m_AdvFile.DataBpp - 8)));
                }
            }

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, (int)m_AdvFile.Width, (int)m_AdvFile.Height);

            Pixelmap rv = new Pixelmap((int)m_AdvFile.Width, (int)m_AdvFile.Height, m_AdvFile.DataBpp, pixels, displayBitmap, displayBitmapBytes);

            return rv;
        }
Beispiel #5
0
 public Pixelmap(int width, int height, int bitPix, uint[] pixels, Bitmap bmp, byte[] displayBitmapBytes)
 {
     Width                 = width;
     Height                = height;
     BitPixCamera          = bitPix;
     m_Pixels              = pixels;
     m_DisplayBitmapPixels = displayBitmapBytes;
     if (bmp != null && bmp.PixelFormat != PixelFormat.Format24bppRgb)
     {
         if (m_DisplayBitmapPixels != null && m_DisplayBitmapPixels.Length == Width * Height)
         {
             m_Bitmap = Pixelmap.ConstructBitmapFromBitmapPixels(m_DisplayBitmapPixels, Width, Height);
         }
         else if (Width * Height > 0)
         {
             m_Bitmap = new Bitmap(Width, Height, PixelFormat.Format24bppRgb);
             using (Graphics g = Graphics.FromImage(m_Bitmap))
             {
                 g.DrawImage(bmp, 0, 0);
             }
         }
         else
         {
             m_Bitmap = null;
         }
     }
     else
     {
         m_Bitmap = bmp;
     }
 }
Beispiel #6
0
        public static void AddVideoFrame(DateTime startTimeStamp, DateTime endTimeStamp, Pixelmap pixmap)
        {
            long timeStamp = WindowsTicksToAavTicks((startTimeStamp.Ticks + endTimeStamp.Ticks) / 2);
            uint exposureIn10thMilliseconds = (uint)(endTimeStamp.Ticks - startTimeStamp.Ticks) / 1000;

            if (s_FirstRecordedFrameTimestamp == 0)
                s_FirstRecordedFrameTimestamp = timeStamp;

            // since the first recorded frame was taken
            uint elapsedTimeMilliseconds = (uint)((timeStamp - s_FirstRecordedFrameTimestamp) * 0xFFFFFFFF);

            AdvLib.Obsolete.AdvVer1.BeginFrame(timeStamp, elapsedTimeMilliseconds, exposureIn10thMilliseconds);

            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag16(STATUS_TAG_NUMBER_INTEGRATED_FRAMES, (ushort)s_IntegrationRate);

            ulong currentSystemTime = (ulong)WindowsTicksToAavTicks(DateTime.Now.Ticks);
            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(STATUS_TAG_SYSTEM_TIME, currentSystemTime);

            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(STATUS_TAG_NTP_START_TIMESTAMP, (ulong)WindowsTicksToAavTicks(startTimeStamp.Ticks));
            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(STATUS_TAG_NTP_END_TIMESTAMP, (ulong)WindowsTicksToAavTicks(endTimeStamp.Ticks));
            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag16(STATUS_TAG_NTP_TIME_ERROR, 320);

            AdvLib.Obsolete.AdvVer1.FrameAddStatusTag(STATUS_TAG_EXPOSURE, string.Format("x{0}", s_IntegrationRate * 2));

            ushort[] pixels = new ushort[pixmap.Pixels.Length];
            for (int i = 0; i < pixmap.Pixels.Length; i++) pixels[i] = (ushort)pixmap.Pixels[i];
            AdvLib.Obsolete.AdvVer1.FrameAddImage(1, pixels, 16);

            AdvLib.Obsolete.AdvVer1.EndFrame();
        }
Beispiel #7
0
        public static void EnsureFullPreviewVisible(Pixelmap currFrame, Form parentForm)
        {
            lock(s_SyncRoot)
            {
                if (s_FullPreviewForm != null)
                {
                    try
                    {
                        // This will test if the form has been disposed
                        if (!s_FullPreviewForm.Visible && s_FullPreviewForm.Handle == IntPtr.Zero)
                            s_FullPreviewForm = null;
                    }
                    catch(ObjectDisposedException)
                    {
                        s_FullPreviewForm = null;
                    }
                }
                if (s_FullPreviewForm == null)
                {
                    s_FullPreviewForm = new frmFullSizePreview();
                    s_FullPreviewForm.Width = currFrame.Width + (s_FullPreviewForm.Width - s_FullPreviewForm.pictureBox.Width);
                    s_FullPreviewForm.Height = currFrame.Height + (s_FullPreviewForm.Height - s_FullPreviewForm.pictureBox.Height);
                    s_FullPreviewForm.Top = parentForm.Top;
                    s_FullPreviewForm.Left = parentForm.Right;
                    s_FullPreviewForm.StartPosition = FormStartPosition.Manual;
                }

                s_FullPreviewForm.pictureBox.Image = GetPreviewImage(currFrame);

                if (!s_FullPreviewForm.Visible)
                    s_FullPreviewForm.Show(parentForm);
                s_FullPreviewForm.Refresh();
            }
        }
        public bool ArePixelmapsTheSame(Pixelmap bmp1, Pixelmap bmp2)
        {
            if (bmp1 == null && bmp2 == null) return true;
            if (bmp1 == null || bmp2 == null) return false;

            int x = m_Randomizer.Next(bmp1.Width - 1);
            int y = m_Randomizer.Next(bmp1.Height - 1);

            if (bmp1[x, y] != bmp2[x, y])
                return false;

            x = m_Randomizer.Next(bmp1.Width - 1);
            y = m_Randomizer.Next(bmp1.Height - 1);

            if (bmp1[x, y] != bmp2[x, y])
                return false;

            // Check all pixels
            int width = bmp1.Width;
            int height = bmp1.Height;

            for (y = 0; y < height; ++y)
            {
                for (x = 0; x < width; ++x)
                {
                    if (bmp1[x, y] != bmp2[x, y])
                        return false;
                }
            }

            return true;
        }
Beispiel #9
0
 public bool AddAviVideoFrame(Pixelmap pixmap, double addedGamma, int? adv16NormalisationValue)
 {
     if (!TangraVideo.AddAviVideoFrame(pixmap, addedGamma, adv16NormalisationValue))
     {
         MessageBox.Show("There was an error calling AddAviVideoFrame:\r\n\r\n" + TangraVideo.GetLastAviErrorMessage(), "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error);
         return false;
     }
     return true;
 }
 public void OnPreProcess(Pixelmap newFrame)
 {
     if (m_PreProcessors.Count > 0)
     {
         foreach (IFramePreProcessor preProcessor in m_PreProcessors)
         {
             preProcessor.OnPreProcess(newFrame);
         }
     }
 }
Beispiel #11
0
 public void OnPreProcess(Pixelmap newFrame)
 {
     if (m_PreProcessFrames &&
         newFrame != null)
     {
         if (m_Filter == TangraConfig.PreProcessingFilter.LowPassFilter)
             BitmapFilter.LowPassFilter(newFrame);
         else if (m_Filter == TangraConfig.PreProcessingFilter.LowPassDifferenceFilter)
             BitmapFilter.LowPassDifference(newFrame);
     }
 }
        public SingleBitmapFileFrameStream(LCFile lightCurveFile)
        {
            m_lcFile = lightCurveFile;

            if (lightCurveFile.LcFileFormatVersion < 4 && m_lcFile.Footer.AveragedFrameBytes.Length == 4 * m_lcFile.Footer.AveragedFrameWidth * m_lcFile.Footer.AveragedFrameHeight)
                m_Pixelmap = Pixelmap.ConstructForLCFile32bppArgbAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp);
            else
                m_Pixelmap = Pixelmap.ConstructForLCFileAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp);

            m_FirstFrame = (int)m_lcFile.Header.MinFrame;
            m_LastFrame = (int)m_lcFile.Header.MaxFrame;
            m_NumFrames = (int)m_lcFile.Header.MeasuredFrames;
        }
Beispiel #13
0
        public static Bitmap ConstructBitmapFrom8BitPixelmap(Pixelmap image)
        {
            float background     = 0;
            float range          = 255;
            bool  hasRangeChange = false;

            var bmp = new Bitmap(image.Width, image.Height, PixelFormat.Format24bppRgb);

            // GDI+ still lies to us - the return format is BGR, NOT RGB.
            BitmapData bmData = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb);

            int stride = bmData.Stride;

            System.IntPtr Scan0 = bmData.Scan0;

            unsafe
            {
                byte *p = (byte *)(void *)Scan0;

                int nOffset = stride - bmp.Width * 3;

                for (int y = 0; y < bmp.Height; ++y)
                {
                    for (int x = 0; x < bmp.Width; ++x)
                    {
                        byte color;

                        if (hasRangeChange)
                        {
                            float displayValue = (image[x, y] - background) * 255.0f / range;
                            color = (byte)Math.Max(0, Math.Min(255, Math.Round(displayValue)));
                        }
                        else
                        {
                            color = (byte)(image[x, y] & 0xFF);
                        }

                        p[0] = color;
                        p[1] = color;
                        p[2] = color;

                        p += 3;
                    }
                    p += nOffset;
                }
            }

            bmp.UnlockBits(bmData);

            return(bmp);
        }
Beispiel #14
0
        public MeasurementsHelper(
            int bitPix,
            TangraConfig.BackgroundMethod backgroundMethod,
            int subPixelSquare,
            uint saturationValue)
        {
            m_BitPix           = bitPix;
            m_MaxPixelValue    = Pixelmap.GetMaxValueForBitPix(m_BitPix);
            m_BackgroundMethod = backgroundMethod;
            m_SubPixelSquare   = subPixelSquare;
            m_SaturationValue  = saturationValue;

            m_TimesHigherPositionToleranceForFullyOccultedStars =
                TangraConfig.Settings.Special.TimesHigherPositionToleranceForFullyOccultedStars;
        }
Beispiel #15
0
        public static void GenerateNoise(Pixelmap pixelmap, int[,] simulatedBackground, int mean, int stdDev)
        {
            uint maxSignalValue = pixelmap.MaxSignalValue;

            for (int y = 0; y < pixelmap.Height; ++y)
            {
                for (int x = 0; x < pixelmap.Width; ++x)
                {
                    double bgPixel = Math.Min(maxSignalValue, Math.Max(0, simulatedBackground[x, y] + Math.Abs(Random(mean, stdDev))));
                    uint val = (uint)Math.Round(bgPixel);

                    pixelmap[x, y] = val;
                }
            }
        }
Beispiel #16
0
        public void PlateSolve(PlateSolveTesterConfig config)
        {
            var pixelMap = new Pixelmap(config.Width, config.Height, config.BitPix, config.Pixels, null, null);
            var image = new AstroImage(pixelMap);

            StarMap starMap = new StarMap(
                TangraConfig.Settings.Astrometry.PyramidRemoveNonStellarObject,
                TangraConfig.Settings.Astrometry.MinReferenceStarFWHM,
                TangraConfig.Settings.Astrometry.MaxReferenceStarFWHM,
                TangraConfig.Settings.Astrometry.MaximumPSFElongation,
                TangraConfig.Settings.Astrometry.LimitReferenceStarDetection);

            Rectangle excludeRect = new Rectangle(config.OSDRectToExclude.X, config.OSDRectToExclude.Y, config.OSDRectToExclude.Width, config.OSDRectToExclude.Height);
            Rectangle includeRect = new Rectangle(config.RectToInclude.X, config.RectToInclude.Y, config.RectToInclude.Width, config.RectToInclude.Height);

            starMap.FindBestMap(
                StarMapInternalConfig.Default,
                image,
                excludeRect,
                includeRect,
                config.LimitByInclusion);

            var facade = new StarCatalogueFacade(TangraConfig.Settings.StarCatalogue);
            var catalogueStars = facade.GetStarsInRegion(
                config.RADeg,
                config.DEDeg,
                (config.ErrFoVs + 1.0) * config.PlateConfig.GetMaxFOVInArcSec() / 3600.0,
                config.LimitMagn,
                config.Epoch);

            var distBasedMatcher = new DistanceBasedAstrometrySolver(
                new MockedOperationNotifier(),
                config.PlateConfig,
                TangraConfig.Settings.Astrometry,
                catalogueStars, config.DetermineAutoLimitMagnitude);

            distBasedMatcher.SetMinMaxMagOfStarsForAstrometry(config.PyramidMinMag, config.LimitMagn);
            distBasedMatcher.SetMinMaxMagOfStarsForPyramidAlignment(config.PyramidMinMag, config.PyramidMaxMag);

            distBasedMatcher.InitNewMatch(starMap, PyramidMatchType.PlateSolve, null);

            distBasedMatcher.InitNewFrame(starMap);

            distBasedMatcher.SetManuallyIdentifiedHints(new Dictionary<PSFFit, IStar>());

            LeastSquareFittedAstrometry astrometricFit;
            PerformMatchResult result = distBasedMatcher.PerformMatch(out astrometricFit);
        }
        public void OnPreProcess(Pixelmap newFrame)
        {
            Trace.Assert(newFrame.BitPixCamera <= 16);

            if (m_PreProcessFrames &&
                newFrame != null)
            {

                for (int y = 0; y < newFrame.Height; ++y)
                {
                    for (int x = 0; x < newFrame.Width; ++x)
                    {
                        newFrame[x, y] = m_MappedBytes[newFrame[x, y]];
                    }
                }
            }
        }
Beispiel #18
0
 public Pixelmap(Pixelmap cloneFrom)
 {
     Width                 = cloneFrom.Width;
     Height                = cloneFrom.Height;
     BitPixCamera          = cloneFrom.m_BitPix;
     m_Pixels              = cloneFrom.m_Pixels;
     m_DisplayBitmapPixels = cloneFrom.m_DisplayBitmapPixels;
     m_MaxSignalValue      = cloneFrom.m_MaxSignalValue;
     try
     {
         m_Bitmap = cloneFrom.DisplayBitmap != null ? (Bitmap)cloneFrom.DisplayBitmap.Clone() : null;
     }
     catch (Exception ex)
     {
         Trace.WriteLine(ex);
         Trace.Assert(false);
     }
 }
Beispiel #19
0
 public Pixelmap(Pixelmap cloneFrom)
 {
     Width = cloneFrom.Width;
     Height = cloneFrom.Height;
     BitPixCamera = cloneFrom.m_BitPix;
     m_Pixels = cloneFrom.m_Pixels;
     m_DisplayBitmapPixels = cloneFrom.m_DisplayBitmapPixels;
     m_MaxSignalValue = cloneFrom.m_MaxSignalValue;
     try
     {
         m_Bitmap = cloneFrom.DisplayBitmap != null ? (Bitmap) cloneFrom.DisplayBitmap.Clone() : null;
     }
     catch (Exception ex)
     {
         Trace.WriteLine(ex);
         Trace.Assert(false);
     }
 }
        public FrameByteBrightnessContrast(int brigtness, int contrast, bool preProcessFrames, int bitPix)
            : base(preProcessFrames, bitPix)
        {
            m_Brigtness = brigtness;
            m_Contrast = (sbyte)contrast;

            uint maxPixelValue = bitPix.GetMaxValueForBitPix();

            Trace.Assert(maxPixelValue <= int.MaxValue, "32bit images not supported.");

            Pixelmap image = new Pixelmap((int)maxPixelValue + 1, 1, bitPix, new uint[maxPixelValue + 1], null, null);
            {
                for (int i = 0; i <= maxPixelValue; i++)
                    image[i, 0] = (uint)i;

                BitmapFilter.Brightness(image, m_Brigtness);
                BitmapFilter.Contrast(image, m_Contrast);

                for (int i = 0; i <= maxPixelValue; i++)
                    m_MappedBytes[i] = image[i, 0];
            }
        }
        public SingleBitmapFileFrameStream(Bitmap bitmap)
        {
            try
            {
                m_Pixelmap = Pixelmap.ConstructFromBitmap(bitmap, TangraConfig.ColourChannel.Red);
            }
            catch (ArgumentException ex)
            {
                // If there is something wrong with the Bitmap, then create a blank(black) frame
                Trace.WriteLine(ex);
                m_Pixelmap = new Pixelmap(
                    bitmap.Width,
                    bitmap.Height,
                    8,
                    new uint[bitmap.Width*bitmap.Height],
                    new Bitmap(bitmap.Width, bitmap.Height, PixelFormat.Format24bppRgb),
                    new byte[3 * bitmap.Width * bitmap.Height]);
            }

            m_FirstFrame = 0;
            m_LastFrame = 0;
            m_NumFrames = 1;
        }
Beispiel #22
0
        internal ucCalibrationPanel(AstrometryController astrometryController, VideoController videoController, IPlateCalibrationTool configTool)
        {
            InitializeComponent();

            m_AstrometryController = astrometryController;
            m_VideoController = videoController;
            m_CalibrationTool = configTool;
            m_CalibrationTool.AreaChanged += m_CalibrationTool_AreaChanged;

            pnlDebugFits.Visible = false;

            m_AstrometryController.RegisterCalibrationRunner(this);

            rbIdentify3Stars.Checked = true;
            UpdateEnabledStateOfScrollControls();

            m_InitialPixelmap = m_VideoController.GetCurrentAstroImage(false).Pixelmap;
            m_VideoController.ApplyDisplayModeAdjustments(m_InitialPixelmap.DisplayBitmap, false, m_InitialPixelmap);

            m_CalibrationTool.ActivateOsdAreaSizing();

            rbInclusion.Checked = TangraConfig.Settings.PlateSolve.SelectedScopeRecorderConfig.IsInclusionArea;
        }
Beispiel #23
0
        public Pixelmap Rotate(double angleDegrees)
        {
            int newWidth  = Width;
            int newHeight = Height;

            TangraModelCore.GetRotatedFrameDimentions(Width, Height, angleDegrees, ref newWidth, ref newHeight);

            uint[] pixels             = new uint[newWidth * newHeight];
            byte[] displayBitmapBytes = new byte[newWidth * newHeight];
            byte[] rawBitmapBytes     = new byte[GetBitmapBIRGBPixelArraySize(24, Width, Height) + 40 + 14 + 1];

            TangraModelCore.RotateFrame(Width, Height, angleDegrees, m_Pixels, newWidth, newHeight, pixels, rawBitmapBytes, displayBitmapBytes, (short)m_BitPix, m_MaxSignalValue.HasValue ? m_MaxSignalValue.Value : 0);

            using (var memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                try
                {
                    displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                }
                catch (Exception ex)
                {
                    Trace.WriteLine(ex.GetFullStackTrace());
                    displayBitmap = new Bitmap(newWidth, newHeight);
                }

                var rv = new Pixelmap(newWidth, newHeight, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                if (m_MaxSignalValue.HasValue)
                {
                    rv.SetMaxSignalValue(m_MaxSignalValue.Value);
                }
                rv.FrameState = FrameState;

                return(rv);
            }
        }
Beispiel #24
0
        private void ProcessCurrentImage()
        {
            if (m_CurrentIndex >= 0 && m_CurrentIndex < m_InputFiles.Count)
            {
                string fileName = m_InputFiles[m_CurrentIndex];
                lblDisplayedFile.Text = fileName;
                m_CurrentImage = (Bitmap)Bitmap.FromFile(fileName);
                m_Pixelmap = Pixelmap.ConstructFromBitmap(m_CurrentImage, TangraConfig.ColourChannel.Red);

                using (Graphics g = Graphics.FromImage(m_CurrentImage))
                {
                    m_Processor.Process(m_Pixelmap.Pixels, m_Pixelmap.Width, m_Pixelmap.Height, g, m_CurrentIndex, m_CurrentIndex % 2 == 0);
                    g.Flush();
                }

                picField.Image = m_CurrentImage;
                picField.Update();

                lblBlockWidth.Text = m_Processor.BlockWidth.ToString();
                lblBlockHeight.Text = m_Processor.BlockHeight.ToString();
                lblBlockXOffs.Text = m_Processor.BlockOffsetX.ToString();

                if (m_Processor.BlockOffsetYOdd != m_Processor.BlockOffsetYEven)
                    lblBlockYOffs.Text = string.Format("o:{0} e:{1}", m_Processor.BlockOffsetYOdd, m_Processor.BlockOffsetYEven);
                else
                    lblBlockYOffs.Text = m_Processor.BlockOffsetYOdd.ToString();

                PlotDigitPatterns();

                if (string.IsNullOrEmpty(m_Processor.CurrentOcredString))
                    lblOcredText.Text = "";
                else
                    lblOcredText.Text = m_Processor.CurrentOcredString;
            }
        }
Beispiel #25
0
 private void DisplayCurrentFrameInternal(MovementType movementType, Pixelmap currentPixelmap, string frameFileName)
 {
     if (m_VideoStream != null)
     {
         if (m_CurrentFrameIndex >= m_VideoStream.FirstFrame &&
             m_CurrentFrameIndex <= m_VideoStream.LastFrame)
         {
             m_FrameRenderer.RenderFrame(m_CurrentFrameIndex, currentPixelmap, movementType, m_CurrentFrameIndex == m_VideoStream.LastFrame, 0, m_CurrentFrameIndex, frameFileName);
             m_LastDisplayedFrameIndex = m_CurrentFrameIndex;
         }
     }
 }
Beispiel #26
0
        public Pixelmap GetPixelmap(int index)
        {
            if (index < FirstFrame) index = FirstFrame;
            if (index > LastFrame) index = LastFrame;

            uint[] pixels;
            uint[] originalPixels;
            Bitmap videoFrame;
            byte[] bitmapBytes;
            TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes);

            var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes);
            rv.UnprocessedPixels = originalPixels;
            return rv;
        }
Beispiel #27
0
        public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < FirstFrame) startFrameNo = FirstFrame;
            if (startFrameNo > LastFrame) startFrameNo = LastFrame;

            uint[] pixels;
            uint[] originalPixels;
            Bitmap videoFrame;
            byte[] bitmapBytes;

            TangraVideo.GetIntegratedFrame(startFrameNo, framesToIntegrate, isSlidingIntegration, isMedianAveraging, out pixels, out originalPixels, out videoFrame, out bitmapBytes);

            var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes);
            rv.UnprocessedPixels = originalPixels;
            return rv;
        }
Beispiel #28
0
        public AstroImage(Pixelmap pixelmap, Rectangle processArea, bool useUnprocessedPixels = false)
        {
            if (useUnprocessedPixels)
            {
                m_Pixelmap = new Pixelmap(pixelmap.Width, pixelmap.Height, pixelmap.BitPixCamera, new uint[pixelmap.Pixels.Length], null, null);
                m_Pixelmap.SetMaxSignalValue(pixelmap.MaxSignalValue);
                for (int i = 0; i < pixelmap.Pixels.Length; i++)
                {
                    m_Pixelmap.Pixels[i] = pixelmap.UnprocessedPixels[i];
                }
            }
            else
                m_Pixelmap = pixelmap;

            m_Rect = processArea;

            m_Width = m_Pixelmap.Width;
            m_Height = m_Pixelmap.Height;
            m_BitPix = m_Pixelmap.BitPixCamera;

            if (m_Rect.Width > m_Width) m_Rect.Width = m_Width;
            if (m_Rect.Height > m_Height) m_Rect.Height = m_Height;
        }
        private void GenerateFrame(Pixelmap pixmap, List<IStar> stars, ModelConfig modelConfig)
        {
            var mea = new MeasurementsHelper(
                pixmap.BitPixCamera,
                TangraConfig.BackgroundMethod.BackgroundMedian,
                TangraConfig.Settings.Photometry.SubPixelSquareSize,
                TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(pixmap.BitPixCamera, pixmap.MaxSignalValue));

            float apertureSize = APERTURE;
            float annulusInnerRadius = (GAP + APERTURE) / APERTURE;
            int annulusMinPixels = (int)(Math.PI * (Math.Pow(ANNULUS + GAP + APERTURE, 2) - Math.Pow(GAP + APERTURE, 2)));

            mea.SetCoreProperties(annulusInnerRadius, annulusMinPixels, CorePhotometrySettings.Default.RejectionBackgroundPixelsStdDev, 2 /* TODO: This must be configurable */);

            var measurements = new Dictionary<IStar, double>();

            foreach (IStar star in stars)
            {
                double x, y;

                GetOnPlateCoordinates(star.RADeg, star.DEDeg, modelConfig, out x, out y);

                if (x < 0 || x > modelConfig.FrameWidth || y < 0 || y > modelConfig.FrameHeight)
                    continue;

                float starMag = GetStarMag(star, modelConfig.PhotometricFilter);
                float iMax = ModelStarAmplitude(star, starMag, modelConfig, pixmap.BitPixCamera, pixmap.MaxSignalValue);

                if (!float.IsNaN(iMax))
                {
                    VideoModelUtils.GenerateStar(pixmap, (float)x, (float)y, (float)modelConfig.FWHM, iMax, 0 /*Use Gaussian */);

                    if (modelConfig.CheckMagnitudes)
                    {
                        var image = new AstroImage(pixmap);
                        uint[,] data = image.GetMeasurableAreaPixels((int)x, (int)y, 17);
                        uint[,] backgroundPixels = image.GetMeasurableAreaPixels((int)x, (int)y, 35);

                        PSFFit fit = new PSFFit((int)x, (int)y);
                        fit.Fit(data);

                        var result = mea.MeasureObject(new ImagePixel(x, y), data, backgroundPixels, pixmap.BitPixCamera,
                            TangraConfig.PreProcessingFilter.NoFilter,
                            TangraConfig.PhotometryReductionMethod.AperturePhotometry, TangraConfig.PsfQuadrature.NumericalInAperture,
                            TangraConfig.PsfFittingMethod.DirectNonLinearFit,
                            apertureSize, modelConfig.FWHM, (float)modelConfig.FWHM,
                            new FakeIMeasuredObject(fit),
                            null, null,
                            false);

                        if (result == NotMeasuredReasons.TrackedSuccessfully && !mea.HasSaturatedPixels)
                        {
                            // Add value for fitting
                            measurements.Add(star, mea.TotalReading - mea.TotalBackground);
                        }
                    }
                }
            }

            if (modelConfig.CheckMagnitudes)
                CalculateGagnitudeFit(measurements, modelConfig.BVSlope);
        }
        public Pixelmap GetPixelmap(int index)
        {
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];

            uint[] flatPixelsCopy = new uint[m_FlatPixels.Length];
            Array.Copy(m_FlatPixels, flatPixelsCopy, m_FlatPixels.Length);

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(flatPixelsCopy, m_Width, m_Height, m_Bpp, 0 /* No normal value for FITS files */, m_Exposure);

            TangraCore.GetBitmapPixels(m_Width, m_Height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)m_Bpp, 0);

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, m_Width, m_Height);

            Pixelmap rv = new Pixelmap(m_Width, m_Height, m_Bpp, flatPixelsCopy, displayBitmap, displayBitmapBytes);

            return rv;
        }
        public Pixelmap GetPixelmap(int index)
        {
            if (index < FirstFrame) index = FirstFrame;
            if (index > LastFrame) index = LastFrame;

            uint[] pixels;
            uint[] originalPixels;
            Bitmap videoFrame;
            byte[] bitmapBytes;

            if (Mode == ReInterlaceMode.SwapFields)
            {
                TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes);

                byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1];

                TangraCore.SwapVideoFields(pixels, originalPixels, Width, Height, bitmapPixels, bitmapBytes);

                videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height);

                var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes);
                rv.UnprocessedPixels = originalPixels;
                return rv;
            }
            else if (Mode == ReInterlaceMode.ShiftOneField || Mode == ReInterlaceMode.SwapAndShiftOneField)
            {
                uint[] pixels2;
                uint[] originalPixels2;
                Bitmap videoFrame2;
                byte[] bitmapBytes2;

                if (m_LastPrevFrameId == index)
                {
                    pixels = m_LastPrevFramePixels;
                    originalPixels = m_LastPrevFrameOriginalPixels;
                    bitmapBytes = m_LastPrevFrameBitmapBytes;
                }
                else
                    TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes);

                TangraVideo.GetFrame(index + 1, out pixels2, out originalPixels2, out videoFrame2, out bitmapBytes2);

                m_LastPrevFrameId = index + 1;
                m_LastPrevFramePixels = pixels2;
                m_LastPrevFrameOriginalPixels = originalPixels2;
                m_LastPrevFrameBitmapBytes = bitmapBytes2;

                byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1];

                TangraCore.ShiftVideoFields(pixels, originalPixels, pixels2, originalPixels2, Width, Height, m_ShiftMode, bitmapPixels, bitmapBytes);

                videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height);

                var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes);
                rv.UnprocessedPixels = originalPixels;
                return rv;
            }
            else if (Mode == ReInterlaceMode.None)
            {
                return m_BaseStream.GetPixelmap(index);
            }
            else
                throw new NotSupportedException();
        }
Beispiel #32
0
        public static void GenerateStar(Pixelmap pixelmap, float x0, float y0, float fwhm, float iMax, int psfModel)
        {
            double r0 = fwhm / (2 * Math.Sqrt(Math.Log(2)));
            int maxPsfModelDist = (int)(6 * fwhm);
            uint maxSignalValue = pixelmap.MaxSignalValue;
            double fiveThirds = 5.0 / 3.0;
            for (int y = 0; y < pixelmap.Height; ++y)
            {
                for (int x = 0; x < pixelmap.Width; ++x)
                {
                    if (Math.Abs(x - x0) < maxPsfModelDist && Math.Abs(y - y0) < maxPsfModelDist)
                    {
                        int counter = 0;
                        double sum = 0;
                        for (double dx = -0.5; dx < 0.5; dx += 0.1)
                        {
                            for (double dy = -0.5; dy < 0.5; dy += 0.1)
                            {
                                double modelVal = 0;
                                if (psfModel == 0)
                                    modelVal = iMax * Math.Exp(-((x + dx - x0) * (x + dx - x0) + (y + dy - y0) * (y + dy - y0)) / (r0 * r0));
                                else if (psfModel == 1)
                                    modelVal = iMax * Math.Exp(-(Math.Pow((x + dx - x0), fiveThirds) + Math.Pow((y + dy - y0), fiveThirds)) / (r0 * r0));

                                double thisVal = Math.Min(maxSignalValue, Math.Max(0, modelVal));
                                sum += thisVal;
                                counter++;
                            }
                        }

                        long val = (long)Math.Round(sum / counter);
                        val += (long)pixelmap[x, y];

                        pixelmap[x, y] = (uint)Math.Min(maxSignalValue, Math.Max(0, val));
                    }
                }
            }
        }
Beispiel #33
0
 public static bool LowPassFilter(Pixelmap b)
 {
     return(Convolution.Conv3x3(b, LOW_PASS_FILTER_MATRIX));
 }
        private void GenerateAAVVideo(ModelConfig modelConfig, List<IStar> stars)
        {
            AavFileCreator.CloseFile();
            AavFileCreator.StartNewFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, modelConfig.Integration);

            m_MagnitudeToPeakDict = null;

            try
            {
                //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null);
                //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame");
                //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null);
                DateTime zeroFrameDT = DateTime.UtcNow;

                uint maxSignalValue = (uint)(255 * modelConfig.Integration);

                Random rndGen = new Random((int)DateTime.Now.Ticks);
                m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight];
                for (int x = 0; x < modelConfig.FrameWidth; x++)
                    for (int y = 0; y < modelConfig.FrameHeight; y++)
                    {
                        if (modelConfig.DarkFrameMean > 0)
                        {
                            double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0;
                            double darkPixel = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak) * modelConfig.Integration, 1));
                            double bgPixel = Math.Min(maxSignalValue, Math.Max(0, darkPixel));
                            m_SimulatedDarkFrame[x, y] = (int)bgPixel;
                        }
                        else
                            m_SimulatedDarkFrame[x, y] = 0;
                    }

                for (int i = 0; i <= modelConfig.TotalFrames; i++)
                {
                    using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null))
                    {
                        pixmap.SetMaxSignalValue(maxSignalValue);

                        VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration);
                        GenerateFrame(pixmap, stars, modelConfig);

                        DateTime startDT = zeroFrameDT.AddMilliseconds(40 * modelConfig.Integration * i);
                        if (Math.Abs(modelConfig.LinearityCoefficient - 1) > 0.0001)
                        {
                            uint maxVal = pixmap.MaxSignalValue;
                            double gammaCoeff = maxVal / Math.Pow((double)maxVal, modelConfig.LinearityCoefficient);
                            for (int x = 0; x < pixmap.Width; x++)
                            {
                                for (int y = 0; y < pixmap.Height; y++)
                                {
                                    uint nonLinVal = (uint)Math.Round(gammaCoeff * Math.Pow(pixmap[x, y], modelConfig.LinearityCoefficient));
                                    pixmap[x, y] = Math.Min(maxVal, Math.Max(0, nonLinVal));
                                }
                            }
                        }
                        AavFileCreator.AddVideoFrame(startDT, startDT.AddMilliseconds(40 * modelConfig.Integration), pixmap);
                    }

                    InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true);
                }
            }
            finally
            {
                AavFileCreator.CloseFile();
            }
        }
        private void GenerateAVIVideo(ModelConfig modelConfig, List<IStar> stars)
        {
            TangraVideo.CloseAviFile();
            TangraVideo.StartNewAviFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, 8, 25, false);

            m_MagnitudeToPeakDict = null;

            try
            {
                //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null);
                //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame");
                //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null);

                uint maxSignalValue = (uint)(255 * modelConfig.Integration);

                Random rndGen = new Random((int)DateTime.Now.Ticks);
                m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight];
                for (int x = 0; x < modelConfig.FrameWidth; x++)
                    for (int y = 0; y < modelConfig.FrameHeight; y++)
                    {
                        if (modelConfig.DarkFrameMean > 0)
                        {
                            double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0;
                            double darkPixel = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak)* modelConfig.Integration, 1));
                            double bgPixel = Math.Min(maxSignalValue, Math.Max(0, darkPixel));
                            m_SimulatedDarkFrame[x, y] = (int)bgPixel;
                        }
                        else
                            m_SimulatedDarkFrame[x, y] = 0;
                    }

                for (int i = 0; i <= modelConfig.TotalFrames; i++)
                {
                    using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null))
                    {
                        pixmap.SetMaxSignalValue(maxSignalValue);

                        VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration);
                        GenerateFrame(pixmap, stars, modelConfig);

                        TangraVideo.AddAviVideoFrame(pixmap, modelConfig.LinearityCoefficient, (int)pixmap.MaxSignalValue);
                    }

                    InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true);
                }
            }
            finally
            {
                TangraVideo.CloseAviFile();
            }
        }
Beispiel #36
0
        public static uint GetMedian(Pixelmap image)
        {
            List<uint> allPixels = new List<uint>();

            for (int y = 0; y < image.Height; ++y)
            {
                for (int x = 0; x < image.Width; ++x)
                {
                    allPixels.Add(image[x, y]);
                }
            }

            allPixels.Sort();

            return allPixels[allPixels.Count / 2];
        }
        private void InitStarAmplitudeModelling(ModelConfig modelConfig, float accuracy, int bitPix, uint maxSignalValue)
        {
            if (m_MagnitudeToPeakDict != null)
                return;

            m_MagnitudeToPeakDict = new Dictionary<double, int>();
            m_MagnitudeToPeakMags = new List<double>();
            m_MagnitudeToPeakPeaks = new List<int>();

            var mea = new MeasurementsHelper(
                bitPix,
                TangraConfig.BackgroundMethod.BackgroundMedian,
                TangraConfig.Settings.Photometry.SubPixelSquareSize,
                TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(bitPix, maxSignalValue));

            float apertureSize = APERTURE;
            float annulusInnerRadius = (GAP + APERTURE) / APERTURE;
            int annulusMinPixels = (int)(Math.PI * (Math.Pow(ANNULUS + GAP + APERTURE, 2) - Math.Pow(GAP + APERTURE, 2)));

            mea.SetCoreProperties(annulusInnerRadius, annulusMinPixels, CorePhotometrySettings.Default.RejectionBackgroundPixelsStdDev, 2 /* TODO: This must be configurable */);

            int peak = (int)(maxSignalValue - (modelConfig.NoiseMean + modelConfig.DarkFrameMean) * modelConfig.Integration);
            int TOTAL_STEPS = 100;
            double step = Math.Log10(peak) / TOTAL_STEPS;
            double zeroMag = double.NaN;
            for (int ii = 0; ii < TOTAL_STEPS; ii++)
            {
                int amplitude = (int)Math.Round(Math.Pow(10, Math.Log10(peak) - ii * step));
                Pixelmap pixmap = new Pixelmap(64, 64, bitPix, new uint[64 * 64], null, null);
                VideoModelUtils.GenerateStar(pixmap, 32, 32, (float)modelConfig.FWHM, amplitude, 0 /* Gaussian */);
                PSFFit fit = new PSFFit(32, 32);
                AstroImage img = new AstroImage(pixmap);
                uint[,] data = img.GetMeasurableAreaPixels(32, 32, 17);
                uint[,] backgroundPixels = img.GetMeasurableAreaPixels(32, 32, 35);

                fit.Fit(data);

                var result = mea.MeasureObject(new ImagePixel(32, 32), data, backgroundPixels, pixmap.BitPixCamera,
                    TangraConfig.PreProcessingFilter.NoFilter,
                    TangraConfig.PhotometryReductionMethod.AperturePhotometry, TangraConfig.PsfQuadrature.NumericalInAperture,
                    TangraConfig.PsfFittingMethod.DirectNonLinearFit,
                    apertureSize, modelConfig.FWHM, (float)modelConfig.FWHM,
                    new FakeIMeasuredObject(fit),
                    null, null,
                    false);

                if (result == NotMeasuredReasons.TrackedSuccessfully && !mea.HasSaturatedPixels)
                {
                    // Add value for fitting
                    double measurement = mea.TotalReading - mea.TotalBackground;
                    if (double.IsNaN(zeroMag))
                        zeroMag = modelConfig.BrighestUnsaturatedStarMag + 2.5 * Math.Log10(measurement);
                    double magnitude = -2.5 * Math.Log10(measurement) + zeroMag;

                    m_MagnitudeToPeakDict[magnitude] = amplitude;
                    m_MagnitudeToPeakMags.Add(magnitude);
                    m_MagnitudeToPeakPeaks.Add(amplitude);
                }
            }
        }
Beispiel #38
0
 public AstroImage(Pixelmap pixelmap, bool useUnprocessedPixels = false)
     : this(pixelmap, new Rectangle(0, 0, pixelmap.Width, pixelmap.Height), useUnprocessedPixels)
 {
 }