Exemple #1
0
        public Pixelmap GetPixelmap(int index)
        {
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, m_Width, m_Height) + 40 + 14 + 1];

            uint[] flatPixelsCopy = new uint[m_FlatPixels.Length];
            Array.Copy(m_FlatPixels, flatPixelsCopy, m_FlatPixels.Length);

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(m_FlatPixels, flatPixelsCopy, m_Width, m_Height, m_Bpp, 0 /* No normal value for FITS files */, m_Exposure);

            TangraCore.GetBitmapPixels(m_Width, m_Height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)m_Bpp, 0);

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, m_Width, m_Height);

            Pixelmap rv = new Pixelmap(m_Width, m_Height, m_Bpp, flatPixelsCopy, displayBitmap, displayBitmapBytes);

            rv.UnprocessedPixels = m_FlatPixels;

            if (m_Cards != null && m_Cards.Count > 0)
            {
                rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>();
                foreach (string key in m_Cards.Keys)
                {
                    rv.FrameState.AdditionalProperties.Add(key, m_Cards[key]);
                }
            }

            return(rv);
        }
Exemple #2
0
        public AstroImage(Pixelmap pixelmap, Rectangle processArea, bool useUnprocessedPixels = false)
        {
            if (useUnprocessedPixels)
            {
                m_Pixelmap = new Pixelmap(pixelmap.Width, pixelmap.Height, pixelmap.BitPixCamera, new uint[pixelmap.Pixels.Length], null, null);
                m_Pixelmap.SetMaxSignalValue(pixelmap.MaxSignalValue);
                var pixelsToCopy = pixelmap.UnprocessedPixels ?? pixelmap.Pixels;
                for (int i = 0; i < pixelmap.Pixels.Length; i++)
                {
                    m_Pixelmap.Pixels[i] = pixelsToCopy[i];
                }
            }
            else
            {
                m_Pixelmap = pixelmap;
            }

            m_Rect = processArea;

            m_Width  = m_Pixelmap.Width;
            m_Height = m_Pixelmap.Height;
            m_BitPix = m_Pixelmap.BitPixCamera;

            if (m_Rect.Width > m_Width)
            {
                m_Rect.Width = m_Width;
            }
            if (m_Rect.Height > m_Height)
            {
                m_Rect.Height = m_Height;
            }
        }
Exemple #3
0
        internal static Pixelmap BuildFitsPixelmap(int width, int height, uint[] pixelsFlat, int bitPix, bool hasUtcTimeStamps, double?exposure, DateTime?timestamp, BasicHDU fitsImage, Dictionary <string, string> cards)
        {
            byte[] displayBitmapBytes = new byte[width * height];
            byte[] rawBitmapBytes     = new byte[(width * height * 3) + 40 + 14 + 1];

            TangraCore.PreProcessors.EnsurePixelRange(pixelsFlat, width, height, bitPix, 0);

            uint[] flatPixelsCopy = new uint[pixelsFlat.Length];
            Array.Copy(pixelsFlat, flatPixelsCopy, pixelsFlat.Length);

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(pixelsFlat, flatPixelsCopy, width, height, bitPix, 0 /* No normal value for FITS files */, exposure.HasValue ? (float)exposure.Value : 0);

            TangraCore.GetBitmapPixels(width, height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)bitPix, 0);

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, width, height);

            Pixelmap rv = new Pixelmap(width, height, bitPix, flatPixelsCopy, displayBitmap, displayBitmapBytes);

            rv.UnprocessedPixels = pixelsFlat;
            if (hasUtcTimeStamps)
            {
                rv.FrameState.CentralExposureTime    = timestamp.HasValue ? timestamp.Value : DateTime.MinValue;
                rv.FrameState.ExposureInMilliseconds = exposure.HasValue ? (float)(exposure.Value * 1000.0) : 0;
            }

            rv.FrameState.Tag = fitsImage;
            rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>();
            foreach (string key in cards.Keys)
            {
                rv.FrameState.AdditionalProperties.Add(key, cards[key]);
            }

            return(rv);
        }
Exemple #4
0
        private void BufferNonIntegratedFrame(int nextFrameIdToBuffer)
        {
            if (m_IsRunning && !m_StopRequestReceived)
            {
                int directionAwareFrameId = GetDirectionAwareFrameIndex(nextFrameIdToBuffer);

                Pixelmap bmp = m_VideoStream.GetPixelmap(directionAwareFrameId);

                if (bmp != null)
                {
                    lock (m_FrameBitmapLock)
                    {
                        var bufferedFrame = new BufferedFrame()
                        {
                            FrameNo = nextFrameIdToBuffer,
                            FirstFrameInIntegrationPeriod = nextFrameIdToBuffer,
                            Image         = bmp,
                            FrameFileName = m_VideoStream.SupportsFrameFileNames ? m_VideoStream.GetFrameFileName(directionAwareFrameId) : null
                        };

                        m_FramesBufferQueue.Enqueue(bufferedFrame);
                    }
                }
            }
        }
        public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < 0 || startFrameNo >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + startFrameNo);
            }

            int actualFramesToIntegrate = Math.Min(startFrameNo + framesToIntegrate, m_FirstFrame + m_CountFrames - 1) - startFrameNo;

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            lock (m_SyncLock)
            {
                TangraCore.ADVGetIntegratedFrame(startFrameNo, actualFramesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap = (Bitmap)Bitmap.FromStream(memStr);

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
Exemple #6
0
        public FrameByteBrightnessContrast(int brigtness, int contrast, bool preProcessFrames, int bitPix)
            : base(preProcessFrames, bitPix)
        {
            m_Brigtness = brigtness;
            m_Contrast  = (sbyte)contrast;

            uint maxPixelValue = bitPix.GetMaxValueForBitPix();

            Trace.Assert(maxPixelValue <= int.MaxValue, "32bit images not supported.");

            Pixelmap image = new Pixelmap((int)maxPixelValue + 1, 1, bitPix, new uint[maxPixelValue + 1], null, null);
            {
                for (int i = 0; i <= maxPixelValue; i++)
                {
                    image[i, 0] = (uint)i;
                }

                BitmapFilter.Brightness(image, m_Brigtness);
                BitmapFilter.Contrast(image, m_Contrast);

                for (int i = 0; i <= maxPixelValue; i++)
                {
                    m_MappedBytes[i] = image[i, 0];
                }
            }
        }
Exemple #7
0
        private void PlotDigitPatterns(IotaVtiOrcManaged ocrEngine)
        {
            List <uint[]> patterns = ocrEngine.GetLearntDigitPatterns();

            if (patterns.Count > 12)
            {
                Bitmap bmpZero = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[0], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picZero.Image = bmpZero;
                picZero.Update();

                Bitmap bmpOne = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[1], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picOne.Image = bmpOne;
                picOne.Update();

                Bitmap bmpTwo = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[2], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picTwo.Image = bmpTwo;
                picTwo.Update();

                Bitmap bmpThree = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[3], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picThree.Image = bmpThree;
                picThree.Update();

                Bitmap bmpFour = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[4], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picFour.Image = bmpFour;
                picFour.Update();

                Bitmap bmpFive = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[5], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picFive.Image = bmpFive;
                picFive.Update();

                Bitmap bmpSix = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[6], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picSix.Image = bmpSix;
                picSix.Update();

                Bitmap bmpSeven = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[7], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picSeven.Image = bmpSeven;
                picSeven.Update();

                Bitmap bmpEight = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[8], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picEight.Image = bmpEight;
                picEight.Update();

                Bitmap bmpNine = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[9], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                picNine.Image = bmpNine;
                picNine.Update();

                Bitmap bmp83 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[10], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                pic83.Image = bmp83;
                pic83.Update();

                Bitmap bmp86 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[11], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                pic86.Image = bmp86;
                pic86.Update();

                Bitmap bmp89 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[12], ocrEngine.BlockWidth, ocrEngine.BlockHeight);
                pic89.Image = bmp89;
                pic89.Update();
            }
        }
Exemple #8
0
        private void picField_MouseDown(object sender, MouseEventArgs e)
        {
            uint[] blockPixels = m_Processor.GetBlockAt(e.X, e.Y, m_CurrentIndex % 2 == 1);
            Bitmap bmpBlock    = Pixelmap.ConstructBitmapFromBitmapPixels(blockPixels, m_Processor.BlockWidth, m_Processor.BlockHeight);

            picBlock.Image = bmpBlock;
            picBlock.Update();
        }
Exemple #9
0
            public static void PreProcessingAddRemoveHotPixels(uint[,] model, ImagePixel[] pixels, uint imageMedian, uint maxPixelValue)
            {
                uint[] xPos      = pixels.Select(x => (uint)x.X).ToArray();
                uint[] yPos      = pixels.Select(x => (uint)x.Y).ToArray();
                uint[] flatModel = Pixelmap.ConvertFromXYToFlatArray(model, 7, 7);

                PreProcessingAddRemoveHotPixels(flatModel, (uint)xPos.Length, xPos, yPos, imageMedian, maxPixelValue);
            }
Exemple #10
0
        public Pixelmap GetPixelmap(int index)
        {
            if (index < FirstFrame || index > LastFrame)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            uint[] pixels             = new uint[Width * Height];
            uint[] unprocessedPixels  = new uint[Width * Height];
            byte[] displayBitmapBytes = new byte[Width * Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, Width, Height) + 40 + 14 + 1];

            var frameInfo = new SerNativeFrameInfo();

            TangraCore.SERGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, (ushort)BitPix, ref frameInfo);

            m_CurrentFrameInfo = new SerFrameInfo(frameInfo);

            using (var memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                try
                {
                    displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                }
                catch (Exception ex)
                {
                    Trace.WriteLine(ex.GetFullStackTrace());
                    displayBitmap = new Bitmap(Width, Height);
                }

                var rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.UnprocessedPixels = unprocessedPixels;
                rv.FrameState        = new FrameStateData()
                {
                    SystemTime = m_CurrentFrameInfo.TimeStampUtc
                };

                if (UseTimeStamp != SerUseTimeStamp.None)
                {
                    if (UseTimeStamp == SerUseTimeStamp.FireCaptureLog)
                    {
                        DateTime dt;
                        if (m_FireCaptureTimeStamps.TryGetValue(1 + index, out dt))
                        {
                            rv.FrameState.CentralExposureTime = dt;
                        }
                    }
                    else if (UseTimeStamp == SerUseTimeStamp.SerEmbeddedUtcTime)
                    {
                        rv.FrameState.CentralExposureTime = m_CurrentFrameInfo.TimeStampUtc;
                    }
                }

                return(rv);
            }
        }
Exemple #11
0
        private Pixelmap RotatePixelMap(Pixelmap org)
        {
            if (org == null)
            {
                return(null);
            }

            return(org.Rotate(m_Angle));
        }
Exemple #12
0
 public bool AddAviVideoFrame(Pixelmap pixmap, double addedGamma, int?adv16NormalisationValue)
 {
     if (!TangraVideo.AddAviVideoFrame(pixmap, addedGamma, adv16NormalisationValue))
     {
         MessageBox.Show("There was an error calling AddAviVideoFrame:\r\n\r\n" + TangraVideo.GetLastAviErrorMessage(), "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error);
         return(false);
     }
     return(true);
 }
Exemple #13
0
        private void DisplayCurrentFrameNoIntegrate(MovementType movementType)
        {
            if (m_VideoStream != null)
            {
                Pixelmap currentBitmap = m_VideoStream.GetPixelmap(CurrentDirectionAwareFrameIndex);
                string   frameFileName = m_VideoStream.SupportsFrameFileNames ? m_VideoStream.GetFrameFileName(CurrentDirectionAwareFrameIndex) : null;

                DisplayCurrentFrameInternal(movementType, currentBitmap, frameFileName);
            }
        }
Exemple #14
0
 public void OnPreProcess(Pixelmap newFrame)
 {
     if (m_PreProcessors.Count > 0)
     {
         foreach (IFramePreProcessor preProcessor in m_PreProcessors)
         {
             preProcessor.OnPreProcess(newFrame);
         }
     }
 }
Exemple #15
0
        private void GenerateAVIVideo(ModelConfig modelConfig, List <IStar> stars)
        {
            TangraVideo.CloseAviFile();
            TangraVideo.StartNewAviFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, 8, 25, false);

            m_MagnitudeToPeakDict = null;

            try
            {
                //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null);
                //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame");
                //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null);

                uint maxSignalValue = (uint)(255 * modelConfig.Integration);

                Random rndGen = new Random((int)DateTime.Now.Ticks);
                m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight];
                for (int x = 0; x < modelConfig.FrameWidth; x++)
                {
                    for (int y = 0; y < modelConfig.FrameHeight; y++)
                    {
                        if (modelConfig.DarkFrameMean > 0)
                        {
                            double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0;
                            double darkPixel  = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak) * modelConfig.Integration, 1));
                            double bgPixel    = Math.Min(maxSignalValue, Math.Max(0, darkPixel));
                            m_SimulatedDarkFrame[x, y] = (int)bgPixel;
                        }
                        else
                        {
                            m_SimulatedDarkFrame[x, y] = 0;
                        }
                    }
                }

                for (int i = 0; i <= modelConfig.TotalFrames; i++)
                {
                    using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null))
                    {
                        pixmap.SetMaxSignalValue(maxSignalValue);

                        VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration);
                        GenerateFrame(pixmap, stars, modelConfig);

                        TangraVideo.AddAviVideoFrame(pixmap, modelConfig.LinearityCoefficient, (int)pixmap.MaxSignalValue);
                    }

                    InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true);
                }
            }
            finally
            {
                TangraVideo.CloseAviFile();
            }
        }
Exemple #16
0
        public void RunNoiseChunkRemovalTestCases(string noiseTestSource, string noiseTestResults)
        {
            string[] testCases = Directory.GetFiles(noiseTestSource, "*.bmp");
            bool     hasErrors = false;

            bool[]   options     = new bool[] { false, true };
            string[] optionNames = new string[] { "Managed", "Native" };
            for (int j = 0; j < options.Length; j++)
            {
                foreach (string file in testCases)
                {
                    string expectedResult = Path.GetFullPath(noiseTestResults + "\\" + Path.GetFileName(file));

                    lvlTestCaseDescription.Text = string.Format("Test case NoiseChunksRemoval\\{0} ({1})", Path.GetFileName(file), optionNames[j]);
                    lvlTestCaseDescription.Update();

                    Pixelmap pix = Pixelmap.ConstructFromBitmap((Bitmap)Bitmap.FromFile(file), TangraConfig.ColourChannel.Red);

                    uint[] pixels = pix.Pixels;

                    LargeChunkDenoiser.Process(options[j], pixels, pix.Width, pix.Height);

                    Pixelmap pixExpected = Pixelmap.ConstructFromBitmap((Bitmap)Bitmap.FromFile(expectedResult), TangraConfig.ColourChannel.Red);

                    for (int i = 0; i < pix.Pixels.Length; i++)
                    {
                        if (pix.Pixels[i] != pixExpected.Pixels[i])
                        {
                            lbErrors.Items.Add(string.Format("NoiseChunk Removal Failed for {0} ({1})", Path.GetFileName(file), optionNames[j]));
                            //Bitmap bmp = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, pix.Width, pix.Height);
                            //bmp.Save(Path.ChangeExtension(expectedResult, ".errbmp"));
                            hasErrors = true;
                            break;
                        }
                    }
                }
            }

            pbar.Value++;

            if (!hasErrors)
            {
                pbarSuccess.Value++;
            }
            else
            {
                pbarError.Value++;
            }

            lblError.Text      = string.Format("Errored {0}/{1}", pbarError.Value, pbar.Value);
            lblSuccessful.Text = string.Format("Successful {0}/{1}", pbarSuccess.Value, pbar.Value);

            Application.DoEvents();
        }
Exemple #17
0
 private void DisplayCurrentFrameInternal(MovementType movementType, Pixelmap currentPixelmap, string frameFileName)
 {
     if (m_VideoStream != null)
     {
         if (m_CurrentFrameIndex >= m_VideoStream.FirstFrame &&
             m_CurrentFrameIndex <= m_VideoStream.LastFrame)
         {
             m_FrameRenderer.RenderFrame(m_CurrentFrameIndex, currentPixelmap, movementType, m_CurrentFrameIndex == m_VideoStream.LastFrame, 0, m_CurrentFrameIndex, frameFileName);
             m_LastDisplayedFrameIndex = m_CurrentFrameIndex;
         }
     }
 }
Exemple #18
0
        public void PlateSolve(PlateSolveTesterConfig config)
        {
            var pixelMap = new Pixelmap(config.Width, config.Height, config.BitPix, config.Pixels, null, null);
            var image    = new AstroImage(pixelMap);

            StarMap starMap = new StarMap(
                TangraConfig.Settings.Astrometry.PyramidRemoveNonStellarObject,
                TangraConfig.Settings.Astrometry.MinReferenceStarFWHM,
                TangraConfig.Settings.Astrometry.MaxReferenceStarFWHM,
                TangraConfig.Settings.Astrometry.MaximumPSFElongation,
                TangraConfig.Settings.Astrometry.LimitReferenceStarDetection);

            Rectangle excludeRect = new Rectangle(config.OSDRectToExclude.X, config.OSDRectToExclude.Y, config.OSDRectToExclude.Width, config.OSDRectToExclude.Height);
            Rectangle includeRect = new Rectangle(config.RectToInclude.X, config.RectToInclude.Y, config.RectToInclude.Width, config.RectToInclude.Height);

            starMap.FindBestMap(
                StarMapInternalConfig.Default,
                image,
                excludeRect,
                includeRect,
                config.LimitByInclusion);

            var facade         = new StarCatalogueFacade(TangraConfig.Settings.StarCatalogue);
            var catalogueStars = facade.GetStarsInRegion(
                config.RADeg,
                config.DEDeg,
                (config.ErrFoVs + 1.0) * config.PlateConfig.GetMaxFOVInArcSec() / 3600.0,
                config.LimitMagn,
                config.Epoch);

            var distBasedMatcher = new DistanceBasedAstrometrySolver(
                new MockedOperationNotifier(),
                config.PlateConfig,
                TangraConfig.Settings.Astrometry,
                catalogueStars,
                config.RADeg,
                config.DEDeg,
                config.DetermineAutoLimitMagnitude);

            distBasedMatcher.SetMinMaxMagOfStarsForAstrometry(config.PyramidMinMag, config.LimitMagn);
            distBasedMatcher.SetMinMaxMagOfStarsForPyramidAlignment(config.PyramidMinMag, config.PyramidMaxMag);

            distBasedMatcher.InitNewMatch(starMap, PyramidMatchType.PlateSolve, null);


            distBasedMatcher.InitNewFrame(starMap);

            distBasedMatcher.SetManuallyIdentifiedHints(new Dictionary <PSFFit, IStar>());

            LeastSquareFittedAstrometry astrometricFit;
            PerformMatchResult          result = distBasedMatcher.PerformMatch(out astrometricFit);
        }
Exemple #19
0
 public static void Update(Pixelmap currFrame)
 {
     if (s_FullPreviewForm != null)
     {
         lock (s_SyncRoot)
         {
             if (s_FullPreviewForm != null)
             {
                 s_FullPreviewForm.pictureBox.Image = GetPreviewImage(currFrame);
                 s_FullPreviewForm.Refresh();
             }
         }
     }
 }
Exemple #20
0
        private void btnLoad_Click(object sender, EventArgs e)
        {
            if (File.Exists(tbxFileLocation.Text))
            {
                m_Frame = (Bitmap)Bitmap.FromFile(tbxFileLocation.Text);
                //m_FieldsFrame = BitmapFilter.ToVideoFields(m_Frame);

                m_Pixelmap = Pixelmap.ConstructFromBitmap(m_Frame, TangraConfig.ColourChannel.GrayScale);
                m_Image    = new AstroImage(m_Pixelmap);

                m_InitialPixels = m_Image.GetOcrPixels();

                pictureBox1.Load(tbxFileLocation.Text);
            }
        }
Exemple #21
0
        public static void GenerateNoise(Pixelmap pixelmap, int[,] simulatedBackground, int mean, int stdDev)
        {
            uint maxSignalValue = pixelmap.MaxSignalValue;

            for (int y = 0; y < pixelmap.Height; ++y)
            {
                for (int x = 0; x < pixelmap.Width; ++x)
                {
                    double bgPixel = Math.Min(maxSignalValue, Math.Max(0, simulatedBackground[x, y] + Math.Abs(Random(mean, stdDev))));
                    uint   val     = (uint)Math.Round(bgPixel);

                    pixelmap[x, y] = val;
                }
            }
        }
 public void OnPreProcess(Pixelmap newFrame)
 {
     if (m_PreProcessFrames &&
         newFrame != null)
     {
         if (m_Filter == TangraConfig.PreProcessingFilter.LowPassFilter)
         {
             BitmapFilter.LowPassFilter(newFrame);
         }
         else if (m_Filter == TangraConfig.PreProcessingFilter.LowPassDifferenceFilter)
         {
             BitmapFilter.LowPassDifference(newFrame);
         }
     }
 }
Exemple #23
0
        public void OnPreProcess(Pixelmap newFrame)
        {
            Trace.Assert(newFrame.BitPixCamera <= 16);

            if (m_PreProcessFrames &&
                newFrame != null)
            {
                for (int y = 0; y < newFrame.Height; ++y)
                {
                    for (int x = 0; x < newFrame.Width; ++x)
                    {
                        newFrame[x, y] = m_MappedBytes[newFrame[x, y]];
                    }
                }
            }
        }
Exemple #24
0
        private static Bitmap GetPreviewImage(Pixelmap currFrame)
        {
            CurrFrame = new AstroImage(currFrame);
            Bitmap image = currFrame.CreateDisplayBitmapDoNotDispose();

            if (OnDrawOverlays != null)
            {
                using (Graphics g = Graphics.FromImage(image))
                {
                    OnDrawOverlays.Invoke(g);
                    g.Save();
                }
            }

            return(image);
        }
Exemple #25
0
        public static uint GetMedian(Pixelmap image)
        {
            List <uint> allPixels = new List <uint>();

            for (int y = 0; y < image.Height; ++y)
            {
                for (int x = 0; x < image.Width; ++x)
                {
                    allPixels.Add(image[x, y]);
                }
            }

            allPixels.Sort();

            return(allPixels[allPixels.Count / 2]);
        }
Exemple #26
0
        public SingleBitmapFileFrameStream(LCFile lightCurveFile)
        {
            m_lcFile = lightCurveFile;

            if (lightCurveFile.LcFileFormatVersion < 4 && m_lcFile.Footer.AveragedFrameBytes.Length == 4 * m_lcFile.Footer.AveragedFrameWidth * m_lcFile.Footer.AveragedFrameHeight)
            {
                m_Pixelmap = Pixelmap.ConstructForLCFile32bppArgbAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp);
            }
            else
            {
                m_Pixelmap = Pixelmap.ConstructForLCFileAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp);
            }

            m_FirstFrame = (int)m_lcFile.Header.MinFrame;
            m_LastFrame  = (int)m_lcFile.Header.MaxFrame;
            m_NumFrames  = (int)m_lcFile.Header.MeasuredFrames;
        }
Exemple #27
0
        private void button1_Click(object sender, EventArgs e)
        {
            string error = null;

            uint[] dataOut         = new uint[m_InitialPixels.Length];
            uint[] dataDebugNoLChD = new uint[m_InitialPixels.Length];
            //GpsBoxSpriteOcr.PrepareOsdArea(m_InitialPixels, dataOut, dataDebugNoLChD, m_Image.Width, m_Image.Height);
            var rv  = GpsBoxSpriteOcr.PreProcessImageOSDForOCR(m_InitialPixels, m_Image.Width, m_Image.Height, 15, ref error);
            var bmp = Pixelmap.ConstructBitmapFrom8BitPixelmap(new Pixelmap(m_Image.Width, m_Image.Height, 8, rv, null, null));

            using (Graphics g = Graphics.FromImage(pictureBox1.Image))
            {
                g.DrawImage(bmp, new Point(0, 0));
                g.Save();
            }
            pictureBox1.Invalidate();
        }
Exemple #28
0
        public Pixelmap CreatePixelmap(uint[] pixels)
        {
            byte[] displayBitmapBytes = new byte[m_AdvFile.Width * m_AdvFile.Height];
            for (int y = 0; y < m_AdvFile.Height; y++)
            {
                for (int x = 0; x < m_AdvFile.Width; x++)
                {
                    int index = x + y * m_AdvFile.Width;

                    if (m_AdvFile.MaxPixelValue == 8)
                    {
                        displayBitmapBytes[index] = (byte)((pixels[index] & 0xFF));
                    }
                    else if (m_AdvFile.DataBpp == 12)
                    {
                        displayBitmapBytes[index] = (byte)((pixels[index] >> 4));
                    }
                    else if (m_AdvFile.DataBpp == 14)
                    {
                        displayBitmapBytes[index] = (byte)((pixels[index] >> 6));
                    }
                    else if (m_AdvFile.DataBpp == 16)
                    {
                        if (m_AdvFile.MaxPixelValue > 0)
                        {
                            displayBitmapBytes[index] = (byte)((255.0 * pixels[index] / m_AdvFile.MaxPixelValue));
                        }
                        else
                        {
                            displayBitmapBytes[index] = (byte)((pixels[index] >> 8));
                        }
                    }
                    else
                    {
                        displayBitmapBytes[index] = (byte)((pixels[index] >> (m_AdvFile.DataBpp - 8)));
                    }
                }
            }

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, (int)m_AdvFile.Width, (int)m_AdvFile.Height);

            Pixelmap rv = new Pixelmap((int)m_AdvFile.Width, (int)m_AdvFile.Height, m_AdvFile.DataBpp, pixels, displayBitmap, displayBitmapBytes);

            return(rv);
        }
Exemple #29
0
        public bool ArePixelmapsTheSame(Pixelmap bmp1, Pixelmap bmp2)
        {
            if (bmp1 == null && bmp2 == null)
            {
                return(true);
            }
            if (bmp1 == null || bmp2 == null)
            {
                return(false);
            }

            int x = m_Randomizer.Next(bmp1.Width - 1);
            int y = m_Randomizer.Next(bmp1.Height - 1);

            if (bmp1[x, y] != bmp2[x, y])
            {
                return(false);
            }

            x = m_Randomizer.Next(bmp1.Width - 1);
            y = m_Randomizer.Next(bmp1.Height - 1);

            if (bmp1[x, y] != bmp2[x, y])
            {
                return(false);
            }

            // Check all pixels
            int width  = bmp1.Width;
            int height = bmp1.Height;


            for (y = 0; y < height; ++y)
            {
                for (x = 0; x < width; ++x)
                {
                    if (bmp1[x, y] != bmp2[x, y])
                    {
                        return(false);
                    }
                }
            }

            return(true);
        }
Exemple #30
0
        public Pixelmap CreatePixelmap(AdvImageData imageData)
        {
            uint[] pixels             = new uint[Width * Height];
            byte[] displayBitmapBytes = new byte[Width * Height];
            for (int y = 0; y < Height; y++)
            {
                for (int x = 0; x < Width; x++)
                {
                    pixels[x + y * Width] = imageData.ImageData[x, y];
                    if (BitsPerPixel == 8)
                    {
                        displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] & 0xFF));
                    }
                    else if (BitsPerPixel == 12)
                    {
                        displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 4));
                    }
                    else if (BitsPerPixel == 14)
                    {
                        displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 6));
                    }
                    else if (BitsPerPixel == 16)
                    {
                        if (Adv16NormalisationValue.HasValue)
                        {
                            displayBitmapBytes[x + y * Width] = (byte)((255.0 * imageData.ImageData[x, y] / Adv16NormalisationValue.Value));
                        }
                        else
                        {
                            displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 8));
                        }
                    }
                    else
                    {
                        displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> (BitsPerPixel - 8)));
                    }
                }
            }

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, (int)Width, (int)Height);

            Pixelmap rv = new Pixelmap((int)Width, (int)Height, BitsPerPixel, pixels, displayBitmap, displayBitmapBytes);

            return(rv);
        }