public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < 0 || startFrameNo >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + startFrameNo);
            }

            int actualFramesToIntegrate = Math.Min(startFrameNo + framesToIntegrate, m_FirstFrame + m_CountFrames - 1) - startFrameNo;

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            lock (m_SyncLock)
            {
                TangraCore.ADVGetIntegratedFrame(startFrameNo, actualFramesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap = (Bitmap)Bitmap.FromStream(memStr);

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
Example #2
0
        public AstroImage(Pixelmap pixelmap, Rectangle processArea, bool useUnprocessedPixels = false)
        {
            if (useUnprocessedPixels)
            {
                m_Pixelmap = new Pixelmap(pixelmap.Width, pixelmap.Height, pixelmap.BitPixCamera, new uint[pixelmap.Pixels.Length], null, null);
                m_Pixelmap.SetMaxSignalValue(pixelmap.MaxSignalValue);
                var pixelsToCopy = pixelmap.UnprocessedPixels ?? pixelmap.Pixels;
                for (int i = 0; i < pixelmap.Pixels.Length; i++)
                {
                    m_Pixelmap.Pixels[i] = pixelsToCopy[i];
                }
            }
            else
            {
                m_Pixelmap = pixelmap;
            }

            m_Rect = processArea;

            m_Width  = m_Pixelmap.Width;
            m_Height = m_Pixelmap.Height;
            m_BitPix = m_Pixelmap.BitPixCamera;

            if (m_Rect.Width > m_Width)
            {
                m_Rect.Width = m_Width;
            }
            if (m_Rect.Height > m_Height)
            {
                m_Rect.Height = m_Height;
            }
        }
Example #3
0
        private void GenerateAVIVideo(ModelConfig modelConfig, List <IStar> stars)
        {
            TangraVideo.CloseAviFile();
            TangraVideo.StartNewAviFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, 8, 25, false);

            m_MagnitudeToPeakDict = null;

            try
            {
                //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null);
                //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame");
                //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null);

                uint maxSignalValue = (uint)(255 * modelConfig.Integration);

                Random rndGen = new Random((int)DateTime.Now.Ticks);
                m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight];
                for (int x = 0; x < modelConfig.FrameWidth; x++)
                {
                    for (int y = 0; y < modelConfig.FrameHeight; y++)
                    {
                        if (modelConfig.DarkFrameMean > 0)
                        {
                            double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0;
                            double darkPixel  = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak) * modelConfig.Integration, 1));
                            double bgPixel    = Math.Min(maxSignalValue, Math.Max(0, darkPixel));
                            m_SimulatedDarkFrame[x, y] = (int)bgPixel;
                        }
                        else
                        {
                            m_SimulatedDarkFrame[x, y] = 0;
                        }
                    }
                }

                for (int i = 0; i <= modelConfig.TotalFrames; i++)
                {
                    using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null))
                    {
                        pixmap.SetMaxSignalValue(maxSignalValue);

                        VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration);
                        GenerateFrame(pixmap, stars, modelConfig);

                        TangraVideo.AddAviVideoFrame(pixmap, modelConfig.LinearityCoefficient, (int)pixmap.MaxSignalValue);
                    }

                    InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true);
                }
            }
            finally
            {
                TangraVideo.CloseAviFile();
            }
        }
        public Pixelmap GetPixelmap(int index)
        {
            if (index >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            byte[] gpsFix      = new byte[256 * 16];
            byte[] userCommand = new byte[256 * 16];
            byte[] systemError = new byte[256 * 16];

            lock (m_SyncLock)
            {
                TangraCore.ADVGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo, gpsFix, userCommand, systemError);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);
            m_CurrentFrameInfo.UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand);
            m_CurrentFrameInfo.SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError);
            m_CurrentFrameInfo.GPSFixString      = AdvFrameInfo.GetStringFromBytes(gpsFix);

            if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0)
            {
                TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine);
            }

            if (frameInfo.HasNtpTimeStamp && m_CurrentFrameInfo.Exposure10thMs == 0 &&
                index + 1 < m_FirstFrame + m_CountFrames)
            {
                lock (m_SyncLock)
                {
                    TangraCore.ADVGetFrameStatusChannel(index + 1, frameInfo, gpsFix, userCommand, systemError);
                }
                if (frameInfo.HasNtpTimeStamp)
                {
                    m_CurrentFrameInfo.Exposure10thMs = (int)Math.Round(new TimeSpan(frameInfo.EndExposureNtpTimeStamp.Ticks - m_CurrentFrameInfo.EndExposureNtpTimeStamp.Ticks).TotalMilliseconds * 10);
                }
            }

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames == 0)
                {
                    // This is a VTI Split reference frame. Put some mark on it to mark it as such??
                    displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, m_Width, m_Height);
                    for (int i = 0; i < pixels.Length; i++)
                    {
                        displayBitmapBytes[i] = (byte)pixels[i];
                    }
                }
                else
                {
                    try
                    {
                        displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                    }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(ex.GetFullStackTrace());
                        displayBitmap = new Bitmap(m_Width, m_Height);
                    }
                }

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.FrameState        = GetCurrentFrameState(index);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
Example #5
0
        private void GenerateAAVVideo(ModelConfig modelConfig, List <IStar> stars)
        {
            AavFileCreator.CloseFile();
            AavFileCreator.StartNewFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, modelConfig.Integration);

            m_MagnitudeToPeakDict = null;

            try
            {
                //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null);
                //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame");
                //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null);
                DateTime zeroFrameDT = DateTime.UtcNow;

                uint maxSignalValue = (uint)(255 * modelConfig.Integration);

                Random rndGen = new Random((int)DateTime.Now.Ticks);
                m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight];
                for (int x = 0; x < modelConfig.FrameWidth; x++)
                {
                    for (int y = 0; y < modelConfig.FrameHeight; y++)
                    {
                        if (modelConfig.DarkFrameMean > 0)
                        {
                            double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0;
                            double darkPixel  = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak) * modelConfig.Integration, 1));
                            double bgPixel    = Math.Min(maxSignalValue, Math.Max(0, darkPixel));
                            m_SimulatedDarkFrame[x, y] = (int)bgPixel;
                        }
                        else
                        {
                            m_SimulatedDarkFrame[x, y] = 0;
                        }
                    }
                }

                for (int i = 0; i <= modelConfig.TotalFrames; i++)
                {
                    using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null))
                    {
                        pixmap.SetMaxSignalValue(maxSignalValue);

                        VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration);
                        GenerateFrame(pixmap, stars, modelConfig);

                        DateTime startDT = zeroFrameDT.AddMilliseconds(40 * modelConfig.Integration * i);
                        if (Math.Abs(modelConfig.LinearityCoefficient - 1) > 0.0001)
                        {
                            uint   maxVal     = pixmap.MaxSignalValue;
                            double gammaCoeff = maxVal / Math.Pow((double)maxVal, modelConfig.LinearityCoefficient);
                            for (int x = 0; x < pixmap.Width; x++)
                            {
                                for (int y = 0; y < pixmap.Height; y++)
                                {
                                    uint nonLinVal = (uint)Math.Round(gammaCoeff * Math.Pow(pixmap[x, y], modelConfig.LinearityCoefficient));
                                    pixmap[x, y] = Math.Min(maxVal, Math.Max(0, nonLinVal));
                                }
                            }
                        }
                        AavFileCreator.AddVideoFrame(startDT, startDT.AddMilliseconds(40 * modelConfig.Integration), pixmap);
                    }

                    InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true);
                }
            }
            finally
            {
                AavFileCreator.CloseFile();
            }
        }
        public Pixelmap GetPixelmap(int index, int streamId)
        {
            if (m_AdvFile.MainSteamInfo.FrameCount == 0)
            {
                return(null);
            }

            uint[] pixels;
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];

            Adv.AdvFrameInfo advFrameInfo;
            lock (m_SyncLock)
            {
                if (streamId == 0)
                {
                    pixels = m_AdvFile.GetMainFramePixels((uint)index, out advFrameInfo);
                }
                else if (streamId == 1)
                {
                    pixels = m_AdvFile.GetCalibrationFramePixels((uint)index, out advFrameInfo);
                }
                else
                {
                    throw new ArgumentOutOfRangeException("streamId");
                }

                if (unprocessedPixels.Length != pixels.Length)
                {
                    throw new ApplicationException("ADV Buffer Error");
                }

                Array.Copy(pixels, unprocessedPixels, pixels.Length);
            }

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(unprocessedPixels, pixels, m_Width, m_Height, m_BitPix, m_MaxPixelValue, (float)(advFrameInfo.UtcExposureMilliseconds / 1000.0));

            TangraCore.GetBitmapPixels(Width, Height, pixels, rawBitmapBytes, displayBitmapBytes, true, (ushort)BitPix, m_MaxPixelValue);

            Bitmap displayBitmap = null;

            if (m_AAVVersion != null && m_IntegratedAAVFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0)
            {
                TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine);
                using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
                {
                    try
                    {
                        displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                    }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(ex.GetFullStackTrace());
                        displayBitmap = new Bitmap(m_Width, m_Height);
                    }
                }
            }
            else
            {
                displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, Width, Height);
            }

            Pixelmap rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes);

            rv.SetMaxSignalValue(m_MaxPixelValue);
            rv.FrameState        = GetCurrentFrameState(advFrameInfo);
            rv.UnprocessedPixels = unprocessedPixels;
            return(rv);
        }