Beispiel #1
0
        internal static Pixelmap BuildFitsPixelmap(int width, int height, uint[] pixelsFlat, int bitPix, bool hasUtcTimeStamps, double?exposure, DateTime?timestamp, BasicHDU fitsImage, Dictionary <string, string> cards)
        {
            byte[] displayBitmapBytes = new byte[width * height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, width, height) + 40 + 14 + 1];

            TangraCore.PreProcessors.EnsurePixelRange(pixelsFlat, width, height, bitPix, 0);

            uint[] flatPixelsCopy = new uint[pixelsFlat.Length];
            Array.Copy(pixelsFlat, flatPixelsCopy, pixelsFlat.Length);

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(pixelsFlat, flatPixelsCopy, width, height, bitPix, 0 /* No normal value for FITS files */, exposure.HasValue ? (float)exposure.Value : 0);

            TangraCore.GetBitmapPixels(width, height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)bitPix, 0);

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, width, height);

            Pixelmap rv = new Pixelmap(width, height, bitPix, flatPixelsCopy, displayBitmap, displayBitmapBytes);

            rv.UnprocessedPixels = pixelsFlat;
            if (hasUtcTimeStamps)
            {
                rv.FrameState.CentralExposureTime    = timestamp.HasValue ? timestamp.Value : DateTime.MinValue;
                rv.FrameState.ExposureInMilliseconds = exposure.HasValue ? (float)(exposure.Value * 1000.0) : 0;
            }

            rv.FrameState.Tag = fitsImage;
            rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>();
            foreach (string key in cards.Keys)
            {
                rv.FrameState.AdditionalProperties.Add(key, cards[key]);
            }

            return(rv);
        }
Beispiel #2
0
        public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < 0 || startFrameNo >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + startFrameNo);
            }

            int actualFramesToIntegrate = Math.Min(startFrameNo + framesToIntegrate, m_FirstFrame + m_CountFrames - 1) - startFrameNo;

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, Width, Height) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            lock (m_SyncLock)
            {
                TangraCore.ADVGetIntegratedFrame(startFrameNo, actualFramesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap = (Bitmap)Bitmap.FromStream(memStr);

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
Beispiel #3
0
        public Pixelmap GetPixelmap(int index)
        {
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, m_Width, m_Height) + 40 + 14 + 1];

            uint[] flatPixelsCopy = new uint[m_FlatPixels.Length];
            Array.Copy(m_FlatPixels, flatPixelsCopy, m_FlatPixels.Length);

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(m_FlatPixels, flatPixelsCopy, m_Width, m_Height, m_Bpp, 0 /* No normal value for FITS files */, m_Exposure);

            TangraCore.GetBitmapPixels(m_Width, m_Height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)m_Bpp, 0);

            Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, m_Width, m_Height);

            Pixelmap rv = new Pixelmap(m_Width, m_Height, m_Bpp, flatPixelsCopy, displayBitmap, displayBitmapBytes);

            rv.UnprocessedPixels = m_FlatPixels;

            if (m_Cards != null && m_Cards.Count > 0)
            {
                rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>();
                foreach (string key in m_Cards.Keys)
                {
                    rv.FrameState.AdditionalProperties.Add(key, m_Cards[key]);
                }
            }

            return(rv);
        }
Beispiel #4
0
        public Pixelmap GetPixelmap(int index)
        {
            if (index < FirstFrame || index > LastFrame)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            uint[] pixels             = new uint[Width * Height];
            uint[] unprocessedPixels  = new uint[Width * Height];
            byte[] displayBitmapBytes = new byte[Width * Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, Width, Height) + 40 + 14 + 1];

            var frameInfo = new SerNativeFrameInfo();

            TangraCore.SERGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, (ushort)BitPix, ref frameInfo);

            m_CurrentFrameInfo = new SerFrameInfo(frameInfo);

            using (var memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                try
                {
                    displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                }
                catch (Exception ex)
                {
                    Trace.WriteLine(ex.GetFullStackTrace());
                    displayBitmap = new Bitmap(Width, Height);
                }

                var rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.UnprocessedPixels = unprocessedPixels;
                rv.FrameState        = new FrameStateData()
                {
                    SystemTime = m_CurrentFrameInfo.TimeStampUtc
                };

                if (UseTimeStamp != SerUseTimeStamp.None)
                {
                    if (UseTimeStamp == SerUseTimeStamp.FireCaptureLog)
                    {
                        DateTime dt;
                        if (m_FireCaptureTimeStamps.TryGetValue(1 + index, out dt))
                        {
                            rv.FrameState.CentralExposureTime = dt;
                        }
                    }
                    else if (UseTimeStamp == SerUseTimeStamp.SerEmbeddedUtcTime)
                    {
                        rv.FrameState.CentralExposureTime = m_CurrentFrameInfo.TimeStampUtc;
                    }
                }

                return(rv);
            }
        }
Beispiel #5
0
        public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < FirstFrame || startFrameNo > LastFrame)
            {
                throw new ApplicationException("Invalid frame position: " + startFrameNo);
            }

            int actualFramesToIntegrate = Math.Max(1, Math.Min(startFrameNo + framesToIntegrate, LastFrame - 1) - startFrameNo);

            uint[] pixels             = new uint[Width * Height];
            uint[] unprocessedPixels  = new uint[Width * Height];
            byte[] displayBitmapBytes = new byte[Width * Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, Width, Height) + 40 + 14 + 1];
            var    frameInfo          = new SerNativeFrameInfo();

            TangraCore.SERGetIntegratedFrame(startFrameNo, actualFramesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, (ushort)BitPix, ref frameInfo);

            m_CurrentFrameInfo = new SerFrameInfo(frameInfo);

            using (var memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                try
                {
                    displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                }
                catch (Exception ex)
                {
                    Trace.WriteLine(ex.GetFullStackTrace());
                    displayBitmap = new Bitmap(Width, Height);
                }

                var rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.UnprocessedPixels = unprocessedPixels;
                rv.FrameState        = new FrameStateData()
                {
                    SystemTime = m_CurrentFrameInfo.TimeStampUtc
                };

                if (UseTimeStamp != SerUseTimeStamp.None)
                {
                    if (UseTimeStamp == SerUseTimeStamp.FireCaptureLog)
                    {
                        DateTime dt, dt2;
                        if (actualFramesToIntegrate % 2 == 0 && actualFramesToIntegrate > 1)
                        {
                            if (
                                m_FireCaptureTimeStamps.TryGetValue(startFrameNo + actualFramesToIntegrate / 2, out dt) &&
                                m_FireCaptureTimeStamps.TryGetValue(1 + startFrameNo + actualFramesToIntegrate / 2, out dt2))
                            {
                                rv.FrameState.CentralExposureTime = dt.AddTicks((dt2 - dt).Ticks / 2);
                            }
                        }
                        else
                        {
                            if (m_FireCaptureTimeStamps.TryGetValue(1 + startFrameNo + actualFramesToIntegrate / 2, out dt))
                            {
                                rv.FrameState.CentralExposureTime = dt;
                            }
                        }
                    }
                    else if (UseTimeStamp == SerUseTimeStamp.SerEmbeddedUtcTime)
                    {
                        rv.FrameState.CentralExposureTime = m_CurrentFrameInfo.TimeStampUtc;
                    }
                }

                return(rv);
            }
        }
        public Pixelmap GetPixelmap(int index, int streamId)
        {
            if (m_AdvFile.MainSteamInfo.FrameCount == 0)
            {
                return(null);
            }

            uint[] pixels;
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, m_Width, m_Height) + 40 + 14 + 1];

            Adv.AdvFrameInfo advFrameInfo;
            lock (m_SyncLock)
            {
                if (streamId == 0)
                {
                    pixels = m_AdvFile.GetMainFramePixels((uint)index, out advFrameInfo);
                }
                else if (streamId == 1)
                {
                    pixels = m_AdvFile.GetCalibrationFramePixels((uint)index, out advFrameInfo);
                }
                else
                {
                    throw new ArgumentOutOfRangeException("streamId");
                }

                if (unprocessedPixels.Length != pixels.Length)
                {
                    throw new ApplicationException("ADV Buffer Error");
                }

                Array.Copy(pixels, unprocessedPixels, pixels.Length);
            }

            TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(unprocessedPixels, pixels, m_Width, m_Height, m_BitPix, m_MaxPixelValue, (float)(advFrameInfo.UtcExposureMilliseconds / 1000.0));

            TangraCore.GetBitmapPixels(Width, Height, pixels, rawBitmapBytes, displayBitmapBytes, true, (ushort)BitPix, m_MaxPixelValue);

            Bitmap displayBitmap = null;

            if (m_AAVVersion != null && m_IntegratedAAVFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0)
            {
                TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine);
                using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
                {
                    try
                    {
                        displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                    }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(Adv.Extensions.GetFullStackTrace(ex));
                        displayBitmap = new Bitmap(m_Width, m_Height);
                    }
                }
            }
            else
            {
                displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, Width, Height);
            }

            Pixelmap rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes);

            rv.SetMaxSignalValue(m_MaxPixelValue);
            rv.FrameState        = GetCurrentFrameState(advFrameInfo);
            rv.UnprocessedPixels = unprocessedPixels;
            return(rv);
        }
Beispiel #7
0
        public Pixelmap GetPixelmap(int index)
        {
            if (index >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, m_Width, m_Height) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            byte[] gpsFix      = new byte[256 * 16];
            byte[] userCommand = new byte[256 * 16];
            byte[] systemError = new byte[256 * 16];

            lock (m_SyncLock)
            {
                TangraCore.ADVGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo, gpsFix, userCommand, systemError);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);
            m_CurrentFrameInfo.UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand);
            m_CurrentFrameInfo.SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError);
            m_CurrentFrameInfo.GPSFixString      = AdvFrameInfo.GetStringFromBytes(gpsFix);

            if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0)
            {
                TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine);
            }

            if (frameInfo.HasNtpTimeStamp && m_CurrentFrameInfo.Exposure10thMs == 0 &&
                index + 1 < m_FirstFrame + m_CountFrames)
            {
                lock (m_SyncLock)
                {
                    TangraCore.ADVGetFrameStatusChannel(index + 1, frameInfo, gpsFix, userCommand, systemError);
                }
                if (frameInfo.HasNtpTimeStamp)
                {
                    m_CurrentFrameInfo.Exposure10thMs = (int)Math.Round(new TimeSpan(frameInfo.EndExposureNtpTimeStamp.Ticks - m_CurrentFrameInfo.EndExposureNtpTimeStamp.Ticks).TotalMilliseconds * 10);
                }
            }

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames == 0)
                {
                    // This is a VTI Split reference frame. Put some mark on it to mark it as such??
                    displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, m_Width, m_Height);
                    for (int i = 0; i < pixels.Length; i++)
                    {
                        displayBitmapBytes[i] = (byte)pixels[i];
                    }
                }
                else
                {
                    try
                    {
                        displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                    }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(ex.GetFullStackTrace());
                        displayBitmap = new Bitmap(m_Width, m_Height);
                    }
                }

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.FrameState        = GetCurrentFrameState(index);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }