コード例 #1
0
        private bool Load16BitFitsFileImpl(string fileName, IFITSTimeStampReader timeStampReader,
                                           out uint[,] pixels, out int width, out int height, out uint medianValue, out Type pixelDataType, out bool hasNegativePixels, out short minValue, out uint maxValue,
                                           FITSHelper.CheckOpenedFitsFileCallback callback, int?negPixelsBZero)
        {
            float frameExposure;

            width  = m_ImageHDU.Axes[WidthIndex];
            height = m_ImageHDU.Axes[HeightIndex];

            return(FITSHelper.LoadFitsDataInternal(
                       m_ImageHDU,
                       GetFirstFramePixelArray(), fileName, timeStampReader,
                       out pixels, out medianValue, out pixelDataType, out frameExposure, out hasNegativePixels, out minValue, out maxValue, callback,
                       (Array dataArray, int h, int w, double bzero, out uint[,] ppx, out uint median, out Type dataType, out bool hasNegPix, out short minV, out uint maxV) =>
            {
                ppx = FITSHelper.Load16BitImageData(dataArray, m_ImageHDU.Axes[HeightIndex], m_ImageHDU.Axes[WidthIndex], negPixelsBZero ?? (int)bzero, out median, out dataType, out hasNegPix, out minV, out maxV);
            }));
        }
コード例 #2
0
        private void ParseFirstFrame(out uint[] pixelsFlat, out short minPixelValue, out uint maxPixelValue, out int bpp, out int bzero, out bool hasNegativePixels)
        {
            int      width;
            int      height;
            DateTime?timestamp;
            double?  exposure;
            int      bz    = 0;
            var      cards = new Dictionary <string, string>();

            FITSHelper.Load16BitFitsFile(null, Load16BitFitsFile, null,
                                         (hdu) =>
            {
                var cursor = hdu.Header.GetCursor();
                bz         = FITSHelper.GetBZero(hdu);
                while (cursor.MoveNext())
                {
                    HeaderCard card = hdu.Header.FindCard((string)cursor.Key);
                    if (card != null && !string.IsNullOrWhiteSpace(card.Key) && card.Key != "END")
                    {
                        if (cards.ContainsKey(card.Key))
                        {
                            cards[card.Key] += "\r\n" + card.Value;
                        }
                        else
                        {
                            cards.Add(card.Key, card.Value);
                        }
                    }
                }
            }, out pixelsFlat, out width, out height, out bpp, out timestamp, out exposure, out minPixelValue, out maxPixelValue, out hasNegativePixels);

            bzero         = bz;
            MinPixelValue = minPixelValue;
            BZero         = bz;

            FITSHelper.Load16BitFitsFile(null, Load16BitFitsFileWithNegativePixels, null, null, out pixelsFlat, out width, out height, out bpp, out timestamp, out exposure, out minPixelValue, out maxPixelValue, out hasNegativePixels);
        }
コード例 #3
0
        private bool LoadDarkFlatOrBiasFrameInternal(string title, ref float[,] pixels, ref float medianValue, ref float exposureSeconds, ref int imagesCombined)
        {
            string filter = "FITS Image 16 bit (*.fit;*.fits)|*.fit;*.fits";

            string fileName;

            if (m_VideoController.ShowOpenFileDialog(title, filter, out fileName) == DialogResult.OK &&
                File.Exists(fileName))
            {
                Type pixelDataType;
                int  snapshot = 1;
                bool hasNegativePixels;

                bool loaded = FITSHelper.LoadFloatingPointFitsFile(
                    fileName,
                    null,
                    out pixels,
                    out medianValue,
                    out pixelDataType,
                    out exposureSeconds,
                    out hasNegativePixels,
                    delegate(BasicHDU imageHDU)
                {
                    if (
                        imageHDU.Axes.Count() != 2 ||
                        imageHDU.Axes[0] != TangraContext.Current.FrameHeight ||
                        imageHDU.Axes[1] != TangraContext.Current.FrameWidth)
                    {
                        m_VideoController.ShowMessageBox(
                            "Selected image has a different frame size from the currently loaded video.", "Tangra",
                            MessageBoxButtons.OK, MessageBoxIcon.Error);

                        return(false);
                    }

                    bool isFloatingPointImage = false;
                    Array dataArray           = (Array)imageHDU.Data.DataArray;
                    object entry = dataArray.GetValue(0);
                    if (entry is float[])
                    {
                        isFloatingPointImage = true;
                    }
                    else if (entry is Array)
                    {
                        isFloatingPointImage = ((Array)entry).GetValue(0) is float;
                    }

                    HeaderCard imagesCombinedCard = imageHDU.Header.FindCard("SNAPSHOT");
                    if (imagesCombinedCard != null)
                    {
                        int.TryParse(imagesCombinedCard.Value, out snapshot);
                    }


                    if (!isFloatingPointImage && imageHDU.BitPix != 16)
                    {
                        if (m_VideoController.ShowMessageBox(
                                "Selected image data type may not be compatible with the currently loaded video. Do you wish to continue?", "Tangra",
                                MessageBoxButtons.YesNo, MessageBoxIcon.Error) == DialogResult.No)
                        {
                            return(false);
                        }
                    }

                    float usedEncodingGamma    = float.NaN;
                    string usedGammaString     = null;
                    HeaderCard tangraGammaCard = imageHDU.Header.FindCard("TANGAMMA");
                    if (tangraGammaCard != null &&
                        float.TryParse(tangraGammaCard.Value, NumberStyles.Number, CultureInfo.InvariantCulture, out usedEncodingGamma))
                    {
                        usedGammaString = usedEncodingGamma.ToString("0.0000", CultureInfo.InvariantCulture);
                    }

                    string gammaUsageError = null;
                    string currGammaString = TangraConfig.Settings.Generic.ReverseGammaCorrection
                                                        ? TangraConfig.Settings.Photometry.EncodingGamma.ToString("0.0000", CultureInfo.InvariantCulture)
                                                        : null;
                    if (TangraConfig.Settings.Generic.ReverseGammaCorrection && currGammaString != null && usedGammaString == null)
                    {
                        gammaUsageError = string.Format("Selected image hasn't been Gamma corrected while the current video uses a gamma of {0}.", currGammaString);
                    }
                    else if (!TangraConfig.Settings.Generic.ReverseGammaCorrection && usedGammaString != null && currGammaString == null)
                    {
                        gammaUsageError = string.Format("Selected image has been corrected for Gamma of {0} while the current video doesn't use gamma correction.", usedGammaString);
                    }
                    else if (TangraConfig.Settings.Generic.ReverseGammaCorrection && !string.Equals(currGammaString, usedGammaString))
                    {
                        gammaUsageError = string.Format("Selected image has been corrected for Gamma of {0} while the current video uses a gamma of {1}.", usedGammaString, currGammaString);
                    }

                    if (gammaUsageError != null)
                    {
                        if (m_VideoController.ShowMessageBox(gammaUsageError + " Do you wish to continue?", "Tangra", MessageBoxButtons.YesNo, MessageBoxIcon.Error) == DialogResult.No)
                        {
                            return(false);
                        }
                    }

                    TangraConfig.KnownCameraResponse usedCameraResponse = TangraConfig.KnownCameraResponse.Undefined;
                    string usedCameraResponseString  = null;
                    int usedCameraResponseInt        = 0;
                    HeaderCard tangraCamResponseCard = imageHDU.Header.FindCard("TANCMRSP");
                    if (tangraCamResponseCard != null &&
                        int.TryParse(tangraCamResponseCard.Value, NumberStyles.Number, CultureInfo.InvariantCulture, out usedCameraResponseInt))
                    {
                        usedCameraResponse       = (TangraConfig.KnownCameraResponse)usedCameraResponseInt;
                        usedCameraResponseString = usedCameraResponse.ToString();
                    }

                    string cameraResponseUsageError = null;
                    string currCameraResponseString = TangraConfig.Settings.Generic.ReverseCameraResponse
                                                        ? TangraConfig.Settings.Photometry.KnownCameraResponse.ToString()
                                                        : null;
                    if (TangraConfig.Settings.Generic.ReverseCameraResponse && currCameraResponseString != null && usedCameraResponseString == null)
                    {
                        cameraResponseUsageError = string.Format("Selected image hasn't been corrected for camera reponse while the current video uses a camera response correction for {0}.", currCameraResponseString);
                    }
                    else if (!TangraConfig.Settings.Generic.ReverseCameraResponse && usedCameraResponseString != null && currCameraResponseString == null)
                    {
                        cameraResponseUsageError = string.Format("Selected image has been corrected for camera response of {0} while the current video doesn't use camera response correction.", usedCameraResponseString);
                    }
                    else if (TangraConfig.Settings.Generic.ReverseCameraResponse && !string.Equals(currCameraResponseString, usedCameraResponseString))
                    {
                        cameraResponseUsageError = string.Format("Selected image has been corrected for camera reponse of {0} while the current video uses a camera response correction for {1}.", usedCameraResponseString, currCameraResponseString);
                    }

                    if (cameraResponseUsageError != null)
                    {
                        if (m_VideoController.ShowMessageBox(cameraResponseUsageError + " Do you wish to continue?", "Tangra", MessageBoxButtons.YesNo, MessageBoxIcon.Error) == DialogResult.No)
                        {
                            return(false);
                        }
                    }

                    return(true);
                });

                imagesCombined = snapshot;
                return(loaded);
            }

            return(false);
        }