public FrameStateData GetFrameStatusChannel(int index)
        {
            if (index >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            var frameInfo = new AdvFrameInfoNative();

            byte[] gpsFix      = new byte[256 * 16];
            byte[] userCommand = new byte[256 * 16];
            byte[] systemError = new byte[256 * 16];

            lock (m_SyncLock)
            {
                TangraCore.ADVGetFrameStatusChannel(index, frameInfo, gpsFix, userCommand, systemError);
            }

            var frameStatusChannel = new AdvFrameInfo(frameInfo);

            frameStatusChannel.UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand);
            frameStatusChannel.SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError);
            frameStatusChannel.GPSFixString      = AdvFrameInfo.GetStringFromBytes(gpsFix);

            return(AdvFrameInfoToFrameStateData(frameStatusChannel, index));
        }
        public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging)
        {
            if (startFrameNo < 0 || startFrameNo >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + startFrameNo);
            }

            int actualFramesToIntegrate = Math.Min(startFrameNo + framesToIntegrate, m_FirstFrame + m_CountFrames - 1) - startFrameNo;

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            lock (m_SyncLock)
            {
                TangraCore.ADVGetIntegratedFrame(startFrameNo, actualFramesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap = (Bitmap)Bitmap.FromStream(memStr);

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
예제 #3
0
        private string StatusDataToCsvRow(AdvFrameInfo frameInfo, int frameNo, out string headerRow)
        {
            var output = new StringBuilder();

            output.AppendFormat("\"{0}\"", frameNo);
            output.AppendFormat(",\"{0}\"", frameInfo.UtcStartExposureTimeStamp.ToString("dd-MMM-yyyy HH:mm:ss.fff"));
            output.AppendFormat(",\"{0}\"", frameInfo.UtcStartExposureTimeStamp.AddMilliseconds(frameInfo.UtcExposureMilliseconds).ToString("dd-MMM-yyyy HH:mm:ss.fff"));

            var header = new StringBuilder();

            header.Append("FrameNo,StartTimestamp,EndTimestamp");

            foreach (var definition in m_AdvFile.StatusTagDefinitions)
            {
                object tagValueObj;
                string tagValue = string.Empty;
                if (frameInfo.Status.TryGetValue(definition.Item1, out tagValueObj))
                {
                    tagValue = Convert.ToString(tagValueObj);
                }

                output.AppendFormat(",\"{0}\"", tagValue.Replace("\"", "\"\""));
                header.AppendFormat(",{0}", definition.Item1);
            }

            headerRow = header.ToString();
            return(output.ToString());
        }
        public string GetFileTag(string tagName)
        {
            byte[] tagValue = new byte[256 * 2];

            TangraCore.ADVGetFileTag(tagName, tagValue);

            return(AdvFrameInfo.GetStringFromBytes(tagValue));
        }
        private void AddExtraNtpDebugTimes(ref FrameStateData stateData, AdvFrameInfo frameInfo)
        {
            if (stateData.AdditionalProperties == null)
            {
                stateData.AdditionalProperties = new SafeDictionary <string, object>();
            }

            stateData.AdditionalProperties.Add("MidTimeNTPRaw", stateData.EndFrameNtpTime.AddMilliseconds(-0.5 * stateData.ExposureInMilliseconds));
            stateData.AdditionalProperties.Add("MidTimeNTPFitted", stateData.CentralExposureTime);
            stateData.AdditionalProperties.Add("MidTimeWindowsRaw", frameInfo.EndExposureSecondaryTimeStamp.AddMilliseconds(-0.5 * stateData.ExposureInMilliseconds));
        }
예제 #6
0
        private void timerScrolling_Tick(object sender, EventArgs e)
        {
            timerScrolling.Enabled = false;

            AdvFrameInfo frameInfo = null;

            uint[] pixels  = null;
            int    frameId = sbFrames.Value - 1;

            if (rbMainStream.Checked)
            {
                pixels = m_AdvFile.GetMainFramePixels((uint)frameId, out frameInfo);
            }
            else
            {
                pixels = m_AdvFile.GetCalibrationFramePixels((uint)frameId, out frameInfo);
            }

            DisplayFrame(frameId, pixels, frameInfo);
        }
        public AdvFrameInfo GetStatusChannel(int index)
        {
            var frameInfo = new AdvFrameInfoNative();

            byte[] gpsFix      = new byte[256 * 16];
            byte[] userCommand = new byte[256 * 16];
            byte[] systemError = new byte[256 * 16];

            lock (m_SyncLock)
            {
                TangraCore.ADVGetFrameStatusChannel(index, frameInfo, gpsFix, userCommand, systemError);
            }

            var rv = new AdvFrameInfo(frameInfo)
            {
                UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand),
                SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError),
                GPSFixString      = AdvFrameInfo.GetStringFromBytes(gpsFix)
            };

            return(rv);
        }
        private FrameStateData AdvFrameInfoToFrameStateData(AdvFrameInfo frameInfo, int frameIndex)
        {
            if (frameInfo != null)
            {
                var rv = new FrameStateData();
                rv.VideoCameraFrameId     = frameInfo.VideoCameraFrameId;
                rv.CentralExposureTime    = frameInfo.MiddleExposureTimeStamp;
                rv.SystemTime             = frameInfo.SystemTime;
                rv.EndFrameNtpTime        = frameInfo.EndExposureNtpTimeStamp;
                rv.NtpTimeStampError      = frameInfo.NtpTimeStampError;
                rv.ExposureInMilliseconds = frameInfo.Exposure10thMs / 10.0f;

                rv.NumberIntegratedFrames = (int)frameInfo.IntegratedFrames;

                int almanacStatus = frameInfo.GPSAlmanacStatus;
                int almanacOffset = frameInfo.GetSignedAlamancOffset();

                if (!frameInfo.AlmanacStatusIsGood && frameInfo.AlmanacStatusIsGood)
                {
                    // When the current almanac is not good, but last frame is, then apply the known almanac offset automatically
                    almanacOffset          = frameInfo.GPSAlmanacOffset;
                    rv.CentralExposureTime = rv.CentralExposureTime.AddSeconds(frameInfo.GPSAlmanacOffset);
                    almanacStatus          = 2;            // Certain
                }


                rv.Gain        = frameInfo.Gain;
                rv.Gamma       = frameInfo.Gamma;
                rv.Temperature = frameInfo.Temperature;
                rv.Offset      = frameInfo.Offset;

                rv.NumberSatellites = frameInfo.GPSTrackedSattelites;

                rv.AlmanacStatus = AdvStatusValuesHelper.TranslateGpsAlmanacStatus(almanacStatus);

                rv.AlmanacOffset = AdvStatusValuesHelper.TranslateGpsAlmanacOffset(almanacStatus, almanacOffset, almanacStatus > 0);

                rv.GPSFixStatus = frameInfo.GPSFixStatus.ToString("#");

                rv.Messages = string.Empty;
                if (frameInfo.SystemErrorString != null)
                {
                    rv.Messages = string.Concat(rv.Messages, frameInfo.SystemErrorString, "\r\n");
                }
                if (frameInfo.UserCommandString != null)
                {
                    rv.Messages = string.Concat(rv.Messages, frameInfo.UserCommandString, "\r\n");
                }
                if (frameInfo.GPSFixString != null)
                {
                    rv.Messages = string.Concat(rv.Messages, frameInfo.GPSFixString, "\r\n");
                }

                if (m_UseNtpTimeAsCentralExposureTime)
                {
                    rv.CentralExposureTime = ComputeCentralExposureTimeFromNtpTime(frameIndex, frameInfo.EndExposureNtpTimeStamp);
                }

                if (m_FrameRate > 0)
                {
                    rv.ExposureInMilliseconds = (float)(1000 / m_FrameRate);
                }

                if (m_UsesNtpTimestamps && !OcrDataAvailable && m_UseNtpTimeAsCentralExposureTime)
                {
                    AddExtraNtpDebugTimes(ref rv, frameInfo);
                }

                return(rv);
            }
            else
            {
                return(new FrameStateData());
            }
        }
        public Pixelmap GetPixelmap(int index)
        {
            if (index >= m_FirstFrame + m_CountFrames)
            {
                throw new ApplicationException("Invalid frame position: " + index);
            }

            uint[] pixels             = new uint[m_Width * m_Height];
            uint[] unprocessedPixels  = new uint[m_Width * m_Height];
            byte[] displayBitmapBytes = new byte[m_Width * m_Height];
            byte[] rawBitmapBytes     = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1];
            var    frameInfo          = new AdvFrameInfoNative();

            byte[] gpsFix      = new byte[256 * 16];
            byte[] userCommand = new byte[256 * 16];
            byte[] systemError = new byte[256 * 16];

            lock (m_SyncLock)
            {
                TangraCore.ADVGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo, gpsFix, userCommand, systemError);
            }

            m_CurrentFrameInfo = new AdvFrameInfo(frameInfo);
            m_CurrentFrameInfo.UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand);
            m_CurrentFrameInfo.SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError);
            m_CurrentFrameInfo.GPSFixString      = AdvFrameInfo.GetStringFromBytes(gpsFix);

            if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0)
            {
                TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine);
            }

            if (frameInfo.HasNtpTimeStamp && m_CurrentFrameInfo.Exposure10thMs == 0 &&
                index + 1 < m_FirstFrame + m_CountFrames)
            {
                lock (m_SyncLock)
                {
                    TangraCore.ADVGetFrameStatusChannel(index + 1, frameInfo, gpsFix, userCommand, systemError);
                }
                if (frameInfo.HasNtpTimeStamp)
                {
                    m_CurrentFrameInfo.Exposure10thMs = (int)Math.Round(new TimeSpan(frameInfo.EndExposureNtpTimeStamp.Ticks - m_CurrentFrameInfo.EndExposureNtpTimeStamp.Ticks).TotalMilliseconds * 10);
                }
            }

            using (MemoryStream memStr = new MemoryStream(rawBitmapBytes))
            {
                Bitmap displayBitmap;

                if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames == 0)
                {
                    // This is a VTI Split reference frame. Put some mark on it to mark it as such??
                    displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, m_Width, m_Height);
                    for (int i = 0; i < pixels.Length; i++)
                    {
                        displayBitmapBytes[i] = (byte)pixels[i];
                    }
                }
                else
                {
                    try
                    {
                        displayBitmap = (Bitmap)Bitmap.FromStream(memStr);
                    }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(ex.GetFullStackTrace());
                        displayBitmap = new Bitmap(m_Width, m_Height);
                    }
                }

                var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes);
                rv.SetMaxSignalValue(m_Aav16NormVal);
                rv.FrameState        = GetCurrentFrameState(index);
                rv.UnprocessedPixels = unprocessedPixels;
                return(rv);
            }
        }
예제 #10
0
        internal bool SaveAsAviFile(string fileName, int firstFrame, int lastFrame, AdvToAviConverter converter, bool tryCodec, bool isCalibrationStream, double msPerFrame, double addedGamma, OnProgressDelegate progressCallback)
        {
            IAviSaver saver = AdvToAviConverterFactory.CreateConverter(converter);

            saver.CloseAviFile();
            if (!saver.StartNewAviFile(fileName, (int)m_AdvFile.Width, (int)m_AdvFile.Height, 8, 25, tryCodec))
            {
                progressCallback(100, 0);
                return(false);
            }
            try
            {
                int           aviFrameNo               = 0;
                AdvIndexEntry firstFrameIdx            = isCalibrationStream ? m_AdvFile.CalibrationIndex[firstFrame] : m_AdvFile.MainIndex[firstFrame];
                double        ticksToMSFactor          = 1000.0 / (isCalibrationStream ? m_AdvFile.CalibrationSteamInfo.ClockFrequency : m_AdvFile.MainSteamInfo.ClockFrequency);
                double        startingTimeMilliseconds = firstFrameIdx.ElapsedTicks * ticksToMSFactor;

                if (m_AdvFile.MainIndex[lastFrame].ElapsedTicks != 0)
                {
                    // Sampling can be done as we have sufficient timing information
                }
                else
                {
                    InvokeMessageBox(
                        "There is insufficient timing information in this file to convert it to AVI. This could be caused by an old file format or trying to make an AVI from a single frame.",
                        MessageBoxButtons.OK,
                        MessageBoxIcon.Error);

                    return(false);
                }

                if (InvokeMessageBox(
                        "Please note that the AVI export is doing resampling of the original video which will typically cause frames to duplicated and/or dropped.\r\n\r\nThis export function is meant to be used for video streaming (i.e. sharing the video for viewing on the Internet) and should not be used to convert the video to another format for measuring in another software. If you want to measure the video in another software either measure it directly as ADV/AAV file (if supported) or export it to a FITS file sequence from the main file menu and measure the FITS images.\r\n\r\nDo you wish to continue?",
                        MessageBoxButtons.YesNo,
                        MessageBoxIcon.Warning) != DialogResult.Yes)
                {
                    return(false);
                }

                progressCallback(5, 0);

                for (int i = firstFrame; i <= lastFrame; i++)
                {
                    AdvIndexEntry frame = isCalibrationStream ? m_AdvFile.CalibrationIndex[i] : m_AdvFile.MainIndex[i];

                    AdvFrameInfo frameInfo = null;
                    uint[]       pixels    = null;

                    if (isCalibrationStream)
                    {
                        pixels = m_AdvFile.GetCalibrationFramePixels((uint)i, out frameInfo);
                    }
                    else
                    {
                        pixels = m_AdvFile.GetMainFramePixels((uint)i, out frameInfo);
                    }

                    using (Pixelmap pixmap = CreatePixelmap(pixels))
                    {
                        int lastRepeatedAviFrameNo = 0;

                        if (frame.ElapsedTicks != 0)
                        {
                            lastRepeatedAviFrameNo = (int)Math.Round((frame.ElapsedTicks * ticksToMSFactor - startingTimeMilliseconds) / msPerFrame);
                        }

                        while (aviFrameNo < lastRepeatedAviFrameNo)
                        {
                            if (!saver.AddAviVideoFrame(pixmap, addedGamma, m_AdvFile.MaxPixelValue))
                            {
                                progressCallback(100, 0);
                                return(false);
                            }
                            aviFrameNo++;
                        }
                    }

                    int percDone = (int)Math.Min(90, 90 * (i - firstFrame) * 1.0 / (lastFrame - firstFrame + 1));
                    progressCallback(5 + percDone, 0);
                }
            }
            finally
            {
                saver.CloseAviFile();
                progressCallback(100, 0);
            }

            return(false);
        }
예제 #11
0
        private void DisplayFrame(int frameId, uint[] pixels, AdvFrameInfo frameInfo)
        {
            Bitmap displayBitmap;

            if (pixels != null)
            {
                var pixelMap = CreatePixelmap(pixels);
                displayBitmap = pixelMap.DisplayBitmap;
            }
            else
            {
                displayBitmap = new Bitmap(picSmallImage.Width, picSmallImage.Height);
                using (Graphics g = Graphics.FromImage(displayBitmap))
                {
                    g.Clear(SystemColors.ControlDarkDark);
                    g.Save();
                }
            }

            picSmallImage.Image = displayBitmap;

            lvFrameStatusData.Items.Clear();
            lblFrameStart.Text         = "";
            lblFrameExposure.Text      = "";
            lblFrameEnd.Text           = "";
            lblFrameSystemTime.Text    = "";
            lblFrameId.Text            = "";
            lblFrameLayout.Text        = "";
            lblDataBlockSize.Text      = "";
            lblFrameGain.Text          = "";
            lblFrameGamma.Text         = "";
            lblFrameOffset.Text        = "";
            lblFrameNumSatellites.Text = "";
            lblFrameAlmanacStatus.Text = "";
            lblFrameAlmanacOffset.Text = "";

            if (frameInfo != null)
            {
                if (frameInfo.HasUtcTimeStamp)
                {
                    lblFrameStart.Text    = frameInfo.UtcStartExposureTimeStamp.ToString("dd-MMM-yyyy HH:mm:ss.fff");
                    lblFrameExposure.Text = frameInfo.UtcExposureMilliseconds.ToString("0.0") + " ms";
                    lblFrameEnd.Text      = frameInfo.UtcStartExposureTimeStamp.AddMilliseconds(frameInfo.UtcExposureMilliseconds).ToString("dd-MMM-yyyy HH:mm:ss.fff");
                }
                else
                {
                    lblFrameStart.Text    = "N/A";
                    lblFrameExposure.Text = "N/A";
                    lblFrameEnd.Text      = "N/A";
                }

                lblFrameSystemTime.Text = frameInfo.SystemTimestamp.ToString("dd-MMM-yyyy HH:mm:ss.fff");
                lblFrameId.Text         = frameId.ToString();
                lblFrameLayout.Text     = frameInfo.ImageLayoutId.ToString();
                lblDataBlockSize.Text   = frameInfo.RawDataBlockSize.ToString() + " bytes";

                lblFrameGain.Text   = frameInfo.Gain.ToString("0.00");
                lblFrameGamma.Text  = frameInfo.Gamma.ToString("0.000");
                lblFrameOffset.Text = frameInfo.Offset.ToString("0.00");

                lblFrameNumSatellites.Text = frameInfo.GPSTrackedSattelites.ToString();
                lblFrameAlmanacStatus.Text = frameInfo.GPSAlmanacStatus.ToString();
                lblFrameAlmanacOffset.Text = frameInfo.GPSAlmanacOffset.ToString();

                foreach (string key in frameInfo.Status.Keys)
                {
                    var item = lvFrameStatusData.Items.Add(key);
                    item.SubItems.Add(Convert.ToString(frameInfo.Status[key]));
                }
            }
        }
예제 #12
0
        public void StepBackward(int secondsBackward)
        {
            if (m_VideoStream != null)
            {
                if (m_IsRunning)
                {
                    // No single frame movement or refresh when the video is 'playing'
                    return;
                }

                AstroDigitalVideoStream advStream = m_VideoStream as AstroDigitalVideoStream;
                if (advStream != null)
                {
                    AdvFrameInfo currStatusChannel = advStream.GetStatusChannel(CurrentDirectionAwareFrameIndex);
                    if (currStatusChannel.HasTimeStamp)
                    {
                        int targetFrame = m_CurrentFrameIndex - 1;

                        while (targetFrame >= m_VideoStream.FirstFrame)
                        {
                            AdvFrameInfo statusChannel = advStream.GetStatusChannel(targetFrame);
                            if (statusChannel.HasTimeStamp)
                            {
                                TimeSpan ts =
                                    new TimeSpan(currStatusChannel.MiddleExposureTimeStamp.Ticks - statusChannel.MiddleExposureTimeStamp.Ticks);
                                if (ts.TotalSeconds >= secondsBackward)
                                {
                                    m_CurrentFrameIndex = targetFrame;
                                    break;
                                }
                            }
                            else if (targetFrame == m_VideoStream.FirstFrame)
                            {
                                // We have reached the beginning of the video
                                m_CurrentFrameIndex = targetFrame;
                                break;
                            }
                            targetFrame--;
                        }
                    }
                    else if (advStream.FrameRate > 0)
                    {
                        m_CurrentFrameIndex -= (int)Math.Round(secondsBackward * advStream.FrameRate);
                    }
                    else
                    {
                        m_CurrentFrameIndex--;
                    }
                }
                else
                {
                    m_CurrentFrameIndex -= (int)Math.Round(secondsBackward * m_VideoStream.FrameRate);
                }

                if (m_CurrentFrameIndex < m_VideoStream.FirstFrame)
                {
                    m_CurrentFrameIndex = m_VideoStream.FirstFrame;
                }

                DisplayCurrentFrame(MovementType.Jump);
            }
        }
예제 #13
0
        public uint[] GetCalibrationFramePixels(uint frameNo, out AdvFrameInfo frameInfo)
        {
            if (frameNo < CalibrationSteamInfo.FrameCount)
            {
                uint[] pixels;
                int    errorCode = AdvLib.GetFramePixels(1, (int)frameNo, Width, Height, out frameInfo, out pixels);
                AdvError.Check(errorCode);
                foreach (var entry in StatusTagDefinitions)
                {
                    byte?  val8;
                    short? val16;
                    int?   val32;
                    long?  val64;
                    float? valf;
                    string vals;

                    switch (entry.Item3)
                    {
                    case Adv2TagType.Int8:
                        if (AdvLib.GetStatusTagUInt8(entry.Item2, out val8) == AdvError.S_OK && val8.HasValue)
                        {
                            frameInfo.Status.Add(entry.Item1, val8.Value);
                        }
                        break;

                    case Adv2TagType.Int16:
                        if (AdvLib.GetStatusTagInt16(entry.Item2, out val16) == AdvError.S_OK && val16.HasValue)
                        {
                            frameInfo.Status.Add(entry.Item1, val16.Value);
                        }
                        break;

                    case Adv2TagType.Int32:
                        if (AdvLib.GetStatusTagInt32(entry.Item2, out val32) == AdvError.S_OK && val32.HasValue)
                        {
                            frameInfo.Status.Add(entry.Item1, val32.Value);
                        }
                        break;

                    case Adv2TagType.Long64:
                        if (AdvLib.GetStatusTagInt64(entry.Item2, out val64) == AdvError.S_OK && val64.HasValue)
                        {
                            frameInfo.Status.Add(entry.Item1, val64.Value);
                        }
                        break;

                    case Adv2TagType.Real:
                        if (AdvLib.GetStatusTagFloat(entry.Item2, out valf) == AdvError.S_OK && valf.HasValue)
                        {
                            frameInfo.Status.Add(entry.Item1, valf.Value);
                        }
                        break;

                    case Adv2TagType.UTF8String:
                        if (AdvLib.GetStatusTagUTF8String(entry.Item2, out vals) == AdvError.S_OK)
                        {
                            frameInfo.Status.Add(entry.Item1, vals);
                        }
                        break;
                    }
                }
                return(pixels);
            }
            else
            {
                throw new AdvLibException(string.Format("Calibration frame number must be bwtween 0 and {0}", CalibrationSteamInfo.FrameCount - 1));
            }
        }
예제 #14
0
        private string StatusDataToCsvRow(AdvFrameInfo frameInfo, int frameNo, out string headerRow)
        {
            var output = new StringBuilder();
            output.AppendFormat("\"{0}\"", frameNo);
            output.AppendFormat(",\"{0}\"", frameInfo.UtcStartExposureTimeStamp.ToString("dd-MMM-yyyy HH:mm:ss.fff"));
            output.AppendFormat(",\"{0}\"", frameInfo.UtcStartExposureTimeStamp.AddMilliseconds(frameInfo.UtcExposureMilliseconds).ToString("dd-MMM-yyyy HH:mm:ss.fff"));

            var header = new StringBuilder();
            header.Append("FrameNo,StartTimestamp,EndTimestamp");

            foreach (var definition in m_AdvFile.StatusTagDefinitions)
            {
                object tagValueObj;
                string tagValue = string.Empty;
                if (frameInfo.Status.TryGetValue(definition.Item1, out tagValueObj))
                {
                    tagValue = Convert.ToString(tagValueObj);
                }

                output.AppendFormat(",\"{0}\"", tagValue.Replace("\"", "\"\""));
                header.AppendFormat(",{0}", definition.Item1);
            }

            headerRow = header.ToString();
            return output.ToString();
        }
예제 #15
0
        private void DisplayFrame(int frameId, uint[] pixels, AdvFrameInfo frameInfo)
        {
            Bitmap displayBitmap;
            if (pixels != null)
            {
                var pixelMap = CreatePixelmap(pixels);
                displayBitmap = pixelMap.DisplayBitmap;
            }
            else
            {
                displayBitmap = new Bitmap(picSmallImage.Width, picSmallImage.Height);
                using (Graphics g = Graphics.FromImage(displayBitmap))
                {
                    g.Clear(SystemColors.ControlDarkDark);
                    g.Save();
                }
            }

            picSmallImage.Image = displayBitmap;

            lvFrameStatusData.Items.Clear();
            lblFrameStart.Text = "";
            lblFrameExposure.Text = "";
            lblFrameEnd.Text = "";
            lblFrameSystemTime.Text = "";
            lblFrameId.Text = "";
            lblFrameLayout.Text = "";
            lblDataBlockSize.Text = "";
            lblFrameGain.Text = "";
            lblFrameGamma.Text = "";
            lblFrameOffset.Text = "";
            lblFrameNumSatellites.Text = "";
            lblFrameAlmanacStatus.Text = "";
            lblFrameAlmanacOffset.Text = "";

            if (frameInfo != null)
            {
                if (frameInfo.HasUtcTimeStamp)
                {
                    lblFrameStart.Text = frameInfo.UtcStartExposureTimeStamp.ToString("dd-MMM-yyyy HH:mm:ss.fff");
                    lblFrameExposure.Text = frameInfo.UtcExposureMilliseconds.ToString("0.0") + " ms";
                    lblFrameEnd.Text = frameInfo.UtcStartExposureTimeStamp.AddMilliseconds(frameInfo.UtcExposureMilliseconds).ToString("dd-MMM-yyyy HH:mm:ss.fff");
                }
                else
                {
                    lblFrameStart.Text = "N/A";
                    lblFrameExposure.Text = "N/A";
                    lblFrameEnd.Text = "N/A";
                }

                lblFrameSystemTime.Text = frameInfo.SystemTimestamp.ToString("dd-MMM-yyyy HH:mm:ss.fff");
                lblFrameId.Text = frameId.ToString();
                lblFrameLayout.Text = frameInfo.ImageLayoutId.ToString();
                lblDataBlockSize.Text = frameInfo.RawDataBlockSize.ToString() + " bytes";

                lblFrameGain.Text = frameInfo.Gain.ToString("0.00");
                lblFrameGamma.Text = frameInfo.Gamma.ToString("0.000");
                lblFrameOffset.Text = frameInfo.Offset.ToString("0.00");

                lblFrameNumSatellites.Text = frameInfo.GPSTrackedSattelites.ToString();
                lblFrameAlmanacStatus.Text = frameInfo.GPSAlmanacStatus.ToString();
                lblFrameAlmanacOffset.Text = frameInfo.GPSAlmanacOffset.ToString();

                foreach (string key in frameInfo.Status.Keys)
                {
                    var item = lvFrameStatusData.Items.Add(key);
                    item.SubItems.Add(Convert.ToString(frameInfo.Status[key]));
                }
            }
        }