Exemple #1
0
        public void SampleAdvFileRecording2()
        {
            fileName = @"C:\hello-world.adv";

            const uint MILLI_TO_NANO = 1000000;
            const int  WIDTH         = 800;
            const int  HEIGHT        = 600;

            var recorder = new AdvRecorder();

            recorder.ImageConfig.SetImageParameters(WIDTH, HEIGHT, 16, 0);

            recorder.FileMetaData.RecorderSoftwareName    = "MyVideoRecorder";
            recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z";
            recorder.FileMetaData.CameraModel             = "TestCamera";
            recorder.FileMetaData.CameraSensorInfo        = "TestSensor";

            recorder.StartRecordingNewFile(fileName, 1 * MILLI_TO_NANO /* 1ms */);

            for (int i = 0; i < 10; i++)
            {
                ushort[] pixels = new ushort[WIDTH * HEIGHT];

                recorder.AddVideoFrame(
                    pixels, false, null,
                    AdvTimeStamp.FromDateTime(DateTime.Now),
                    AdvTimeStamp.FromDateTime(DateTime.Now.AddSeconds(0.5 * i)),
                    null, AdvImageData.PixelDepth16Bit);
            }

            recorder.FinishRecording();
        }
Exemple #2
0
        public void SampleAdvFileRecording5()
        {
            fileName = @"C:\hello-world-5.adv";

            const uint MILLI_TO_NANO = 1000000;
            const int  WIDTH         = 800;
            const int  HEIGHT        = 600;

            var rec = new AdvRecorder();

            rec.ImageConfig.SetImageParameters(WIDTH, HEIGHT, 16, 0);

            rec.FileMetaData.RecorderSoftwareName    = "MyVideoRecorder";
            rec.FileMetaData.RecorderSoftwareVersion = "x.y.z";
            rec.FileMetaData.CameraModel             = "TestCamera";
            rec.FileMetaData.CameraSensorInfo        = "TestSensor";

            rec.FileMetaData.GainResponseMode = ResponseMode.Linear;
            rec.FileMetaData.Telescope        = "14\" LX-200 ACF (Tangra Observatory)";
            rec.FileMetaData.Observer         = "Hristo Pavlov";
            rec.FileMetaData.ObjectName       = "Chariklo";
            rec.FileMetaData.Comment          = "Full moon only 20 deg away from the target.";

            rec.FileMetaData.AddUserTag("Timing Hardware", "IOTA-VTI v3");

            rec.StatusSectionConfig.AddDefineTag("ErrorFlag", Adv2TagType.Int8);
            rec.StatusSectionConfig.AddDefineTag("Temperature", Adv2TagType.Real);

            rec.StartRecordingNewFile(fileName, 1 * MILLI_TO_NANO /* 1ms */);

            // TODO: Get the real actual timestamps and exposure
            DateTime startTime       = DateTime.UtcNow;
            double   exposureSeconds = 0.5;

            for (int i = 0; i < 10; i++)
            {
                ushort[] pixels = new ushort[WIDTH * HEIGHT];

                var statusEntry = new AdvRecorder.AdvStatusEntry()
                {
                    // Set actual values. Order and type of values is important.
                    AdditionalStatusTags = new object[] { (byte)1, 15.3f }
                };

                rec.AddVideoFrame(
                    pixels, false, null,
                    AdvTimeStamp.FromDateTime(startTime.AddSeconds(exposureSeconds * i)),
                    AdvTimeStamp.FromDateTime(startTime.AddSeconds(exposureSeconds * (i + 1))),
                    statusEntry, AdvImageData.PixelDepth16Bit);
            }

            rec.FinishRecording();
        }
Exemple #3
0
        private void VideoGrabberWorker(object state)
        {
            int w = 0;
            int h = 0;
            int bpp = 0;
            int channels = 0;

            byte[] imageBytes = new byte[m_ImageSize];
            m_CurrentBytes = new byte[m_ImageSize];
            for (int i = 0; i < m_ImageSize; i++)
            {
                imageBytes[i] = 0x66;
            }

            m_FrameNo = 0;
            int rv;
            var stopwatch = new Stopwatch();

            m_GPSLongitude = double.NaN;
            m_GPSLatitude = double.NaN;
            m_CCDTemp = double.NaN;
            if (m_UseCooling) stopwatch.Start();

            while (m_Connected)
            {
                try
                {
                    rv = QHYPInvoke.GetQHYCCDLiveFrame(m_Handle, ref w, ref h, ref bpp, ref channels, imageBytes);
                    if (rv >= 0)
                    {
                        m_FrameNo++;

                        var header = new ImageHeader(m_FrameNo, imageBytes, bpp);

                        if (m_CameraState == VideoCameraState.videoCameraRecording)
                        {
                            lock (m_SyncLock)
                            {
                                if (m_Recorder == null)
                                {
                                    m_Recorder = new AdvRecorder();

                                    Version occuRecVersion = Assembly.GetExecutingAssembly().GetName().Version;
                                    bool isBeta = Assembly.GetExecutingAssembly().GetCustomAttributes(typeof(BetaReleaseAttribute), false).Length == 1;

                                    // Start Recording
                                    m_Recorder.FileMetaData.RecorderSoftwareName = "OccuRec";
                                    m_Recorder.FileMetaData.RecorderSoftwareVersion = string.Format("{0}.{1}{2}", occuRecVersion.Major, occuRecVersion.Minor, isBeta ? " BETA" : "");

                                    m_Recorder.FileMetaData.CameraModel = m_CameraModel;
                                    m_Recorder.FileMetaData.CameraSensorInfo = m_CameraModel;

                                    if (!double.IsNaN(m_GPSLongitude) && !double.IsNaN(m_GPSLatitude))
                                        m_Recorder.LocationData.SetLocation(m_GPSLongitude, m_GPSLatitude, double.NaN, "ALTITUDE-UNAVAILABLE");

                                    m_Recorder.StatusSectionConfig.RecordGain = true;
                                    m_Recorder.StatusSectionConfig.RecordGamma = true;
                                    m_Recorder.StatusSectionConfig.RecordSystemTime = true;
                                    m_Recorder.StatusSectionConfig.RecordFixStatus = true;
                                    m_Recorder.StatusSectionConfig.RecordVideoCameraFrameId = true;

                                    m_TimeStampFlagTagId = m_Recorder.StatusSectionConfig.AddDefineTag("TimeStampFlag", Adv2TagType.Int8);
                                    m_EstimatedTimeStampErrorMillisecondsTagId = m_Recorder.StatusSectionConfig.AddDefineTag("EstimatedTimeStampErrorMilliseconds", Adv2TagType.Real);
                                    if (m_UseCooling)
                                        m_CCDTempTagId = m_Recorder.StatusSectionConfig.AddDefineTag("CCDTemperature", Adv2TagType.Real);

                                    m_Recorder.ImageConfig.SetImageParameters((ushort)m_Width, (ushort)m_Height, (byte)m_Bpp, null);

                                    m_Recorder.StartRecordingNewFile(m_PreferredFileName, 1000000, true);

                                    m_PrevVoxFreqIsGood = false;
                                }

                                var statusChannel = new AdvRecorder.AdvStatusEntry()
                                {
                                    Gain = m_CurrentGain,
                                    Gamma = (float)m_CurrentGamma,
                                    VideoCameraFrameId = header.SeqNumber,
                                    SystemTime = AdvTimeStamp.FromDateTime(DateTime.Now)
                                };

                                if (m_UseCooling)
                                    statusChannel.AdditionalStatusTags = new object[] { (byte)0, 0, (float)m_CCDTemp };
                                else
                                    statusChannel.AdditionalStatusTags = new object[] { (byte)0, 0 };

                                if (header.GpsTimeAvailable)
                                {
                                    if (header.Latitude != 0 && header.Longitude != 0)
                                        statusChannel.FixStatus = FixStatus.GFix;
                                    else
                                        statusChannel.FixStatus = FixStatus.PFix;
                                }
                                else
                                    statusChannel.FixStatus = FixStatus.NoFix;

                                if (header.MaxClock == 10000500 && m_PrevVoxFreqIsGood)
                                {
                                    // NOTE: Add Potentially Bad GPS Timestamp Flag
                                    statusChannel.AdditionalStatusTags[0] = (byte)1;
                                }
                                else
                                {
                                    // NOTE: Add GPS Timestamp Bias Estimate
                                    double biasMSPerSec = (header.MaxClock - 10000000) * 0.5 / 1000.0;
                                    double biasMS = biasMSPerSec * (header.EndTime - header.StartTime).TotalSeconds;
                                    statusChannel.AdditionalStatusTags[1] = (float)biasMS;
                                }

                                m_PrevVoxFreqIsGood = header.MaxClock != 10000500;
            /* TODO
            [2548] System.InvalidCastException : Specified cast is not valid.
            [2548]    at Adv.AdvRecorder.BeginVideoFrame(AdvStream advStream, Nullable`1 startClockTicks, Nullable`1 endClockTicks, AdvTimeStamp startUtcTimeStamp, AdvTimeStamp endUtcTimeStamp, AdvStatusEntry metadata)
            [2548]    at Adv.AdvRecorder.AddFrame(AdvStream advStream, Byte[] pixels, Boolean compressIfPossible, Nullable`1 preferredCompression, Nullable`1 startClockTicks, Nullable`1 endClockTicks, AdvTimeStamp startUtcTimeStamp, AdvTimeStamp endUtcTimeStamp, AdvStatusEntry metadata, AdvImageData imageData)
            [2548]    at Adv.AdvRecorder.AddVideoFrame(Byte[] pixels, Boolean compressIfPossible, Nullable`1 preferredCompression, AdvTimeStamp startUtcTimeStamp, AdvTimeStamp endUtcTimeStamp, AdvStatusEntry metadata, AdvImageData imageData)
            [2548]    at OccuRec.Drivers.QHYVideo.Video.VideoGrabberWorker(Object state) in f:\WORK\OccuRec\OccuRec\Drivers\QHYVideo\Video.cs:line 714
            [2548] --------------------------------------------------------------------------------------------------
                                */
                                m_Recorder.AddVideoFrame(
                                    imageBytes,
                                    true, PreferredCompression.Lagarith16,
                                    AdvTimeStamp.FromDateTime(header.StartTime),
                                    AdvTimeStamp.FromDateTime(header.EndTime),
                                    statusChannel,
                                    bpp == 8 ? AdvImageData.PixelDepth8Bit : AdvImageData.PixelDepth16Bit);
                            }
                        }

                        lock (m_SyncLock)
                        {
                            Array.Copy(imageBytes, 0, m_CurrentBytes, 0, m_ImageSize);
                        }

                        if (m_ChangeGaintTo.HasValue)
                        {
                            rv = QHYPInvoke.SetQHYCCDParam(m_Handle, CONTROL_ID.CONTROL_GAIN, m_ChangeGaintTo.Value);
                            if (QHYPInvoke.SUCCEEDED(rv))
                            {
                                m_CurrentGain = m_ChangeGaintTo.Value;
                                m_ChangeGaintTo = null;
                            }
                        }

                        if (m_ChangeGammaTo.HasValue)
                        {
                            double gamma = m_SupportedGammaValues[m_ChangeGammaTo.Value];
                            rv = QHYPInvoke.SetQHYCCDParam(m_Handle, CONTROL_ID.CONTROL_GAMMA, gamma);
                            if (QHYPInvoke.SUCCEEDED(rv))
                            {
                                m_CurrentGamma = gamma;
                                m_CurrentGammaIndex = m_ChangeGammaTo.Value;
                                m_ChangeGammaTo = null;
                            }
                        }

                        if (m_ChangeExposureIndexTo.HasValue)
                        {
                            rv = QHYPInvoke.SetQHYCCDParam(m_Handle, CONTROL_ID.CONTROL_EXPOSURE, m_SupportedExposureMilliseconds[m_ChangeExposureIndexTo.Value] * 1000);
                            if (QHYPInvoke.SUCCEEDED(rv))
                            {
                                m_CurrentExposureIndex = m_ChangeExposureIndexTo.Value;
                                m_ChangeExposureIndexTo = null;
                            }
                        }

                        var val = m_VOXFreqFitter.GetNextValue((uint)header.MaxClock);
                        if (val.HasValue)
                            QHYPInvoke.SetQHYCCDGPSVCOXFreq(m_Handle, val.Value);
                    }

                    if (m_UseCooling && stopwatch.ElapsedMilliseconds > 1000)
                    {
                        m_CCDTemp = QHYPInvoke.GetQHYCCDParam(m_Handle, CONTROL_ID.CONTROL_CURTEMP);

                        rv = QHYPInvoke.ControlQHYCCDTemp(m_Handle, -50);
                        if (!QHYPInvoke.SUCCEEDED(rv))
                        {
                            Trace.WriteLine("QHYPInvoke.ControlQHYCCDTemp() Failed!");
                        }
                        stopwatch.Reset();
                        stopwatch.Start();

                    }

                    Thread.Sleep(0);
                }
                catch (Exception ex)
                {
                    Trace.WriteLine(Extensions.GetFullStackTrace(ex));
                }
            }
        }
Exemple #4
0
        private static void FixOccuRecEveryFrameNoLockedSavedInPeriod(string fileLocation)
        {
            string fileLocationOut = Path.GetFullPath(Path.GetDirectoryName(fileLocation) + @"\" + Path.GetFileName(fileLocation) + "-fixed.aav");

            int FIRST_FRAME_IN_PERIOD = 0;
            int ACTUAL_INTEGRATION    = 2;

            var aaFile = new AdvFile2(fileLocation);

            int FIRST_OSD_LINE = int.Parse(aaFile.UserMetadataTags["OSD-FIRST-LINE"]);
            int LAST_OSD_LINE  = int.Parse(aaFile.UserMetadataTags["OSD-LAST-LINE"]);
            int FRAME_WIDTH    = int.Parse(aaFile.SystemMetadataTags["WIDTH"]);
            int FRAME_HEIGHT   = int.Parse(aaFile.SystemMetadataTags["HEIGHT"]);
            int FRAME_PIXELS   = FRAME_WIDTH * FRAME_HEIGHT;

            var             frame     = new ushort[FRAME_PIXELS];
            List <ushort[]> calFrames = new List <ushort[]>();

            for (uint i = 0; i < aaFile.CalibrationSteamInfo.FrameCount; i++)
            {
                var calPix = aaFile.GetCalibrationFramePixels(i);
                for (int j = 0; j < FRAME_PIXELS; j++)
                {
                    frame[j] = (ushort)calPix[j];
                }
                calFrames.Add(frame.ToArray());
            }

            for (int j = 0; j < FRAME_PIXELS; j++)
            {
                frame[j] = 0;
            }

            int    totalIntegratedFrames = 0;
            string tmpBuffFileName       = Path.GetTempFileName();

            using (var tmpFile = File.Create(tmpBuffFileName))
                using (var bw = new BinaryWriter(tmpFile))
                {
                    uint[] pixels            = null;
                    int    originalRawFrames = 0;
                    for (uint i = 0; i < aaFile.MainSteamInfo.FrameCount; i++)
                    {
                        AdvFrameInfo frameInfo;
                        pixels = aaFile.GetMainFramePixels(i, out frameInfo);

                        bool isFirstFrameInInterval = (i - (FIRST_FRAME_IN_PERIOD + ACTUAL_INTEGRATION)) % ACTUAL_INTEGRATION == 0;
                        bool isLastFrameInInterval  = (i - (FIRST_FRAME_IN_PERIOD + ACTUAL_INTEGRATION - 1)) % ACTUAL_INTEGRATION == 0;
                        bool isFirstFrameOfOccuRecPresumedIntegrationInterval = Convert.ToInt64(frameInfo.Status["StartFrame"]) == Convert.ToInt64(frameInfo.Status["EndFrame"]);

                        for (int j = 0; j < FRAME_PIXELS; j++)
                        {
                            int lineNo = (j / FRAME_WIDTH);

                            if (lineNo < FIRST_OSD_LINE || lineNo > (LAST_OSD_LINE + 1))
                            {
                                // Sum up all pixels which are NOT in the OSD area
                                frame[j] += (ushort)pixels[j];
                            }
                            else
                            {
                                // This is *the* OSD timestamp. Copy only even/odd lines
                                if (isFirstFrameInInterval)
                                {
                                    if (lineNo % 2 == 0)
                                    {
                                        // The first timestamp will be only correct if this is also a first frame from the 'presumed' integration period
                                        if (isFirstFrameOfOccuRecPresumedIntegrationInterval)
                                        {
                                            frame[j] = (ushort)(ACTUAL_INTEGRATION * pixels[j]);
                                        }
                                        else
                                        {
                                            frame[j] = 0;
                                        }
                                    }
                                }
                                else if (isLastFrameInInterval)
                                {
                                    // The timestamp of the last field is always correct so always copy it
                                    if (lineNo % 2 == 1)
                                    {
                                        frame[j] = (ushort)pixels[j];
                                    }
                                }
                            }
                        }

                        if (isLastFrameInInterval)
                        {
                            for (int j = 0; j < frame.Length; j++)
                            {
                                bw.Write(frame[j]);
                            }

                            totalIntegratedFrames++;
                            for (int j = 0; j < FRAME_PIXELS; j++)
                            {
                                frame[j] = 0;
                            }
                        }

                        originalRawFrames += (short)frameInfo.Status["IntegratedFrames"];
                    }
                }


            var recorder = new AdvRecorder();

            recorder.ImageConfig.SetImageParameters(
                (ushort)aaFile.Width,
                (ushort)aaFile.Height,
                16,
                255 * ACTUAL_INTEGRATION);

            recorder.FileMetaData.RecorderSoftwareName    = "Tangra";
            recorder.FileMetaData.RecorderSoftwareVersion = VersionHelper.AssemblyVersion;
            recorder.FileMetaData.CameraModel             = "Unknown";
            recorder.FileMetaData.CameraSensorInfo        = "Unknown";

            var frameRate = 25.00;

            recorder.FileMetaData.NativeFrameRate    = frameRate;
            recorder.FileMetaData.EffectiveFrameRate = 25.0 / ACTUAL_INTEGRATION;

            var nativeStandards = string.Empty;

            if (Math.Abs(frameRate - 25.0) < 0.1)
            {
                nativeStandards = "PAL";
            }
            else if (Math.Abs(frameRate - 29.97) < 0.1)
            {
                nativeStandards = "NTSC";
            }
            recorder.FileMetaData.AddUserTag("NATIVE-VIDEO-STANDARD", nativeStandards);
            recorder.FileMetaData.AddUserTag("FRAME-COMBINING", "Binning");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("AAV-VERSION", "2");
            recorder.FileMetaData.AddUserTag("AAV16-NORMVAL", (255 * ACTUAL_INTEGRATION).ToString());

            recorder.FileMetaData.AddCalibrationStreamTag("TYPE", "VTI-OSD-CALIBRATION");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());

            recorder.StatusSectionConfig.RecordSystemErrors = true;
            recorder.StatusSectionConfig.AddDefineTag("FRAME-TYPE", Adv2TagType.UTF8String);
            recorder.StatusSectionConfig.AddDefineTag("FRAMES-IN-INTERVAL", Adv2TagType.Int8);
            recorder.StatusSectionConfig.AddDefineTag("NOISE-SIGNATURES", Adv2TagType.UTF8String);
            recorder.StatusSectionConfig.AddDefineTag("ORIGINAL-FRAME-ID", Adv2TagType.Int32);
            recorder.StatusSectionConfig.AddDefineTag("IntegratedFrames", Adv2TagType.Int8);

            recorder.StartRecordingNewFile(fileLocationOut, 0, true);

            int calFrameId = 0;

            foreach (var calFrame in calFrames)
            {
                recorder.AddCalibrationFrame(calFrame, true,
                                             PreferredCompression.Lagarith16,
                                             new AdvRecorder.AdvStatusEntry()
                {
                    AdditionalStatusTags = new[] { "VTI-OSD-CALIBRATION", (object)(byte)0, string.Empty, (object)calFrameId, (object)(byte)0 }
                },
                                             Adv.AdvImageData.PixelDepth16Bit);
                calFrameId++;
            }

            using (var tmpFile = File.OpenRead(tmpBuffFileName))
                using (var rdr = new BinaryReader(tmpFile))
                {
                    for (int i = 0; i < totalIntegratedFrames; i++)
                    {
                        for (int j = 0; j < frame.Length; j++)
                        {
                            frame[j] = rdr.ReadUInt16();
                        }

                        recorder.AddVideoFrame(frame, true,
                                               PreferredCompression.Lagarith16,
                                               new AdvRecorder.AdvStatusEntry()
                        {
                            SystemErrors         = "",
                            AdditionalStatusTags = new[] { "DATA", (object)(byte)8, string.Empty, (object)(int)0, (object)(byte)8 }
                        },
                                               Adv.AdvImageData.PixelDepth16Bit);
                    }
                }

            File.Delete(tmpBuffFileName);

            recorder.FinishRecording();
        }
        internal void StartConversion(string fileName, int topVtiOsdRow, int bottomVtiOsdRow, int firstIntegratedFrameId, int integrationInterval, string cameraModel, string sensorInfo, bool swapTimestampFields)
        {
            m_VideoController.ClearAAVConversionErrors();

            m_Width = m_VideoController.FramePlayer.Video.Width;
            m_Height = m_VideoController.FramePlayer.Video.Height;

            m_EndFieldParity = swapTimestampFields ? 1 : 0;

            m_TestRect = new Rectangle((m_Width / 2) - 16, (m_Height / 2) - 16, 32, 32);

            m_CurrAavFramePixels = new ushort[m_Width * m_Height];

            m_FileName = fileName;
            m_MaxPixelValue = 0xFF * integrationInterval;

            m_FirstVtiOsdLine = topVtiOsdRow;
            m_LastVtiOsdLine = bottomVtiOsdRow;

            m_IntegrationPeriod = integrationInterval;
            m_FirstIntegrationPeriodStartFrameId = firstIntegratedFrameId;
            m_NextExpectedIntegrationPeriodStartFrameId = firstIntegratedFrameId + integrationInterval;
            m_FramesSoFar = 0;

            m_Recorder = new AdvRecorder();
            m_Recorder.ImageConfig.SetImageParameters(
                (ushort)m_Width,
                (ushort)m_Height,
                16,
                m_MaxPixelValue);

            m_Recorder.FileMetaData.RecorderSoftwareName = "Tangra";
            m_Recorder.FileMetaData.RecorderSoftwareVersion = VersionHelper.AssemblyFileVersion;
            m_Recorder.FileMetaData.CameraModel = !string.IsNullOrWhiteSpace(cameraModel) ? cameraModel : "Unknown";
            m_Recorder.FileMetaData.CameraSensorInfo = !string.IsNullOrWhiteSpace(sensorInfo) ? sensorInfo : "Unknown";

            var frameRate = 1000.0 / m_VideoController.FramePlayer.Video.MillisecondsPerFrame;
            m_Recorder.FileMetaData.NativeFrameRate = frameRate;
            m_Recorder.FileMetaData.EffectiveFrameRate = 1000.0 / (integrationInterval * m_VideoController.FramePlayer.Video.MillisecondsPerFrame);

            var nativeStandards = string.Empty;
            if (Math.Abs(frameRate - 25.0) < 0.1)
                nativeStandards = "PAL";
            else if (Math.Abs(frameRate - 29.97) < 0.1)
                nativeStandards = "NTSC";
            m_Recorder.FileMetaData.AddUserTag("NATIVE-VIDEO-STANDARD", nativeStandards);
            m_Recorder.FileMetaData.AddUserTag("FRAME-COMBINING", "Binning");
            m_Recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", topVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", bottomVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddUserTag("AAV-VERSION", "2");
            m_Recorder.FileMetaData.AddUserTag("AAV16-NORMVAL", m_MaxPixelValue.ToString());

            m_Recorder.FileMetaData.AddCalibrationStreamTag("TYPE", "VTI-OSD-CALIBRATION");
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-FIRST-LINE", topVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-LAST-LINE", bottomVtiOsdRow.ToString());

            m_Recorder.StatusSectionConfig.RecordSystemErrors = true;
            m_Recorder.StatusSectionConfig.AddDefineTag("FRAME-TYPE", Adv2TagType.UTF8String);
            m_Recorder.StatusSectionConfig.AddDefineTag("FRAMES-IN-INTERVAL", Adv2TagType.Int8);
            m_Recorder.StatusSectionConfig.AddDefineTag("NOISE-SIGNATURES", Adv2TagType.UTF8String);
            m_Recorder.StatusSectionConfig.AddDefineTag("ORIGINAL-FRAME-ID", Adv2TagType.Int32);

            m_Recorder.StartRecordingNewFile(m_FileName, 0, true);

            int currFrame = m_VideoController.CurrentFrameIndex;
            try
            {
                m_VideoController.SetPictureBoxCursor(Cursors.WaitCursor);
                m_VideoController.NotifyFileProgress(-1, 16);

                Pixelmap frame;
                ushort[] pixels;

                for (int i = currFrame; i < Math.Min(currFrame + 16, m_VideoController.VideoLastFrame); i++)
                {
                    frame = m_VideoController.GetFrame(i);

                    pixels = frame.Pixels.Select(x => (ushort)(integrationInterval * x)).ToArray();
                    m_Recorder.AddCalibrationFrame(pixels, true,
                        PreferredCompression.Lagarith16,
                        new AdvRecorder.AdvStatusEntry() { AdditionalStatusTags = new[] { "VTI-OSD-CALIBRATION", (object)(byte)0, string.Empty, (object)i } },
                        Adv.AdvImageData.PixelDepth16Bit);

                    m_VideoController.NotifyFileProgress(i - currFrame, 16);
                }

                frame = m_VideoController.GetFrame(m_FirstIntegrationPeriodStartFrameId);

                pixels = frame.Pixels.Select(x => (ushort)(integrationInterval * x)).ToArray();
                m_Recorder.AddCalibrationFrame(pixels, true,
                    PreferredCompression.Lagarith16,
                    new AdvRecorder.AdvStatusEntry() { AdditionalStatusTags = new[] { "FIELD-CALIBRATION", (object)(byte)0, string.Empty, (object)m_FirstIntegrationPeriodStartFrameId } },
                    Adv.AdvImageData.PixelDepth16Bit);
            }
            finally
            {
                m_VideoController.NotifyFileProgress(-1, 0);
                m_VideoController.SetPictureBoxCursor(Cursors.Default);
            }
        }
Exemple #6
0
        private static void FixOccuRecEveryFrameSavedInPeriod(string fileLocation)
        {
            string fileLocationOut = Path.GetFullPath(Path.GetDirectoryName(fileLocation) + @"\" + Path.GetFileName(fileLocation) + "-fixed.aav");

            int         FIRST_FRAME_IN_PERIOD = 2;
            List <uint> BAD_FRAMES            = new List <uint>();

            var aaFile = new AdvFile2(fileLocation);

            int FIRST_OSD_LINE = int.Parse(aaFile.SystemMetadataTags["OSD-FIRST-LINE"]);
            int LAST_OSD_LINE  = int.Parse(aaFile.SystemMetadataTags["OSD-LAST-LINE"]);
            int INTEGRATION    = (int)Math.Round(double.Parse(aaFile.SystemMetadataTags["NATIVE-FRAME-RATE"]) / double.Parse(aaFile.SystemMetadataTags["EFFECTIVE-FRAME-RATE"]));
            int FRAME_WIDTH    = int.Parse(aaFile.SystemMetadataTags["WIDTH"]);
            int FRAME_HEIGHT   = int.Parse(aaFile.SystemMetadataTags["HEIGHT"]);
            int FRAME_PIXELS   = FRAME_WIDTH * FRAME_HEIGHT;

            var             frame     = new ushort[FRAME_PIXELS];
            List <ushort[]> calFrames = new List <ushort[]>();

            for (uint i = 0; i < aaFile.CalibrationSteamInfo.FrameCount; i++)
            {
                var calPix = aaFile.GetCalibrationFramePixels(i);
                for (int j = 0; j < FRAME_PIXELS; j++)
                {
                    frame[j] = (ushort)calPix[j];
                }
                calFrames.Add(frame.ToArray());
            }

            for (int j = 0; j < FRAME_PIXELS; j++)
            {
                frame[j] = 0;
            }

            int    totalIntegratedFrames = 0;
            string tmpBuffFileName       = Path.GetTempFileName();

            using (var tmpFile = File.Create(tmpBuffFileName))
                using (var bw = new BinaryWriter(tmpFile))
                {
                    uint[] pixels = null;
                    for (uint i = 1; i < aaFile.MainSteamInfo.FrameCount; i++)
                    {
                        if (!BAD_FRAMES.Contains(i))
                        {
                            pixels = aaFile.GetMainFramePixels(i);
                        }

                        for (int j = 0; j < FRAME_PIXELS; j++)
                        {
                            if (j < FIRST_OSD_LINE * FRAME_WIDTH)
                            {
                                frame[j] += (ushort)pixels[j];
                            }
                            else
                            {
                                frame[j] = (ushort)pixels[j];
                            }
                        }
                        if ((i - (FIRST_FRAME_IN_PERIOD + INTEGRATION - 1)) % INTEGRATION == 0)
                        {
                            for (int j = 0; j < frame.Length; j++)
                            {
                                bw.Write(frame[j]);
                            }

                            totalIntegratedFrames++;
                            for (int j = 0; j < FRAME_PIXELS; j++)
                            {
                                frame[j] = 0;
                            }
                        }
                    }
                }


            var recorder = new AdvRecorder();

            recorder.ImageConfig.SetImageParameters(
                (ushort)aaFile.Width,
                (ushort)aaFile.Height,
                16,
                aaFile.MaxPixelValue);

            recorder.FileMetaData.RecorderSoftwareName    = "Tangra";
            recorder.FileMetaData.RecorderSoftwareVersion = VersionHelper.AssemblyVersion;
            recorder.FileMetaData.CameraModel             = "Unknown";
            recorder.FileMetaData.CameraSensorInfo        = "Unknown";

            var frameRate = 25.00;

            recorder.FileMetaData.NativeFrameRate    = frameRate;
            recorder.FileMetaData.EffectiveFrameRate = 25.0 / INTEGRATION;

            var nativeStandards = string.Empty;

            if (Math.Abs(frameRate - 25.0) < 0.1)
            {
                nativeStandards = "PAL";
            }
            else if (Math.Abs(frameRate - 29.97) < 0.1)
            {
                nativeStandards = "NTSC";
            }
            recorder.FileMetaData.AddUserTag("NATIVE-VIDEO-STANDARD", nativeStandards);
            recorder.FileMetaData.AddUserTag("FRAME-COMBINING", "Binning");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("AAV-VERSION", "2");
            recorder.FileMetaData.AddUserTag("AAV16-NORMVAL", (256 * INTEGRATION).ToString());

            recorder.FileMetaData.AddCalibrationStreamTag("TYPE", "VTI-OSD-CALIBRATION");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());

            recorder.StatusSectionConfig.RecordSystemErrors = true;
            recorder.StatusSectionConfig.AddDefineTag("FRAME-TYPE", Adv2TagType.UTF8String);
            recorder.StatusSectionConfig.AddDefineTag("FRAMES-IN-INTERVAL", Adv2TagType.Int8);
            recorder.StatusSectionConfig.AddDefineTag("NOISE-SIGNATURES", Adv2TagType.UTF8String);
            recorder.StatusSectionConfig.AddDefineTag("ORIGINAL-FRAME-ID", Adv2TagType.Int32);
            recorder.StatusSectionConfig.AddDefineTag("IntegratedFrames", Adv2TagType.Int8);

            recorder.StartRecordingNewFile(fileLocationOut, 0, true);

            int calFrameId = 0;

            foreach (var calFrame in calFrames)
            {
                recorder.AddCalibrationFrame(calFrame, true,
                                             PreferredCompression.Lagarith16,
                                             new AdvRecorder.AdvStatusEntry()
                {
                    AdditionalStatusTags = new[] { "VTI-OSD-CALIBRATION", (object)(byte)0, string.Empty, (object)calFrameId, (object)(byte)0 }
                },
                                             Adv.AdvImageData.PixelDepth16Bit);
                calFrameId++;
            }

            using (var tmpFile = File.OpenRead(tmpBuffFileName))
                using (var rdr = new BinaryReader(tmpFile))
                {
                    for (int i = 0; i < totalIntegratedFrames; i++)
                    {
                        for (int j = 0; j < frame.Length; j++)
                        {
                            frame[j] = rdr.ReadUInt16();
                        }

                        recorder.AddVideoFrame(frame, true,
                                               PreferredCompression.Lagarith16,
                                               new AdvRecorder.AdvStatusEntry()
                        {
                            SystemErrors         = "",
                            AdditionalStatusTags = new[] { "DATA", (object)(byte)8, string.Empty, (object)(int)0, (object)(byte)8 }
                        },
                                               Adv.AdvImageData.PixelDepth16Bit);
                    }
                }

            File.Delete(tmpBuffFileName);

            recorder.FinishRecording();
        }
Exemple #7
0
        private static void MergeOverlappingAAVFiles(string file1, string file2)
        {
            string fileLocationOut = Path.GetFullPath(Path.GetDirectoryName(file1) + @"\" + Path.GetFileName(file1) + "-fixed.aav");

            int FIRST_FILE_FRAME_NO_SEC_FILE_ZERO = 110;

            var aaFile1 = new AdvFile2(file1);


            int FIRST_OSD_LINE = int.Parse(aaFile1.SystemMetadataTags["OSD-FIRST-LINE"]);
            int LAST_OSD_LINE  = int.Parse(aaFile1.SystemMetadataTags["OSD-LAST-LINE"]);
            int INTEGRATION    = (int)Math.Round(double.Parse(aaFile1.SystemMetadataTags["NATIVE-FRAME-RATE"]) / double.Parse(aaFile1.SystemMetadataTags["EFFECTIVE-FRAME-RATE"]));
            int FRAME_WIDTH    = int.Parse(aaFile1.SystemMetadataTags["WIDTH"]);
            int FRAME_HEIGHT   = int.Parse(aaFile1.SystemMetadataTags["HEIGHT"]);
            int FRAME_PIXELS   = FRAME_WIDTH * FRAME_HEIGHT;

            var frame = new ushort[FRAME_PIXELS];

            List <ushort[]> calFrames = new List <ushort[]>();

            // TODO: Frames of large AAV files should be saved in a temporary file to avoid running OutOfMemory
            List <ushort[]> mainFrames = new List <ushort[]>();
            List <Tuple <object, object, object> > mainFramesInfo = new List <Tuple <object, object, object> >();

            for (uint i = 0; i < aaFile1.CalibrationSteamInfo.FrameCount; i++)
            {
                var calPix = aaFile1.GetCalibrationFramePixels(i);
                for (int j = 0; j < FRAME_PIXELS; j++)
                {
                    frame[j] = (ushort)calPix[j];
                }
                calFrames.Add(frame.ToArray());
            }

            for (uint i = 0; i < FIRST_FILE_FRAME_NO_SEC_FILE_ZERO; i++)
            {
                AdvFrameInfo frameInfo;
                var          framePix = aaFile1.GetMainFramePixels(i, out frameInfo);
                for (int j = 0; j < FRAME_PIXELS; j++)
                {
                    frame[j] = (ushort)framePix[j];
                }

                mainFrames.Add(frame.ToArray());
                mainFramesInfo.Add(Tuple.Create(frameInfo.Status["IntegratedFrames"], frameInfo.Status["StartFrame"], frameInfo.Status["EndFrame"]));
            }
            aaFile1.Close();

            var aaFile2 = new AdvFile2(file2);

            for (uint i = 0; i < aaFile2.MainSteamInfo.FrameCount; i++)
            {
                AdvFrameInfo frameInfo;
                var          framePix = aaFile2.GetMainFramePixels(i, out frameInfo);
                for (int j = 0; j < FRAME_PIXELS; j++)
                {
                    frame[j] = (ushort)framePix[j];
                }

                mainFrames.Add(frame.ToArray());
                mainFramesInfo.Add(Tuple.Create(frameInfo.Status["IntegratedFrames"], frameInfo.Status["StartFrame"], frameInfo.Status["EndFrame"]));
            }
            aaFile2.Close();

            var recorder = new AdvRecorder();

            recorder.ImageConfig.SetImageParameters(
                (ushort)aaFile1.Width,
                (ushort)aaFile1.Height,
                (byte)aaFile1.DataBpp,
                aaFile1.MaxPixelValue);

            recorder.FileMetaData.RecorderSoftwareName    = "Tangra";
            recorder.FileMetaData.RecorderSoftwareVersion = VersionHelper.AssemblyVersion;
            recorder.FileMetaData.CameraModel             = "Unknown";
            recorder.FileMetaData.CameraSensorInfo        = "Unknown";

            var frameRate = 25.00;

            recorder.FileMetaData.NativeFrameRate    = frameRate;
            recorder.FileMetaData.EffectiveFrameRate = 25.0 / INTEGRATION;

            var nativeStandards = string.Empty;

            if (Math.Abs(frameRate - 25.0) < 0.1)
            {
                nativeStandards = "PAL";
            }
            else if (Math.Abs(frameRate - 29.97) < 0.1)
            {
                nativeStandards = "NTSC";
            }
            recorder.FileMetaData.AddUserTag("NATIVE-VIDEO-STANDARD", nativeStandards);
            recorder.FileMetaData.AddUserTag("FRAME-COMBINING", "Binning");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("AAV-VERSION", "2");
            recorder.FileMetaData.AddUserTag("AAV16-NORMVAL", (256 * INTEGRATION).ToString());

            recorder.FileMetaData.AddCalibrationStreamTag("TYPE", "VTI-OSD-CALIBRATION");
            recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", FIRST_OSD_LINE.ToString());
            recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", LAST_OSD_LINE.ToString());

            recorder.StatusSectionConfig.RecordSystemErrors = true;
            recorder.StatusSectionConfig.AddDefineTag("FRAME-TYPE", Adv2TagType.UTF8String);
            recorder.StatusSectionConfig.AddDefineTag("ORIGINAL-FRAME-ID", Adv2TagType.Int32);
            recorder.StatusSectionConfig.AddDefineTag("IntegratedFrames", Adv2TagType.Int16);
            recorder.StatusSectionConfig.AddDefineTag("StartFrame", Adv2TagType.Long64);
            recorder.StatusSectionConfig.AddDefineTag("EndFrame", Adv2TagType.Long64);

            recorder.StartRecordingNewFile(fileLocationOut, 0, true);

            int calFrameId = 0;

            foreach (var calFrame in calFrames)
            {
                recorder.AddCalibrationFrame(calFrame, true,
                                             PreferredCompression.Lagarith16,
                                             new AdvRecorder.AdvStatusEntry()
                {
                    AdditionalStatusTags = new[] { "VTI-OSD-CALIBRATION", (object)calFrameId, (object)(short)0, (object)(long)0, (object)(long)0 }
                },
                                             Adv.AdvImageData.PixelDepth16Bit);
                calFrameId++;
            }

            for (int frameNo = 0; frameNo < mainFrames.Count; frameNo++)
            {
                Tuple <object, object, object> frameInfo = mainFramesInfo[frameNo];
                recorder.AddVideoFrame(mainFrames[frameNo], true,
                                       PreferredCompression.Lagarith16,
                                       new AdvRecorder.AdvStatusEntry()
                {
                    SystemErrors         = "",
                    AdditionalStatusTags = new[] { "DATA", (object)(int)frameNo, frameInfo.Item1, frameInfo.Item2, frameInfo.Item3 }
                },
                                       Adv.AdvImageData.PixelDepth16Bit);
            }

            recorder.FinishRecording();
        }
Exemple #8
0
        private void button1_Click(object sender, EventArgs e)
        {
            AdvRecorder recorder = new AdvRecorder();

            // First set the values of the standard file metadata
            recorder.FileMetaData.RecorderName    = "Genika";
            recorder.FileMetaData.RecorderVersion = "x.y.z";
            recorder.FileMetaData.RecorderTimerFirmwareVersion = "a.b.c";

            recorder.FileMetaData.CameraModel             = "Flea3 FL3-FW-03S3M";
            recorder.FileMetaData.CameraSerialNumber      = "10210906";
            recorder.FileMetaData.CameraVendorNumber      = "Point Grey Research";
            recorder.FileMetaData.CameraSensorInfo        = "Sony ICX414AL (1/2\" 648x488 CCD)";
            recorder.FileMetaData.CameraSensorResolution  = "648x488";
            recorder.FileMetaData.CameraFirmwareVersion   = "1.22.3.0";
            recorder.FileMetaData.CameraFirmwareBuildTime = "Mon Dec 28 20:15:45 2009";
            recorder.FileMetaData.CameraDriverVersion     = "2.2.1.6";

            // Then define additional metadata, if required
            recorder.FileMetaData.AddUserTag("TELESCOPE-NAME", "Large Telescope");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FL", "8300");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FD", "6.5");
            recorder.FileMetaData.AddUserTag("CAMERA-DIGITAL-SAMPLIG", "xxx");
            recorder.FileMetaData.AddUserTag("CAMERA-HDR-RESPONSE", "yyy");
            recorder.FileMetaData.AddUserTag("CAMERA-OPTICAL-RESOLUTION", "zzz");

            if (cbxLocationData.Checked)
            {
                recorder.LocationData.LongitudeWgs84 = "150*38'27.7\"";
                recorder.LocationData.LatitudeWgs84  = "-33*39'49.3\"";
                recorder.LocationData.AltitudeMsl    = "284.4M";
                recorder.LocationData.MslWgs84Offset = "22.4M";
                recorder.LocationData.GpsHdop        = "0.7";
            }

            // Define the image size and bit depth
            byte dynaBits = 16;

            if (rbPixel16.Checked)
            {
                dynaBits = 16;
            }
            else if (rbPixel12.Checked)
            {
                dynaBits = 12;
            }
            else if (rbPixel8.Checked)
            {
                dynaBits = 8;
            }

            byte cameraDepth = 16;

            if (rbCamera16.Checked)
            {
                cameraDepth = 16;
            }
            else if (rbCamera12.Checked)
            {
                cameraDepth = 12;
            }
            else if (rbCamera8.Checked)
            {
                cameraDepth = 8;
            }

            recorder.ImageConfig.SetImageParameters(640, 480, cameraDepth, dynaBits);

            // By default no status section values will be recorded. The user must enable the ones they need recorded and
            // can also define additional status parameters to be recorded with each video frame
            recorder.StatusSectionConfig.RecordGain  = true;
            recorder.StatusSectionConfig.RecordGamma = true;
            int customTagIdCustomGain = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-GAIN", AdvTagType.UInt32);
            int customTagIdMessages   = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-MESSAGES", AdvTagType.List16OfAnsiString255);

            string fileName = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + @"\Filename.adv");

            recorder.StartRecordingNewFile(fileName);

            AdvStatusEntry status = new AdvStatusEntry();

            status.AdditionalStatusTags = new object[2];

            int  imagesCount    = GetTotalImages();
            bool useCompression = cbxCompress.Checked;

            for (int i = 0; i < imagesCount; i++)
            {
                // NOTE: Moking up some test data
                uint     exposure  = GetCurrentImageExposure(i);
                DateTime timestamp = GetCurrentImageTimeStamp(i);
                status.Gain  = GetCurrentImageGain(i);
                status.Gamma = GetCurrentImageGamma(i);
                status.AdditionalStatusTags[customTagIdMessages]   = GetCurrentExampleMassages(i);
                status.AdditionalStatusTags[customTagIdCustomGain] = GetCurrentExampleCustomGain(i);

                if (rb16BitUShort.Checked)
                {
                    ushort[] imagePixels = GetCurrentImageBytesIn16(i, dynaBits);

                    recorder.AddVideoFrame(
                        imagePixels,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,

                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb16BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth16Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb8BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth8Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
            }

            recorder.StopRecording();

            MessageBox.Show(string.Format("'{0}' has been created.", fileName));
        }
Exemple #9
0
        public void GenerateaAdv_V2(AdvGenerationConfig config, string fileName)
        {
            var recorder = new AdvRecorder();

            // First set the values of the standard file metadata
            recorder.FileMetaData.RecorderSoftwareName    = "AdvLibRecorder";
            recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z";
            recorder.FileMetaData.RecorderHardwareName    = "a.b.c";

            recorder.FileMetaData.CameraModel      = "Flea3 FL3-FW-03S3M";
            recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)";

            recorder.FileMetaData.ObjectName = "Generated File Object";
            recorder.FileMetaData.Telescope  = "Generated File Telescope";
            recorder.FileMetaData.Observer   = "Generated File Observer";

            if (config.SaveLocationData)
            {
                recorder.LocationData.SetLocation(
                    150 + 38 / 60.0 + 27.7 / 3600.0,
                    -1 * (33 + 39 / 60.0 + 49.3 / 3600.0),
                    284.4);
            }

            recorder.ImageConfig.SetImageParameters(640, 480, config.DynaBits, config.NormalPixelValue);

            // By default no status section values will be recorded. The user must enable the ones they need recorded and
            // can also define additional status parameters to be recorded with each video frame
            recorder.StatusSectionConfig.RecordGain         = true;
            recorder.StatusSectionConfig.RecordGamma        = true;
            recorder.StatusSectionConfig.RecordSystemErrors = true;

            if (config.MainStreamCustomClock != null)
            {
                recorder.DefineCustomClock(AdvRecorder.AdvStream.MainStream, config.MainStreamCustomClock.ClockFrequency, config.MainStreamCustomClock.TicksTimingAccuracy, config.MainStreamCustomClock.ClockTicksCallback);
            }

            if (config.CalibrationStreamCustomClock != null)
            {
                recorder.DefineCustomClock(AdvRecorder.AdvStream.CalibrationStream, config.CalibrationStreamCustomClock.ClockFrequency, config.CalibrationStreamCustomClock.TicksTimingAccuracy, config.CalibrationStreamCustomClock.ClockTicksCallback);
            }

            if (config.BayerPattern != null)
            {
                recorder.ImageConfig.SetBayerPattern(config.BayerPattern.Value);
            }

            foreach (string key in config.MainStreamMetadata.Keys)
            {
                recorder.FileMetaData.AddMainStreamTag(key, config.MainStreamMetadata[key]);
            }

            foreach (string key in config.CalibrationStreamMetadata.Keys)
            {
                recorder.FileMetaData.AddCalibrationStreamTag(key, config.CalibrationStreamMetadata[key]);
            }

            foreach (string key in config.UserMetadata.Keys)
            {
                recorder.FileMetaData.AddUserTag(key, config.UserMetadata[key]);
            }

            if (config.SaveCustomStatusSectionTags)
            {
                recorder.StatusSectionConfig.AddDefineTag("CustomInt8", Adv2TagType.Int8);
                recorder.StatusSectionConfig.AddDefineTag("CustomInt16", Adv2TagType.Int16);
                recorder.StatusSectionConfig.AddDefineTag("CustomInt32", Adv2TagType.Int32);
                recorder.StatusSectionConfig.AddDefineTag("CustomLong64", Adv2TagType.Long64);
                recorder.StatusSectionConfig.AddDefineTag("CustomReal", Adv2TagType.Real);
                recorder.StatusSectionConfig.AddDefineTag("CustomString", Adv2TagType.UTF8String);
            }

            recorder.StartRecordingNewFile(fileName, config.UtcTimestampAccuracyInNanoseconds);

            AdvRecorder.AdvStatusEntry status = new AdvRecorder.AdvStatusEntry();
            status.AdditionalStatusTags = new object[2];

            for (int i = 0; i < config.NumberOfFrames; i++)
            {
                // NOTE: Get the test data
                uint exposure = config.ExposureCallback != null?config.ExposureCallback(i) : 0;

                DateTime startTimestamp = config.TimeStampCallback != null?config.TimeStampCallback(i) : DateTime.Now;

                var utcStart = AdvTimeStamp.FromDateTime(startTimestamp);
                var utcEnd   = utcStart.AddNanoseconds(exposure);

                status.Gain = config.GainCallback != null?config.GainCallback(i) : 0;

                status.Gamma = config.GammaCallback != null?config.GammaCallback(i) : 0;

                status.SystemErrors = config.SystemErrorsCallback != null?config.SystemErrorsCallback(i) : null;

                if (config.SaveCustomStatusSectionTags)
                {
                    status.AdditionalStatusTags = new object[]
                    {
                        (byte)12, (short)-123, (int)192847, -1 * (long)(0x6E9104B012CD110F), 91.291823f, "Значение 1"
                    };
                }
                else
                {
                    status.AdditionalStatusTags = null;
                }


                if (config.SourceFormat == AdvSourceDataFormat.Format16BitUShort)
                {
                    ushort[] imagePixels = imageGenerator.GetCurrentImageBytesInt16(i, config.DynaBits);

                    recorder.AddVideoFrame(
                        imagePixels,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        config.Compression != CompressionType.Uncompressed,

                        config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ,

                        utcStart,
                        utcEnd,
                        status,
                        AdvImageData.PixelDepth16Bit);
                }
                else if (config.SourceFormat == AdvSourceDataFormat.Format16BitLittleEndianByte)
                {
                    byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        config.Compression != CompressionType.Uncompressed,

                        config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ,

                        utcStart,
                        utcEnd,
                        status,

                        AdvImageData.PixelDepth16Bit);
                }
                else if (config.SourceFormat == AdvSourceDataFormat.Format12BitPackedByte)
                {
                    byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        config.Compression != CompressionType.Uncompressed,

                        config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ,

                        utcStart,
                        utcEnd,
                        status,

                        AdvImageData.PixelData12Bit);
                }
                else if (config.SourceFormat == AdvSourceDataFormat.Format8BitByte)
                {
                    byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        config.Compression != CompressionType.Uncompressed,

                        config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ,

                        utcStart,
                        utcEnd,
                        status,

                        AdvImageData.PixelDepth8Bit);
                }
                else if (config.SourceFormat == AdvSourceDataFormat.Format24BitColour)
                {
                    throw new NotImplementedException();
                }
            }

            recorder.FinishRecording();
        }
Exemple #10
0
        private void button1_Click(object sender, EventArgs e)
        {
            AdvRecorder recorder = new AdvRecorder();

            // First set the values of the standard file metadata
            recorder.FileMetaData.RecorderName = "Genika";
            recorder.FileMetaData.RecorderVersion = "x.y.z";
            recorder.FileMetaData.RecorderTimerFirmwareVersion = "a.b.c";

            recorder.FileMetaData.CameraModel = "Flea3 FL3-FW-03S3M";
            recorder.FileMetaData.CameraSerialNumber = "10210906";
            recorder.FileMetaData.CameraVendorNumber = "Point Grey Research";
            recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)";
            recorder.FileMetaData.CameraSensorResolution = "648x488";
            recorder.FileMetaData.CameraFirmwareVersion = "1.22.3.0";
            recorder.FileMetaData.CameraFirmwareBuildTime = "Mon Dec 28 20:15:45 2009";
            recorder.FileMetaData.CameraDriverVersion = "2.2.1.6";

            // Then define additional metadata, if required
            recorder.FileMetaData.AddUserTag("TELESCOPE-NAME", "Large Telescope");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FL", "8300");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FD", "6.5");
            recorder.FileMetaData.AddUserTag("CAMERA-DIGITAL-SAMPLIG", "xxx");
            recorder.FileMetaData.AddUserTag("CAMERA-HDR-RESPONSE", "yyy");
            recorder.FileMetaData.AddUserTag("CAMERA-OPTICAL-RESOLUTION", "zzz");

            if (cbxLocationData.Checked)
            {
                recorder.LocationData.LongitudeWgs84 = "150*38'27.7\"";
                recorder.LocationData.LatitudeWgs84 = "-33*39'49.3\"";
                recorder.LocationData.AltitudeMsl = "284.4M";
                recorder.LocationData.MslWgs84Offset = "22.4M";
                recorder.LocationData.GpsHdop = "0.7";
            }

            // Define the image size and bit depth
            byte dynaBits = 16;
            if (rbPixel16.Checked) dynaBits = 16;
            else if (rbPixel12.Checked) dynaBits = 12;
            else if (rbPixel8.Checked) dynaBits = 8;

            byte cameraDepth = 16;
            if (rbCamera16.Checked) cameraDepth = 16;
            else if (rbCamera12.Checked) cameraDepth = 12;
            else if (rbCamera8.Checked) cameraDepth = 8;

            recorder.ImageConfig.SetImageParameters(640, 480, cameraDepth, dynaBits);

            // By default no status section values will be recorded. The user must enable the ones they need recorded and
            // can also define additional status parameters to be recorded with each video frame
            recorder.StatusSectionConfig.RecordGain = true;
            recorder.StatusSectionConfig.RecordGamma = true;
            int customTagIdCustomGain = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-GAIN", AdvTagType.UInt32);
            int customTagIdMessages = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-MESSAGES", AdvTagType.List16OfAnsiString255);

            string fileName = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + @"\Filename.adv");
            recorder.StartRecordingNewFile(fileName);

            AdvStatusEntry status = new AdvStatusEntry();
            status.AdditionalStatusTags = new object[2];

            int imagesCount = GetTotalImages();
            bool useCompression = cbxCompress.Checked;

            for (int i = 0; i < imagesCount; i++)
            {
                // NOTE: Moking up some test data
                uint exposure = GetCurrentImageExposure(i);
                DateTime timestamp = GetCurrentImageTimeStamp(i);
                status.Gain = GetCurrentImageGain(i);
                status.Gamma = GetCurrentImageGamma(i);
                status.AdditionalStatusTags[customTagIdMessages] = GetCurrentExampleMassages(i);
                status.AdditionalStatusTags[customTagIdCustomGain] = GetCurrentExampleCustomGain(i);

                if (rb16BitUShort.Checked)
                {
                    ushort[] imagePixels = GetCurrentImageBytesIn16(i, dynaBits);

                    recorder.AddVideoFrame(
                        imagePixels,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,

                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb16BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth16Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb8BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth8Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
            }

            recorder.StopRecording();

            MessageBox.Show(string.Format("'{0}' has been created.", fileName));
        }
        internal void StartConversion(
            string fileName, int topVtiOsdRow, int bottomVtiOsdRow, int leftVtiOsdCol, int rightVtiOsdCol,
            int firstIntegratedFrameId, int integrationInterval, string cameraModel, string sensorInfo,
            bool swapTimestampFields, bool dontValidateIntegrationIntervals)
        {
            m_VideoController.ClearAAVConversionErrors();

            m_Width  = m_VideoController.FramePlayer.Video.Width;
            m_Height = m_VideoController.FramePlayer.Video.Height;

            m_EndFieldParity = swapTimestampFields ? 1 : 0;

            m_TestRect = new Rectangle((m_Width / 2) - 16, (m_Height / 2) - 16, 32, 32);

            m_CurrAavFramePixels = new ushort[m_Width * m_Height];

            m_FileName      = fileName;
            m_MaxPixelValue = 0xFF * integrationInterval;

            m_FirstVtiOsdLine = topVtiOsdRow;
            m_LastVtiOsdLine  = bottomVtiOsdRow;
            m_LeftVtiOsdCol   = leftVtiOsdCol;
            m_RightVtiOsdCol  = rightVtiOsdCol;

            m_IntegrationPeriod = integrationInterval;
            m_FirstIntegrationPeriodStartFrameId        = firstIntegratedFrameId;
            m_DontValidateIntegrationIntervals          = dontValidateIntegrationIntervals;
            m_NextExpectedIntegrationPeriodStartFrameId = firstIntegratedFrameId + integrationInterval;
            m_FramesSoFar = 0;

            m_Recorder = new AdvRecorder();
            m_Recorder.ImageConfig.SetImageParameters(
                (ushort)m_Width,
                (ushort)m_Height,
                16,
                m_MaxPixelValue);

            m_Recorder.FileMetaData.RecorderSoftwareName    = "Tangra";
            m_Recorder.FileMetaData.RecorderSoftwareVersion = VersionHelper.AssemblyFileVersion;
            m_Recorder.FileMetaData.CameraModel             = !string.IsNullOrWhiteSpace(cameraModel) ? cameraModel : "Unknown";
            m_Recorder.FileMetaData.CameraSensorInfo        = !string.IsNullOrWhiteSpace(sensorInfo) ? sensorInfo : "Unknown";

            var frameRate = 1000.0 / m_VideoController.FramePlayer.Video.MillisecondsPerFrame;

            m_Recorder.FileMetaData.NativeFrameRate    = frameRate;
            m_Recorder.FileMetaData.EffectiveFrameRate = 1000.0 / (integrationInterval * m_VideoController.FramePlayer.Video.MillisecondsPerFrame);

            var nativeStandards = string.Empty;

            if (Math.Abs(frameRate - 25.0) < 0.1)
            {
                nativeStandards = "PAL";
            }
            else if (Math.Abs(frameRate - 29.97) < 0.1)
            {
                nativeStandards = "NTSC";
            }
            m_Recorder.FileMetaData.AddUserTag("NATIVE-VIDEO-STANDARD", nativeStandards);
            m_Recorder.FileMetaData.AddUserTag("FRAME-COMBINING", "Binning");
            m_Recorder.FileMetaData.AddUserTag("OSD-FIRST-LINE", topVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddUserTag("OSD-LAST-LINE", bottomVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddUserTag("OSD-LEFT-COLUMN", leftVtiOsdCol.ToString());
            m_Recorder.FileMetaData.AddUserTag("OSD-RIGHT-COLUMN", rightVtiOsdCol.ToString());
            m_Recorder.FileMetaData.AddUserTag("AAV-VERSION", "2");
            m_Recorder.FileMetaData.AddUserTag("AAV16-NORMVAL", m_MaxPixelValue.ToString());
            m_Recorder.FileMetaData.AddUserTag("INTEGRATION-DETECTION", m_DontValidateIntegrationIntervals ? "Manual" : "Automatic");

            m_Recorder.FileMetaData.AddCalibrationStreamTag("TYPE", "VTI-OSD-CALIBRATION");
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-FIRST-LINE", topVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-LAST-LINE", bottomVtiOsdRow.ToString());
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-LEFT-COLUMN", leftVtiOsdCol.ToString());
            m_Recorder.FileMetaData.AddCalibrationStreamTag("OSD-RIGHT-COLUMN", rightVtiOsdCol.ToString());

            m_Recorder.StatusSectionConfig.RecordSystemErrors = true;
            m_Recorder.StatusSectionConfig.AddDefineTag("FRAME-TYPE", Adv2TagType.UTF8String);
            m_Recorder.StatusSectionConfig.AddDefineTag("FRAMES-IN-INTERVAL", Adv2TagType.Int8);
            m_Recorder.StatusSectionConfig.AddDefineTag("NOISE-SIGNATURES", Adv2TagType.UTF8String);
            m_Recorder.StatusSectionConfig.AddDefineTag("ORIGINAL-FRAME-ID", Adv2TagType.Int32);

            m_Recorder.StartRecordingNewFile(m_FileName, 0, true);

            int currFrame = m_VideoController.CurrentFrameIndex;

            try
            {
                m_VideoController.SetPictureBoxCursor(Cursors.WaitCursor);
                m_VideoController.NotifyFileProgress(-1, 16);

                Pixelmap frame;
                ushort[] pixels;

                for (int i = currFrame; i < Math.Min(currFrame + 16, m_VideoController.VideoLastFrame); i++)
                {
                    frame = m_VideoController.GetFrame(i);

                    pixels = frame.Pixels.Select(x => (ushort)(integrationInterval * x)).ToArray();
                    m_Recorder.AddCalibrationFrame(pixels, true,
                                                   PreferredCompression.Lagarith16,
                                                   new AdvRecorder.AdvStatusEntry()
                    {
                        AdditionalStatusTags = new[] { "VTI-OSD-CALIBRATION", (object)(byte)0, string.Empty, (object)i }
                    },
                                                   Adv.AdvImageData.PixelDepth16Bit);

                    m_VideoController.NotifyFileProgress(i - currFrame, 16);
                }

                frame = m_VideoController.GetFrame(m_FirstIntegrationPeriodStartFrameId);

                pixels = frame.Pixels.Select(x => (ushort)(integrationInterval * x)).ToArray();
                m_Recorder.AddCalibrationFrame(pixels, true,
                                               PreferredCompression.Lagarith16,
                                               new AdvRecorder.AdvStatusEntry()
                {
                    AdditionalStatusTags = new[] { "FIELD-CALIBRATION", (object)(byte)0, string.Empty, (object)m_FirstIntegrationPeriodStartFrameId }
                },
                                               Adv.AdvImageData.PixelDepth16Bit);
            }
            finally
            {
                m_VideoController.NotifyFileProgress(-1, 0);
                m_VideoController.SetPictureBoxCursor(Cursors.Default);
            }
        }
Exemple #12
0
        public void TestStatusTagsAreSavedAndReadCorrectly()
        {
            string fileName = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N"));

            if (File.Exists(fileName))
            {
                File.Delete(fileName);
            }

            try
            {
                // Generate
                var recorder = new AdvRecorder();
                recorder.ImageConfig.SetImageParameters(640, 480, 16, 0);

                recorder.FileMetaData.RecorderSoftwareName    = "AdvLibTestRecorder";
                recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z";
                recorder.FileMetaData.RecorderHardwareName    = "a.b.c";
                recorder.FileMetaData.CameraModel             = "TestCamera";
                recorder.FileMetaData.CameraSensorInfo        = "TestSensor";


                recorder.StatusSectionConfig.RecordGain                 = true;
                recorder.StatusSectionConfig.RecordGamma                = true;
                recorder.StatusSectionConfig.RecordShutter              = true;
                recorder.StatusSectionConfig.RecordCameraOffset         = true;
                recorder.StatusSectionConfig.RecordSystemTime           = true;
                recorder.StatusSectionConfig.RecordTrackedSatellites    = true;
                recorder.StatusSectionConfig.RecordAlmanacStatus        = true;
                recorder.StatusSectionConfig.RecordAlmanacOffset        = true;
                recorder.StatusSectionConfig.RecordFixStatus            = true;
                recorder.StatusSectionConfig.RecordSystemErrors         = true;
                recorder.StatusSectionConfig.RecordVideoCameraFrameId   = true;
                recorder.StatusSectionConfig.RecordHardwareTimerFrameId = true;

                recorder.StatusSectionConfig.AddDefineTag("CustomInt8", Adv2TagType.Int8);
                recorder.StatusSectionConfig.AddDefineTag("CustomInt16", Adv2TagType.Int16);
                recorder.StatusSectionConfig.AddDefineTag("CustomInt32", Adv2TagType.Int32);
                recorder.StatusSectionConfig.AddDefineTag("CustomLong64", Adv2TagType.Long64);
                recorder.StatusSectionConfig.AddDefineTag("CustomReal", Adv2TagType.Real);
                recorder.StatusSectionConfig.AddDefineTag("CustomString", Adv2TagType.UTF8String);

                recorder.StartRecordingNewFile(fileName, 0);

                var systemTimeStamp = DateTime.Now.AddMilliseconds(123);

                var status = new AdvRecorder.AdvStatusEntry()
                {
                    AlmanacStatus     = AlmanacStatus.Good,
                    AlmanacOffset     = 14,
                    TrackedSatellites = 8,
                    CameraOffset      = 8.23f,
                    FixStatus         = FixStatus.PFix,
                    Gain                 = 32.82f,
                    Gamma                = 0.35f,
                    Shutter              = 2.502f,
                    SystemTime           = AdvTimeStamp.FromDateTime(systemTimeStamp),
                    VideoCameraFrameId   = 19289232,
                    HardwareTimerFrameId = 9102
                };

                status.AdditionalStatusTags = new object[]
                {
                    (byte)12, (short)-123, (int)192847, -1 * (long)(0x6E9104B012CD110F), 91.291823f, "Значение 1"
                };

                var      imageGenerator = new ImageGenerator();
                ushort[] imagePixels    = imageGenerator.GetCurrentImageBytesInt16(0, 16);

                recorder.AddVideoFrame(
                    imagePixels, false, null,
                    AdvTimeStamp.FromDateTime(DateTime.Now),
                    AdvTimeStamp.FromDateTime(DateTime.Now.AddSeconds(2.56)),
                    status, AdvImageData.PixelDepth16Bit);

                recorder.FinishRecording();

                // Verify
                using (var loadedFile = new AdvFile2(fileName))
                {
                    AdvFrameInfo frameInfo;
                    loadedFile.GetMainFramePixels(0, out frameInfo);

                    Assert.AreEqual(status.Gain, frameInfo.Gain, 0.000001);
                    Assert.AreEqual(status.Gamma, frameInfo.Gamma, 0.000001);
                    Assert.AreEqual(status.Shutter, frameInfo.Shutter, 0.000001);
                    Assert.AreEqual(status.CameraOffset, frameInfo.Offset, 0.000001);
                    Assert.AreEqual(status.FixStatus, (FixStatus)frameInfo.GPSFixStatus);
                    Assert.AreEqual(status.AlmanacStatus, (AlmanacStatus)frameInfo.GPSAlmanacStatus);
                    Assert.AreEqual(status.TrackedSatellites, frameInfo.GPSTrackedSattelites);
                    Assert.AreEqual(status.AlmanacOffset, frameInfo.GPSAlmanacOffset);
                    Assert.AreEqual(status.VideoCameraFrameId, frameInfo.VideoCameraFrameId);
                    Assert.AreEqual(status.HardwareTimerFrameId, frameInfo.HardwareTimerFrameId);
                    Assert.AreEqual(systemTimeStamp.Ticks, frameInfo.SystemTimestamp.Ticks);

                    Assert.AreEqual(status.AdditionalStatusTags[0], frameInfo.Status["CustomInt8"]);
                    Assert.AreEqual(status.AdditionalStatusTags[1], frameInfo.Status["CustomInt16"]);
                    Assert.AreEqual(status.AdditionalStatusTags[2], frameInfo.Status["CustomInt32"]);
                    Assert.AreEqual(status.AdditionalStatusTags[3], frameInfo.Status["CustomLong64"]);
                    Assert.AreEqual(status.AdditionalStatusTags[4], frameInfo.Status["CustomReal"]);
                    Assert.AreEqual(status.AdditionalStatusTags[5], frameInfo.Status["CustomString"]);
                }
            }
            finally
            {
                try
                {
                    if (File.Exists(fileName))
                    {
                        File.Delete(fileName);
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex);
                    Trace.WriteLine(ex);
                }
            }
        }