Beispiel #1
0
            /// <summary>
            /// Adds a new video frame from an ushort array.
            /// </summary>
            /// <param name="pixels">The pixels to be saved. The row-major array is of size 2 * Width * Height. This only works in little-endian 16-bit mode.</param>
            /// <param name="compress">True if the frame is to be compressed. Please note that compression is CPU and I/O intensive and may not work at high frame rates. Use wisely.</param>
            /// <param name="timeStamp">The high accuracy timestamp for the middle of the frame. If the timestamp is not with an accuracy of 1ms then set it as zero. A lower accuracy timestamp can be specified in the SystemTime status value.</param>
            /// <param name="exposureIn10thMilliseconds">The duration of the frame in whole 0.1 ms as determined by the high accuracy timestamping. If high accuracy timestamp is not available then set this to zero. Note that the Shutter status value should be derived from the camera settings rather than from the timestamps.</param>
            /// <param name="metadata">The status metadata to be saved with the video frame.</param>
            public void AddVideoFrame(ushort[] pixels, bool compress, AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds,
                                      AdvStatusEntry metadata)
            {
                BeginVideoFrame(timeStamp, exposureIn10thMilliseconds, metadata);

                byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_3_COMPRESSED : CFG_ADV_LAYOUT_1_UNCOMPRESSED;

                AdvLib.Obsolete.AdvVer1.FrameAddImage(layoutIdForCurrentFramerate, pixels, 16);

                AdvLib.Obsolete.AdvVer1.EndFrame();
            }
Beispiel #2
0
        private void button1_Click(object sender, EventArgs e)
        {
            AdvRecorder recorder = new AdvRecorder();

            // First set the values of the standard file metadata
            recorder.FileMetaData.RecorderName    = "Genika";
            recorder.FileMetaData.RecorderVersion = "x.y.z";
            recorder.FileMetaData.RecorderTimerFirmwareVersion = "a.b.c";

            recorder.FileMetaData.CameraModel             = "Flea3 FL3-FW-03S3M";
            recorder.FileMetaData.CameraSerialNumber      = "10210906";
            recorder.FileMetaData.CameraVendorNumber      = "Point Grey Research";
            recorder.FileMetaData.CameraSensorInfo        = "Sony ICX414AL (1/2\" 648x488 CCD)";
            recorder.FileMetaData.CameraSensorResolution  = "648x488";
            recorder.FileMetaData.CameraFirmwareVersion   = "1.22.3.0";
            recorder.FileMetaData.CameraFirmwareBuildTime = "Mon Dec 28 20:15:45 2009";
            recorder.FileMetaData.CameraDriverVersion     = "2.2.1.6";

            // Then define additional metadata, if required
            recorder.FileMetaData.AddUserTag("TELESCOPE-NAME", "Large Telescope");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FL", "8300");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FD", "6.5");
            recorder.FileMetaData.AddUserTag("CAMERA-DIGITAL-SAMPLIG", "xxx");
            recorder.FileMetaData.AddUserTag("CAMERA-HDR-RESPONSE", "yyy");
            recorder.FileMetaData.AddUserTag("CAMERA-OPTICAL-RESOLUTION", "zzz");

            if (cbxLocationData.Checked)
            {
                recorder.LocationData.LongitudeWgs84 = "150*38'27.7\"";
                recorder.LocationData.LatitudeWgs84  = "-33*39'49.3\"";
                recorder.LocationData.AltitudeMsl    = "284.4M";
                recorder.LocationData.MslWgs84Offset = "22.4M";
                recorder.LocationData.GpsHdop        = "0.7";
            }

            // Define the image size and bit depth
            byte dynaBits = 16;

            if (rbPixel16.Checked)
            {
                dynaBits = 16;
            }
            else if (rbPixel12.Checked)
            {
                dynaBits = 12;
            }
            else if (rbPixel8.Checked)
            {
                dynaBits = 8;
            }

            byte cameraDepth = 16;

            if (rbCamera16.Checked)
            {
                cameraDepth = 16;
            }
            else if (rbCamera12.Checked)
            {
                cameraDepth = 12;
            }
            else if (rbCamera8.Checked)
            {
                cameraDepth = 8;
            }

            recorder.ImageConfig.SetImageParameters(640, 480, cameraDepth, dynaBits);

            // By default no status section values will be recorded. The user must enable the ones they need recorded and
            // can also define additional status parameters to be recorded with each video frame
            recorder.StatusSectionConfig.RecordGain  = true;
            recorder.StatusSectionConfig.RecordGamma = true;
            int customTagIdCustomGain = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-GAIN", AdvTagType.UInt32);
            int customTagIdMessages   = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-MESSAGES", AdvTagType.List16OfAnsiString255);

            string fileName = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + @"\Filename.adv");

            recorder.StartRecordingNewFile(fileName);

            AdvStatusEntry status = new AdvStatusEntry();

            status.AdditionalStatusTags = new object[2];

            int  imagesCount    = GetTotalImages();
            bool useCompression = cbxCompress.Checked;

            for (int i = 0; i < imagesCount; i++)
            {
                // NOTE: Moking up some test data
                uint     exposure  = GetCurrentImageExposure(i);
                DateTime timestamp = GetCurrentImageTimeStamp(i);
                status.Gain  = GetCurrentImageGain(i);
                status.Gamma = GetCurrentImageGamma(i);
                status.AdditionalStatusTags[customTagIdMessages]   = GetCurrentExampleMassages(i);
                status.AdditionalStatusTags[customTagIdCustomGain] = GetCurrentExampleCustomGain(i);

                if (rb16BitUShort.Checked)
                {
                    ushort[] imagePixels = GetCurrentImageBytesIn16(i, dynaBits);

                    recorder.AddVideoFrame(
                        imagePixels,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,

                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb16BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth16Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb8BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth8Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
            }

            recorder.StopRecording();

            MessageBox.Show(string.Format("'{0}' has been created.", fileName));
        }
Beispiel #3
0
        private void button1_Click(object sender, EventArgs e)
        {
            AdvRecorder recorder = new AdvRecorder();

            // First set the values of the standard file metadata
            recorder.FileMetaData.RecorderName = "Genika";
            recorder.FileMetaData.RecorderVersion = "x.y.z";
            recorder.FileMetaData.RecorderTimerFirmwareVersion = "a.b.c";

            recorder.FileMetaData.CameraModel = "Flea3 FL3-FW-03S3M";
            recorder.FileMetaData.CameraSerialNumber = "10210906";
            recorder.FileMetaData.CameraVendorNumber = "Point Grey Research";
            recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)";
            recorder.FileMetaData.CameraSensorResolution = "648x488";
            recorder.FileMetaData.CameraFirmwareVersion = "1.22.3.0";
            recorder.FileMetaData.CameraFirmwareBuildTime = "Mon Dec 28 20:15:45 2009";
            recorder.FileMetaData.CameraDriverVersion = "2.2.1.6";

            // Then define additional metadata, if required
            recorder.FileMetaData.AddUserTag("TELESCOPE-NAME", "Large Telescope");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FL", "8300");
            recorder.FileMetaData.AddUserTag("TELESCOPE-FD", "6.5");
            recorder.FileMetaData.AddUserTag("CAMERA-DIGITAL-SAMPLIG", "xxx");
            recorder.FileMetaData.AddUserTag("CAMERA-HDR-RESPONSE", "yyy");
            recorder.FileMetaData.AddUserTag("CAMERA-OPTICAL-RESOLUTION", "zzz");

            if (cbxLocationData.Checked)
            {
                recorder.LocationData.LongitudeWgs84 = "150*38'27.7\"";
                recorder.LocationData.LatitudeWgs84 = "-33*39'49.3\"";
                recorder.LocationData.AltitudeMsl = "284.4M";
                recorder.LocationData.MslWgs84Offset = "22.4M";
                recorder.LocationData.GpsHdop = "0.7";
            }

            // Define the image size and bit depth
            byte dynaBits = 16;
            if (rbPixel16.Checked) dynaBits = 16;
            else if (rbPixel12.Checked) dynaBits = 12;
            else if (rbPixel8.Checked) dynaBits = 8;

            byte cameraDepth = 16;
            if (rbCamera16.Checked) cameraDepth = 16;
            else if (rbCamera12.Checked) cameraDepth = 12;
            else if (rbCamera8.Checked) cameraDepth = 8;

            recorder.ImageConfig.SetImageParameters(640, 480, cameraDepth, dynaBits);

            // By default no status section values will be recorded. The user must enable the ones they need recorded and
            // can also define additional status parameters to be recorded with each video frame
            recorder.StatusSectionConfig.RecordGain = true;
            recorder.StatusSectionConfig.RecordGamma = true;
            int customTagIdCustomGain = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-GAIN", AdvTagType.UInt32);
            int customTagIdMessages = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-MESSAGES", AdvTagType.List16OfAnsiString255);

            string fileName = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + @"\Filename.adv");
            recorder.StartRecordingNewFile(fileName);

            AdvStatusEntry status = new AdvStatusEntry();
            status.AdditionalStatusTags = new object[2];

            int imagesCount = GetTotalImages();
            bool useCompression = cbxCompress.Checked;

            for (int i = 0; i < imagesCount; i++)
            {
                // NOTE: Moking up some test data
                uint exposure = GetCurrentImageExposure(i);
                DateTime timestamp = GetCurrentImageTimeStamp(i);
                status.Gain = GetCurrentImageGain(i);
                status.Gamma = GetCurrentImageGamma(i);
                status.AdditionalStatusTags[customTagIdMessages] = GetCurrentExampleMassages(i);
                status.AdditionalStatusTags[customTagIdCustomGain] = GetCurrentExampleCustomGain(i);

                if (rb16BitUShort.Checked)
                {
                    ushort[] imagePixels = GetCurrentImageBytesIn16(i, dynaBits);

                    recorder.AddVideoFrame(
                        imagePixels,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,

                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb16BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth16Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
                else if (rb8BitByte.Checked)
                {
                    byte[] imageBytes = GetCurrentImageBytes(i, dynaBits);

                    recorder.AddVideoFrame(
                        imageBytes,

                        // NOTE: Use with caution! Using compression is slower and may not work at high frame rates
                        // i.e. it may take longer to compress the data than for the next image to arrive on the buffer
                        useCompression,
                        AdvImageData.PixelDepth8Bit,
                        AdvTimeStamp.FromDateTime(timestamp),
                        exposure,
                        status);
                }
            }

            recorder.StopRecording();

            MessageBox.Show(string.Format("'{0}' has been created.", fileName));
        }
Beispiel #4
0
            private void BeginVideoFrame(AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds, AdvStatusEntry metadata)
            {
                ulong elapsedTimeMilliseconds = 0;                 // since the first recorded frame was taken

                if (m_NumberRecordedFrames > 0 && m_FirstRecordedFrameTimestamp != 0)
                {
                    elapsedTimeMilliseconds = timeStamp.MillisecondsAfterAdvZeroEpoch - m_FirstRecordedFrameTimestamp;
                }
                else if (m_NumberRecordedFrames == 0)
                {
                    m_FirstRecordedFrameTimestamp = timeStamp.MillisecondsAfterAdvZeroEpoch;
                }

                bool frameStartedOk = AdvLib.Obsolete.AdvVer1.BeginFrame((long)timeStamp.MillisecondsAfterAdvZeroEpoch,
                                                                         elapsedTimeMilliseconds > 0 ? (uint)elapsedTimeMilliseconds : 0,
                                                                         exposureIn10thMilliseconds);

                if (!frameStartedOk)
                {
                    // If we can't add the first frame, this may be a file creation issue; otherwise increase the dropped frames counter
                    if (m_NumberRecordedFrames > 0)
                    {
                        m_NumberDroppedFrames++;
                    }
                    return;
                }

                if (StatusSectionConfig.RecordSystemTime)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(m_TAGID_SystemTime,
                                                                metadata.SystemTime.MillisecondsAfterAdvZeroEpoch > 0
                                                                                                                   ? (ulong)metadata.SystemTime.MillisecondsAfterAdvZeroEpoch
                                                                                                                   : 0);
                }

                if (StatusSectionConfig.RecordGPSTrackedSatellites)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_TrackedGPSSatellites, metadata.GPSTrackedSatellites);
                }
                if (StatusSectionConfig.RecordGPSAlmanacStatus)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSAlmanacStatus, (byte)metadata.GPSAlmanacStatus);
                }
                if (StatusSectionConfig.RecordGPSAlmanacOffset)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSAlmanacOffset, metadata.GPSAlmanacOffset);
                }
                if (StatusSectionConfig.RecordGPSFixStatus)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSFixStatus, (byte)metadata.GPSFixStatus);
                }
                if (StatusSectionConfig.RecordGain)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Gain, metadata.Gain);
                }
                if (StatusSectionConfig.RecordGamma)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Gamma, metadata.Gamma);
                }
                if (StatusSectionConfig.RecordShutter)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Shutter, metadata.Shutter);
                }
                if (StatusSectionConfig.RecordCameraOffset)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Offset, metadata.CameraOffset);
                }
                if (StatusSectionConfig.RecordVideoCameraFrameId)
                {
                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(m_TAGID_VideoCameraFrameId, metadata.VideoCameraFrameId);
                }

                if (StatusSectionConfig.RecordUserCommands && metadata.UserCommands != null)
                {
                    for (int i = 0; i < Math.Min(16, metadata.UserCommands.Count()); i++)
                    {
                        if (metadata.UserCommands[i] != null)
                        {
                            if (metadata.UserCommands[i].Length > 255)
                            {
                                AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_UserCommand, metadata.UserCommands[i].Substring(0, 255));
                            }
                            else
                            {
                                AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_UserCommand, metadata.UserCommands[i]);
                            }
                        }
                    }
                }

                if (StatusSectionConfig.RecordSystemErrors && metadata.SystemErrors != null)
                {
                    for (int i = 0; i < Math.Min(16, metadata.SystemErrors.Count()); i++)
                    {
                        if (metadata.SystemErrors[i] != null)
                        {
                            if (metadata.SystemErrors[i].Length > 255)
                            {
                                AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_SystemError, metadata.SystemErrors[i].Substring(0, 255));
                            }
                            else
                            {
                                AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_SystemError, metadata.SystemErrors[i]);
                            }
                        }
                    }
                }

                int additionalStatusTagId = -1;

                foreach (string tagName in StatusSectionConfig.AdditionalStatusTags.Keys)
                {
                    uint tagId = m_AdditionalStatusSectionTagIds[tagName];
                    additionalStatusTagId++;
                    object statusTagValue = metadata.AdditionalStatusTags[additionalStatusTagId];

                    switch (StatusSectionConfig.AdditionalStatusTags[tagName])
                    {
                    case AdvTagType.UInt8:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(tagId, (byte)statusTagValue);
                        break;

                    case AdvTagType.UInt16:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTag16(tagId, (ushort)statusTagValue);
                        break;

                    case AdvTagType.UInt32:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTag32(tagId, (uint)statusTagValue);
                        break;

                    case AdvTagType.ULong64:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(tagId, (ulong)statusTagValue);
                        break;

                    case AdvTagType.Real:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(tagId, (float)statusTagValue);
                        break;

                    case AdvTagType.AnsiString255:
                        AdvLib.Obsolete.AdvVer1.FrameAddStatusTag(tagId, (string)statusTagValue);
                        break;

                    case AdvTagType.List16OfAnsiString255:
                        string[] lines = (string[])statusTagValue;
                        for (int i = 0; i < Math.Min(16, lines.Count()); i++)
                        {
                            if (lines[i] != null)
                            {
                                if (lines[i].Length > 255)
                                {
                                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(tagId, lines[i].Substring(0, 255));
                                }
                                else
                                {
                                    AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(tagId, lines[i]);
                                }
                            }
                        }
                        break;
                    }
                }
            }
Beispiel #5
0
            /// <summary>
            /// Adds a new video frame from a byte array.
            /// </summary>
            /// <param name="pixels">The pixels to be saved. The row-major array is of size Width * Height in 8-bit mode and 2 * Width * Height in little-endian 16-bit mode.</param>
            /// <param name="compress">True if the frame is to be compressed. Please note that compression is CPU and I/O intensive and may not work at high frame rates. Use wisely.</param>
            /// <param name="imageData">The format of the pixels - 8 bit or 16 bit.</param>
            /// <param name="timeStamp">The high accuracy timestamp for the middle of the frame. If the timestamp is not with an accuracy of 1ms then set it as zero. A lower accuracy timestamp can be specified in the SystemTime status value.</param>
            /// <param name="exposureIn10thMilliseconds">The duration of the frame in whole 0.1 ms as determined by the high accuracy timestamping. If high accuracy timestamp is not available then set this to zero. Note that the Shutter status value should be derived from the camera settings rather than from the timestamps.</param>
            /// <param name="metadata">The status metadata to be saved with the video frame.</param>
            public void AddVideoFrame(byte[] pixels, bool compress, AdvImageData imageData, AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds, AdvStatusEntry metadata)
            {
                BeginVideoFrame(timeStamp, exposureIn10thMilliseconds, metadata);

                if (imageData == AdvImageData.PixelDepth16Bit)
                {
                    byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_3_COMPRESSED : CFG_ADV_LAYOUT_1_UNCOMPRESSED;
                    AdvLib.Obsolete.AdvVer1.FrameAddImageBytes(layoutIdForCurrentFramerate, pixels, 16);
                }
                else if (imageData == AdvImageData.PixelDepth8Bit)
                {
                    byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_5_COMPRESSED : CFG_ADV_LAYOUT_4_UNCOMPRESSED;
                    AdvLib.Obsolete.AdvVer1.FrameAddImageBytes(layoutIdForCurrentFramerate, pixels, 8);
                }

                AdvLib.Obsolete.AdvVer1.EndFrame();
            }