public void SampleAdvFileRecording2() { fileName = @"C:\hello-world.adv"; const uint MILLI_TO_NANO = 1000000; const int WIDTH = 800; const int HEIGHT = 600; var recorder = new AdvRecorder(); recorder.ImageConfig.SetImageParameters(WIDTH, HEIGHT, 16, 0); recorder.FileMetaData.RecorderSoftwareName = "MyVideoRecorder"; recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z"; recorder.FileMetaData.CameraModel = "TestCamera"; recorder.FileMetaData.CameraSensorInfo = "TestSensor"; recorder.StartRecordingNewFile(fileName, 1 * MILLI_TO_NANO /* 1ms */); for (int i = 0; i < 10; i++) { ushort[] pixels = new ushort[WIDTH * HEIGHT]; recorder.AddVideoFrame( pixels, false, null, AdvTimeStamp.FromDateTime(DateTime.Now), AdvTimeStamp.FromDateTime(DateTime.Now.AddSeconds(0.5 * i)), null, AdvImageData.PixelDepth16Bit); } recorder.FinishRecording(); }
/// <summary> /// Adds a new video frame from an ushort array. /// </summary> /// <param name="pixels">The pixels to be saved. The row-major array is of size 2 * Width * Height. This only works in little-endian 16-bit mode.</param> /// <param name="compress">True if the frame is to be compressed. Please note that compression is CPU and I/O intensive and may not work at high frame rates. Use wisely.</param> /// <param name="timeStamp">The high accuracy timestamp for the middle of the frame. If the timestamp is not with an accuracy of 1ms then set it as zero. A lower accuracy timestamp can be specified in the SystemTime status value.</param> /// <param name="exposureIn10thMilliseconds">The duration of the frame in whole 0.1 ms as determined by the high accuracy timestamping. If high accuracy timestamp is not available then set this to zero. Note that the Shutter status value should be derived from the camera settings rather than from the timestamps.</param> /// <param name="metadata">The status metadata to be saved with the video frame.</param> public void AddVideoFrame(ushort[] pixels, bool compress, AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds, AdvStatusEntry metadata) { BeginVideoFrame(timeStamp, exposureIn10thMilliseconds, metadata); byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_3_COMPRESSED : CFG_ADV_LAYOUT_1_UNCOMPRESSED; AdvLib.Obsolete.AdvVer1.FrameAddImage(layoutIdForCurrentFramerate, pixels, 16); AdvLib.Obsolete.AdvVer1.EndFrame(); }
public void SampleAdvFileRecording5() { fileName = @"C:\hello-world-5.adv"; const uint MILLI_TO_NANO = 1000000; const int WIDTH = 800; const int HEIGHT = 600; var rec = new AdvRecorder(); rec.ImageConfig.SetImageParameters(WIDTH, HEIGHT, 16, 0); rec.FileMetaData.RecorderSoftwareName = "MyVideoRecorder"; rec.FileMetaData.RecorderSoftwareVersion = "x.y.z"; rec.FileMetaData.CameraModel = "TestCamera"; rec.FileMetaData.CameraSensorInfo = "TestSensor"; rec.FileMetaData.GainResponseMode = ResponseMode.Linear; rec.FileMetaData.Telescope = "14\" LX-200 ACF (Tangra Observatory)"; rec.FileMetaData.Observer = "Hristo Pavlov"; rec.FileMetaData.ObjectName = "Chariklo"; rec.FileMetaData.Comment = "Full moon only 20 deg away from the target."; rec.FileMetaData.AddUserTag("Timing Hardware", "IOTA-VTI v3"); rec.StatusSectionConfig.AddDefineTag("ErrorFlag", Adv2TagType.Int8); rec.StatusSectionConfig.AddDefineTag("Temperature", Adv2TagType.Real); rec.StartRecordingNewFile(fileName, 1 * MILLI_TO_NANO /* 1ms */); // TODO: Get the real actual timestamps and exposure DateTime startTime = DateTime.UtcNow; double exposureSeconds = 0.5; for (int i = 0; i < 10; i++) { ushort[] pixels = new ushort[WIDTH * HEIGHT]; var statusEntry = new AdvRecorder.AdvStatusEntry() { // Set actual values. Order and type of values is important. AdditionalStatusTags = new object[] { (byte)1, 15.3f } }; rec.AddVideoFrame( pixels, false, null, AdvTimeStamp.FromDateTime(startTime.AddSeconds(exposureSeconds * i)), AdvTimeStamp.FromDateTime(startTime.AddSeconds(exposureSeconds * (i + 1))), statusEntry, AdvImageData.PixelDepth16Bit); } rec.FinishRecording(); }
public void SampleAdvFileRecording() { fileName = @"C:\hello-world.adv"; const uint MILLI_TO_NANO = 1000000; const byte IMAGE_LAYOUT_ID = 1; const int WIDTH = 800; const int HEIGHT = 600; AdvError.ShowMessageBoxErrorMessage = true; AdvError.Check(Adv.AdvLib.NewFile(fileName)); AdvError.Check(Adv.AdvLib.AddOrUpdateFileTag("FSTF-TYPE", "ADV")); AdvError.Check(Adv.AdvLib.AddOrUpdateFileTag("ADV-VERSION", "2")); AdvError.Check(Adv.AdvLib.AddOrUpdateFileTag("OBJNAME", "Sample Generated Object")); AdvError.Check(Adv.AdvLib.DefineImageSection(WIDTH, HEIGHT, 16)); AdvError.Check(Adv.AdvLib.DefineImageLayout(IMAGE_LAYOUT_ID, "FULL-IMAGE-RAW", "UNCOMPRESSED", 16)); AdvError.Check(Adv.AdvLib.DefineStatusSection(1 * MILLI_TO_NANO /* 1ms */)); // TODO: Get the real actual timestamps and exposure DateTime startTime = DateTime.UtcNow; uint exposureMilliseconds = 500; for (int i = 0; i < 10; i++) { // TODO: Get the real actual timestamps and exposure DateTime exposureStartTime = startTime.AddMilliseconds(exposureMilliseconds * i); AdvError.Check(Adv.AdvLib.BeginFrame(0, AdvTimeStamp.FromDateTime(exposureStartTime).NanosecondsAfterAdvZeroEpoch, MILLI_TO_NANO * exposureMilliseconds)); ushort[] pixels = new ushort[WIDTH * HEIGHT]; AdvError.Check(Adv.AdvLib.FrameAddImage(IMAGE_LAYOUT_ID, pixels, 16)); AdvError.Check(Adv.AdvLib.EndFrame()); } AdvError.Check(Adv.AdvLib.EndFile()); }
private void SaveAdvVer1Sample() { string fileName = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + Path.DirectorySeparatorChar + @"Filename.adv"); if (File.Exists(fileName)) { if (MessageBox.Show(string.Format("Output file exists:\r\n\r\n{0}\r\n\r\nOverwrite?", fileName), "Confirmation", MessageBoxButtons.YesNo) != DialogResult.Yes) { return; } File.Delete(fileName); } var recorder = new Obsolete.AdvVer1.AdvRecorder(); // First set the values of the standard file metadata recorder.FileMetaData.RecorderName = "Genika"; recorder.FileMetaData.RecorderVersion = "x.y.z"; recorder.FileMetaData.RecorderTimerFirmwareVersion = "a.b.c"; recorder.FileMetaData.CameraModel = "Flea3 FL3-FW-03S3M"; recorder.FileMetaData.CameraSerialNumber = "10210906"; recorder.FileMetaData.CameraVendorNumber = "Point Grey Research"; recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)"; recorder.FileMetaData.CameraSensorResolution = "648x488"; recorder.FileMetaData.CameraFirmwareVersion = "1.22.3.0"; recorder.FileMetaData.CameraFirmwareBuildTime = "Mon Dec 28 20:15:45 2009"; recorder.FileMetaData.CameraDriverVersion = "2.2.1.6"; // Then define additional metadata, if required recorder.FileMetaData.AddUserTag("TELESCOPE-NAME", "Large Telescope"); recorder.FileMetaData.AddUserTag("TELESCOPE-FL", "8300"); recorder.FileMetaData.AddUserTag("TELESCOPE-FD", "6.5"); recorder.FileMetaData.AddUserTag("CAMERA-DIGITAL-SAMPLIG", "xxx"); recorder.FileMetaData.AddUserTag("CAMERA-HDR-RESPONSE", "yyy"); recorder.FileMetaData.AddUserTag("CAMERA-OPTICAL-RESOLUTION", "zzz"); if (cbxLocationData.Checked) { recorder.LocationData.LongitudeWgs84 = "150*38'27.7\""; recorder.LocationData.LatitudeWgs84 = "-33*39'49.3\""; recorder.LocationData.AltitudeMsl = "284.4M"; recorder.LocationData.MslWgs84Offset = "22.4M"; recorder.LocationData.GpsHdop = "0.7"; } // Define the image size and bit depth byte dynaBits = 16; if (rbPixel16.Checked) { dynaBits = 16; } else if (rbPixel12.Checked) { dynaBits = 12; } else if (rbPixel8.Checked) { dynaBits = 8; } recorder.ImageConfig.SetImageParameters(640, 480, dynaBits); // By default no status section values will be recorded. The user must enable the ones they need recorded and // can also define additional status parameters to be recorded with each video frame recorder.StatusSectionConfig.RecordGain = true; recorder.StatusSectionConfig.RecordGamma = true; int customTagIdCustomGain = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-GAIN", AdvTagType.UInt32); int customTagIdMessages = recorder.StatusSectionConfig.AddDefineTag("EXAMPLE-MESSAGES", AdvTagType.List16OfAnsiString255); recorder.StartRecordingNewFile(fileName); Obsolete.AdvStatusEntry status = new Obsolete.AdvStatusEntry(); status.AdditionalStatusTags = new object[2]; int imagesCount = GetTotalImages(); bool useCompression = cbxCompress.Checked; for (int i = 0; i < imagesCount; i++) { // NOTE: Moking up some test data uint exposure = GetCurrentImageExposure(i); DateTime timestamp = GetCurrentImageTimeStamp(i); status.Gain = GetCurrentImageGain(i); status.Gamma = GetCurrentImageGamma(i); status.AdditionalStatusTags[customTagIdMessages] = "Test Message"; status.AdditionalStatusTags[customTagIdCustomGain] = 36.0f; if (rb16BitUShort.Checked) { ushort[] imagePixels = imageGenerator.GetCurrentImageBytesInt16(i, dynaBits); recorder.AddVideoFrame( imagePixels, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer useCompression, AdvTimeStamp.FromDateTime(timestamp), exposure, status); } else if (rb16BitByte.Checked) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, dynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer useCompression, AdvImageData.PixelDepth16Bit, AdvTimeStamp.FromDateTime(timestamp), exposure, status); } else if (rb8BitByte.Checked) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, dynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer useCompression, AdvImageData.PixelDepth8Bit, AdvTimeStamp.FromDateTime(timestamp), exposure, status); } } recorder.FinishRecording(); ActionFileOperation(fileName); }
public void GenerateaAdv_V2(AdvGenerationConfig config, string fileName) { var recorder = new AdvRecorder(); // First set the values of the standard file metadata recorder.FileMetaData.RecorderSoftwareName = "AdvLibRecorder"; recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z"; recorder.FileMetaData.RecorderHardwareName = "a.b.c"; recorder.FileMetaData.CameraModel = "Flea3 FL3-FW-03S3M"; recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)"; recorder.FileMetaData.ObjectName = "Generated File Object"; recorder.FileMetaData.Telescope = "Generated File Telescope"; recorder.FileMetaData.Observer = "Generated File Observer"; if (config.SaveLocationData) { recorder.LocationData.SetLocation( 150 + 38 / 60.0 + 27.7 / 3600.0, -1 * (33 + 39 / 60.0 + 49.3 / 3600.0), 284.4); } recorder.ImageConfig.SetImageParameters(640, 480, config.DynaBits, config.NormalPixelValue); // By default no status section values will be recorded. The user must enable the ones they need recorded and // can also define additional status parameters to be recorded with each video frame recorder.StatusSectionConfig.RecordGain = true; recorder.StatusSectionConfig.RecordGamma = true; recorder.StatusSectionConfig.RecordSystemErrors = true; if (config.MainStreamCustomClock != null) { recorder.DefineCustomClock(AdvRecorder.AdvStream.MainStream, config.MainStreamCustomClock.ClockFrequency, config.MainStreamCustomClock.TicksTimingAccuracy, config.MainStreamCustomClock.ClockTicksCallback); } if (config.CalibrationStreamCustomClock != null) { recorder.DefineCustomClock(AdvRecorder.AdvStream.CalibrationStream, config.CalibrationStreamCustomClock.ClockFrequency, config.CalibrationStreamCustomClock.TicksTimingAccuracy, config.CalibrationStreamCustomClock.ClockTicksCallback); } if (config.BayerPattern != null) { recorder.ImageConfig.SetBayerPattern(config.BayerPattern.Value); } foreach (string key in config.MainStreamMetadata.Keys) { recorder.FileMetaData.AddMainStreamTag(key, config.MainStreamMetadata[key]); } foreach (string key in config.CalibrationStreamMetadata.Keys) { recorder.FileMetaData.AddCalibrationStreamTag(key, config.CalibrationStreamMetadata[key]); } foreach (string key in config.UserMetadata.Keys) { recorder.FileMetaData.AddUserTag(key, config.UserMetadata[key]); } if (config.SaveCustomStatusSectionTags) { recorder.StatusSectionConfig.AddDefineTag("CustomInt8", Adv2TagType.Int8); recorder.StatusSectionConfig.AddDefineTag("CustomInt16", Adv2TagType.Int16); recorder.StatusSectionConfig.AddDefineTag("CustomInt32", Adv2TagType.Int32); recorder.StatusSectionConfig.AddDefineTag("CustomLong64", Adv2TagType.Long64); recorder.StatusSectionConfig.AddDefineTag("CustomReal", Adv2TagType.Real); recorder.StatusSectionConfig.AddDefineTag("CustomString", Adv2TagType.UTF8String); } recorder.StartRecordingNewFile(fileName, config.UtcTimestampAccuracyInNanoseconds); AdvRecorder.AdvStatusEntry status = new AdvRecorder.AdvStatusEntry(); status.AdditionalStatusTags = new object[2]; for (int i = 0; i < config.NumberOfFrames; i++) { // NOTE: Get the test data uint exposure = config.ExposureCallback != null?config.ExposureCallback(i) : 0; DateTime startTimestamp = config.TimeStampCallback != null?config.TimeStampCallback(i) : DateTime.Now; var utcStart = AdvTimeStamp.FromDateTime(startTimestamp); var utcEnd = utcStart.AddNanoseconds(exposure); status.Gain = config.GainCallback != null?config.GainCallback(i) : 0; status.Gamma = config.GammaCallback != null?config.GammaCallback(i) : 0; status.SystemErrors = config.SystemErrorsCallback != null?config.SystemErrorsCallback(i) : null; if (config.SaveCustomStatusSectionTags) { status.AdditionalStatusTags = new object[] { (byte)12, (short)-123, (int)192847, -1 * (long)(0x6E9104B012CD110F), 91.291823f, "Значение 1" }; } else { status.AdditionalStatusTags = null; } if (config.SourceFormat == AdvSourceDataFormat.Format16BitUShort) { ushort[] imagePixels = imageGenerator.GetCurrentImageBytesInt16(i, config.DynaBits); recorder.AddVideoFrame( imagePixels, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth16Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format16BitLittleEndianByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth16Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format12BitPackedByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelData12Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format8BitByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth8Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format24BitColour) { throw new NotImplementedException(); } } recorder.FinishRecording(); }
public void TestStatusTagsAreSavedAndReadCorrectly() { string fileName = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")); if (File.Exists(fileName)) { File.Delete(fileName); } try { // Generate var recorder = new AdvRecorder(); recorder.ImageConfig.SetImageParameters(640, 480, 16, 0); recorder.FileMetaData.RecorderSoftwareName = "AdvLibTestRecorder"; recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z"; recorder.FileMetaData.RecorderHardwareName = "a.b.c"; recorder.FileMetaData.CameraModel = "TestCamera"; recorder.FileMetaData.CameraSensorInfo = "TestSensor"; recorder.StatusSectionConfig.RecordGain = true; recorder.StatusSectionConfig.RecordGamma = true; recorder.StatusSectionConfig.RecordShutter = true; recorder.StatusSectionConfig.RecordCameraOffset = true; recorder.StatusSectionConfig.RecordSystemTime = true; recorder.StatusSectionConfig.RecordTrackedSatellites = true; recorder.StatusSectionConfig.RecordAlmanacStatus = true; recorder.StatusSectionConfig.RecordAlmanacOffset = true; recorder.StatusSectionConfig.RecordFixStatus = true; recorder.StatusSectionConfig.RecordSystemErrors = true; recorder.StatusSectionConfig.RecordVideoCameraFrameId = true; recorder.StatusSectionConfig.RecordHardwareTimerFrameId = true; recorder.StatusSectionConfig.AddDefineTag("CustomInt8", Adv2TagType.Int8); recorder.StatusSectionConfig.AddDefineTag("CustomInt16", Adv2TagType.Int16); recorder.StatusSectionConfig.AddDefineTag("CustomInt32", Adv2TagType.Int32); recorder.StatusSectionConfig.AddDefineTag("CustomLong64", Adv2TagType.Long64); recorder.StatusSectionConfig.AddDefineTag("CustomReal", Adv2TagType.Real); recorder.StatusSectionConfig.AddDefineTag("CustomString", Adv2TagType.UTF8String); recorder.StartRecordingNewFile(fileName, 0); var systemTimeStamp = DateTime.Now.AddMilliseconds(123); var status = new AdvRecorder.AdvStatusEntry() { AlmanacStatus = AlmanacStatus.Good, AlmanacOffset = 14, TrackedSatellites = 8, CameraOffset = 8.23f, FixStatus = FixStatus.PFix, Gain = 32.82f, Gamma = 0.35f, Shutter = 2.502f, SystemTime = AdvTimeStamp.FromDateTime(systemTimeStamp), VideoCameraFrameId = 19289232, HardwareTimerFrameId = 9102 }; status.AdditionalStatusTags = new object[] { (byte)12, (short)-123, (int)192847, -1 * (long)(0x6E9104B012CD110F), 91.291823f, "Значение 1" }; var imageGenerator = new ImageGenerator(); ushort[] imagePixels = imageGenerator.GetCurrentImageBytesInt16(0, 16); recorder.AddVideoFrame( imagePixels, false, null, AdvTimeStamp.FromDateTime(DateTime.Now), AdvTimeStamp.FromDateTime(DateTime.Now.AddSeconds(2.56)), status, AdvImageData.PixelDepth16Bit); recorder.FinishRecording(); // Verify using (var loadedFile = new AdvFile2(fileName)) { AdvFrameInfo frameInfo; loadedFile.GetMainFramePixels(0, out frameInfo); Assert.AreEqual(status.Gain, frameInfo.Gain, 0.000001); Assert.AreEqual(status.Gamma, frameInfo.Gamma, 0.000001); Assert.AreEqual(status.Shutter, frameInfo.Shutter, 0.000001); Assert.AreEqual(status.CameraOffset, frameInfo.Offset, 0.000001); Assert.AreEqual(status.FixStatus, (FixStatus)frameInfo.GPSFixStatus); Assert.AreEqual(status.AlmanacStatus, (AlmanacStatus)frameInfo.GPSAlmanacStatus); Assert.AreEqual(status.TrackedSatellites, frameInfo.GPSTrackedSattelites); Assert.AreEqual(status.AlmanacOffset, frameInfo.GPSAlmanacOffset); Assert.AreEqual(status.VideoCameraFrameId, frameInfo.VideoCameraFrameId); Assert.AreEqual(status.HardwareTimerFrameId, frameInfo.HardwareTimerFrameId); Assert.AreEqual(systemTimeStamp.Ticks, frameInfo.SystemTimestamp.Ticks); Assert.AreEqual(status.AdditionalStatusTags[0], frameInfo.Status["CustomInt8"]); Assert.AreEqual(status.AdditionalStatusTags[1], frameInfo.Status["CustomInt16"]); Assert.AreEqual(status.AdditionalStatusTags[2], frameInfo.Status["CustomInt32"]); Assert.AreEqual(status.AdditionalStatusTags[3], frameInfo.Status["CustomLong64"]); Assert.AreEqual(status.AdditionalStatusTags[4], frameInfo.Status["CustomReal"]); Assert.AreEqual(status.AdditionalStatusTags[5], frameInfo.Status["CustomString"]); } } finally { try { if (File.Exists(fileName)) { File.Delete(fileName); } } catch (Exception ex) { Console.WriteLine(ex); Trace.WriteLine(ex); } } }
public void TestTimestampsAreSavedAndReadCorrectly() { DateTime frameTimeStamp = new DateTime(2016, 6, 24, 20, 42, 15).AddMilliseconds(1234); long tickStamp = frameTimeStamp.Ticks; float exposureMS = 16.7f; DateTime frameTimeStamp2 = frameTimeStamp.AddMilliseconds(17); long tickStamp2 = frameTimeStamp2.Ticks; var utcTimeStamps = new DateTime[] { frameTimeStamp, frameTimeStamp2 }; var tickStamps = new long[] { tickStamp, tickStamp2 }; AdvTimeStamp ts = AdvTimeStamp.FromDateTime(frameTimeStamp); var tdBackFromMS = new DateTime((long)AdvTimeStamp.ADV_EPOCH_ZERO_TICKS).AddMilliseconds(ts.MillisecondsAfterAdvZeroEpoch); Assert.AreEqual(frameTimeStamp.Ticks, tdBackFromMS.Ticks); Assert.AreEqual(ts.MillisecondsAfterAdvZeroEpoch, ts.NanosecondsAfterAdvZeroEpoch / 1000000); var tdBackFromNS = new DateTime((long)AdvTimeStamp.ADV_EPOCH_ZERO_TICKS).AddMilliseconds(ts.NanosecondsAfterAdvZeroEpoch / 1000000.0); Assert.AreEqual(frameTimeStamp.Ticks, tdBackFromNS.Ticks); var maxTimeStamp = new DateTime((long)AdvTimeStamp.ADV_EPOCH_ZERO_TICKS).AddMilliseconds(ulong.MaxValue / 1000000.0); Console.WriteLine(string.Format("Max ADV UTC Timestamp: {0}", maxTimeStamp.ToString("yyyy-MMM-dd HH:mm:ss"))); Assert.AreEqual(frameTimeStamp.Ticks, new DateTime((long)AdvTimeStamp.ADV_EPOCH_ZERO_TICKS).AddMilliseconds(204496936234000000 / 1000000.0).Ticks); var fileGen = new AdvGenerator(); int tickId = -1; var cfg = new AdvGenerationConfig() { DynaBits = 16, SourceFormat = AdvSourceDataFormat.Format16BitUShort, NumberOfFrames = 2, Compression = CompressionType.Uncompressed, NormalPixelValue = null, MainStreamCustomClock = new CustomClockConfig() { ClockFrequency = 10000000, ClockTicksCallback = () => { tickId++; return(tickStamps[tickId]); }, TicksTimingAccuracy = 1 }, CalibrationStreamCustomClock = new CustomClockConfig() { ClockFrequency = 10000000, ClockTicksCallback = () => 0, TicksTimingAccuracy = 1 }, TimeStampCallback = new GetCurrentImageTimeStampCallback((frameId) => utcTimeStamps[frameId]), ExposureCallback = id => (uint)(exposureMS * 1000000.0) }; string fileName = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")); if (File.Exists(fileName)) { File.Delete(fileName); } try { // Generate fileGen.GenerateaAdv_V2(cfg, fileName); // Verify using (var loadedFile = new AdvFile2(fileName)) { AdvFrameInfo frameInfo; loadedFile.GetMainFramePixels(0, out frameInfo); Assert.IsNotNull(frameInfo); Assert.IsTrue(frameInfo.HasUtcTimeStamp); Assert.AreEqual(frameTimeStamp.Ticks, frameInfo.UtcStartExposureTimeStamp.Ticks); Assert.AreEqual(exposureMS, frameInfo.UtcExposureMilliseconds, 0.00001); Assert.AreEqual(0, frameInfo.TickStampStartTicks); Assert.AreEqual(tickStamp, frameInfo.TickStampEndTicks); loadedFile.GetMainFramePixels(1, out frameInfo); Assert.IsNotNull(frameInfo); Assert.IsTrue(frameInfo.HasUtcTimeStamp); Assert.AreEqual(frameTimeStamp2.Ticks, frameInfo.UtcStartExposureTimeStamp.Ticks); Assert.AreEqual(exposureMS, frameInfo.UtcExposureMilliseconds, 0.00001); Assert.AreEqual(tickStamp, frameInfo.TickStampStartTicks); Assert.AreEqual(tickStamp2, frameInfo.TickStampEndTicks); } } finally { try { if (File.Exists(fileName)) { File.Delete(fileName); } } catch (Exception ex) { Console.WriteLine(ex); Trace.WriteLine(ex); } } }
private void BeginVideoFrame(AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds, AdvStatusEntry metadata) { ulong elapsedTimeMilliseconds = 0; // since the first recorded frame was taken if (m_NumberRecordedFrames > 0 && m_FirstRecordedFrameTimestamp != 0) { elapsedTimeMilliseconds = timeStamp.MillisecondsAfterAdvZeroEpoch - m_FirstRecordedFrameTimestamp; } else if (m_NumberRecordedFrames == 0) { m_FirstRecordedFrameTimestamp = timeStamp.MillisecondsAfterAdvZeroEpoch; } bool frameStartedOk = AdvLib.Obsolete.AdvVer1.BeginFrame((long)timeStamp.MillisecondsAfterAdvZeroEpoch, elapsedTimeMilliseconds > 0 ? (uint)elapsedTimeMilliseconds : 0, exposureIn10thMilliseconds); if (!frameStartedOk) { // If we can't add the first frame, this may be a file creation issue; otherwise increase the dropped frames counter if (m_NumberRecordedFrames > 0) { m_NumberDroppedFrames++; } return; } if (StatusSectionConfig.RecordSystemTime) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(m_TAGID_SystemTime, metadata.SystemTime.MillisecondsAfterAdvZeroEpoch > 0 ? (ulong)metadata.SystemTime.MillisecondsAfterAdvZeroEpoch : 0); } if (StatusSectionConfig.RecordGPSTrackedSatellites) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_TrackedGPSSatellites, metadata.GPSTrackedSatellites); } if (StatusSectionConfig.RecordGPSAlmanacStatus) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSAlmanacStatus, (byte)metadata.GPSAlmanacStatus); } if (StatusSectionConfig.RecordGPSAlmanacOffset) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSAlmanacOffset, metadata.GPSAlmanacOffset); } if (StatusSectionConfig.RecordGPSFixStatus) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(m_TAGID_GPSFixStatus, (byte)metadata.GPSFixStatus); } if (StatusSectionConfig.RecordGain) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Gain, metadata.Gain); } if (StatusSectionConfig.RecordGamma) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Gamma, metadata.Gamma); } if (StatusSectionConfig.RecordShutter) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Shutter, metadata.Shutter); } if (StatusSectionConfig.RecordCameraOffset) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(m_TAGID_Offset, metadata.CameraOffset); } if (StatusSectionConfig.RecordVideoCameraFrameId) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(m_TAGID_VideoCameraFrameId, metadata.VideoCameraFrameId); } if (StatusSectionConfig.RecordUserCommands && metadata.UserCommands != null) { for (int i = 0; i < Math.Min(16, metadata.UserCommands.Count()); i++) { if (metadata.UserCommands[i] != null) { if (metadata.UserCommands[i].Length > 255) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_UserCommand, metadata.UserCommands[i].Substring(0, 255)); } else { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_UserCommand, metadata.UserCommands[i]); } } } } if (StatusSectionConfig.RecordSystemErrors && metadata.SystemErrors != null) { for (int i = 0; i < Math.Min(16, metadata.SystemErrors.Count()); i++) { if (metadata.SystemErrors[i] != null) { if (metadata.SystemErrors[i].Length > 255) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_SystemError, metadata.SystemErrors[i].Substring(0, 255)); } else { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(m_TAGID_SystemError, metadata.SystemErrors[i]); } } } } int additionalStatusTagId = -1; foreach (string tagName in StatusSectionConfig.AdditionalStatusTags.Keys) { uint tagId = m_AdditionalStatusSectionTagIds[tagName]; additionalStatusTagId++; object statusTagValue = metadata.AdditionalStatusTags[additionalStatusTagId]; switch (StatusSectionConfig.AdditionalStatusTags[tagName]) { case AdvTagType.UInt8: AdvLib.Obsolete.AdvVer1.FrameAddStatusTagUInt8(tagId, (byte)statusTagValue); break; case AdvTagType.UInt16: AdvLib.Obsolete.AdvVer1.FrameAddStatusTag16(tagId, (ushort)statusTagValue); break; case AdvTagType.UInt32: AdvLib.Obsolete.AdvVer1.FrameAddStatusTag32(tagId, (uint)statusTagValue); break; case AdvTagType.ULong64: AdvLib.Obsolete.AdvVer1.FrameAddStatusTag64(tagId, (ulong)statusTagValue); break; case AdvTagType.Real: AdvLib.Obsolete.AdvVer1.FrameAddStatusTagReal(tagId, (float)statusTagValue); break; case AdvTagType.AnsiString255: AdvLib.Obsolete.AdvVer1.FrameAddStatusTag(tagId, (string)statusTagValue); break; case AdvTagType.List16OfAnsiString255: string[] lines = (string[])statusTagValue; for (int i = 0; i < Math.Min(16, lines.Count()); i++) { if (lines[i] != null) { if (lines[i].Length > 255) { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(tagId, lines[i].Substring(0, 255)); } else { AdvLib.Obsolete.AdvVer1.FrameAddStatusTagMessage(tagId, lines[i]); } } } break; } } }
/// <summary> /// Adds a new video frame from a byte array. /// </summary> /// <param name="pixels">The pixels to be saved. The row-major array is of size Width * Height in 8-bit mode and 2 * Width * Height in little-endian 16-bit mode.</param> /// <param name="compress">True if the frame is to be compressed. Please note that compression is CPU and I/O intensive and may not work at high frame rates. Use wisely.</param> /// <param name="imageData">The format of the pixels - 8 bit or 16 bit.</param> /// <param name="timeStamp">The high accuracy timestamp for the middle of the frame. If the timestamp is not with an accuracy of 1ms then set it as zero. A lower accuracy timestamp can be specified in the SystemTime status value.</param> /// <param name="exposureIn10thMilliseconds">The duration of the frame in whole 0.1 ms as determined by the high accuracy timestamping. If high accuracy timestamp is not available then set this to zero. Note that the Shutter status value should be derived from the camera settings rather than from the timestamps.</param> /// <param name="metadata">The status metadata to be saved with the video frame.</param> public void AddVideoFrame(byte[] pixels, bool compress, AdvImageData imageData, AdvTimeStamp timeStamp, uint exposureIn10thMilliseconds, AdvStatusEntry metadata) { BeginVideoFrame(timeStamp, exposureIn10thMilliseconds, metadata); if (imageData == AdvImageData.PixelDepth16Bit) { byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_3_COMPRESSED : CFG_ADV_LAYOUT_1_UNCOMPRESSED; AdvLib.Obsolete.AdvVer1.FrameAddImageBytes(layoutIdForCurrentFramerate, pixels, 16); } else if (imageData == AdvImageData.PixelDepth8Bit) { byte layoutIdForCurrentFramerate = compress ? CFG_ADV_LAYOUT_5_COMPRESSED : CFG_ADV_LAYOUT_4_UNCOMPRESSED; AdvLib.Obsolete.AdvVer1.FrameAddImageBytes(layoutIdForCurrentFramerate, pixels, 8); } AdvLib.Obsolete.AdvVer1.EndFrame(); }