public void SampleAdvFileRecording3() { fileName = @"C:\hello-world.adv"; const uint MILLI_TO_NANO = 1000000; const int WIDTH = 800; const int HEIGHT = 600; var recorder = new AdvRecorder(); recorder.ImageConfig.SetImageParameters(WIDTH, HEIGHT, 16, 0); recorder.FileMetaData.RecorderSoftwareName = "MyVideoRecorder"; recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z"; recorder.FileMetaData.CameraModel = "TestCamera"; recorder.FileMetaData.CameraSensorInfo = "TestSensor"; recorder.DefineCustomClock( AdvRecorder.AdvStream.MainStream, 98304000 /* 98.304MHz */, 1, /* 1 tick */ GetCustomClockTickCount); recorder.StartRecordingNewFile(fileName, 1 * MILLI_TO_NANO /* 1ms */); for (int i = 0; i < 10; i++) { ushort[] pixels = new ushort[WIDTH * HEIGHT]; recorder.AddVideoFrame( pixels, false, null, AdvTimeStamp.FromDateTime(DateTime.Now), AdvTimeStamp.FromDateTime(DateTime.Now.AddSeconds(0.5 * i)), null, AdvImageData.PixelDepth16Bit); } recorder.FinishRecording(); }
public void GenerateaAdv_V2(AdvGenerationConfig config, string fileName) { var recorder = new AdvRecorder(); // First set the values of the standard file metadata recorder.FileMetaData.RecorderSoftwareName = "AdvLibRecorder"; recorder.FileMetaData.RecorderSoftwareVersion = "x.y.z"; recorder.FileMetaData.RecorderHardwareName = "a.b.c"; recorder.FileMetaData.CameraModel = "Flea3 FL3-FW-03S3M"; recorder.FileMetaData.CameraSensorInfo = "Sony ICX414AL (1/2\" 648x488 CCD)"; recorder.FileMetaData.ObjectName = "Generated File Object"; recorder.FileMetaData.Telescope = "Generated File Telescope"; recorder.FileMetaData.Observer = "Generated File Observer"; if (config.SaveLocationData) { recorder.LocationData.SetLocation( 150 + 38 / 60.0 + 27.7 / 3600.0, -1 * (33 + 39 / 60.0 + 49.3 / 3600.0), 284.4); } recorder.ImageConfig.SetImageParameters(640, 480, config.DynaBits, config.NormalPixelValue); // By default no status section values will be recorded. The user must enable the ones they need recorded and // can also define additional status parameters to be recorded with each video frame recorder.StatusSectionConfig.RecordGain = true; recorder.StatusSectionConfig.RecordGamma = true; recorder.StatusSectionConfig.RecordSystemErrors = true; if (config.MainStreamCustomClock != null) { recorder.DefineCustomClock(AdvRecorder.AdvStream.MainStream, config.MainStreamCustomClock.ClockFrequency, config.MainStreamCustomClock.TicksTimingAccuracy, config.MainStreamCustomClock.ClockTicksCallback); } if (config.CalibrationStreamCustomClock != null) { recorder.DefineCustomClock(AdvRecorder.AdvStream.CalibrationStream, config.CalibrationStreamCustomClock.ClockFrequency, config.CalibrationStreamCustomClock.TicksTimingAccuracy, config.CalibrationStreamCustomClock.ClockTicksCallback); } if (config.BayerPattern != null) { recorder.ImageConfig.SetBayerPattern(config.BayerPattern.Value); } foreach (string key in config.MainStreamMetadata.Keys) { recorder.FileMetaData.AddMainStreamTag(key, config.MainStreamMetadata[key]); } foreach (string key in config.CalibrationStreamMetadata.Keys) { recorder.FileMetaData.AddCalibrationStreamTag(key, config.CalibrationStreamMetadata[key]); } foreach (string key in config.UserMetadata.Keys) { recorder.FileMetaData.AddUserTag(key, config.UserMetadata[key]); } if (config.SaveCustomStatusSectionTags) { recorder.StatusSectionConfig.AddDefineTag("CustomInt8", Adv2TagType.Int8); recorder.StatusSectionConfig.AddDefineTag("CustomInt16", Adv2TagType.Int16); recorder.StatusSectionConfig.AddDefineTag("CustomInt32", Adv2TagType.Int32); recorder.StatusSectionConfig.AddDefineTag("CustomLong64", Adv2TagType.Long64); recorder.StatusSectionConfig.AddDefineTag("CustomReal", Adv2TagType.Real); recorder.StatusSectionConfig.AddDefineTag("CustomString", Adv2TagType.UTF8String); } recorder.StartRecordingNewFile(fileName, config.UtcTimestampAccuracyInNanoseconds); AdvRecorder.AdvStatusEntry status = new AdvRecorder.AdvStatusEntry(); status.AdditionalStatusTags = new object[2]; for (int i = 0; i < config.NumberOfFrames; i++) { // NOTE: Get the test data uint exposure = config.ExposureCallback != null?config.ExposureCallback(i) : 0; DateTime startTimestamp = config.TimeStampCallback != null?config.TimeStampCallback(i) : DateTime.Now; var utcStart = AdvTimeStamp.FromDateTime(startTimestamp); var utcEnd = utcStart.AddNanoseconds(exposure); status.Gain = config.GainCallback != null?config.GainCallback(i) : 0; status.Gamma = config.GammaCallback != null?config.GammaCallback(i) : 0; status.SystemErrors = config.SystemErrorsCallback != null?config.SystemErrorsCallback(i) : null; if (config.SaveCustomStatusSectionTags) { status.AdditionalStatusTags = new object[] { (byte)12, (short)-123, (int)192847, -1 * (long)(0x6E9104B012CD110F), 91.291823f, "Значение 1" }; } else { status.AdditionalStatusTags = null; } if (config.SourceFormat == AdvSourceDataFormat.Format16BitUShort) { ushort[] imagePixels = imageGenerator.GetCurrentImageBytesInt16(i, config.DynaBits); recorder.AddVideoFrame( imagePixels, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth16Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format16BitLittleEndianByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth16Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format12BitPackedByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelData12Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format8BitByte) { byte[] imageBytes = imageGenerator.GetCurrentImageBytes(i, config.DynaBits); recorder.AddVideoFrame( imageBytes, // NOTE: Use with caution! Using compression is slower and may not work at high frame rates // i.e. it may take longer to compress the data than for the next image to arrive on the buffer config.Compression != CompressionType.Uncompressed, config.Compression == CompressionType.Lagarith16 ? PreferredCompression.Lagarith16 : PreferredCompression.QuickLZ, utcStart, utcEnd, status, AdvImageData.PixelDepth8Bit); } else if (config.SourceFormat == AdvSourceDataFormat.Format24BitColour) { throw new NotImplementedException(); } } recorder.FinishRecording(); }