public KafkaProducer(string bootstrapServers, RecordConfig recordConfig, string topic, //int partition, //int offset, Action <string> errorHandler) { if (errorHandler == null) { throw new Exception("Empty handler"); } //1 var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig //{ // SchemaRegistryUrl = schemaRegistryUrl, // SchemaRegistryRequestTimeoutMs = 5000, //}); //var schemaRegistry = new SchemaRegistryClient(new Schema(recordConfig.Subject, recordConfig.Version, recordConfig.Id, recordConfig.SchemaString)); //1 var schemaRegistry = new SchemaRegistryClient(new Schema(recordConfig.Subject, recordConfig.Version, recordConfig.Id, recordConfig.SchemaString)); //1 _avroSerializer = new AvroSerializer <GenericRecord>(schemaRegistry); _producer = new ProducerBuilder <string, byte[]>(new ProducerConfig { BootstrapServers = bootstrapServers }) .SetKeySerializer(Serializers.Utf8) .SetValueSerializer(Serializers.ByteArray /*new AvroSerializer<T>(schemaRegistry)*/) .Build(); _topic = topic; }
internal Dictionary <string, object> GetUpdateDicts(RecordConfig recordConfig) { var dict = new Dictionary <string, object>(); if (!object.Equals(AppName, recordConfig.AppName)) { dict.Add($"{RecordConfig.PrefixName}appName", AppName); } if (!object.Equals(FileBufSize, recordConfig.FileBufSize)) { dict.Add($"{RecordConfig.PrefixName}fileBufSize", FileBufSize); } if (!object.Equals(FilePath, recordConfig.FilePath)) { dict.Add($"{RecordConfig.PrefixName}filePath", FilePath); } if (!object.Equals(FileSecond, recordConfig.FileSecond)) { dict.Add($"{RecordConfig.PrefixName}fileSecond", FileSecond); } if (!object.Equals(SampleMS, recordConfig.SampleMS)) { dict.Add($"{RecordConfig.PrefixName}sampleMS", SampleMS); } if (!object.Equals(FastStart, recordConfig.FastStart)) { dict.Add($"{RecordConfig.PrefixName}fastStart", FastStart); } if (!object.Equals(FileRepeat, recordConfig.FileRepeat)) { dict.Add($"{RecordConfig.PrefixName}fileRepeat", FileRepeat); } return(dict); }
public int imuQueueLength; ///< Length of the imu queue internal void SetRecordConfig(ref RecordConfig recordConfig) { recordConfig.enableDepthRecording = enableDepthRecording; recordConfig.enableLeanDepthRecording = enableLeanDepthRecording; recordConfig.enableColorRecording = enableColorRecording; recordConfig.enableMonochromeRecording = enableMonochromeRecording; recordConfig.enableImuRecording = enableImuRecording; recordConfig.recordColorFps = recordColorFps; recordConfig.recordDepthFps = recordDepthFps; recordConfig.recordMonochromeFps = recordMonochromeFps; recordConfig.imuQueueLength = imuQueueLength; recordConfig.colorQueueLength = colorQueueLength; recordConfig.monochromeQueueLength = monochromeQueueLength; recordConfig.depthQueueLength = depthQueueLength; }
public KafkaConsumer(string bootstrapServers, RecordConfig recordConfig, string topic, string groupId, int partition, int offset, Action <string, dynamic, DateTime> consumeResultHandler, Action <string> errorHandler) { if (consumeResultHandler == null || errorHandler == null) { throw new Exception("Empty handler"); } _consumeResultHandler = consumeResultHandler; _cts = new CancellationTokenSource(); //1 var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { SchemaRegistryUrl = schemaRegistryUrl }); //var schemaRegistry = new SchemaRegistryClient(new Schema(recordConfig.Subject, recordConfig.Version, recordConfig.Id, recordConfig.SchemaString)); //1 var schemaRegistry = new SchemaRegistryClient(new Schema(recordConfig.Subject, recordConfig.Version, recordConfig.Id, recordConfig.SchemaString)); //1 _avroDeserializer = new AvroDeserializer <GenericRecord>(schemaRegistry); _consumer = new ConsumerBuilder <string, byte[]>( new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = groupId, AutoOffsetReset = AutoOffsetReset.Earliest }) .SetKeyDeserializer(Deserializers.Utf8) .SetValueDeserializer(Deserializers.ByteArray /*new AvroDeserializer<T>(schemaRegistry).AsSyncOverAsync()*/) .SetErrorHandler((_, e) => errorHandler(e.Reason)) .Build(); _consumer.Assign(new List <TopicPartitionOffset> { new TopicPartitionOffset(topic, partition, offset) }); _topic = topic; }
// Use this for initialization void Start() { RecordConfig recordConfig = new RecordConfig(); string folderPath = ""; string folderName = ""; recordParameters.SetRecordConfig(ref recordConfig); #if UNITY_EDITOR recordParameters.SetRecordPath(ref folderPath, ref folderName); #else recordParameters.SetRecordPath(ref folderPath, ref folderName); folderPath = Application.dataPath; Debug.LogError(folderPath.ToString()); Debug.LogError(folderName.ToString()); #endif folderPath.Replace("\\", "\\\\"); EnableRecording(folderPath, folderName, ref recordConfig); int bufferSize = 512; StringBuilder buffer = new StringBuilder(bufferSize); GetRecordingfolder(buffer, ref bufferSize); Debug.Log(buffer.ToString()); finalRecordingPath = buffer.ToString(); }
public AggregatedConfig() { RunEnvironment = RunEnv.ExternalEnv; UserCenterAddress = string.Empty; RtServerAddress = string.Empty; ServerVersionInfo = string.Empty; DeviceNo = string.Empty; DeviceKey = string.Empty; AccountAutoLogin = new LoginConfig(); MainCamera = new VideoConfig() { Type = "主摄像头", }; SecondaryCamera = new VideoConfig() { Type = "辅摄像头" }; AudioConfig = new AudioConfig(); LocalLiveConfig = new LiveConfig() { Description = "本地推流", IsEnabled = true }; RemoteLiveConfig = new LiveConfig() { Description = "服务器推流", IsEnabled = true }; RecordConfig = new RecordConfig() { Description = "录制" }; }
internal static extern void EnableRecording(string folderPath_, string folderName_, ref RecordConfig recordConfig_);
static void Main(string[] args) { #region Config //var schemaRegistryUrl = "http://*****:*****@"..\..\..\wwwroot\schema.json"; var bootstrapServers = "localhost:9092"; var topic = "quick-start"; // "stream-topic"; var groupId = "consumer-group"; // "simple-consumer"; int partition = 0; int offset = 0; var recordConfig = new RecordConfig(schemaRegistryUrl); #endregion // Config #region Kafka Consumer var kafkaConsumer = new KafkaConsumer( bootstrapServers, recordConfig, topic, groupId, partition, offset, (key, value, dt) => { Console.WriteLine($"Consumed Object:\nkey = {key}"); foreach (var field in value.Schema.Fields) { Console.WriteLine($" {field.Name} = {value[field.Name]}"); } }, e => Console.WriteLine(e)) .StartConsuming(); #endregion // Kafka Consumer #region Create Kafka Producer var kafkaProducer = new KafkaProducer( bootstrapServers, recordConfig, topic, //partition, //offset, e => Console.WriteLine(e)); #endregion // Create Kafka Producer var count = 0; var timer = new Timer(_ => { var lstTuple = new List <Tuple <string, GenericRecord> >(); for (var i = 0; i < 10; i++) { count++; #region Create GenericRecord Object var gr = new GenericRecord(recordConfig.RecordSchema); gr.Add("SEQUENCE", count); gr.Add("ID", count); gr.Add("CategoryID", count); gr.Add("YouTubeCategoryTypeID", count); gr.Add("CreationTime", DateTime.Now.Ticks); lstTuple.Add(new Tuple <string, GenericRecord>($"{count}", gr)); #endregion // Create GenericRecord Object } kafkaProducer.Send(lstTuple.ToArray()); }, null, 0, 5000); Console.WriteLine("Press any key to quit..."); Console.ReadKey(); timer.Dispose(); kafkaProducer.Dispose(); kafkaConsumer.Dispose(); }
public void InitialRecordData(RecordParams recordParams, Guid stageId) { try { Stage.Add(stageId, RecordProcessStage.OnCreatingRecord); var recordRepo = Repo <PowerRepository <Record> >(); var newRecord = new Record { RecordName = recordParams.RecordName, RecordStartDateTime = DateTime.Parse(recordParams.RecordConfigs["StartDateTime"]), RecordDuration = TimeSpan.Parse(recordParams.RecordConfigs["RecordDuration"]), RecordEndDateTime = DateTime.Parse(recordParams.RecordConfigs["EndDateTime"]) }; recordRepo.AddOrUpdateDoCommit(newRecord); newRecord.ModelState = ModelState.Changed; var configRepo = Repo <PowerRepository <SystemConfig> >(); var configs = configRepo.GetModelList(obj => obj.ConfigType == "RecordInitial") .ToDictionary(obj => obj.ConfigName, item => item.ConfigValue); var initialzation = $"{newRecord.Id}\r\n{recordParams.RecordConfigs["Period"]}\r\n{recordParams.RecordConfigs["Frequency"]}\r\n{recordParams.RecordConfigs["LineType"]}"; foreach (var file in Directory.GetFiles(configs["dataDirectory"])) { if (!recordParams.FileList.Contains(Path.GetFileNameWithoutExtension(file))) { File.Delete(file); } } if (Directory.GetFiles(configs["dataDirectory"]).Length != recordParams.FileList.Count) { Stage[stageId] = RecordProcessStage.MissingFile; return; } File.WriteAllText(configs["InitialzationFile"], initialzation); if (!Globals.IsProcessRunning(configs["MainProcessName"])) { Process.Start(configs["MainProcessPath"]); } File.WriteAllText(configs["FinishFile"], "0"); File.WriteAllText(configs["StartFile"], "1"); Stage[stageId] = RecordProcessStage.OnCaclating; while (File.ReadAllText(configs["FinishFile"]).Substring(0, 1) != "1") { Thread.Sleep(1000); } var config = new RecordConfig { LineType = (LineType)byte.Parse(recordParams.RecordConfigs["LineType"]), CalcPrecision = ushort.Parse(recordParams.RecordConfigs["Period"]), Frequency = ushort.Parse(recordParams.RecordConfigs["SampleRate"]), RecordId = newRecord.Id }; var ctx = new PowerDbContext(); ctx.RecordConfigs.Add(config); ctx.SaveChanges(); using (var connection = new MySqlConnection(configs["MySqlConnString"])) { Stage[stageId] = RecordProcessStage.OnAfterCaclating; connection.Open(); using (var transction = connection.BeginTransaction()) { try { using (var loadCmd = connection.CreateCommand()) { loadCmd.CommandType = CommandType.Text; loadCmd.CommandText = $"LOAD DATA LOCAL INFILE '{configs["ActiveFilePath"]}' INTO TABLE activevalues"; loadCmd.ExecuteNonQuery(); loadCmd.CommandText = $"LOAD DATA LOCAL INFILE '{configs["HarmonicFilePath"]}' INTO TABLE harmonics"; loadCmd.ExecuteNonQuery(); } using (var cmd = connection.CreateCommand()) { cmd.CommandType = CommandType.StoredProcedure; cmd.CommandText = "transferPowers"; cmd.CommandTimeout = int.MaxValue; cmd.Parameters.Add(new MySqlParameter() { DbType = DbType.Int64, Direction = ParameterDirection.Input, ParameterName = "relativeRecordId", Value = newRecord.Id }); cmd.ExecuteNonQuery(); cmd.CommandText = "calcvoltageseconds"; cmd.ExecuteNonQuery(); cmd.CommandText = "calcvoltagethreeseconds"; cmd.ExecuteNonQuery(); } transction.Commit(); } catch (Exception ex) { transction.Rollback(); File.WriteAllText(configs["ActiveFilePath"], string.Empty); File.WriteAllText(configs["HarmonicFilePath"], string.Empty); LogService.Instance.Error("数据库操作执行失败。", ex); Stage[stageId] = RecordProcessStage.Failed; return; } } File.WriteAllText(configs["ActiveFilePath"], string.Empty); File.WriteAllText(configs["HarmonicFilePath"], string.Empty); File.WriteAllText(configs["StartFile"], "0"); } Stage[stageId] = RecordProcessStage.ProcessCompleted; newRecord.Finalized = true; recordRepo.AddOrUpdateDoCommit(newRecord); } catch (Exception ex) { LogService.Instance.Error("生成记录失败。", ex); } }