private void Initialize() { lock (Sync) { var mongoUrl = MongoUrl.Create(_connectionString); var mongoClient = new MongoClient(mongoUrl); var server = mongoClient.GetServer(); var database = server.GetDatabase(mongoUrl.DatabaseName); if (!database.CollectionExists(_collectionName)) { var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(true) .SetMaxSize(_maxSize); if (_maxDocuments != int.MaxValue) { options.SetMaxDocuments(_maxDocuments); } database.CreateCollection(_collectionName, options); } _collection = database.GetCollection(_collectionName); _mongoInsertOptions = new MongoInsertOptions { CheckElementNames = false }; } }
public void TestSetCappedTrue() { var options = CollectionOptions.SetCapped(true); var expected = "{ 'capped' : true }".Replace("'", "\""); Assert.AreEqual(expected, options.ToJson()); }
private static void Log(MongoDatabase database) { var createOptions = CollectionOptions .SetCapped(true) .SetMaxSize(5000) .SetMaxDocuments(10000); createOptions.SetCapped(true); createOptions.SetMaxDocuments(5000); createOptions.SetMaxSize(10000); if (!database.CollectionExists("site.log")) { database.CreateCollection("site.log", createOptions); } var logCollection = database.GetCollection("site.log"); var logIndexes = logCollection.GetIndexes(); if (logIndexes.All(c => c.Name != "CreateDate")) { var keys = IndexKeys.Ascending("CreateDate"); var options = IndexOptions.SetName("CreateDate"); logCollection.CreateIndex(keys, options); } }
public void TestSetCappedFalse() { var options = CollectionOptions.SetCapped(false); var expected = "{ }"; Assert.AreEqual(expected, options.ToJson()); }
public MongoQueue(MongoQueConfig config) { // our queue name will be the same as the message class _database = MongoDatabase.Create(config.ConnectionString); if (!_database.CollectionExists(_queueName)) { try { Log.InfoFormat("Creating queue '{0}' size {1}", _queueName, config.QueueSize); var options = CollectionOptions .SetCapped(true) // use a capped collection so space is pre-allocated and re-used .SetAutoIndexId(true) .SetMaxSize(config.QueueSize); // limit the size of the collection and pre-allocated the space to this number of bytes _database.CreateCollection(_queueName, options); var col = _database.GetCollection(_queueName); col.EnsureIndex(new[] { "Dequeued" }); col.EnsureIndex(new[] { "Equeued" }); } catch { // assume that any exceptions are because the collection already exists ... } } // get the queue collection for our messages _queue = _database.GetCollection <MongoMessage <T> >(_queueName); }
public void TestSetCappedFalse() { var options = CollectionOptions.SetCapped(false); var expected = "{ 'capped' : false }".Replace("'", "\""); Assert.Equal(expected, options.ToJson()); }
protected override void CreateCollection(MongoDatabase database) { CollectionOptionsBuilder options = CollectionOptions .SetCapped(true) .SetMaxSize(5 * 1024 * 1024); database.CreateCollection(GetCollectionName(), options); }
private object EnsureCollection() { MongoDatabase database = this.GetDatabase(); if (!database.CollectionExists(this.collectionName)) { var options = CollectionOptions .SetCapped(true) .SetMaxSize(5000) .SetMaxDocuments(100); database.CreateCollection(this.collectionName, options); } return(new object()); }
public MongoQueue(string connectionString, long queueSize) { // our queue name will be the same as the message class MongoClient client = new MongoClient(connectionString); server = client.GetServer(); server.Connect(); string dbname = MongoUrl.Create(connectionString).DatabaseName; _database = server.GetDatabase(dbname); if (!_database.CollectionExists(_queueName)) { try { Log.InfoFormat("Creating queue '{0}' size {1}", _queueName, queueSize); var options = CollectionOptions .SetCapped(true) // use a capped collection so space is pre-allocated and re-used .SetAutoIndexId(false) // we don't need the default _id index that MongoDB normally created automatically .SetMaxSize(queueSize); // limit the size of the collection and pre-allocated the space to this number of bytes _database.CreateCollection(_queueName, options); } catch { // assume that any exceptions are because the collection already exists ... } } // get the queue collection for our messages _queue = _database.GetCollection <MongoMessage <T> >(_queueName); // check if we already have a 'last read' position to start from _position = _database.GetCollection("_queueIndex"); var last = _position.FindOneById(_queueName); if (last != null) { _lastId = last["last"].AsObjectId; } _positionQuery = Query.EQ("_id", _queueName); }
private static void StartItem(KeyValuePair <string, ComponentConfiguration> item) { var value = item.Value; var key = item.Key; if (value != null && !string.IsNullOrWhiteSpace(value.Url)) { var db = server.GetDatabase(key); foreach (var citem in value.ComponentItems) { while (!db.CollectionExists(citem.Key)) { var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(true) .SetMaxSize(1024 * 1024 * 100) .SetMaxDocuments(1000000); db.CreateCollection(citem.Key, options); Thread.Sleep(1000); } } var timer = new Timer(state => { try { var a = state as Tuple <string, ComponentConfiguration>; var config = a.Item2; if (config != null) { var data = config.GetCurrentStatus(); if (data != null && data.Count > 0) { HandleValue(a.Item1, config, data); } } } catch (Exception ex) { ex.Handle("收集器" + key); } }, new Tuple <string, ComponentConfiguration>(key, value), TimeSpan.Zero, value.CollectSpan); LocalLoggingService.Info(string.Format("收集器成功初始化:{0}", key)); timers.Add(timer); } }
private void CreateCollection() { if (_database == null || _database.CollectionExists(_settings.CollectionName)) { return; } var options = CollectionOptions .SetCapped(true) .SetMaxSize(_settings.MaxSize) .SetMaxDocuments(_settings.MaxDocuments) .SetAutoIndexId(true); _database.CreateCollection(_settings.CollectionName, options); //TailableCursor requires at least one item var collection = _database.GetCollection(_settings.CollectionName); collection.Insert(new QueueJob(new Noop(), new AppContext())); }
public virtual void CreateCappedCollection(long maxSize, long maxDocNum, bool dropExistsCollection = true) { YmtSystemAssert.AssertArgumentRange(maxSize, 0L, long.MaxValue, "maxSize 超过范围"); YmtSystemAssert.AssertArgumentRange(maxDocNum, 0L, long.MaxValue, "maxDocNum 超过范围"); var exist = this.context.GetCollection <TEntity>().Exists(); if (dropExistsCollection && this.context.GetCollection <TEntity>().Exists()) { this.context.GetCollection <TEntity>().Drop(); } else if (exist) { throw new Exception <MongodbRepositoryException>("collectionName exists"); } var options = CollectionOptions.SetCapped(true).SetMaxSize(maxSize).SetMaxDocuments(maxDocNum); this.context.GetCollection <TEntity>().Database.CreateCollection(context.GetMapCfg <TEntity>().ToCollection, options); }
public virtual void CreateCappedCollection(string dbName, string collectionName, long maxSize, long maxDocNum, bool dropExistsCollection = true) { YmtSystemAssert.AssertArgumentRange(maxSize, 0L, long.MaxValue, "maxSize 超过范围"); YmtSystemAssert.AssertArgumentRange(maxDocNum, 0L, long.MaxValue, "maxDocNum 超过范围"); var exist = this.context.Database(dbName).CollectionExists(collectionName); if (dropExistsCollection && exist) { this.context.Database(dbName).DropCollection(collectionName); } else if (exist) { throw new Exception <MongodbRepositoryException>("collectionName exists"); } var options = CollectionOptions.SetCapped(true).SetMaxSize(maxSize).SetMaxDocuments(maxDocNum); this.context.Database(dbName).CreateCollection(collectionName, options); }
private MongoCollection GetCollection() { // cache mongo collection based on target name. string key = string.Format("k|{0}|{1}|{2}", ConnectionName ?? string.Empty, ConnectionString ?? string.Empty, CollectionName ?? string.Empty); return(_collectionCache.GetOrAdd(key, k => { // create collection var mongoUrl = new MongoUrl(ConnectionString); var client = new MongoClient(mongoUrl); var server = client.GetServer(); // Database name overrides connection string var databaseName = DatabaseName ?? mongoUrl.DatabaseName ?? "NLog"; var database = server.GetDatabase(databaseName); string collectionName = CollectionName ?? "Log"; if (CappedCollectionSize.HasValue && !database.CollectionExists(collectionName)) { // create capped var options = CollectionOptions .SetCapped(true) .SetMaxSize(CappedCollectionSize.Value); if (CappedCollectionMaxItems.HasValue) { options.SetMaxDocuments(CappedCollectionMaxItems.Value); } database.CreateCollection(collectionName, options); } return database.GetCollection(collectionName); })); }
/// <summary> /// Adds a sink that writes log events as documents to a MongoDb database. /// </summary> /// <param name="loggerConfiguration">The logger configuration.</param> /// <param name="databaseUrl">The URL of a created MongoDB collection that log events will be written to.</param> /// <param name="restrictedToMinimumLevel">The minimum log event level required in order to write an event to the sink.</param> /// <param name="cappedMaxSizeMb">Max total size in megabytes of the created capped collection. (Default: 50mb)</param> /// <param name="cappedMaxDocuments">Max number of documents of the created capped collection.</param> /// <param name="collectionName">Name of the collection. Default is "log".</param> /// <param name="batchPostingLimit">The maximum number of events to post in a single batch.</param> /// <param name="period">The time to wait between checking for event batches.</param> /// <param name="formatProvider">Supplies culture-specific formatting information, or null.</param> /// <returns>Logger configuration, allowing configuration to continue.</returns> /// <exception cref="ArgumentNullException">A required parameter is null.</exception> public static LoggerConfiguration MongoDBCapped( this LoggerSinkConfiguration loggerConfiguration, string databaseUrl, LogEventLevel restrictedToMinimumLevel = LevelAlias.Minimum, long cappedMaxSizeMb = 50, long?cappedMaxDocuments = null, string collectionName = null, int batchPostingLimit = MongoDBSink.DefaultBatchPostingLimit, TimeSpan?period = null, IFormatProvider formatProvider = null) { if (loggerConfiguration == null) { throw new ArgumentNullException("loggerConfiguration"); } if (databaseUrl == null) { throw new ArgumentNullException("databaseUrl"); } var optionsBuilder = CollectionOptions.SetCapped(true).SetMaxSize(cappedMaxSizeMb * 1024 * 1024); if (cappedMaxDocuments.HasValue) { optionsBuilder = optionsBuilder.SetMaxDocuments(cappedMaxDocuments.Value); } var defaultedPeriod = period ?? MongoDBSink.DefaultPeriod; return(loggerConfiguration.Sink( new MongoDBSink( databaseUrl, batchPostingLimit, defaultedPeriod, formatProvider, collectionName ?? MongoDBSink.DefaultCollectionName, optionsBuilder), restrictedToMinimumLevel)); }
public MongoStringPatternProcessorRepository(IOptimizerConfig optimizerConfig, IStringPatternProcessorFactory stringPatternProcessorFactory) { this.stringPatternProcessorFactory = stringPatternProcessorFactory; var client = new MongoClient(optimizerConfig.MongoConnectionString); var server = client.GetServer(); var db = server.GetDatabase(string.Format("{0}-{1}", optimizerConfig.MongoDatabasePrefix, optimizerConfig.InstanceId)); var collectionName = typeof(StringPatternProcessorResult).Name; if (!db.CollectionExists(collectionName)) { var collectionOptions = CollectionOptions.SetCapped(true).SetMaxSize(524288000); db.CreateCollection(collectionName, collectionOptions); } this.stringPatternProcessorCollection = db.GetCollection <StringPatternProcessorResult>(collectionName); var keys = IndexKeys <StringPatternProcessorResult> .Ascending(x => x.GenerationNumber, x => x.PopulationRank); var options = IndexOptions.SetName("GenerationNumber And PopulationRank").SetUnique(true).SetBackground(true); this.stringPatternProcessorCollection.CreateIndex(keys, options); }
private bool _startedReading = false; // initial query on an empty collection is a special case public MongoQueue(string connectionString, string databaseName, string queueName, long queueSize) { MongoClient client = new MongoClient(connectionString); var database = client.GetServer().GetDatabase(databaseName); if (!database.CollectionExists(queueName)) { try { Console.WriteLine("Creating queue '{0}' size {1}", queueName, queueSize); var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(false) .SetMaxSize(queueSize); database.CreateCollection(queueName, options); } catch { // assume that any exceptions are because the collection already exists ... } } _queue = database.GetCollection <MongoMessage <T> >(queueName); _position = database.GetCollection("_queueIndex"); var last = _position.FindOneById(queueName); if (last != null) { _lastId = last["last"].AsObjectId; } _positionQuery = Query.EQ("_id", queueName); }
public void SetupCollection() { var uri = new MongoUrl(ConnectionString); var client = new MongoClient(uri); MongoDatabase db = client.GetServer().GetDatabase(uri.DatabaseName); Int64 cappedSize; if (!Int64.TryParse(CappedSizeInMb, out cappedSize)) { cappedSize = 5 * 1024L; } if (!db.CollectionExists(CollectionName)) { CollectionOptionsBuilder options = CollectionOptions .SetCapped(true) .SetMaxSize(1024L * 1024L * cappedSize); //5 gb. db.CreateCollection(CollectionName, options); } LogCollection = db.GetCollection(CollectionName); var builder = new IndexOptionsBuilder(); const string ttlIndex = FieldNames.Timestamp + "_-1"; var index = LogCollection.GetIndexes().SingleOrDefault(x => x.Name == ttlIndex); if (index != null) { if (ExpireAfter != null) { if (index.TimeToLive != ExpireAfter.ToTimeSpan()) { var d = new CommandDocument() { { "collMod", CollectionName }, { "index", new BsonDocument { { "keyPattern", new BsonDocument { { FieldNames.Timestamp, -1 } } }, { "expireAfterSeconds", (int)(ExpireAfter.ToTimeSpan().TotalSeconds) } } } }; db.RunCommand(d); } } } else { if (ExpireAfter != null) { builder.SetTimeToLive(ExpireAfter.ToTimeSpan()); } LogCollection.CreateIndex(IndexKeys.Descending(FieldNames.Timestamp), builder); } LogCollection.CreateIndex(IndexKeys .Ascending(FieldNames.Level, FieldNames.Thread, FieldNames.Loggername) ); }
public void SeedMongoData() { var options = CollectionOptions.SetCapped(false).SetMaxSize(5000).SetMaxDocuments(100); if (_db.CollectionExists(_document)) { //_db.DropCollection(_document); try { _db.CreateCollection(_document, options); var _col = _db.GetCollection(_document); _col.Insert <Zones>(new Zones { name = new Name { First = "John", Last = "Deen" }, Birth = new DateTime(1880, 1, 10), Death = new DateTime(1956, 5, 17), Contribs = new string[] { "Insight Out", "Rosemary Garden", "I Am Your Friend" }, Awards = new List <Awardee> { new Awardee { Award = "IFA", By = "Doordarshan", Year = 1921 }, new Awardee { Award = "STAR CINI", By = "Star", Year = 1923 }, new Awardee { Award = "ZEE PROVISONAL", By = "Zee", Year = 1930 } } }); _col.Insert <Zones>(new Zones { Id = "Ind_1", name = new Name { First = "Nidhi", Last = "Bansal" }, Birth = new DateTime(1820, 3, 11), Death = new DateTime(1943, 2, 21), Contribs = new string[] { "Pani Ki Pehali Boond", "Doopahar Ki Dhoop", "Kanha Ka Bansuri" }, Awards = new List <Awardee> { new Awardee { Award = "OSCAR", By = "Hollywood", Year = 1922 }, new Awardee { Award = "TECHNICAL CARTOONIST", By = "Star", Year = 1926 }, new Awardee { Award = "COREOGRAPHY", By = "Zee", Year = 1942 } } }); _col.Insert <Zones>(new Zones { Id = "Hld_1", name = new Name { First = "Kristen", Last = "Lopher" }, Birth = new DateTime(1870, 7, 9), Death = new DateTime(1938, 7, 17), Contribs = new string[] { "Rendeer In My Pond", "Rosemary Garden", "Whos is there?" }, Awards = new List <Awardee> { new Awardee { Award = "HOLLESTER", By = "Hgtv", Year = 1918 }, new Awardee { Award = "STAR CINI", By = "Star", Year = 1923 }, new Awardee { Award = "OSCAR", By = "Hollywood", Year = 1936 } } }); _col.Insert <Zones>(new Zones { Id = "Ind_2", name = new Name { First = "Rammohan", Last = "Basu" }, Birth = new DateTime(1923, 4, 30), Death = new DateTime(1980, 3, 11), Contribs = new string[] { "Pani Ki Pehali Boond", "My Lawn", "Please, Forget Me" }, Awards = new List <Awardee> { new Awardee { Award = "Fimfare", By = "Bollywood", Year = 1952 }, new Awardee { Award = "Bharat Ratna", By = "Govt Of India", Year = 1971 } } }); } catch (MongoException e) { log.Info("Collection already exists."); } catch (Exception e) { log.Info("Error - "); log.Error(e); } finally { log.Info("Seeding to MongoDb is completed"); } } }
public void createCollection(String databaseName) { // creating a collection in database CollectionOptionsBuilder options = CollectionOptions.SetCapped(true); dataBase.CreateCollection(databaseName, options); }
private void StartItem(ItemConfigurationEntity item, string name, string type) { var dbNames = server.GetDatabaseNames().Where(_ => _.StartsWith(item.Prefix + "__" + name)).ToList(); foreach (var dbName in dbNames) { var dbb = server.GetDatabase(dbName); var colNames = dbb.GetCollectionNames().Where(n => !n.Contains("system.") && !n.Contains("$") && !n.Contains("___")).ToList(); foreach (var colName in colNames) { foreach (var agg in item.AggregateSpans) { var aggcollectionname = string.Format("{0}___{1}", colName, agg.Key); try { while (!dbb.CollectionExists(aggcollectionname)) { var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(false) .SetMaxSize(1024 * 1024 * 10) .SetMaxDocuments(100000); dbb.CreateCollection(aggcollectionname, options); Thread.Sleep(200); } } catch (Exception ex) { ex.Handle("聚合器创建表失败:" + aggcollectionname); continue; } var key = string.Format("{0}.{1}.{2}", dbName, colName, agg.Key); if (!timers.ContainsKey(key)) { var timer = new Timer(state => { var a = state as Tuple <string, string, string, TimeSpan>; if (a != null) { try { var db = server.GetDatabase(a.Item1); var collection = db.GetCollection(a.Item2); var aggcollection = db.GetCollection(a.Item3); var aggstart = aggcollection.FindAll().SetLimit(1).SetSortOrder(SortBy.Descending("$natural")).FirstOrDefault(); DateTime?startTime = null; if (aggstart != null) { startTime = aggstart["_id"].AsDateTime.ToLocalTime(); } else { var start = collection.FindAll().SetLimit(1).SetSortOrder(SortBy.Ascending("$natural")).FirstOrDefault(); if (start != null) { startTime = start["_id"].AsDateTime.ToLocalTime(); } } if (startTime != null) { var endTime = startTime.Value.Add(a.Item4); while (endTime < DateTime.Now) { var query = Query.LT("_id", endTime).GTE(startTime); var data = collection.Find(query).ToList(); object v = 0; if (data.Count > 0) { Func <BsonDocument, long> func = b => { if (b["V"].IsInt64) { return(b["V"].AsInt64); } if (b["V"].IsInt32) { return(b["V"].AsInt32); } else { return(0); } }; switch (type) { case "Min": v = data.Select(func).Min(); break; case "Max": v = data.Select(func).Max(); break; case "Avg": v = Convert.ToInt32(data.Select(func).Average()); break; case "Sum": v = data.Select(func).Sum(); break; default: break; } } var doc = new BsonDocument().Add("V", BsonValue.Create(v)); doc.SetDocumentId(endTime.ToUniversalTime()); aggcollection.Insert(doc); startTime = endTime; endTime += a.Item4; } } } catch (Exception ex) { ex.Handle(string.Format("{3} PerformanceAggregator出错:{0} {1} {2}", dbName, colName, agg.Key, config.Name)); } } }, new Tuple <string, string, string, TimeSpan>(dbName, colName, aggcollectionname, agg.Value), TimeSpan.Zero, agg.Value); timers.Add(key, timer); LocalLoggingService.Info(string.Format("{3} PerformanceAggregator成功初始化:{0} {1} {2}", dbName, colName, agg.Key, config.Name)); } } } } }
private void HandleData(Dictionary <string, Dictionary <string, Dictionary <string, List <int> > > > data, string name, string type) { foreach (var appKey in new List <string>(data.Keys)) { var app = data[appKey]; foreach (var itemKey in new List <string>(app.Keys)) { var item = app[itemKey]; var db = server.GetDatabase(GetDatabaseName(name, appKey, itemKey)); foreach (var subItemKey in new List <string>(item.Keys)) { var subItem = item[subItemKey]; Int32 value = 0; if (subItem != null && subItem.Count > 0) { lock (subItem) { switch (type) { case "Min": value = subItem.Min(); break; case "Max": value = subItem.Max(); break; case "Avg": value = Convert.ToInt32(subItem.Average()); break; case "Sum": value = subItem.Sum(); break; default: break; } } } var colName = subItemKey; var col = db.GetCollection(colName); var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(true) .SetMaxSize(1024 * 1024 * 100) .SetMaxDocuments(1000000); try { while (!db.CollectionExists(colName)) { db.CreateCollection(colName, options); Thread.Sleep(1000); } } catch (Exception ex) { ex.Handle("收集器创建表失败:" + colName); continue; } //AppInfoCenterService.LoggingService.Info(string.Format("收集器成功初始化表:{0} 参数:{1}", col.FullName, options.ToString())); try { var doc = new BsonDocument().Add("V", BsonValue.Create(value)); doc.SetDocumentId(DateTime.Now.ToUniversalTime()); col.Insert(doc); } catch (Exception ex) { ex.Handle(); } finally { lock (subItem) subItem.Clear(); } } } } }
private static void StartItem(KeyValuePair <string, ComponentConfiguration> item) { var value = item.Value; var key = item.Key; if (value != null && !string.IsNullOrWhiteSpace(value.Url)) { var db = server.GetDatabase(key); foreach (var agg in value.AggregateSpans) { foreach (var configItem in value.ComponentItems) { var aggcollectionname = string.Format("{0}__{1}", configItem.Key, agg.Key); while (!db.CollectionExists(aggcollectionname)) { var options = CollectionOptions .SetCapped(true) .SetAutoIndexId(false) .SetMaxSize(1024 * 1024 * 10) .SetMaxDocuments(100000); db.CreateCollection(aggcollectionname, options); Thread.Sleep(200); } } var timer = new Thread(state => { while (true) { var t = state as Tuple <KeyValuePair <string, TimeSpan>, ComponentConfiguration>; if (t != null) { var aggInfo = t.Item1; var config = t.Item2; try { foreach (var configItem in config.ComponentItems) { if (configItem.Value.ItemValueType == ItemValueType.TotalValue || configItem.Value.ItemValueType == ItemValueType.StateValue) { var collection = db.GetCollection(configItem.Key); var aggcollectionname = string.Format("{0}__{1}", configItem.Key, aggInfo.Key); var aggcollection = db.GetCollection(aggcollectionname); var aggstart = aggcollection.FindAll().SetLimit(1).SetSortOrder(SortBy.Descending("$natural")).FirstOrDefault(); DateTime?startTime = null; if (aggstart != null) { startTime = aggstart["_id"].AsDateTime.ToLocalTime(); } else { var start = collection.FindAll().SetLimit(1).SetSortOrder(SortBy.Ascending("$natural")).FirstOrDefault(); if (start != null) { startTime = start["_id"].AsDateTime.ToLocalTime(); } } if (startTime != null) { var endTime = startTime.Value.Add(aggInfo.Value); while (endTime < DateTime.Now) { var query = Query.LT("_id", endTime).GTE(startTime); var data = collection.Find(query).ToList(); object v = 0; if (data.Count > 0) { Func <BsonDocument, long> func = a => { if (a["V"].IsInt64) { return(a["V"].AsInt64); } if (a["V"].IsInt32) { return(a["V"].AsInt32); } else { return(0); } }; if (configItem.Value.ItemValueType == ItemValueType.TotalValue) { v = data.Select(func).Sum(); } if (configItem.Value.ItemValueType == ItemValueType.StateValue) { v = data.Select(func).Average(); } } var doc = new BsonDocument().Add("V", BsonValue.Create(v)); doc.SetDocumentId(endTime.ToUniversalTime()); aggcollection.Insert(doc); startTime = endTime; endTime += aggInfo.Value; } } } } } catch (Exception ex) { ex.Handle("聚合器" + key); } finally { Thread.Sleep(aggInfo.Value); } } else { Thread.Sleep(100); } } }); LocalLoggingService.Info(string.Format("聚合器成功初始化:{0} {1}", key, agg.Key)); timer.Start(new Tuple <KeyValuePair <string, TimeSpan>, ComponentConfiguration>(agg, value)); timers.Add(timer); } } }